Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/npm/cli.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorclaudiahdz <cghr1990@gmail.com>2020-03-31 23:09:27 +0300
committerisaacs <i@izs.me>2020-05-08 04:12:58 +0300
commite97662e25f92840f7e8a683f8e2cda2f5874d945 (patch)
tree121568f55a85593e09e6e0e98b7c3b9bc09b086f
parent29be1c9d1845d36c6eb2ef9f2c0ec2132066bb0f (diff)
chore: rm libcipm
-rw-r--r--node_modules/agent-base/.travis.yml24
-rw-r--r--node_modules/agent-base/History.md113
-rw-r--r--node_modules/agent-base/README.md145
-rw-r--r--node_modules/agent-base/index.d.ts43
-rw-r--r--node_modules/agent-base/index.js170
-rw-r--r--node_modules/agent-base/package.json68
-rw-r--r--node_modules/agent-base/patch-core.js51
-rw-r--r--node_modules/agent-base/test/ssl-cert-snakeoil.key15
-rw-r--r--node_modules/agent-base/test/ssl-cert-snakeoil.pem12
-rw-r--r--node_modules/agent-base/test/test.js728
-rw-r--r--node_modules/agentkeepalive/History.md170
-rw-r--r--node_modules/agentkeepalive/README.md248
-rw-r--r--node_modules/agentkeepalive/browser.js5
-rw-r--r--node_modules/agentkeepalive/index.d.ts43
-rw-r--r--node_modules/agentkeepalive/index.js4
-rw-r--r--node_modules/agentkeepalive/lib/_http_agent.js416
-rw-r--r--node_modules/agentkeepalive/lib/agent.js133
-rw-r--r--node_modules/agentkeepalive/lib/https_agent.js42
-rw-r--r--node_modules/agentkeepalive/package.json83
-rw-r--r--node_modules/es6-promise/CHANGELOG.md155
-rw-r--r--node_modules/es6-promise/LICENSE19
-rw-r--r--node_modules/es6-promise/README.md97
-rw-r--r--node_modules/es6-promise/auto.js4
-rw-r--r--node_modules/es6-promise/dist/es6-promise.auto.js1176
-rw-r--r--node_modules/es6-promise/dist/es6-promise.auto.map1
-rw-r--r--node_modules/es6-promise/dist/es6-promise.auto.min.js1
-rw-r--r--node_modules/es6-promise/dist/es6-promise.auto.min.map1
-rw-r--r--node_modules/es6-promise/dist/es6-promise.js1174
-rw-r--r--node_modules/es6-promise/dist/es6-promise.map1
-rw-r--r--node_modules/es6-promise/dist/es6-promise.min.js1
-rw-r--r--node_modules/es6-promise/dist/es6-promise.min.map1
-rw-r--r--node_modules/es6-promise/es6-promise.d.ts85
-rw-r--r--node_modules/es6-promise/lib/es6-promise.auto.js3
-rw-r--r--node_modules/es6-promise/lib/es6-promise.js7
-rw-r--r--node_modules/es6-promise/lib/es6-promise/-internal.js243
-rw-r--r--node_modules/es6-promise/lib/es6-promise/asap.js119
-rw-r--r--node_modules/es6-promise/lib/es6-promise/enumerator.js124
-rw-r--r--node_modules/es6-promise/lib/es6-promise/polyfill.js35
-rw-r--r--node_modules/es6-promise/lib/es6-promise/promise.js431
-rw-r--r--node_modules/es6-promise/lib/es6-promise/promise/all.js52
-rw-r--r--node_modules/es6-promise/lib/es6-promise/promise/race.js84
-rw-r--r--node_modules/es6-promise/lib/es6-promise/promise/reject.js46
-rw-r--r--node_modules/es6-promise/lib/es6-promise/promise/resolve.js48
-rw-r--r--node_modules/es6-promise/lib/es6-promise/then.js32
-rw-r--r--node_modules/es6-promise/lib/es6-promise/utils.js21
-rw-r--r--node_modules/es6-promise/package.json106
-rw-r--r--node_modules/es6-promisify/README.md89
-rw-r--r--node_modules/es6-promisify/dist/promise.js73
-rw-r--r--node_modules/es6-promisify/dist/promisify.js85
-rw-r--r--node_modules/es6-promisify/package.json72
-rw-r--r--node_modules/genfun/CHANGELOG.md53
-rw-r--r--node_modules/genfun/LICENSE21
-rw-r--r--node_modules/genfun/README.md223
-rw-r--r--node_modules/genfun/lib/genfun.js296
-rw-r--r--node_modules/genfun/lib/method.js82
-rw-r--r--node_modules/genfun/lib/role.js17
-rw-r--r--node_modules/genfun/lib/util.js37
-rw-r--r--node_modules/genfun/package.json79
-rw-r--r--node_modules/http-cache-semantics/README.md177
-rw-r--r--node_modules/http-cache-semantics/node4/index.js559
-rw-r--r--node_modules/http-cache-semantics/package.json60
-rw-r--r--node_modules/http-proxy-agent/.travis.yml22
-rw-r--r--node_modules/http-proxy-agent/History.md101
-rw-r--r--node_modules/http-proxy-agent/README.md74
-rw-r--r--node_modules/http-proxy-agent/index.js111
-rw-r--r--node_modules/http-proxy-agent/package.json67
-rw-r--r--node_modules/http-proxy-agent/test/ssl-cert-snakeoil.key15
-rw-r--r--node_modules/http-proxy-agent/test/ssl-cert-snakeoil.pem12
-rw-r--r--node_modules/http-proxy-agent/test/test.js303
-rw-r--r--node_modules/https-proxy-agent/.editorconfig37
-rw-r--r--node_modules/https-proxy-agent/.eslintrc.js86
-rw-r--r--node_modules/https-proxy-agent/History.md124
-rw-r--r--node_modules/https-proxy-agent/README.md137
-rw-r--r--node_modules/https-proxy-agent/index.d.ts22
-rw-r--r--node_modules/https-proxy-agent/index.js241
-rw-r--r--node_modules/https-proxy-agent/package.json66
-rw-r--r--node_modules/libcipm/CHANGELOG.md508
-rw-r--r--node_modules/libcipm/LICENSE.md19
-rw-r--r--node_modules/libcipm/README.md37
-rw-r--r--node_modules/libcipm/index.js429
-rw-r--r--node_modules/libcipm/lib/config/npm-config.js84
-rw-r--r--node_modules/libcipm/lib/extract.js67
-rw-r--r--node_modules/libcipm/lib/silentlog.js13
-rw-r--r--node_modules/libcipm/lib/worker.js16
l---------node_modules/libcipm/node_modules/.bin/which1
-rw-r--r--node_modules/libcipm/node_modules/cacache/CHANGELOG.md657
-rw-r--r--node_modules/libcipm/node_modules/cacache/LICENSE.md16
-rw-r--r--node_modules/libcipm/node_modules/cacache/README.es.md628
-rw-r--r--node_modules/libcipm/node_modules/cacache/README.md641
-rw-r--r--node_modules/libcipm/node_modules/cacache/en.js3
-rw-r--r--node_modules/libcipm/node_modules/cacache/es.js3
-rw-r--r--node_modules/libcipm/node_modules/cacache/get.js247
-rw-r--r--node_modules/libcipm/node_modules/cacache/index.js3
-rw-r--r--node_modules/libcipm/node_modules/cacache/lib/content/path.js26
-rw-r--r--node_modules/libcipm/node_modules/cacache/lib/content/read.js195
-rw-r--r--node_modules/libcipm/node_modules/cacache/lib/content/rm.js21
-rw-r--r--node_modules/libcipm/node_modules/cacache/lib/content/write.js164
-rw-r--r--node_modules/libcipm/node_modules/cacache/lib/entry-index.js288
-rw-r--r--node_modules/libcipm/node_modules/cacache/lib/memoization.js69
-rw-r--r--node_modules/libcipm/node_modules/cacache/lib/util/fix-owner.js128
-rw-r--r--node_modules/libcipm/node_modules/cacache/lib/util/hash-to-segments.js11
-rw-r--r--node_modules/libcipm/node_modules/cacache/lib/util/move-file.js51
-rw-r--r--node_modules/libcipm/node_modules/cacache/lib/util/tmp.js37
-rw-r--r--node_modules/libcipm/node_modules/cacache/lib/util/y.js25
-rw-r--r--node_modules/libcipm/node_modules/cacache/lib/verify.js227
-rw-r--r--node_modules/libcipm/node_modules/cacache/locales/en.js47
-rw-r--r--node_modules/libcipm/node_modules/cacache/locales/en.json7
-rw-r--r--node_modules/libcipm/node_modules/cacache/locales/es.js49
-rw-r--r--node_modules/libcipm/node_modules/cacache/locales/es.json6
-rw-r--r--node_modules/libcipm/node_modules/cacache/ls.js6
-rw-r--r--node_modules/libcipm/node_modules/cacache/package.json126
-rw-r--r--node_modules/libcipm/node_modules/cacache/put.js86
-rw-r--r--node_modules/libcipm/node_modules/cacache/rm.js28
-rw-r--r--node_modules/libcipm/node_modules/cacache/verify.js3
-rw-r--r--node_modules/libcipm/node_modules/fs-minipass/LICENSE15
-rw-r--r--node_modules/libcipm/node_modules/fs-minipass/README.md70
-rw-r--r--node_modules/libcipm/node_modules/fs-minipass/index.js387
-rw-r--r--node_modules/libcipm/node_modules/fs-minipass/package.json65
-rw-r--r--node_modules/libcipm/node_modules/minipass/LICENSE15
-rw-r--r--node_modules/libcipm/node_modules/minipass/README.md606
-rw-r--r--node_modules/libcipm/node_modules/minipass/index.js537
-rw-r--r--node_modules/libcipm/node_modules/minipass/node_modules/yallist/LICENSE15
-rw-r--r--node_modules/libcipm/node_modules/minipass/node_modules/yallist/README.md204
-rw-r--r--node_modules/libcipm/node_modules/minipass/node_modules/yallist/iterator.js8
-rw-r--r--node_modules/libcipm/node_modules/minipass/node_modules/yallist/package.json62
-rw-r--r--node_modules/libcipm/node_modules/minipass/node_modules/yallist/yallist.js426
-rw-r--r--node_modules/libcipm/node_modules/minipass/package.json73
-rw-r--r--node_modules/libcipm/node_modules/minizlib/LICENSE26
-rw-r--r--node_modules/libcipm/node_modules/minizlib/README.md53
-rw-r--r--node_modules/libcipm/node_modules/minizlib/constants.js115
-rw-r--r--node_modules/libcipm/node_modules/minizlib/index.js320
-rw-r--r--node_modules/libcipm/node_modules/minizlib/package.json71
-rw-r--r--node_modules/libcipm/node_modules/npm-package-arg/CHANGELOG.md26
-rw-r--r--node_modules/libcipm/node_modules/npm-package-arg/LICENSE15
-rw-r--r--node_modules/libcipm/node_modules/npm-package-arg/README.md83
l---------node_modules/libcipm/node_modules/npm-package-arg/node_modules/.bin/semver1
-rw-r--r--node_modules/libcipm/node_modules/npm-package-arg/node_modules/hosted-git-info/CHANGELOG.md115
-rw-r--r--node_modules/libcipm/node_modules/npm-package-arg/node_modules/hosted-git-info/LICENSE13
-rw-r--r--node_modules/libcipm/node_modules/npm-package-arg/node_modules/hosted-git-info/README.md133
-rw-r--r--node_modules/libcipm/node_modules/npm-package-arg/node_modules/hosted-git-info/git-host-info.js79
-rw-r--r--node_modules/libcipm/node_modules/npm-package-arg/node_modules/hosted-git-info/git-host.js156
-rw-r--r--node_modules/libcipm/node_modules/npm-package-arg/node_modules/hosted-git-info/index.js125
-rw-r--r--node_modules/libcipm/node_modules/npm-package-arg/node_modules/hosted-git-info/package.json69
-rw-r--r--node_modules/libcipm/node_modules/npm-package-arg/node_modules/semver/CHANGELOG.md39
-rw-r--r--node_modules/libcipm/node_modules/npm-package-arg/node_modules/semver/LICENSE15
-rw-r--r--node_modules/libcipm/node_modules/npm-package-arg/node_modules/semver/README.md412
-rwxr-xr-xnode_modules/libcipm/node_modules/npm-package-arg/node_modules/semver/bin/semver160
-rw-r--r--node_modules/libcipm/node_modules/npm-package-arg/node_modules/semver/package.json60
-rw-r--r--node_modules/libcipm/node_modules/npm-package-arg/node_modules/semver/range.bnf16
-rw-r--r--node_modules/libcipm/node_modules/npm-package-arg/node_modules/semver/semver.js1483
-rw-r--r--node_modules/libcipm/node_modules/npm-package-arg/npa.js301
-rw-r--r--node_modules/libcipm/node_modules/npm-package-arg/package.json73
-rw-r--r--node_modules/libcipm/node_modules/npm-pick-manifest/CHANGELOG.md167
-rw-r--r--node_modules/libcipm/node_modules/npm-pick-manifest/LICENSE.md16
-rw-r--r--node_modules/libcipm/node_modules/npm-pick-manifest/README.md84
-rw-r--r--node_modules/libcipm/node_modules/npm-pick-manifest/index.js136
l---------node_modules/libcipm/node_modules/npm-pick-manifest/node_modules/.bin/semver1
-rw-r--r--node_modules/libcipm/node_modules/npm-pick-manifest/node_modules/semver/CHANGELOG.md39
-rw-r--r--node_modules/libcipm/node_modules/npm-pick-manifest/node_modules/semver/LICENSE15
-rw-r--r--node_modules/libcipm/node_modules/npm-pick-manifest/node_modules/semver/README.md412
-rwxr-xr-xnode_modules/libcipm/node_modules/npm-pick-manifest/node_modules/semver/bin/semver160
-rw-r--r--node_modules/libcipm/node_modules/npm-pick-manifest/node_modules/semver/package.json60
-rw-r--r--node_modules/libcipm/node_modules/npm-pick-manifest/node_modules/semver/range.bnf16
-rw-r--r--node_modules/libcipm/node_modules/npm-pick-manifest/node_modules/semver/semver.js1483
-rw-r--r--node_modules/libcipm/node_modules/npm-pick-manifest/package.json82
-rw-r--r--node_modules/libcipm/node_modules/npm-registry-fetch/CHANGELOG.md250
-rw-r--r--node_modules/libcipm/node_modules/npm-registry-fetch/LICENSE.md16
-rw-r--r--node_modules/libcipm/node_modules/npm-registry-fetch/README.md636
-rw-r--r--node_modules/libcipm/node_modules/npm-registry-fetch/auth.js57
-rw-r--r--node_modules/libcipm/node_modules/npm-registry-fetch/check-response.js109
-rw-r--r--node_modules/libcipm/node_modules/npm-registry-fetch/config.js98
-rw-r--r--node_modules/libcipm/node_modules/npm-registry-fetch/errors.js79
-rw-r--r--node_modules/libcipm/node_modules/npm-registry-fetch/index.js203
-rw-r--r--node_modules/libcipm/node_modules/npm-registry-fetch/node_modules/safe-buffer/LICENSE21
-rw-r--r--node_modules/libcipm/node_modules/npm-registry-fetch/node_modules/safe-buffer/README.md586
-rw-r--r--node_modules/libcipm/node_modules/npm-registry-fetch/node_modules/safe-buffer/index.d.ts187
-rw-r--r--node_modules/libcipm/node_modules/npm-registry-fetch/node_modules/safe-buffer/index.js64
-rw-r--r--node_modules/libcipm/node_modules/npm-registry-fetch/node_modules/safe-buffer/package.json62
-rw-r--r--node_modules/libcipm/node_modules/npm-registry-fetch/package.json96
-rw-r--r--node_modules/libcipm/node_modules/npm-registry-fetch/silentlog.js14
-rw-r--r--node_modules/libcipm/node_modules/pacote/CHANGELOG.md1417
-rw-r--r--node_modules/libcipm/node_modules/pacote/LICENSE21
-rw-r--r--node_modules/libcipm/node_modules/pacote/README.md288
-rw-r--r--node_modules/libcipm/node_modules/pacote/extract.js99
-rw-r--r--node_modules/libcipm/node_modules/pacote/index.js10
-rw-r--r--node_modules/libcipm/node_modules/pacote/lib/extract-stream.js89
-rw-r--r--node_modules/libcipm/node_modules/pacote/lib/fetch.js82
-rw-r--r--node_modules/libcipm/node_modules/pacote/lib/fetchers/alias.js24
-rw-r--r--node_modules/libcipm/node_modules/pacote/lib/fetchers/directory.js89
-rw-r--r--node_modules/libcipm/node_modules/pacote/lib/fetchers/file.js78
-rw-r--r--node_modules/libcipm/node_modules/pacote/lib/fetchers/git.js178
-rw-r--r--node_modules/libcipm/node_modules/pacote/lib/fetchers/hosted.js3
-rw-r--r--node_modules/libcipm/node_modules/pacote/lib/fetchers/range.js3
-rw-r--r--node_modules/libcipm/node_modules/pacote/lib/fetchers/registry/index.js32
-rw-r--r--node_modules/libcipm/node_modules/pacote/lib/fetchers/registry/manifest.js81
-rw-r--r--node_modules/libcipm/node_modules/pacote/lib/fetchers/registry/packument.js92
-rw-r--r--node_modules/libcipm/node_modules/pacote/lib/fetchers/registry/tarball.js102
-rw-r--r--node_modules/libcipm/node_modules/pacote/lib/fetchers/remote.js34
-rw-r--r--node_modules/libcipm/node_modules/pacote/lib/fetchers/tag.js3
-rw-r--r--node_modules/libcipm/node_modules/pacote/lib/fetchers/version.js3
-rw-r--r--node_modules/libcipm/node_modules/pacote/lib/finalize-manifest.js248
-rw-r--r--node_modules/libcipm/node_modules/pacote/lib/util/cache-key.js6
-rw-r--r--node_modules/libcipm/node_modules/pacote/lib/util/finished.js17
-rw-r--r--node_modules/libcipm/node_modules/pacote/lib/util/git.js292
-rw-r--r--node_modules/libcipm/node_modules/pacote/lib/util/opt-check.js48
-rw-r--r--node_modules/libcipm/node_modules/pacote/lib/util/pack-dir.js44
-rw-r--r--node_modules/libcipm/node_modules/pacote/lib/util/proclog.js23
-rw-r--r--node_modules/libcipm/node_modules/pacote/lib/util/read-json.js15
-rw-r--r--node_modules/libcipm/node_modules/pacote/lib/with-tarball-stream.js135
-rw-r--r--node_modules/libcipm/node_modules/pacote/manifest.js38
l---------node_modules/libcipm/node_modules/pacote/node_modules/.bin/semver1
-rw-r--r--node_modules/libcipm/node_modules/pacote/node_modules/npm-packlist/LICENSE15
-rw-r--r--node_modules/libcipm/node_modules/pacote/node_modules/npm-packlist/README.md68
-rw-r--r--node_modules/libcipm/node_modules/pacote/node_modules/npm-packlist/index.js289
-rw-r--r--node_modules/libcipm/node_modules/pacote/node_modules/npm-packlist/package.json74
-rw-r--r--node_modules/libcipm/node_modules/pacote/node_modules/semver/CHANGELOG.md39
-rw-r--r--node_modules/libcipm/node_modules/pacote/node_modules/semver/LICENSE15
-rw-r--r--node_modules/libcipm/node_modules/pacote/node_modules/semver/README.md412
-rwxr-xr-xnode_modules/libcipm/node_modules/pacote/node_modules/semver/bin/semver160
-rw-r--r--node_modules/libcipm/node_modules/pacote/node_modules/semver/package.json60
-rw-r--r--node_modules/libcipm/node_modules/pacote/node_modules/semver/range.bnf16
-rw-r--r--node_modules/libcipm/node_modules/pacote/node_modules/semver/semver.js1483
-rw-r--r--node_modules/libcipm/node_modules/pacote/package.json121
-rw-r--r--node_modules/libcipm/node_modules/pacote/packument.js29
-rw-r--r--node_modules/libcipm/node_modules/pacote/prefetch.js64
-rw-r--r--node_modules/libcipm/node_modules/pacote/tarball.js67
-rw-r--r--node_modules/libcipm/node_modules/ssri/CHANGELOG.md286
-rw-r--r--node_modules/libcipm/node_modules/ssri/LICENSE.md16
-rw-r--r--node_modules/libcipm/node_modules/ssri/README.md488
-rw-r--r--node_modules/libcipm/node_modules/ssri/index.js395
-rw-r--r--node_modules/libcipm/node_modules/ssri/package.json90
-rw-r--r--node_modules/libcipm/node_modules/tar/LICENSE15
-rw-r--r--node_modules/libcipm/node_modules/tar/README.md954
-rw-r--r--node_modules/libcipm/node_modules/tar/index.js18
-rw-r--r--node_modules/libcipm/node_modules/tar/lib/buffer.js11
-rw-r--r--node_modules/libcipm/node_modules/tar/lib/create.js105
-rw-r--r--node_modules/libcipm/node_modules/tar/lib/extract.js112
-rw-r--r--node_modules/libcipm/node_modules/tar/lib/header.js289
-rw-r--r--node_modules/libcipm/node_modules/tar/lib/high-level-opt.js29
-rw-r--r--node_modules/libcipm/node_modules/tar/lib/large-numbers.js97
-rw-r--r--node_modules/libcipm/node_modules/tar/lib/list.js130
-rw-r--r--node_modules/libcipm/node_modules/tar/lib/mkdir.js206
-rw-r--r--node_modules/libcipm/node_modules/tar/lib/mode-fix.js14
-rw-r--r--node_modules/libcipm/node_modules/tar/lib/pack.js404
-rw-r--r--node_modules/libcipm/node_modules/tar/lib/parse.js428
-rw-r--r--node_modules/libcipm/node_modules/tar/lib/pax.js146
-rw-r--r--node_modules/libcipm/node_modules/tar/lib/read-entry.js98
-rw-r--r--node_modules/libcipm/node_modules/tar/lib/replace.js220
-rw-r--r--node_modules/libcipm/node_modules/tar/lib/types.js44
-rw-r--r--node_modules/libcipm/node_modules/tar/lib/unpack.js621
-rw-r--r--node_modules/libcipm/node_modules/tar/lib/update.js36
-rw-r--r--node_modules/libcipm/node_modules/tar/lib/warn-mixin.js14
-rw-r--r--node_modules/libcipm/node_modules/tar/lib/winchars.js23
-rw-r--r--node_modules/libcipm/node_modules/tar/lib/write-entry.js422
-rw-r--r--node_modules/libcipm/node_modules/tar/node_modules/yallist/LICENSE15
-rw-r--r--node_modules/libcipm/node_modules/tar/node_modules/yallist/README.md204
-rw-r--r--node_modules/libcipm/node_modules/tar/node_modules/yallist/iterator.js8
-rw-r--r--node_modules/libcipm/node_modules/tar/node_modules/yallist/package.json62
-rw-r--r--node_modules/libcipm/node_modules/tar/node_modules/yallist/yallist.js426
-rw-r--r--node_modules/libcipm/node_modules/tar/package.json82
-rw-r--r--node_modules/libcipm/node_modules/which/CHANGELOG.md152
-rw-r--r--node_modules/libcipm/node_modules/which/LICENSE15
-rw-r--r--node_modules/libcipm/node_modules/which/README.md51
-rwxr-xr-xnode_modules/libcipm/node_modules/which/bin/which52
-rw-r--r--node_modules/libcipm/node_modules/which/package.json65
-rw-r--r--node_modules/libcipm/node_modules/which/which.js135
-rw-r--r--node_modules/libcipm/package.json100
-rw-r--r--node_modules/make-fetch-happen/CHANGELOG.md587
-rw-r--r--node_modules/make-fetch-happen/LICENSE16
-rw-r--r--node_modules/make-fetch-happen/README.md404
-rw-r--r--node_modules/make-fetch-happen/agent.js171
-rw-r--r--node_modules/make-fetch-happen/cache.js245
-rw-r--r--node_modules/make-fetch-happen/index.js482
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/CHANGELOG.md657
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md16
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/README.es.md628
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/README.md641
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/en.js3
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/es.js3
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/get.js247
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/index.js3
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js26
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js195
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js21
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js164
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js288
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js69
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/lib/util/fix-owner.js128
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js11
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/lib/util/move-file.js51
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js37
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/lib/util/y.js25
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js227
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/locales/en.js47
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/locales/en.json7
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/locales/es.js49
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/locales/es.json6
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/ls.js6
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/package.json126
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/put.js86
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/rm.js28
-rw-r--r--node_modules/make-fetch-happen/node_modules/cacache/verify.js3
-rw-r--r--node_modules/make-fetch-happen/node_modules/ssri/CHANGELOG.md286
-rw-r--r--node_modules/make-fetch-happen/node_modules/ssri/LICENSE.md16
-rw-r--r--node_modules/make-fetch-happen/node_modules/ssri/README.md488
-rw-r--r--node_modules/make-fetch-happen/node_modules/ssri/index.js395
-rw-r--r--node_modules/make-fetch-happen/node_modules/ssri/package.json89
-rw-r--r--node_modules/make-fetch-happen/package.json97
-rw-r--r--node_modules/make-fetch-happen/warning.js24
-rw-r--r--node_modules/node-fetch-npm/CHANGELOG.md252
-rw-r--r--node_modules/node-fetch-npm/LICENSE.md22
-rw-r--r--node_modules/node-fetch-npm/README.md398
-rw-r--r--node_modules/node-fetch-npm/package.json102
-rw-r--r--node_modules/node-fetch-npm/src/blob.js109
-rw-r--r--node_modules/node-fetch-npm/src/body.js411
-rw-r--r--node_modules/node-fetch-npm/src/common.js92
-rw-r--r--node_modules/node-fetch-npm/src/fetch-error.js35
-rw-r--r--node_modules/node-fetch-npm/src/headers.js296
-rw-r--r--node_modules/node-fetch-npm/src/index.js214
-rw-r--r--node_modules/node-fetch-npm/src/request.js174
-rw-r--r--node_modules/node-fetch-npm/src/response.js71
-rw-r--r--node_modules/npm-logical-tree/CHANGELOG.md46
-rw-r--r--node_modules/npm-logical-tree/LICENSE.md16
-rw-r--r--node_modules/npm-logical-tree/README.md147
-rw-r--r--node_modules/npm-logical-tree/index.js192
-rw-r--r--node_modules/npm-logical-tree/package.json83
-rw-r--r--node_modules/protoduck/CHANGELOG.md66
-rw-r--r--node_modules/protoduck/LICENSE21
-rw-r--r--node_modules/protoduck/README.md346
-rw-r--r--node_modules/protoduck/index.js349
-rw-r--r--node_modules/protoduck/package.json88
-rw-r--r--node_modules/socks-proxy-agent/.travis.yml21
-rw-r--r--node_modules/socks-proxy-agent/History.md96
-rw-r--r--node_modules/socks-proxy-agent/README.md133
-rw-r--r--node_modules/socks-proxy-agent/index.js145
-rw-r--r--node_modules/socks-proxy-agent/node_modules/agent-base/.travis.yml23
-rw-r--r--node_modules/socks-proxy-agent/node_modules/agent-base/History.md113
-rw-r--r--node_modules/socks-proxy-agent/node_modules/agent-base/README.md145
-rw-r--r--node_modules/socks-proxy-agent/node_modules/agent-base/index.js170
-rw-r--r--node_modules/socks-proxy-agent/node_modules/agent-base/package.json65
-rw-r--r--node_modules/socks-proxy-agent/node_modules/agent-base/patch-core.js37
-rw-r--r--node_modules/socks-proxy-agent/node_modules/agent-base/test/ssl-cert-snakeoil.key15
-rw-r--r--node_modules/socks-proxy-agent/node_modules/agent-base/test/ssl-cert-snakeoil.pem12
-rw-r--r--node_modules/socks-proxy-agent/node_modules/agent-base/test/test.js697
-rw-r--r--node_modules/socks-proxy-agent/package.json71
-rw-r--r--node_modules/socks-proxy-agent/test/ssl-cert-snakeoil.key15
-rw-r--r--node_modules/socks-proxy-agent/test/ssl-cert-snakeoil.pem12
-rw-r--r--node_modules/socks-proxy-agent/test/test.js144
-rw-r--r--node_modules/socks-proxy-agent/yarn.lock354
-rw-r--r--package-lock.json384
-rw-r--r--package.json1
351 files changed, 4 insertions, 53346 deletions
diff --git a/node_modules/agent-base/.travis.yml b/node_modules/agent-base/.travis.yml
deleted file mode 100644
index 76200951f..000000000
--- a/node_modules/agent-base/.travis.yml
+++ /dev/null
@@ -1,24 +0,0 @@
-sudo: false
-
-language: node_js
-
-node_js:
- - "4"
- - "5"
- - "6"
- - "7"
- - "8"
- - "9"
- - "10"
-
-install:
- - PATH="`npm bin`:`npm bin -g`:$PATH"
- # Install dependencies and build
- - npm install
-
-script:
- # Output useful info for debugging
- - node --version
- - npm --version
- # Run tests
- - npm test
diff --git a/node_modules/agent-base/History.md b/node_modules/agent-base/History.md
deleted file mode 100644
index 80c88dc40..000000000
--- a/node_modules/agent-base/History.md
+++ /dev/null
@@ -1,113 +0,0 @@
-
-4.2.0 / 2018-01-15
-==================
-
- * Add support for returning an `http.Agent` instance
- * Optimize promisifying logic
- * Set `timeout` to null for proper cleanup
- * Remove Node.js <= 0.11.3 special-casing from test case
-
-4.1.2 / 2017-11-20
-==================
-
- * test Node 9 on Travis
- * ensure that `https.get()` uses the patched `https.request()`
-
-4.1.1 / 2017-07-20
-==================
-
- * Correct `https.request()` with a String (#9)
-
-4.1.0 / 2017-06-26
-==================
-
- * mix in Agent options into Request options
- * throw when nothing is returned from agent-base callback
- * do not modify the options object for https requests
-
-4.0.1 / 2017-06-13
-==================
-
- * add `this` context tests and fixes
-
-4.0.0 / 2017-06-06
-==================
-
- * drop support for Node.js < 4
- * drop old versions of Node.js from Travis-CI
- * specify Node.js >= 4.0.0 in `engines.node`
- * remove more old code
- * remove "extend" dependency
- * remove "semver" dependency
- * make the Promise logic a bit cleaner
- * add async function pseudo-example to README
- * use direct return in README example
-
-3.0.0 / 2017-06-02
-==================
-
- * drop support for Node.js v0.8 and v0.10
- * add support for async, Promises, and direct return
- * add a couple `options` test cases
- * implement a `"timeout"` option
- * rename main file to `index.js`
- * test Node 8 on Travis
-
-2.1.1 / 2017-05-30
-==================
-
- * Revert [`fe2162e`](https://github.com/TooTallNate/node-agent-base/commit/fe2162e0ba18123f5b301cba4de1e9dd74e437cd) and [`270bdc9`](https://github.com/TooTallNate/node-agent-base/commit/270bdc92eb8e3bd0444d1e5266e8e9390aeb3095) (fixes #7)
-
-2.1.0 / 2017-05-26
-==================
-
- * unref is not supported for node < 0.9.1 (@pi0)
- * add tests to dangling socket (@pi0)
- * check unref() is supported (@pi0)
- * fix dangling sockets problem (@pi0)
- * add basic "ws" module tests
- * make `Agent` be subclassable
- * turn `addRequest()` into a named function
- * test: Node.js v4 likes to call `cork` on the stream (#3, @tomhughes)
- * travis: test node v4, v5, v6 and v7
-
-2.0.1 / 2015-09-10
-==================
-
- * package: update "semver" to v5.0.1 for WebPack (#1, @vhpoet)
-
-2.0.0 / 2015-07-10
-==================
-
- * refactor to patch Node.js core for more consistent `opts` values
- * ensure that HTTP(s) default port numbers are always given
- * test: use ssl-cert-snakeoil SSL certs
- * test: add tests for arbitrary options
- * README: add API section
- * README: make the Agent HTTP/HTTPS generic in the example
- * README: use SVG for Travis-CI badge
-
-1.0.2 / 2015-06-27
-==================
-
- * agent: set `req._hadError` to true after emitting "error"
- * package: update "mocha" to v2
- * test: add artificial HTTP GET request test
- * test: add artificial data events test
- * test: fix artifical GET response test on node > v0.11.3
- * test: use a real timeout for the async error test
-
-1.0.1 / 2013-09-09
-==================
-
- * Fix passing an "error" object to the callback function on the first tick
-
-1.0.0 / 2013-09-09
-==================
-
- * New API: now you pass a callback function directly
-
-0.0.1 / 2013-07-09
-==================
-
- * Initial release
diff --git a/node_modules/agent-base/README.md b/node_modules/agent-base/README.md
deleted file mode 100644
index dbeceab8a..000000000
--- a/node_modules/agent-base/README.md
+++ /dev/null
@@ -1,145 +0,0 @@
-agent-base
-==========
-### Turn a function into an [`http.Agent`][http.Agent] instance
-[![Build Status](https://travis-ci.org/TooTallNate/node-agent-base.svg?branch=master)](https://travis-ci.org/TooTallNate/node-agent-base)
-
-This module provides an `http.Agent` generator. That is, you pass it an async
-callback function, and it returns a new `http.Agent` instance that will invoke the
-given callback function when sending outbound HTTP requests.
-
-#### Some subclasses:
-
-Here's some more interesting uses of `agent-base`.
-Send a pull request to list yours!
-
- * [`http-proxy-agent`][http-proxy-agent]: An HTTP(s) proxy `http.Agent` implementation for HTTP endpoints
- * [`https-proxy-agent`][https-proxy-agent]: An HTTP(s) proxy `http.Agent` implementation for HTTPS endpoints
- * [`pac-proxy-agent`][pac-proxy-agent]: A PAC file proxy `http.Agent` implementation for HTTP and HTTPS
- * [`socks-proxy-agent`][socks-proxy-agent]: A SOCKS (v4a) proxy `http.Agent` implementation for HTTP and HTTPS
-
-
-Installation
-------------
-
-Install with `npm`:
-
-``` bash
-$ npm install agent-base
-```
-
-
-Example
--------
-
-Here's a minimal example that creates a new `net.Socket` connection to the server
-for every HTTP request (i.e. the equivalent of `agent: false` option):
-
-```js
-var net = require('net');
-var tls = require('tls');
-var url = require('url');
-var http = require('http');
-var agent = require('agent-base');
-
-var endpoint = 'http://nodejs.org/api/';
-var parsed = url.parse(endpoint);
-
-// This is the important part!
-parsed.agent = agent(function (req, opts) {
- var socket;
- // `secureEndpoint` is true when using the https module
- if (opts.secureEndpoint) {
- socket = tls.connect(opts);
- } else {
- socket = net.connect(opts);
- }
- return socket;
-});
-
-// Everything else works just like normal...
-http.get(parsed, function (res) {
- console.log('"response" event!', res.headers);
- res.pipe(process.stdout);
-});
-```
-
-Returning a Promise or using an `async` function is also supported:
-
-```js
-agent(async function (req, opts) {
- await sleep(1000);
- // etc…
-});
-```
-
-Return another `http.Agent` instance to "pass through" the responsibility
-for that HTTP request to that agent:
-
-```js
-agent(function (req, opts) {
- return opts.secureEndpoint ? https.globalAgent : http.globalAgent;
-});
-```
-
-
-API
----
-
-## Agent(Function callback[, Object options]) → [http.Agent][]
-
-Creates a base `http.Agent` that will execute the callback function `callback`
-for every HTTP request that it is used as the `agent` for. The callback function
-is responsible for creating a `stream.Duplex` instance of some kind that will be
-used as the underlying socket in the HTTP request.
-
-The `options` object accepts the following properties:
-
- * `timeout` - Number - Timeout for the `callback()` function in milliseconds. Defaults to Infinity (optional).
-
-The callback function should have the following signature:
-
-### callback(http.ClientRequest req, Object options, Function cb) → undefined
-
-The ClientRequest `req` can be accessed to read request headers and
-and the path, etc. The `options` object contains the options passed
-to the `http.request()`/`https.request()` function call, and is formatted
-to be directly passed to `net.connect()`/`tls.connect()`, or however
-else you want a Socket to be created. Pass the created socket to
-the callback function `cb` once created, and the HTTP request will
-continue to proceed.
-
-If the `https` module is used to invoke the HTTP request, then the
-`secureEndpoint` property on `options` _will be set to `true`_.
-
-
-License
--------
-
-(The MIT License)
-
-Copyright (c) 2013 Nathan Rajlich &lt;nathan@tootallnate.net&gt;
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-'Software'), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-[http-proxy-agent]: https://github.com/TooTallNate/node-http-proxy-agent
-[https-proxy-agent]: https://github.com/TooTallNate/node-https-proxy-agent
-[pac-proxy-agent]: https://github.com/TooTallNate/node-pac-proxy-agent
-[socks-proxy-agent]: https://github.com/TooTallNate/node-socks-proxy-agent
-[http.Agent]: https://nodejs.org/api/http.html#http_class_http_agent
diff --git a/node_modules/agent-base/index.d.ts b/node_modules/agent-base/index.d.ts
deleted file mode 100644
index ff6788bdc..000000000
--- a/node_modules/agent-base/index.d.ts
+++ /dev/null
@@ -1,43 +0,0 @@
-// Type definitions for agent-base 4.2.1
-// Project: https://github.com/TooTallNate/node-agent-base
-// Definitions by: Christopher Quadflieg <https://github.com/Shinigami92>
-
-/// <reference types="node" />
-import { EventEmitter } from 'events';
-
-declare namespace Agent {
- export type AgentCallback = (
- req?: any,
- opts?: {
- secureEndpoint: boolean;
- }
- ) => void;
-
- export interface AgentOptions {
- timeout?: number;
- host?: string;
- port?: number;
- [key: string]: any;
- }
-
- export interface Agent extends EventEmitter {
- _promisifiedCallback: boolean;
- timeout: number | null;
- options?: AgentOptions;
- callback: AgentCallback;
- addRequest: (req?: any, opts?: any) => void;
- freeSocket: (socket: any, opts: any) => void;
- }
-}
-
-/**
- * Base `http.Agent` implementation.
- * No pooling/keep-alive is implemented by default.
- */
-declare function Agent(opts?: Agent.AgentOptions): Agent.Agent;
-declare function Agent(
- callback: Agent.AgentCallback,
- opts?: Agent.AgentOptions
-): Agent.Agent;
-
-export = Agent;
diff --git a/node_modules/agent-base/index.js b/node_modules/agent-base/index.js
deleted file mode 100644
index 0ee6b2969..000000000
--- a/node_modules/agent-base/index.js
+++ /dev/null
@@ -1,170 +0,0 @@
-'use strict';
-require('./patch-core');
-const inherits = require('util').inherits;
-const promisify = require('es6-promisify');
-const EventEmitter = require('events').EventEmitter;
-
-module.exports = Agent;
-
-function isAgent(v) {
- return v && typeof v.addRequest === 'function';
-}
-
-/**
- * Base `http.Agent` implementation.
- * No pooling/keep-alive is implemented by default.
- *
- * @param {Function} callback
- * @api public
- */
-function Agent(callback, _opts) {
- if (!(this instanceof Agent)) {
- return new Agent(callback, _opts);
- }
-
- EventEmitter.call(this);
-
- // The callback gets promisified if it has 3 parameters
- // (i.e. it has a callback function) lazily
- this._promisifiedCallback = false;
-
- let opts = _opts;
- if ('function' === typeof callback) {
- this.callback = callback;
- } else if (callback) {
- opts = callback;
- }
-
- // timeout for the socket to be returned from the callback
- this.timeout = (opts && opts.timeout) || null;
-
- this.options = opts;
-}
-inherits(Agent, EventEmitter);
-
-/**
- * Override this function in your subclass!
- */
-Agent.prototype.callback = function callback(req, opts) {
- throw new Error(
- '"agent-base" has no default implementation, you must subclass and override `callback()`'
- );
-};
-
-/**
- * Called by node-core's "_http_client.js" module when creating
- * a new HTTP request with this Agent instance.
- *
- * @api public
- */
-Agent.prototype.addRequest = function addRequest(req, _opts) {
- const ownOpts = Object.assign({}, _opts);
-
- // Set default `host` for HTTP to localhost
- if (null == ownOpts.host) {
- ownOpts.host = 'localhost';
- }
-
- // Set default `port` for HTTP if none was explicitly specified
- if (null == ownOpts.port) {
- ownOpts.port = ownOpts.secureEndpoint ? 443 : 80;
- }
-
- const opts = Object.assign({}, this.options, ownOpts);
-
- if (opts.host && opts.path) {
- // If both a `host` and `path` are specified then it's most likely the
- // result of a `url.parse()` call... we need to remove the `path` portion so
- // that `net.connect()` doesn't attempt to open that as a unix socket file.
- delete opts.path;
- }
-
- delete opts.agent;
- delete opts.hostname;
- delete opts._defaultAgent;
- delete opts.defaultPort;
- delete opts.createConnection;
-
- // Hint to use "Connection: close"
- // XXX: non-documented `http` module API :(
- req._last = true;
- req.shouldKeepAlive = false;
-
- // Create the `stream.Duplex` instance
- let timeout;
- let timedOut = false;
- const timeoutMs = this.timeout;
- const freeSocket = this.freeSocket;
-
- function onerror(err) {
- if (req._hadError) return;
- req.emit('error', err);
- // For Safety. Some additional errors might fire later on
- // and we need to make sure we don't double-fire the error event.
- req._hadError = true;
- }
-
- function ontimeout() {
- timeout = null;
- timedOut = true;
- const err = new Error(
- 'A "socket" was not created for HTTP request before ' + timeoutMs + 'ms'
- );
- err.code = 'ETIMEOUT';
- onerror(err);
- }
-
- function callbackError(err) {
- if (timedOut) return;
- if (timeout != null) {
- clearTimeout(timeout);
- timeout = null;
- }
- onerror(err);
- }
-
- function onsocket(socket) {
- if (timedOut) return;
- if (timeout != null) {
- clearTimeout(timeout);
- timeout = null;
- }
- if (isAgent(socket)) {
- // `socket` is actually an http.Agent instance, so relinquish
- // responsibility for this `req` to the Agent from here on
- socket.addRequest(req, opts);
- } else if (socket) {
- function onfree() {
- freeSocket(socket, opts);
- }
- socket.on('free', onfree);
- req.onSocket(socket);
- } else {
- const err = new Error(
- 'no Duplex stream was returned to agent-base for `' + req.method + ' ' + req.path + '`'
- );
- onerror(err);
- }
- }
-
- if (!this._promisifiedCallback && this.callback.length >= 3) {
- // Legacy callback function - convert to a Promise
- this.callback = promisify(this.callback, this);
- this._promisifiedCallback = true;
- }
-
- if (timeoutMs > 0) {
- timeout = setTimeout(ontimeout, timeoutMs);
- }
-
- try {
- Promise.resolve(this.callback(req, opts)).then(onsocket, callbackError);
- } catch (err) {
- Promise.reject(err).catch(callbackError);
- }
-};
-
-Agent.prototype.freeSocket = function freeSocket(socket, opts) {
- // TODO reuse sockets
- socket.destroy();
-};
diff --git a/node_modules/agent-base/package.json b/node_modules/agent-base/package.json
deleted file mode 100644
index 70da68723..000000000
--- a/node_modules/agent-base/package.json
+++ /dev/null
@@ -1,68 +0,0 @@
-{
- "_from": "agent-base@4",
- "_id": "agent-base@4.3.0",
- "_inBundle": false,
- "_integrity": "sha512-salcGninV0nPrwpGNn4VTXBb1SOuXQBiqbrNXoeizJsHrsL6ERFM2Ne3JUSBWRE6aeNJI2ROP/WEEIDUiDe3cg==",
- "_location": "/agent-base",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "agent-base@4",
- "name": "agent-base",
- "escapedName": "agent-base",
- "rawSpec": "4",
- "saveSpec": null,
- "fetchSpec": "4"
- },
- "_requiredBy": [
- "/http-proxy-agent",
- "/https-proxy-agent"
- ],
- "_resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.3.0.tgz",
- "_shasum": "8165f01c436009bccad0b1d122f05ed770efc6ee",
- "_spec": "agent-base@4",
- "_where": "/Users/isaacs/dev/npm/cli/node_modules/http-proxy-agent",
- "author": {
- "name": "Nathan Rajlich",
- "email": "nathan@tootallnate.net",
- "url": "http://n8.io/"
- },
- "bugs": {
- "url": "https://github.com/TooTallNate/node-agent-base/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "es6-promisify": "^5.0.0"
- },
- "deprecated": false,
- "description": "Turn a function into an `http.Agent` instance",
- "devDependencies": {
- "@types/es6-promisify": "^5.0.0",
- "@types/node": "^10.5.3",
- "mocha": "^3.4.2",
- "ws": "^3.0.0"
- },
- "engines": {
- "node": ">= 4.0.0"
- },
- "homepage": "https://github.com/TooTallNate/node-agent-base#readme",
- "keywords": [
- "http",
- "agent",
- "base",
- "barebones",
- "https"
- ],
- "license": "MIT",
- "main": "./index.js",
- "name": "agent-base",
- "repository": {
- "type": "git",
- "url": "git://github.com/TooTallNate/node-agent-base.git"
- },
- "scripts": {
- "test": "mocha --reporter spec"
- },
- "version": "4.3.0"
-}
diff --git a/node_modules/agent-base/patch-core.js b/node_modules/agent-base/patch-core.js
deleted file mode 100644
index 21cbbb675..000000000
--- a/node_modules/agent-base/patch-core.js
+++ /dev/null
@@ -1,51 +0,0 @@
-'use strict';
-const url = require('url');
-const https = require('https');
-
-/**
- * This currently needs to be applied to all Node.js versions
- * in order to determine if the `req` is an HTTP or HTTPS request.
- *
- * There is currently no PR attempting to move this property upstream.
- */
-const patchMarker = "__agent_base_https_request_patched__";
-if (!https.request[patchMarker]) {
- https.request = (function(request) {
- return function(_options, cb) {
- let options;
- if (typeof _options === 'string') {
- options = url.parse(_options);
- } else {
- options = Object.assign({}, _options);
- }
- if (null == options.port) {
- options.port = 443;
- }
- options.secureEndpoint = true;
- return request.call(https, options, cb);
- };
- })(https.request);
- https.request[patchMarker] = true;
-}
-
-/**
- * This is needed for Node.js >= 9.0.0 to make sure `https.get()` uses the
- * patched `https.request()`.
- *
- * Ref: https://github.com/nodejs/node/commit/5118f31
- */
-https.get = function (_url, _options, cb) {
- let options;
- if (typeof _url === 'string' && _options && typeof _options !== 'function') {
- options = Object.assign({}, url.parse(_url), _options);
- } else if (!_options && !cb) {
- options = _url;
- } else if (!cb) {
- options = _url;
- cb = _options;
- }
-
- const req = https.request(options, cb);
- req.end();
- return req;
-};
diff --git a/node_modules/agent-base/test/ssl-cert-snakeoil.key b/node_modules/agent-base/test/ssl-cert-snakeoil.key
deleted file mode 100644
index fd1250122..000000000
--- a/node_modules/agent-base/test/ssl-cert-snakeoil.key
+++ /dev/null
@@ -1,15 +0,0 @@
------BEGIN RSA PRIVATE KEY-----
-MIICWwIBAAKBgQCzURxIqzer0ACAbX/lHdsn4Gd9PLKrf7EeDYfIdV0HZKPD8WDr
-bBx2/fBu0OW2sjnzv/SVZbJ0DAuPE/p0+eT0qb2qC10iz9iTD7ribd7gxhirVb8y
-b3fBjXsxc8V8p4Ny1LcvNSqCjwUbJqdRogfoJeTiqPM58z5sNzuv5iq7iwIDAQAB
-AoGAPMQy4olrP0UotlzlJ36bowLP70ffgHCwU+/f4NWs5fF78c3du0oSx1w820Dd
-Z7E0JF8bgnlJJTxjumPZz0RUCugrEHBKJmzEz3cxF5E3+7NvteZcjKn9D67RrM5x
-1/uSZ9cqKE9cYvY4fSuHx18diyZ4axR/wB1Pea2utjjDM+ECQQDb9ZbmmaWMiRpQ
-5Up+loxP7BZNPsEVsm+DVJmEFbaFgGfncWBqSIqnPNjMwTwj0OigTwCAEGPkfRVW
-T0pbYWCxAkEA0LK7SCTwzyDmhASUalk0x+3uCAA6ryFdwJf/wd8TRAvVOmkTEldX
-uJ7ldLvfrONYO3v56uKTU/SoNdZYzKtO+wJAX2KM4ctXYy5BXztPpr2acz4qHa1N
-Bh+vBAC34fOYhyQ76r3b1btHhWZ5jbFuZwm9F2erC94Ps5IaoqcX07DSwQJAPKGw
-h2U0EPkd/3zVIZCJJQya+vgWFIs9EZcXVtvYXQyTBkVApTN66MhBIYjzkub5205J
-bVQmOV37AKklY1DhwQJAA1wos0cYxro02edzatxd0DIR2r4qqOqLkw6BhYHhq6HJ
-ZvIcQkHqdSXzdETFc01I1znDGGIrJHcnvKWgBPoEUg==
------END RSA PRIVATE KEY-----
diff --git a/node_modules/agent-base/test/ssl-cert-snakeoil.pem b/node_modules/agent-base/test/ssl-cert-snakeoil.pem
deleted file mode 100644
index b115a5e91..000000000
--- a/node_modules/agent-base/test/ssl-cert-snakeoil.pem
+++ /dev/null
@@ -1,12 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIB1TCCAT4CCQDV5mPlzm9+izANBgkqhkiG9w0BAQUFADAvMS0wKwYDVQQDEyQ3
-NTI3YmQ3Ny1hYjNlLTQ3NGItYWNlNy1lZWQ2MDUzOTMxZTcwHhcNMTUwNzA2MjI0
-NTA3WhcNMjUwNzAzMjI0NTA3WjAvMS0wKwYDVQQDEyQ3NTI3YmQ3Ny1hYjNlLTQ3
-NGItYWNlNy1lZWQ2MDUzOTMxZTcwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGB
-ALNRHEirN6vQAIBtf+Ud2yfgZ308sqt/sR4Nh8h1XQdko8PxYOtsHHb98G7Q5bay
-OfO/9JVlsnQMC48T+nT55PSpvaoLXSLP2JMPuuJt3uDGGKtVvzJvd8GNezFzxXyn
-g3LUty81KoKPBRsmp1GiB+gl5OKo8znzPmw3O6/mKruLAgMBAAEwDQYJKoZIhvcN
-AQEFBQADgYEACzoHUF8UV2Z6541Q2wKEA0UFUzmUjf/E1XwBO+1P15ZZ64uw34B4
-1RwMPtAo9RY/PmICTWtNxWGxkzwb2JtDWtnxVER/lF8k2XcXPE76fxTHJF/BKk9J
-QU8OTD1dd9gHCBviQB9TqntRZ5X7axjtuWjb2umY+owBYzAHZkp1HKI=
------END CERTIFICATE-----
diff --git a/node_modules/agent-base/test/test.js b/node_modules/agent-base/test/test.js
deleted file mode 100644
index 0f372c076..000000000
--- a/node_modules/agent-base/test/test.js
+++ /dev/null
@@ -1,728 +0,0 @@
-/**
- * Module dependencies.
- */
-
-var fs = require('fs');
-var url = require('url');
-var net = require('net');
-var tls = require('tls');
-var http = require('http');
-var https = require('https');
-var WebSocket = require('ws');
-var assert = require('assert');
-var events = require('events');
-var inherits = require('util').inherits;
-var Agent = require('../');
-
-var PassthroughAgent = Agent(function(req, opts) {
- return opts.secureEndpoint ? https.globalAgent : http.globalAgent;
-});
-
-describe('Agent', function() {
- describe('subclass', function() {
- it('should be subclassable', function(done) {
- function MyAgent() {
- Agent.call(this);
- }
- inherits(MyAgent, Agent);
-
- MyAgent.prototype.callback = function(req, opts, fn) {
- assert.equal(req.path, '/foo');
- assert.equal(req.getHeader('host'), '127.0.0.1:1234');
- assert.equal(opts.secureEndpoint, true);
- done();
- };
-
- var info = url.parse('https://127.0.0.1:1234/foo');
- info.agent = new MyAgent();
- https.get(info);
- });
- });
- describe('options', function() {
- it('should support an options Object as first argument', function() {
- var agent = new Agent({ timeout: 1000 });
- assert.equal(1000, agent.timeout);
- });
- it('should support an options Object as second argument', function() {
- var agent = new Agent(function() {}, { timeout: 1000 });
- assert.equal(1000, agent.timeout);
- });
- it('should be mixed in with HTTP request options', function(done) {
- var agent = new Agent({
- host: 'my-proxy.com',
- port: 3128,
- foo: 'bar'
- });
- agent.callback = function(req, opts, fn) {
- assert.equal('bar', opts.foo);
- assert.equal('a', opts.b);
-
- // `host` and `port` are special-cases, and should always be
- // overwritten in the request `opts` inside the agent-base callback
- assert.equal('localhost', opts.host);
- assert.equal(80, opts.port);
- done();
- };
- var opts = {
- b: 'a',
- agent: agent
- };
- http.get(opts);
- });
- });
- describe('`this` context', function() {
- it('should be the Agent instance', function(done) {
- var called = false;
- var agent = new Agent();
- agent.callback = function() {
- called = true;
- assert.equal(this, agent);
- };
- var info = url.parse('http://127.0.0.1/foo');
- info.agent = agent;
- var req = http.get(info);
- req.on('error', function(err) {
- assert(/no Duplex stream was returned/.test(err.message));
- done();
- });
- });
- it('should be the Agent instance with callback signature', function(done) {
- var called = false;
- var agent = new Agent();
- agent.callback = function(req, opts, fn) {
- called = true;
- assert.equal(this, agent);
- fn();
- };
- var info = url.parse('http://127.0.0.1/foo');
- info.agent = agent;
- var req = http.get(info);
- req.on('error', function(err) {
- assert(/no Duplex stream was returned/.test(err.message));
- done();
- });
- });
- });
- describe('"error" event', function() {
- it('should be invoked on `http.ClientRequest` instance if `callback()` has not been defined', function(
- done
- ) {
- var agent = new Agent();
- var info = url.parse('http://127.0.0.1/foo');
- info.agent = agent;
- var req = http.get(info);
- req.on('error', function(err) {
- assert.equal(
- '"agent-base" has no default implementation, you must subclass and override `callback()`',
- err.message
- );
- done();
- });
- });
- it('should be invoked on `http.ClientRequest` instance if Error passed to callback function on the first tick', function(
- done
- ) {
- var agent = new Agent(function(req, opts, fn) {
- fn(new Error('is this caught?'));
- });
- var info = url.parse('http://127.0.0.1/foo');
- info.agent = agent;
- var req = http.get(info);
- req.on('error', function(err) {
- assert.equal('is this caught?', err.message);
- done();
- });
- });
- it('should be invoked on `http.ClientRequest` instance if Error passed to callback function after the first tick', function(
- done
- ) {
- var agent = new Agent(function(req, opts, fn) {
- setTimeout(function() {
- fn(new Error('is this caught?'));
- }, 10);
- });
- var info = url.parse('http://127.0.0.1/foo');
- info.agent = agent;
- var req = http.get(info);
- req.on('error', function(err) {
- assert.equal('is this caught?', err.message);
- done();
- });
- });
- });
- describe('artificial "streams"', function() {
- it('should send a GET request', function(done) {
- var stream = new events.EventEmitter();
-
- // needed for the `http` module to call .write() on the stream
- stream.writable = true;
-
- stream.write = function(str) {
- assert(0 == str.indexOf('GET / HTTP/1.1'));
- done();
- };
-
- // needed for `http` module in Node.js 4
- stream.cork = function() {};
-
- var opts = {
- method: 'GET',
- host: '127.0.0.1',
- path: '/',
- port: 80,
- agent: new Agent(function(req, opts, fn) {
- fn(null, stream);
- })
- };
- var req = http.request(opts);
- req.end();
- });
- it('should receive a GET response', function(done) {
- var stream = new events.EventEmitter();
- var opts = {
- method: 'GET',
- host: '127.0.0.1',
- path: '/',
- port: 80,
- agent: new Agent(function(req, opts, fn) {
- fn(null, stream);
- })
- };
- var req = http.request(opts, function(res) {
- assert.equal('1.0', res.httpVersion);
- assert.equal(200, res.statusCode);
- assert.equal('bar', res.headers.foo);
- assert.deepEqual(['1', '2'], res.headers['set-cookie']);
- done();
- });
-
- // have to wait for the "socket" event since `http.ClientRequest`
- // doesn't *actually* attach the listeners to the "stream" until
- // this happens
- req.once('socket', function() {
- var buf = Buffer.from(
- 'HTTP/1.0 200\r\n' +
- 'Foo: bar\r\n' +
- 'Set-Cookie: 1\r\n' +
- 'Set-Cookie: 2\r\n\r\n'
- );
- stream.emit('data', buf);
- });
-
- req.end();
- });
- });
-});
-
-describe('"http" module', function() {
- var server;
- var port;
-
- // setup test HTTP server
- before(function(done) {
- server = http.createServer();
- server.listen(0, function() {
- port = server.address().port;
- done();
- });
- });
-
- // shut down test HTTP server
- after(function(done) {
- server.once('close', function() {
- done();
- });
- server.close();
- });
-
- it('should work for basic HTTP requests', function(done) {
- var called = false;
- var agent = new Agent(function(req, opts, fn) {
- called = true;
- var socket = net.connect(opts);
- fn(null, socket);
- });
-
- // add HTTP server "request" listener
- var gotReq = false;
- server.once('request', function(req, res) {
- gotReq = true;
- res.setHeader('X-Foo', 'bar');
- res.setHeader('X-Url', req.url);
- res.end();
- });
-
- var info = url.parse('http://127.0.0.1:' + port + '/foo');
- info.agent = agent;
- http.get(info, function(res) {
- assert.equal('bar', res.headers['x-foo']);
- assert.equal('/foo', res.headers['x-url']);
- assert(gotReq);
- assert(called);
- done();
- });
- });
-
- it('should support direct return in `connect()`', function(done) {
- var called = false;
- var agent = new Agent(function(req, opts) {
- called = true;
- return net.connect(opts);
- });
-
- // add HTTP server "request" listener
- var gotReq = false;
- server.once('request', function(req, res) {
- gotReq = true;
- res.setHeader('X-Foo', 'bar');
- res.setHeader('X-Url', req.url);
- res.end();
- });
-
- var info = url.parse('http://127.0.0.1:' + port + '/foo');
- info.agent = agent;
- http.get(info, function(res) {
- assert.equal('bar', res.headers['x-foo']);
- assert.equal('/foo', res.headers['x-url']);
- assert(gotReq);
- assert(called);
- done();
- });
- });
-
- it('should support returning a Promise in `connect()`', function(done) {
- var called = false;
- var agent = new Agent(function(req, opts) {
- return new Promise(function(resolve, reject) {
- called = true;
- resolve(net.connect(opts));
- });
- });
-
- // add HTTP server "request" listener
- var gotReq = false;
- server.once('request', function(req, res) {
- gotReq = true;
- res.setHeader('X-Foo', 'bar');
- res.setHeader('X-Url', req.url);
- res.end();
- });
-
- var info = url.parse('http://127.0.0.1:' + port + '/foo');
- info.agent = agent;
- http.get(info, function(res) {
- assert.equal('bar', res.headers['x-foo']);
- assert.equal('/foo', res.headers['x-url']);
- assert(gotReq);
- assert(called);
- done();
- });
- });
-
- it('should set the `Connection: close` response header', function(done) {
- var called = false;
- var agent = new Agent(function(req, opts, fn) {
- called = true;
- var socket = net.connect(opts);
- fn(null, socket);
- });
-
- // add HTTP server "request" listener
- var gotReq = false;
- server.once('request', function(req, res) {
- gotReq = true;
- res.setHeader('X-Url', req.url);
- assert.equal('close', req.headers.connection);
- res.end();
- });
-
- var info = url.parse('http://127.0.0.1:' + port + '/bar');
- info.agent = agent;
- http.get(info, function(res) {
- assert.equal('/bar', res.headers['x-url']);
- assert.equal('close', res.headers.connection);
- assert(gotReq);
- assert(called);
- done();
- });
- });
-
- it('should pass through options from `http.request()`', function(done) {
- var agent = new Agent(function(req, opts, fn) {
- assert.equal('google.com', opts.host);
- assert.equal('bar', opts.foo);
- done();
- });
-
- http.get({
- host: 'google.com',
- foo: 'bar',
- agent: agent
- });
- });
-
- it('should default to port 80', function(done) {
- var agent = new Agent(function(req, opts, fn) {
- assert.equal(80, opts.port);
- done();
- });
-
- // (probably) not hitting a real HTTP server here,
- // so no need to add a httpServer request listener
- http.get({
- host: '127.0.0.1',
- path: '/foo',
- agent: agent
- });
- });
-
- it('should support the "timeout" option', function(done) {
- // ensure we timeout after the "error" event had a chance to trigger
- this.timeout(1000);
- this.slow(800);
-
- var agent = new Agent(
- function(req, opts, fn) {
- // this function will time out
- },
- { timeout: 100 }
- );
-
- var opts = url.parse('http://nodejs.org');
- opts.agent = agent;
-
- var req = http.get(opts);
- req.once('error', function(err) {
- assert.equal('ETIMEOUT', err.code);
- req.abort();
- done();
- });
- });
-
- it('should free sockets after use', function(done) {
- var agent = new Agent(function(req, opts, fn) {
- var socket = net.connect(opts);
- fn(null, socket);
- });
-
- // add HTTP server "request" listener
- var gotReq = false;
- server.once('request', function(req, res) {
- gotReq = true;
- res.end();
- });
-
- var info = url.parse('http://127.0.0.1:' + port + '/foo');
- info.agent = agent;
- http.get(info, function(res) {
- res.socket.emit('free');
- assert.equal(true, res.socket.destroyed);
- assert(gotReq);
- done();
- });
- });
-
-
- describe('PassthroughAgent', function() {
- it('should pass through to `http.globalAgent`', function(done) {
- // add HTTP server "request" listener
- var gotReq = false;
- server.once('request', function(req, res) {
- gotReq = true;
- res.setHeader('X-Foo', 'bar');
- res.setHeader('X-Url', req.url);
- res.end();
- });
-
- var info = url.parse('http://127.0.0.1:' + port + '/foo');
- info.agent = PassthroughAgent;
- http.get(info, function(res) {
- assert.equal('bar', res.headers['x-foo']);
- assert.equal('/foo', res.headers['x-url']);
- assert(gotReq);
- done();
- });
- });
- });
-});
-
-describe('"https" module', function() {
- var server;
- var port;
-
- // setup test HTTPS server
- before(function(done) {
- var options = {
- key: fs.readFileSync(__dirname + '/ssl-cert-snakeoil.key'),
- cert: fs.readFileSync(__dirname + '/ssl-cert-snakeoil.pem')
- };
- server = https.createServer(options);
- server.listen(0, function() {
- port = server.address().port;
- done();
- });
- });
-
- // shut down test HTTP server
- after(function(done) {
- server.once('close', function() {
- done();
- });
- server.close();
- });
-
- it('should not modify the passed in Options object', function(done) {
- var called = false;
- var agent = new Agent(function(req, opts, fn) {
- called = true;
- assert.equal(true, opts.secureEndpoint);
- assert.equal(443, opts.port);
- assert.equal('localhost', opts.host);
- });
- var opts = { agent: agent };
- var req = https.request(opts);
- assert.equal(true, called);
- assert.equal(false, 'secureEndpoint' in opts);
- assert.equal(false, 'port' in opts);
- done();
- });
-
- it('should work with a String URL', function(done) {
- var endpoint = 'https://127.0.0.1:' + port;
- var req = https.get(endpoint);
-
- // it's gonna error out since `rejectUnauthorized` is not being passed in
- req.on('error', function(err) {
- assert.equal(err.code, 'DEPTH_ZERO_SELF_SIGNED_CERT');
- done();
- });
- });
-
- it('should work for basic HTTPS requests', function(done) {
- var called = false;
- var agent = new Agent(function(req, opts, fn) {
- called = true;
- assert(opts.secureEndpoint);
- var socket = tls.connect(opts);
- fn(null, socket);
- });
-
- // add HTTPS server "request" listener
- var gotReq = false;
- server.once('request', function(req, res) {
- gotReq = true;
- res.setHeader('X-Foo', 'bar');
- res.setHeader('X-Url', req.url);
- res.end();
- });
-
- var info = url.parse('https://127.0.0.1:' + port + '/foo');
- info.agent = agent;
- info.rejectUnauthorized = false;
- https.get(info, function(res) {
- assert.equal('bar', res.headers['x-foo']);
- assert.equal('/foo', res.headers['x-url']);
- assert(gotReq);
- assert(called);
- done();
- });
- });
-
- it('should pass through options from `https.request()`', function(done) {
- var agent = new Agent(function(req, opts, fn) {
- assert.equal('google.com', opts.host);
- assert.equal('bar', opts.foo);
- done();
- });
-
- https.get({
- host: 'google.com',
- foo: 'bar',
- agent: agent
- });
- });
-
- it('should support the 3-argument `https.get()`', function(done) {
- var agent = new Agent(function(req, opts, fn) {
- assert.equal('google.com', opts.host);
- assert.equal('/q', opts.pathname || opts.path);
- assert.equal('881', opts.port);
- assert.equal('bar', opts.foo);
- done();
- });
-
- https.get(
- 'https://google.com:881/q',
- {
- host: 'google.com',
- foo: 'bar',
- agent: agent
- }
- );
- });
-
- it('should default to port 443', function(done) {
- var agent = new Agent(function(req, opts, fn) {
- assert.equal(true, opts.secureEndpoint);
- assert.equal(false, opts.rejectUnauthorized);
- assert.equal(443, opts.port);
- done();
- });
-
- // (probably) not hitting a real HTTPS server here,
- // so no need to add a httpsServer request listener
- https.get({
- host: '127.0.0.1',
- path: '/foo',
- agent: agent,
- rejectUnauthorized: false
- });
- });
-
- it('should not re-patch https.request', () => {
- var patchModulePath = "../patch-core";
- var patchedRequest = https.request;
-
- delete require.cache[require.resolve(patchModulePath)];
- require(patchModulePath);
-
- assert.equal(patchedRequest, https.request);
- assert.equal(true, https.request.__agent_base_https_request_patched__);
- });
-
- describe('PassthroughAgent', function() {
- it('should pass through to `https.globalAgent`', function(done) {
- // add HTTP server "request" listener
- var gotReq = false;
- server.once('request', function(req, res) {
- gotReq = true;
- res.setHeader('X-Foo', 'bar');
- res.setHeader('X-Url', req.url);
- res.end();
- });
-
- var info = url.parse('https://127.0.0.1:' + port + '/foo');
- info.agent = PassthroughAgent;
- info.rejectUnauthorized = false;
- https.get(info, function(res) {
- assert.equal('bar', res.headers['x-foo']);
- assert.equal('/foo', res.headers['x-url']);
- assert(gotReq);
- done();
- });
- });
- });
-});
-
-describe('"ws" server', function() {
- var wss;
- var server;
- var port;
-
- // setup test HTTP server
- before(function(done) {
- server = http.createServer();
- wss = new WebSocket.Server({ server: server });
- server.listen(0, function() {
- port = server.address().port;
- done();
- });
- });
-
- // shut down test HTTP server
- after(function(done) {
- server.once('close', function() {
- done();
- });
- server.close();
- });
-
- it('should work for basic WebSocket connections', function(done) {
- function onconnection(ws) {
- ws.on('message', function(data) {
- assert.equal('ping', data);
- ws.send('pong');
- });
- }
- wss.on('connection', onconnection);
-
- var agent = new Agent(function(req, opts, fn) {
- var socket = net.connect(opts);
- fn(null, socket);
- });
-
- var client = new WebSocket('ws://127.0.0.1:' + port + '/', {
- agent: agent
- });
-
- client.on('open', function() {
- client.send('ping');
- });
-
- client.on('message', function(data) {
- assert.equal('pong', data);
- client.close();
- wss.removeListener('connection', onconnection);
- done();
- });
- });
-});
-
-describe('"wss" server', function() {
- var wss;
- var server;
- var port;
-
- // setup test HTTP server
- before(function(done) {
- var options = {
- key: fs.readFileSync(__dirname + '/ssl-cert-snakeoil.key'),
- cert: fs.readFileSync(__dirname + '/ssl-cert-snakeoil.pem')
- };
- server = https.createServer(options);
- wss = new WebSocket.Server({ server: server });
- server.listen(0, function() {
- port = server.address().port;
- done();
- });
- });
-
- // shut down test HTTP server
- after(function(done) {
- server.once('close', function() {
- done();
- });
- server.close();
- });
-
- it('should work for secure WebSocket connections', function(done) {
- function onconnection(ws) {
- ws.on('message', function(data) {
- assert.equal('ping', data);
- ws.send('pong');
- });
- }
- wss.on('connection', onconnection);
-
- var agent = new Agent(function(req, opts, fn) {
- var socket = tls.connect(opts);
- fn(null, socket);
- });
-
- var client = new WebSocket('wss://127.0.0.1:' + port + '/', {
- agent: agent,
- rejectUnauthorized: false
- });
-
- client.on('open', function() {
- client.send('ping');
- });
-
- client.on('message', function(data) {
- assert.equal('pong', data);
- client.close();
- wss.removeListener('connection', onconnection);
- done();
- });
- });
-});
diff --git a/node_modules/agentkeepalive/History.md b/node_modules/agentkeepalive/History.md
deleted file mode 100644
index d5d14d8b4..000000000
--- a/node_modules/agentkeepalive/History.md
+++ /dev/null
@@ -1,170 +0,0 @@
-
-3.5.2 / 2018-10-19
-==================
-
-**fixes**
- * [[`5751fc1`](http://github.com/node-modules/agentkeepalive/commit/5751fc1180ed6544602c681ffbd08ca66a0cb12c)] - fix: sockLen being miscalculated when removing sockets (#60) (Ehden Sinai <<cixel@users.noreply.github.com>>)
-
-3.5.1 / 2018-07-31
-==================
-
-**fixes**
- * [[`495f1ab`](http://github.com/node-modules/agentkeepalive/commit/495f1ab625d43945d72f68096b97db723d4f0657)] - fix: add the lost npm files (#66) (Henry Zhuang <<zhuanghengfei@gmail.com>>)
-
-3.5.0 / 2018-07-31
-==================
-
-**features**
- * [[`16f5aea`](http://github.com/node-modules/agentkeepalive/commit/16f5aeadfda57f1c602652f1472a63cc83cd05bf)] - feat: add typing define. (#65) (Henry Zhuang <<zhuanghengfei@gmail.com>>)
-
-**others**
- * [[`28fa062`](http://github.com/node-modules/agentkeepalive/commit/28fa06246fb5103f88ebeeb8563757a9078b8157)] - docs: add "per host" to description of maxFreeSockets (tony-gutierrez <<tony.gutierrez@bluefletch.com>>)
- * [[`7df2577`](http://github.com/node-modules/agentkeepalive/commit/7df25774f00a1031ca4daad2878a17e0539072a2)] - test: run test on node 10 (#63) (fengmk2 <<fengmk2@gmail.com>>)
-
-3.4.1 / 2018-03-08
-==================
-
-**fixes**
- * [[`4d3a3b1`](http://github.com/node-modules/agentkeepalive/commit/4d3a3b1f7b16595febbbd39eeed72b2663549014)] - fix: Handle ipv6 addresses in host-header correctly with TLS (#53) (Mattias Holmlund <<u376@m1.holmlund.se>>)
-
-**others**
- * [[`55a7a5c`](http://github.com/node-modules/agentkeepalive/commit/55a7a5cd33e97f9a8370083dcb041c5552f10ac9)] - test: stop timer after test end (fengmk2 <<fengmk2@gmail.com>>)
-
-3.4.0 / 2018-02-27
-==================
-
-**features**
- * [[`bc7cadb`](http://github.com/node-modules/agentkeepalive/commit/bc7cadb30ecd2071e2b341ac53ae1a2b8155c43d)] - feat: use socket custom freeSocketKeepAliveTimeout first (#59) (fengmk2 <<fengmk2@gmail.com>>)
-
-**others**
- * [[`138eda8`](http://github.com/node-modules/agentkeepalive/commit/138eda81e10b632aaa87bea0cb66d8667124c4e8)] - doc: fix `keepAliveMsecs` params description (#55) (Hongcai Deng <<admin@dhchouse.com>>)
-
-3.3.0 / 2017-06-20
-==================
-
- * feat: add statusChanged getter (#51)
- * chore: format License
-
-3.2.0 / 2017-06-10
-==================
-
- * feat: add expiring active sockets
- * test: add node 8 (#49)
-
-3.1.0 / 2017-02-20
-==================
-
- * feat: timeout support humanize ms (#48)
-
-3.0.0 / 2016-12-20
-==================
-
- * fix: emit agent socket close event
- * test: add remove excess calls to removeSocket
- * test: use egg-ci
- * test: refactor test with eslint rules
- * feat: merge _http_agent.js from 7.2.1
-
-2.2.0 / 2016-06-26
-==================
-
- * feat: Add browser shim (noop) for isomorphic use. (#39)
- * chore: add security check badge
-
-2.1.1 / 2016-04-06
-==================
-
- * https: fix ssl socket leak when keepalive is used
- * chore: remove circle ci image
-
-2.1.0 / 2016-04-02
-==================
-
- * fix: opened sockets number overflow maxSockets
-
-2.0.5 / 2016-03-16
-==================
-
- * fix: pick _evictSession to httpsAgent
-
-2.0.4 / 2016-03-13
-==================
-
- * test: add Circle ci
- * test: add appveyor ci build
- * refactor: make sure only one error listener
- * chore: use codecov
- * fix: handle idle socket error
- * test: run on more node versions
-
-2.0.3 / 2015-08-03
-==================
-
- * fix: add default error handler to avoid Unhandled error event throw
-
-2.0.2 / 2015-04-25
-==================
-
- * fix: remove socket from freeSockets on 'timeout' (@pmalouin)
-
-2.0.1 / 2015-04-19
-==================
-
- * fix: add timeoutSocketCount to getCurrentStatus()
- * feat(getCurrentStatus): add getCurrentStatus
-
-2.0.0 / 2015-04-01
-==================
-
- * fix: socket.destroyed always be undefined on 0.10.x
- * Make it compatible with node v0.10.x (@lattmann)
-
-1.2.1 / 2015-03-23
-==================
-
- * patch from iojs: don't overwrite servername option
- * patch commits from joyent/node
- * add max sockets test case
- * add nagle algorithm delayed link
-
-1.2.0 / 2014-09-02
-==================
-
- * allow set keepAliveTimeout = 0
- * support timeout on working socket. fixed #6
-
-1.1.0 / 2014-08-28
-==================
-
- * add some socket counter for deep monitor
-
-1.0.0 / 2014-08-13
-==================
-
- * update _http_agent, only support 0.11+, only support node 0.11.0+
-
-0.2.2 / 2013-11-19
-==================
-
- * support node 0.8 and node 0.10
-
-0.2.1 / 2013-11-08
-==================
-
- * fix socket does not timeout bug, it will hang on life, must use 0.2.x on node 0.11
-
-0.2.0 / 2013-11-06
-==================
-
- * use keepalive agent on node 0.11+ impl
-
-0.1.5 / 2013-06-24
-==================
-
- * support coveralls
- * add node 0.10 test
- * add 0.8.22 original https.js
- * add original http.js module to diff
- * update jscover
- * mv pem to fixtures
- * add https agent usage
diff --git a/node_modules/agentkeepalive/README.md b/node_modules/agentkeepalive/README.md
deleted file mode 100644
index 823145821..000000000
--- a/node_modules/agentkeepalive/README.md
+++ /dev/null
@@ -1,248 +0,0 @@
-# agentkeepalive
-
-[![NPM version][npm-image]][npm-url]
-[![build status][travis-image]][travis-url]
-[![Appveyor status][appveyor-image]][appveyor-url]
-[![Test coverage][codecov-image]][codecov-url]
-[![David deps][david-image]][david-url]
-[![Known Vulnerabilities][snyk-image]][snyk-url]
-[![npm download][download-image]][download-url]
-
-[npm-image]: https://img.shields.io/npm/v/agentkeepalive.svg?style=flat
-[npm-url]: https://npmjs.org/package/agentkeepalive
-[travis-image]: https://img.shields.io/travis/node-modules/agentkeepalive.svg?style=flat
-[travis-url]: https://travis-ci.org/node-modules/agentkeepalive
-[appveyor-image]: https://ci.appveyor.com/api/projects/status/k7ct4s47di6m5uy2?svg=true
-[appveyor-url]: https://ci.appveyor.com/project/fengmk2/agentkeepalive
-[codecov-image]: https://codecov.io/gh/node-modules/agentkeepalive/branch/master/graph/badge.svg
-[codecov-url]: https://codecov.io/gh/node-modules/agentkeepalive
-[david-image]: https://img.shields.io/david/node-modules/agentkeepalive.svg?style=flat
-[david-url]: https://david-dm.org/node-modules/agentkeepalive
-[snyk-image]: https://snyk.io/test/npm/agentkeepalive/badge.svg?style=flat-square
-[snyk-url]: https://snyk.io/test/npm/agentkeepalive
-[download-image]: https://img.shields.io/npm/dm/agentkeepalive.svg?style=flat-square
-[download-url]: https://npmjs.org/package/agentkeepalive
-
-The Node.js's missing `keep alive` `http.Agent`. Support `http` and `https`.
-
-## What's different from original `http.Agent`?
-
-- `keepAlive=true` by default
-- Disable Nagle's algorithm: `socket.setNoDelay(true)`
-- Add free socket timeout: avoid long time inactivity socket leak in the free-sockets queue.
-- Add active socket timeout: avoid long time inactivity socket leak in the active-sockets queue.
-
-## Install
-
-```bash
-$ npm install agentkeepalive --save
-```
-
-## new Agent([options])
-
-* `options` {Object} Set of configurable options to set on the agent.
- Can have the following fields:
- * `keepAlive` {Boolean} Keep sockets around in a pool to be used by
- other requests in the future. Default = `true`.
- * `keepAliveMsecs` {Number} When using the keepAlive option, specifies the initial delay
- for TCP Keep-Alive packets. Ignored when the keepAlive option is false or undefined. Defaults to 1000.
- Default = `1000`. Only relevant if `keepAlive` is set to `true`.
- * `freeSocketKeepAliveTimeout`: {Number} Sets the free socket to timeout
- after `freeSocketKeepAliveTimeout` milliseconds of inactivity on the free socket.
- Default is `15000`.
- Only relevant if `keepAlive` is set to `true`.
- * `timeout`: {Number} Sets the working socket to timeout
- after `timeout` milliseconds of inactivity on the working socket.
- Default is `freeSocketKeepAliveTimeout * 2`.
- * `maxSockets` {Number} Maximum number of sockets to allow per
- host. Default = `Infinity`.
- * `maxFreeSockets` {Number} Maximum number of sockets (per host) to leave open
- in a free state. Only relevant if `keepAlive` is set to `true`.
- Default = `256`.
- * `socketActiveTTL` {Number} Sets the socket active time to live, even if it's in use.
- If not setted the behaviour continues the same (the socket will be released only when free)
- Default = `null`.
-
-## Usage
-
-```js
-const http = require('http');
-const Agent = require('agentkeepalive');
-
-const keepaliveAgent = new Agent({
- maxSockets: 100,
- maxFreeSockets: 10,
- timeout: 60000,
- freeSocketKeepAliveTimeout: 30000, // free socket keepalive for 30 seconds
-});
-
-const options = {
- host: 'cnodejs.org',
- port: 80,
- path: '/',
- method: 'GET',
- agent: keepaliveAgent,
-};
-
-const req = http.request(options, res => {
- console.log('STATUS: ' + res.statusCode);
- console.log('HEADERS: ' + JSON.stringify(res.headers));
- res.setEncoding('utf8');
- res.on('data', function (chunk) {
- console.log('BODY: ' + chunk);
- });
-});
-req.on('error', e => {
- console.log('problem with request: ' + e.message);
-});
-req.end();
-
-setTimeout(() => {
- if (keepaliveAgent.statusChanged) {
- console.log('[%s] agent status changed: %j', Date(), keepaliveAgent.getCurrentStatus());
- }
-}, 2000);
-
-```
-
-### `getter agent.statusChanged`
-
-counters have change or not after last checkpoint.
-
-### `agent.getCurrentStatus()`
-
-`agent.getCurrentStatus()` will return a object to show the status of this agent:
-
-```js
-{
- createSocketCount: 10,
- closeSocketCount: 5,
- timeoutSocketCount: 0,
- requestCount: 5,
- freeSockets: { 'localhost:57479:': 3 },
- sockets: { 'localhost:57479:': 5 },
- requests: {}
-}
-```
-
-### Support `https`
-
-```js
-const https = require('https');
-const HttpsAgent = require('agentkeepalive').HttpsAgent;
-
-const keepaliveAgent = new HttpsAgent();
-// https://www.google.com/search?q=nodejs&sugexp=chrome,mod=12&sourceid=chrome&ie=UTF-8
-const options = {
- host: 'www.google.com',
- port: 443,
- path: '/search?q=nodejs&sugexp=chrome,mod=12&sourceid=chrome&ie=UTF-8',
- method: 'GET',
- agent: keepaliveAgent,
-};
-
-const req = https.request(options, res => {
- console.log('STATUS: ' + res.statusCode);
- console.log('HEADERS: ' + JSON.stringify(res.headers));
- res.setEncoding('utf8');
- res.on('data', chunk => {
- console.log('BODY: ' + chunk);
- });
-});
-
-req.on('error', e => {
- console.log('problem with request: ' + e.message);
-});
-req.end();
-
-setTimeout(() => {
- console.log('agent status: %j', keepaliveAgent.getCurrentStatus());
-}, 2000);
-```
-
-## [Benchmark](https://github.com/node-modules/agentkeepalive/tree/master/benchmark)
-
-run the benchmark:
-
-```bash
-cd benchmark
-sh start.sh
-```
-
-Intel(R) Core(TM)2 Duo CPU P8600 @ 2.40GHz
-
-node@v0.8.9
-
-50 maxSockets, 60 concurrent, 1000 requests per concurrent, 5ms delay
-
-Keep alive agent (30 seconds):
-
-```js
-Transactions: 60000 hits
-Availability: 100.00 %
-Elapsed time: 29.70 secs
-Data transferred: 14.88 MB
-Response time: 0.03 secs
-Transaction rate: 2020.20 trans/sec
-Throughput: 0.50 MB/sec
-Concurrency: 59.84
-Successful transactions: 60000
-Failed transactions: 0
-Longest transaction: 0.15
-Shortest transaction: 0.01
-```
-
-Normal agent:
-
-```js
-Transactions: 60000 hits
-Availability: 100.00 %
-Elapsed time: 46.53 secs
-Data transferred: 14.88 MB
-Response time: 0.05 secs
-Transaction rate: 1289.49 trans/sec
-Throughput: 0.32 MB/sec
-Concurrency: 59.81
-Successful transactions: 60000
-Failed transactions: 0
-Longest transaction: 0.45
-Shortest transaction: 0.00
-```
-
-Socket created:
-
-```
-[proxy.js:120000] keepalive, 50 created, 60000 requestFinished, 1200 req/socket, 0 requests, 0 sockets, 0 unusedSockets, 50 timeout
-{" <10ms":662," <15ms":17825," <20ms":20552," <30ms":17646," <40ms":2315," <50ms":567," <100ms":377," <150ms":56," <200ms":0," >=200ms+":0}
-----------------------------------------------------------------
-[proxy.js:120000] normal , 53866 created, 84260 requestFinished, 1.56 req/socket, 0 requests, 0 sockets
-{" <10ms":75," <15ms":1112," <20ms":10947," <30ms":32130," <40ms":8228," <50ms":3002," <100ms":4274," <150ms":181," <200ms":18," >=200ms+":33}
-```
-
-## License
-
-```
-(The MIT License)
-
-Copyright(c) node-modules and other contributors.
-Copyright(c) 2012 - 2015 fengmk2 <fengmk2@gmail.com>
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-'Software'), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-```
diff --git a/node_modules/agentkeepalive/browser.js b/node_modules/agentkeepalive/browser.js
deleted file mode 100644
index 29c9398aa..000000000
--- a/node_modules/agentkeepalive/browser.js
+++ /dev/null
@@ -1,5 +0,0 @@
-module.exports = noop;
-module.exports.HttpsAgent = noop;
-
-// Noop function for browser since native api's don't use agents.
-function noop () {}
diff --git a/node_modules/agentkeepalive/index.d.ts b/node_modules/agentkeepalive/index.d.ts
deleted file mode 100644
index c11636f7c..000000000
--- a/node_modules/agentkeepalive/index.d.ts
+++ /dev/null
@@ -1,43 +0,0 @@
-declare module "agentkeepalive" {
- import * as http from 'http';
- import * as https from 'https';
-
- interface AgentStatus {
- createSocketCount: number,
- createSocketErrorCount: number,
- closeSocketCount: number,
- errorSocketCount: number,
- timeoutSocketCount: number,
- requestCount: number,
- freeSockets: object,
- sockets: object,
- requests: object,
- }
-
- interface HttpOptions extends http.AgentOptions {
- freeSocketKeepAliveTimeout?: number;
- timeout?: number;
- socketActiveTTL?: number;
- }
-
- interface HttpsOptions extends https.AgentOptions {
- freeSocketKeepAliveTimeout?: number;
- timeout?: number;
- socketActiveTTL?: number;
- }
-
- class internal extends http.Agent {
- constructor(opts?: HttpOptions);
- readonly statusChanged: boolean;
- createSocket(req: http.IncomingMessage, options: http.RequestOptions, cb: Function): void;
- getCurrentStatus(): AgentStatus;
- }
-
- namespace internal {
- export class HttpsAgent extends internal {
- constructor(opts?: HttpsOptions);
- }
- }
-
- export = internal;
-}
diff --git a/node_modules/agentkeepalive/index.js b/node_modules/agentkeepalive/index.js
deleted file mode 100644
index 6138131a9..000000000
--- a/node_modules/agentkeepalive/index.js
+++ /dev/null
@@ -1,4 +0,0 @@
-'use strict';
-
-module.exports = require('./lib/agent');
-module.exports.HttpsAgent = require('./lib/https_agent');
diff --git a/node_modules/agentkeepalive/lib/_http_agent.js b/node_modules/agentkeepalive/lib/_http_agent.js
deleted file mode 100644
index c324b7f87..000000000
--- a/node_modules/agentkeepalive/lib/_http_agent.js
+++ /dev/null
@@ -1,416 +0,0 @@
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-// patch from https://github.com/nodejs/node/blob/v7.2.1/lib/_http_agent.js
-
-'use strict';
-
-const net = require('net');
-const util = require('util');
-const EventEmitter = require('events');
-const debug = util.debuglog('http');
-
-// New Agent code.
-
-// The largest departure from the previous implementation is that
-// an Agent instance holds connections for a variable number of host:ports.
-// Surprisingly, this is still API compatible as far as third parties are
-// concerned. The only code that really notices the difference is the
-// request object.
-
-// Another departure is that all code related to HTTP parsing is in
-// ClientRequest.onSocket(). The Agent is now *strictly*
-// concerned with managing a connection pool.
-
-function Agent(options) {
- if (!(this instanceof Agent))
- return new Agent(options);
-
- EventEmitter.call(this);
-
- var self = this;
-
- self.defaultPort = 80;
- self.protocol = 'http:';
-
- self.options = util._extend({}, options);
-
- // don't confuse net and make it think that we're connecting to a pipe
- self.options.path = null;
- self.requests = {};
- self.sockets = {};
- self.freeSockets = {};
- self.keepAliveMsecs = self.options.keepAliveMsecs || 1000;
- self.keepAlive = self.options.keepAlive || false;
- self.maxSockets = self.options.maxSockets || Agent.defaultMaxSockets;
- self.maxFreeSockets = self.options.maxFreeSockets || 256;
-
- // [patch start]
- // free keep-alive socket timeout. By default free socket do not have a timeout.
- self.freeSocketKeepAliveTimeout = self.options.freeSocketKeepAliveTimeout || 0;
- // working socket timeout. By default working socket do not have a timeout.
- self.timeout = self.options.timeout || 0;
- // the socket active time to live, even if it's in use
- this.socketActiveTTL = this.options.socketActiveTTL || null;
- // [patch end]
-
- self.on('free', function(socket, options) {
- var name = self.getName(options);
- debug('agent.on(free)', name);
-
- if (socket.writable &&
- self.requests[name] && self.requests[name].length) {
- // [patch start]
- debug('continue handle next request');
- // [patch end]
- self.requests[name].shift().onSocket(socket);
- if (self.requests[name].length === 0) {
- // don't leak
- delete self.requests[name];
- }
- } else {
- // If there are no pending requests, then put it in
- // the freeSockets pool, but only if we're allowed to do so.
- var req = socket._httpMessage;
- if (req &&
- req.shouldKeepAlive &&
- socket.writable &&
- self.keepAlive) {
- var freeSockets = self.freeSockets[name];
- var freeLen = freeSockets ? freeSockets.length : 0;
- var count = freeLen;
- if (self.sockets[name])
- count += self.sockets[name].length;
-
- if (count > self.maxSockets || freeLen >= self.maxFreeSockets) {
- socket.destroy();
- } else {
- freeSockets = freeSockets || [];
- self.freeSockets[name] = freeSockets;
- socket.setKeepAlive(true, self.keepAliveMsecs);
- socket.unref();
- socket._httpMessage = null;
- self.removeSocket(socket, options);
- freeSockets.push(socket);
-
- // [patch start]
- // Add a default error handler to avoid Unhandled 'error' event throw on idle socket
- // https://github.com/node-modules/agentkeepalive/issues/25
- // https://github.com/nodejs/node/pull/4482 (fixed in >= 4.4.0 and >= 5.4.0)
- if (socket.listeners('error').length === 0) {
- socket.once('error', freeSocketErrorListener);
- }
- // set free keepalive timer
- // try to use socket custom freeSocketKeepAliveTimeout first
- const freeSocketKeepAliveTimeout = socket.freeSocketKeepAliveTimeout || self.freeSocketKeepAliveTimeout;
- socket.setTimeout(freeSocketKeepAliveTimeout);
- debug(`push to free socket queue and wait for ${freeSocketKeepAliveTimeout}ms`);
- // [patch end]
- }
- } else {
- socket.destroy();
- }
- }
- });
-}
-
-util.inherits(Agent, EventEmitter);
-exports.Agent = Agent;
-
-// [patch start]
-function freeSocketErrorListener(err) {
- var socket = this;
- debug('SOCKET ERROR on FREE socket:', err.message, err.stack);
- socket.destroy();
- socket.emit('agentRemove');
-}
-// [patch end]
-
-Agent.defaultMaxSockets = Infinity;
-
-Agent.prototype.createConnection = net.createConnection;
-
-// Get the key for a given set of request options
-Agent.prototype.getName = function getName(options) {
- var name = options.host || 'localhost';
-
- name += ':';
- if (options.port)
- name += options.port;
-
- name += ':';
- if (options.localAddress)
- name += options.localAddress;
-
- // Pacify parallel/test-http-agent-getname by only appending
- // the ':' when options.family is set.
- if (options.family === 4 || options.family === 6)
- name += ':' + options.family;
-
- return name;
-};
-
-// [patch start]
-function handleSocketCreation(req) {
- return function(err, newSocket) {
- if (err) {
- process.nextTick(function() {
- req.emit('error', err);
- });
- return;
- }
- req.onSocket(newSocket);
- }
-}
-// [patch end]
-
-Agent.prototype.addRequest = function addRequest(req, options, port/*legacy*/,
- localAddress/*legacy*/) {
- // Legacy API: addRequest(req, host, port, localAddress)
- if (typeof options === 'string') {
- options = {
- host: options,
- port,
- localAddress
- };
- }
-
- options = util._extend({}, options);
- options = util._extend(options, this.options);
-
- if (!options.servername)
- options.servername = calculateServerName(options, req);
-
- var name = this.getName(options);
- if (!this.sockets[name]) {
- this.sockets[name] = [];
- }
-
- var freeLen = this.freeSockets[name] ? this.freeSockets[name].length : 0;
- var sockLen = freeLen + this.sockets[name].length;
-
- if (freeLen) {
- // we have a free socket, so use that.
- var socket = this.freeSockets[name].shift();
- debug('have free socket');
-
- // [patch start]
- // remove free socket error event handler
- socket.removeListener('error', freeSocketErrorListener);
- // restart the default timer
- socket.setTimeout(this.timeout);
-
- if (this.socketActiveTTL && Date.now() - socket.createdTime > this.socketActiveTTL) {
- debug(`socket ${socket.createdTime} expired`);
- socket.destroy();
- return this.createSocket(req, options, handleSocketCreation(req));
- }
- // [patch end]
-
- // don't leak
- if (!this.freeSockets[name].length)
- delete this.freeSockets[name];
-
- socket.ref();
- req.onSocket(socket);
- this.sockets[name].push(socket);
- } else if (sockLen < this.maxSockets) {
- debug('call onSocket', sockLen, freeLen);
- // If we are under maxSockets create a new one.
- // [patch start]
- this.createSocket(req, options, handleSocketCreation(req));
- // [patch end]
- } else {
- debug('wait for socket');
- // We are over limit so we'll add it to the queue.
- if (!this.requests[name]) {
- this.requests[name] = [];
- }
- this.requests[name].push(req);
- }
-};
-
-Agent.prototype.createSocket = function createSocket(req, options, cb) {
- var self = this;
- options = util._extend({}, options);
- options = util._extend(options, self.options);
-
- if (!options.servername)
- options.servername = calculateServerName(options, req);
-
- var name = self.getName(options);
- options._agentKey = name;
-
- debug('createConnection', name, options);
- options.encoding = null;
- var called = false;
- const newSocket = self.createConnection(options, oncreate);
- // [patch start]
- if (newSocket) {
- oncreate(null, Object.assign(newSocket, { createdTime: Date.now() }));
- }
- // [patch end]
- function oncreate(err, s) {
- if (called)
- return;
- called = true;
- if (err)
- return cb(err);
- if (!self.sockets[name]) {
- self.sockets[name] = [];
- }
- self.sockets[name].push(s);
- debug('sockets', name, self.sockets[name].length);
-
- function onFree() {
- self.emit('free', s, options);
- }
- s.on('free', onFree);
-
- function onClose(err) {
- debug('CLIENT socket onClose');
- // This is the only place where sockets get removed from the Agent.
- // If you want to remove a socket from the pool, just close it.
- // All socket errors end in a close event anyway.
- self.removeSocket(s, options);
-
- // [patch start]
- self.emit('close');
- // [patch end]
- }
- s.on('close', onClose);
-
- // [patch start]
- // start socket timeout handler
- function onTimeout() {
- debug('CLIENT socket onTimeout');
- s.destroy();
- // Remove it from freeSockets immediately to prevent new requests from being sent through this socket.
- self.removeSocket(s, options);
- self.emit('timeout');
- }
- s.on('timeout', onTimeout);
- // set the default timer
- s.setTimeout(self.timeout);
- // [patch end]
-
- function onRemove() {
- // We need this function for cases like HTTP 'upgrade'
- // (defined by WebSockets) where we need to remove a socket from the
- // pool because it'll be locked up indefinitely
- debug('CLIENT socket onRemove');
- self.removeSocket(s, options);
- s.removeListener('close', onClose);
- s.removeListener('free', onFree);
- s.removeListener('agentRemove', onRemove);
-
- // [patch start]
- // remove socket timeout handler
- s.setTimeout(0, onTimeout);
- // [patch end]
- }
- s.on('agentRemove', onRemove);
- cb(null, s);
- }
-};
-
-function calculateServerName(options, req) {
- let servername = options.host;
- const hostHeader = req.getHeader('host');
- if (hostHeader) {
- // abc => abc
- // abc:123 => abc
- // [::1] => ::1
- // [::1]:123 => ::1
- if (hostHeader.startsWith('[')) {
- const index = hostHeader.indexOf(']');
- if (index === -1) {
- // Leading '[', but no ']'. Need to do something...
- servername = hostHeader;
- } else {
- servername = hostHeader.substr(1, index - 1);
- }
- } else {
- servername = hostHeader.split(':', 1)[0];
- }
- }
- return servername;
-}
-
-Agent.prototype.removeSocket = function removeSocket(s, options) {
- var name = this.getName(options);
- debug('removeSocket', name, 'writable:', s.writable);
- var sets = [this.sockets];
-
- // If the socket was destroyed, remove it from the free buffers too.
- if (!s.writable)
- sets.push(this.freeSockets);
-
- for (var sk = 0; sk < sets.length; sk++) {
- var sockets = sets[sk];
-
- if (sockets[name]) {
- var index = sockets[name].indexOf(s);
- if (index !== -1) {
- sockets[name].splice(index, 1);
- // Don't leak
- if (sockets[name].length === 0)
- delete sockets[name];
- }
- }
- }
-
- // [patch start]
- var freeLen = this.freeSockets[name] ? this.freeSockets[name].length : 0;
- var sockLen = freeLen + (this.sockets[name] ? this.sockets[name].length : 0);
- // [patch end]
-
- if (this.requests[name] && this.requests[name].length && sockLen < this.maxSockets) {
- debug('removeSocket, have a request, make a socket');
- var req = this.requests[name][0];
- // If we have pending requests and a socket gets closed make a new one
- this.createSocket(req, options, function(err, newSocket) {
- if (err) {
- process.nextTick(function() {
- req.emit('error', err);
- });
- return;
- }
- newSocket.emit('free');
- });
- }
-};
-
-Agent.prototype.destroy = function destroy() {
- var sets = [this.freeSockets, this.sockets];
- for (var s = 0; s < sets.length; s++) {
- var set = sets[s];
- var keys = Object.keys(set);
- for (var v = 0; v < keys.length; v++) {
- var setName = set[keys[v]];
- for (var n = 0; n < setName.length; n++) {
- setName[n].destroy();
- }
- }
- }
-};
-
-exports.globalAgent = new Agent();
diff --git a/node_modules/agentkeepalive/lib/agent.js b/node_modules/agentkeepalive/lib/agent.js
deleted file mode 100644
index a51ad597b..000000000
--- a/node_modules/agentkeepalive/lib/agent.js
+++ /dev/null
@@ -1,133 +0,0 @@
-/**
- * refer:
- * * @atimb "Real keep-alive HTTP agent": https://gist.github.com/2963672
- * * https://github.com/joyent/node/blob/master/lib/http.js
- * * https://github.com/joyent/node/blob/master/lib/https.js
- * * https://github.com/joyent/node/blob/master/lib/_http_agent.js
- */
-
-'use strict';
-
-const OriginalAgent = require('./_http_agent').Agent;
-const ms = require('humanize-ms');
-
-class Agent extends OriginalAgent {
- constructor(options) {
- options = options || {};
- options.keepAlive = options.keepAlive !== false;
- // default is keep-alive and 15s free socket timeout
- if (options.freeSocketKeepAliveTimeout === undefined) {
- options.freeSocketKeepAliveTimeout = 15000;
- }
- // Legacy API: keepAliveTimeout should be rename to `freeSocketKeepAliveTimeout`
- if (options.keepAliveTimeout) {
- options.freeSocketKeepAliveTimeout = options.keepAliveTimeout;
- }
- options.freeSocketKeepAliveTimeout = ms(options.freeSocketKeepAliveTimeout);
-
- // Sets the socket to timeout after timeout milliseconds of inactivity on the socket.
- // By default is double free socket keepalive timeout.
- if (options.timeout === undefined) {
- options.timeout = options.freeSocketKeepAliveTimeout * 2;
- // make sure socket default inactivity timeout >= 30s
- if (options.timeout < 30000) {
- options.timeout = 30000;
- }
- }
- options.timeout = ms(options.timeout);
-
- super(options);
-
- this.createSocketCount = 0;
- this.createSocketCountLastCheck = 0;
-
- this.createSocketErrorCount = 0;
- this.createSocketErrorCountLastCheck = 0;
-
- this.closeSocketCount = 0;
- this.closeSocketCountLastCheck = 0;
-
- // socket error event count
- this.errorSocketCount = 0;
- this.errorSocketCountLastCheck = 0;
-
- this.requestCount = 0;
- this.requestCountLastCheck = 0;
-
- this.timeoutSocketCount = 0;
- this.timeoutSocketCountLastCheck = 0;
-
- this.on('free', s => {
- this.requestCount++;
- // last enter free queue timestamp
- s.lastFreeTime = Date.now();
- });
- this.on('timeout', () => {
- this.timeoutSocketCount++;
- });
- this.on('close', () => {
- this.closeSocketCount++;
- });
- this.on('error', () => {
- this.errorSocketCount++;
- });
- }
-
- createSocket(req, options, cb) {
- super.createSocket(req, options, (err, socket) => {
- if (err) {
- this.createSocketErrorCount++;
- return cb(err);
- }
- if (this.keepAlive) {
- // Disable Nagle's algorithm: http://blog.caustik.com/2012/04/08/scaling-node-js-to-100k-concurrent-connections/
- // https://fengmk2.com/benchmark/nagle-algorithm-delayed-ack-mock.html
- socket.setNoDelay(true);
- }
- this.createSocketCount++;
- cb(null, socket);
- });
- }
-
- get statusChanged() {
- const changed = this.createSocketCount !== this.createSocketCountLastCheck ||
- this.createSocketErrorCount !== this.createSocketErrorCountLastCheck ||
- this.closeSocketCount !== this.closeSocketCountLastCheck ||
- this.errorSocketCount !== this.errorSocketCountLastCheck ||
- this.timeoutSocketCount !== this.timeoutSocketCountLastCheck ||
- this.requestCount !== this.requestCountLastCheck;
- if (changed) {
- this.createSocketCountLastCheck = this.createSocketCount;
- this.createSocketErrorCountLastCheck = this.createSocketErrorCount;
- this.closeSocketCountLastCheck = this.closeSocketCount;
- this.errorSocketCountLastCheck = this.errorSocketCount;
- this.timeoutSocketCountLastCheck = this.timeoutSocketCount;
- this.requestCountLastCheck = this.requestCount;
- }
- return changed;
- }
-
- getCurrentStatus() {
- return {
- createSocketCount: this.createSocketCount,
- createSocketErrorCount: this.createSocketErrorCount,
- closeSocketCount: this.closeSocketCount,
- errorSocketCount: this.errorSocketCount,
- timeoutSocketCount: this.timeoutSocketCount,
- requestCount: this.requestCount,
- freeSockets: inspect(this.freeSockets),
- sockets: inspect(this.sockets),
- requests: inspect(this.requests),
- };
- }
-}
-
-module.exports = Agent;
-
-function inspect(obj) {
- const res = {};
- for (const key in obj) {
- res[key] = obj[key].length;
- }
- return res;
-}
diff --git a/node_modules/agentkeepalive/lib/https_agent.js b/node_modules/agentkeepalive/lib/https_agent.js
deleted file mode 100644
index e6d58a3df..000000000
--- a/node_modules/agentkeepalive/lib/https_agent.js
+++ /dev/null
@@ -1,42 +0,0 @@
-/**
- * Https Agent base on custom http agent
- */
-
-'use strict';
-
-const https = require('https');
-const HttpAgent = require('./agent');
-const OriginalHttpsAgent = https.Agent;
-
-class HttpsAgent extends HttpAgent {
- constructor(options) {
- super(options);
-
- this.defaultPort = 443;
- this.protocol = 'https:';
- this.maxCachedSessions = this.options.maxCachedSessions;
- if (this.maxCachedSessions === undefined) {
- this.maxCachedSessions = 100;
- }
-
- this._sessionCache = {
- map: {},
- list: [],
- };
- }
-}
-
-[
- 'createConnection',
- 'getName',
- '_getSession',
- '_cacheSession',
- // https://github.com/nodejs/node/pull/4982
- '_evictSession',
-].forEach(function(method) {
- if (typeof OriginalHttpsAgent.prototype[method] === 'function') {
- HttpsAgent.prototype[method] = OriginalHttpsAgent.prototype[method];
- }
-});
-
-module.exports = HttpsAgent;
diff --git a/node_modules/agentkeepalive/package.json b/node_modules/agentkeepalive/package.json
deleted file mode 100644
index ba6470dba..000000000
--- a/node_modules/agentkeepalive/package.json
+++ /dev/null
@@ -1,83 +0,0 @@
-{
- "_from": "agentkeepalive@^3.4.1",
- "_id": "agentkeepalive@3.5.2",
- "_inBundle": false,
- "_integrity": "sha512-e0L/HNe6qkQ7H19kTlRRqUibEAwDK5AFk6y3PtMsuut2VAH6+Q4xZml1tNDJD7kSAyqmbG/K08K5WEJYtUrSlQ==",
- "_location": "/agentkeepalive",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "agentkeepalive@^3.4.1",
- "name": "agentkeepalive",
- "escapedName": "agentkeepalive",
- "rawSpec": "^3.4.1",
- "saveSpec": null,
- "fetchSpec": "^3.4.1"
- },
- "_requiredBy": [
- "/make-fetch-happen"
- ],
- "_resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-3.5.2.tgz",
- "_shasum": "a113924dd3fa24a0bc3b78108c450c2abee00f67",
- "_spec": "agentkeepalive@^3.4.1",
- "_where": "/Users/isaacs/dev/npm/cli/node_modules/make-fetch-happen",
- "author": {
- "name": "fengmk2",
- "email": "fengmk2@gmail.com",
- "url": "https://fengmk2.com"
- },
- "browser": "browser.js",
- "bugs": {
- "url": "https://github.com/node-modules/agentkeepalive/issues"
- },
- "bundleDependencies": false,
- "ci": {
- "version": "4, 6, 8, 10"
- },
- "dependencies": {
- "humanize-ms": "^1.2.1"
- },
- "deprecated": false,
- "description": "Missing keepalive http.Agent",
- "devDependencies": {
- "autod": "^3.0.1",
- "egg-bin": "^1.11.1",
- "egg-ci": "^1.8.0",
- "eslint": "^4.19.1",
- "eslint-config-egg": "^6.0.0",
- "pedding": "^1.1.0"
- },
- "engines": {
- "node": ">= 4.0.0"
- },
- "files": [
- "index.js",
- "index.d.ts",
- "browser.js",
- "lib"
- ],
- "homepage": "https://github.com/node-modules/agentkeepalive#readme",
- "keywords": [
- "http",
- "https",
- "agent",
- "keepalive",
- "agentkeepalive"
- ],
- "license": "MIT",
- "main": "index.js",
- "name": "agentkeepalive",
- "repository": {
- "type": "git",
- "url": "git://github.com/node-modules/agentkeepalive.git"
- },
- "scripts": {
- "autod": "autod",
- "ci": "npm run lint && npm run cov",
- "cov": "egg-bin cov",
- "lint": "eslint lib test index.js",
- "test": "egg-bin test"
- },
- "version": "3.5.2"
-}
diff --git a/node_modules/es6-promise/CHANGELOG.md b/node_modules/es6-promise/CHANGELOG.md
deleted file mode 100644
index d630cc0dc..000000000
--- a/node_modules/es6-promise/CHANGELOG.md
+++ /dev/null
@@ -1,155 +0,0 @@
-# Master
-
-# 4.2.5
-
-* remove old try/catch performance hacks, modern runtimes do not require these tricks
-
-# 4.2.4
-
-* [Fixes #305] Confuse webpack
-
-# 4.2.3
-
-* Cleanup testem related build configuration
-* Use `prepublishOnly` instead of `prepublish` (thanks @rhysd)
-* Add Node.js 9, 8 to testing matrix
-* drop now unused s3 deployment files
-* internal cleanup (thanks to @bekzod, @mariusschulz)
-* Fixup Changelog
-
-# 4.2.2
-
-* Ensure PROMISE_ID works correctly
-* internal cleanup (thanks yo @mariusschulz)
-
-# 4.2.1
-
-* drop bower support
-
-# 4.2.0
-
-* drop `dist` from git repo
-* add `Promise.prototype.finally`
-* update various build related dependencies
-* add CDN links
-
-# 4.1.0
-
-* [BUGFIX] Fix memory leak [#269]
-* [BUGFIX] Auto Bundles within an AMD Environment [#263]
-
-# 4.0.5
-
-* fix require('es6-promise/auto') for Node < 4
-
-# 4.0.4
-
-* fix asap when using https://github.com/Kinvey/titanium-sdk
-
-# 4.0.3
-
-* fix Readme links
-
-# 4.0.2
-
-* fix require('es6-promise/auto');
-
-# 4.0.0
-
-* no longer polyfill automatically, if needed one can still invoke
- `require('es6-promise/auto')` directly.
-
-# 3.3.1
-
-* fix links in readme
-
-# 3.3.0
-
-* support polyfil on WebMAF (playstation env)
-* fix tampering related bug global `constructor` was referenced by mistake.
-* provide TS Typings
-* increase compatibliity with sinon.useFakeTimers();
-* update build tools (use rollup)
-* directly export promise;
-
-# 3.2.2
-
-* IE8: use isArray
-* update build dependencies
-
-# 3.2.1
-
-* fix race tampering issue
-* use eslint
-* fix Promise.all tampering
-* remove unused code
-* fix issues with NWJS/electron
-
-# 3.2.0
-
-* improve tamper resistence of Promise.all Promise.race and
- Promise.prototype.then (note, this isn't complete, but addresses an exception
- when used \w core-js, follow up work will address entirely)
-* remove spec incompatible then chaining fast-path
-* add eslint
-* update build deps
-
-# 3.1.2
-
-* fix node detection issues with NWJS/electron
-
-# 3.1.0
-
-* improve performance of Promise.all when it encounters a non-promise input object input
-* then/resolve tamper protection
-* reduce AST size of promise constructor, to facilitate more inlining
-* Update README.md with details about PhantomJS requirement for running tests
-* Mangle and compress the minified version
-
-# 3.0.2
-
-* correctly bump both bower and package.json versions
-
-# 3.0.1
-
-* no longer include dist/test in npm releases
-
-# 3.0.0
-
-* use nextTick() instead of setImmediate() to schedule microtasks with node 0.10. Later versions of
- nodes are not affected as they were already using nextTick(). Note that using nextTick() might
- trigger a depreciation warning on 0.10 as described at https://github.com/cujojs/when/issues/410.
- The reason why nextTick() is preferred is that is setImmediate() would schedule a macrotask
- instead of a microtask and might result in a different scheduling.
- If needed you can revert to the former behavior as follow:
-
- var Promise = require('es6-promise').Promise;
- Promise._setScheduler(setImmediate);
-
-# 2.3.0
-
-* #121: Ability to override the internal asap implementation
-* #120: Use an ascii character for an apostrophe, for source maps
-
-# 2.2.0
-
-* #116: Expose asap() and a way to override the scheduling mechanism on Promise
-* Lock to v0.2.3 of ember-cli
-
-# 2.1.1
-
-* Fix #100 via #105: tell browserify to ignore vertx require
-* Fix #101 via #102: "follow thenable state, not own state"
-
-# 2.1.0
-
-* #59: Automatic polyfill. No need to invoke `ES6Promise.polyfill()` anymore.
-* ... (see the commit log)
-
-# 2.0.0
-
-* re-sync with RSVP. Many large performance improvements and bugfixes.
-
-# 1.0.0
-
-* first subset of RSVP
diff --git a/node_modules/es6-promise/LICENSE b/node_modules/es6-promise/LICENSE
deleted file mode 100644
index 954ec5992..000000000
--- a/node_modules/es6-promise/LICENSE
+++ /dev/null
@@ -1,19 +0,0 @@
-Copyright (c) 2014 Yehuda Katz, Tom Dale, Stefan Penner and contributors
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal in
-the Software without restriction, including without limitation the rights to
-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
-of the Software, and to permit persons to whom the Software is furnished to do
-so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
diff --git a/node_modules/es6-promise/README.md b/node_modules/es6-promise/README.md
deleted file mode 100644
index 951db75fa..000000000
--- a/node_modules/es6-promise/README.md
+++ /dev/null
@@ -1,97 +0,0 @@
-# ES6-Promise (subset of [rsvp.js](https://github.com/tildeio/rsvp.js)) [![Build Status](https://travis-ci.org/stefanpenner/es6-promise.svg?branch=master)](https://travis-ci.org/stefanpenner/es6-promise)
-
-This is a polyfill of the [ES6 Promise](http://www.ecma-international.org/ecma-262/6.0/#sec-promise-constructor). The implementation is a subset of [rsvp.js](https://github.com/tildeio/rsvp.js) extracted by @jakearchibald, if you're wanting extra features and more debugging options, check out the [full library](https://github.com/tildeio/rsvp.js).
-
-For API details and how to use promises, see the <a href="http://www.html5rocks.com/en/tutorials/es6/promises/">JavaScript Promises HTML5Rocks article</a>.
-
-## Downloads
-
-* [es6-promise 27.86 KB (7.33 KB gzipped)](https://cdn.jsdelivr.net/npm/es6-promise/dist/es6-promise.js)
-* [es6-promise-auto 27.78 KB (7.3 KB gzipped)](https://cdn.jsdelivr.net/npm/es6-promise/dist/es6-promise.auto.js) - Automatically provides/replaces `Promise` if missing or broken.
-* [es6-promise-min 6.17 KB (2.4 KB gzipped)](https://cdn.jsdelivr.net/npm/es6-promise/dist/es6-promise.min.js)
-* [es6-promise-auto-min 6.19 KB (2.4 KB gzipped)](https://cdn.jsdelivr.net/npm/es6-promise/dist/es6-promise.auto.min.js) - Minified version of `es6-promise-auto` above.
-
-## CDN
-
-To use via a CDN include this in your html:
-
-```html
-<!-- Automatically provides/replaces `Promise` if missing or broken. -->
-<script src="https://cdn.jsdelivr.net/npm/es6-promise@4/dist/es6-promise.js"></script>
-<script src="https://cdn.jsdelivr.net/npm/es6-promise@4/dist/es6-promise.auto.js"></script>
-
-<!-- Minified version of `es6-promise-auto` below. -->
-<script src="https://cdn.jsdelivr.net/npm/es6-promise@4/dist/es6-promise.min.js"></script>
-<script src="https://cdn.jsdelivr.net/npm/es6-promise@4/dist/es6-promise.auto.min.js"></script>
-
-```
-
-## Node.js
-
-To install:
-
-```sh
-yarn add es6-promise
-```
-
-or
-
-```sh
-npm install es6-promise
-```
-
-To use:
-
-```js
-var Promise = require('es6-promise').Promise;
-```
-
-
-## Usage in IE<9
-
-`catch` and `finally` are reserved keywords in IE<9, meaning
-`promise.catch(func)` or `promise.finally(func)` throw a syntax error. To work
-around this, you can use a string to access the property as shown in the
-following example.
-
-However most minifiers will automatically fix this for you, making the
-resulting code safe for old browsers and production:
-
-```js
-promise['catch'](function(err) {
- // ...
-});
-```
-
-```js
-promise['finally'](function() {
- // ...
-});
-```
-
-## Auto-polyfill
-
-To polyfill the global environment (either in Node or in the browser via CommonJS) use the following code snippet:
-
-```js
-require('es6-promise').polyfill();
-```
-
-Alternatively
-
-```js
-require('es6-promise/auto');
-```
-
-Notice that we don't assign the result of `polyfill()` to any variable. The `polyfill()` method will patch the global environment (in this case to the `Promise` name) when called.
-
-## Building & Testing
-
-You will need to have PhantomJS installed globally in order to run the tests.
-
-`npm install -g phantomjs`
-
-* `npm run build` to build
-* `npm test` to run tests
-* `npm start` to run a build watcher, and webserver to test
-* `npm run test:server` for a testem test runner and watching builder
diff --git a/node_modules/es6-promise/auto.js b/node_modules/es6-promise/auto.js
deleted file mode 100644
index 92bbf36e5..000000000
--- a/node_modules/es6-promise/auto.js
+++ /dev/null
@@ -1,4 +0,0 @@
-// This file can be required in Browserify and Node.js for automatic polyfill
-// To use it: require('es6-promise/auto');
-'use strict';
-module.exports = require('./').polyfill();
diff --git a/node_modules/es6-promise/dist/es6-promise.auto.js b/node_modules/es6-promise/dist/es6-promise.auto.js
deleted file mode 100644
index 7ad1de569..000000000
--- a/node_modules/es6-promise/dist/es6-promise.auto.js
+++ /dev/null
@@ -1,1176 +0,0 @@
-/*!
- * @overview es6-promise - a tiny implementation of Promises/A+.
- * @copyright Copyright (c) 2014 Yehuda Katz, Tom Dale, Stefan Penner and contributors (Conversion to ES6 API by Jake Archibald)
- * @license Licensed under MIT license
- * See https://raw.githubusercontent.com/stefanpenner/es6-promise/master/LICENSE
- * @version v4.2.8+1e68dce6
- */
-
-(function (global, factory) {
- typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
- typeof define === 'function' && define.amd ? define(factory) :
- (global.ES6Promise = factory());
-}(this, (function () { 'use strict';
-
-function objectOrFunction(x) {
- var type = typeof x;
- return x !== null && (type === 'object' || type === 'function');
-}
-
-function isFunction(x) {
- return typeof x === 'function';
-}
-
-
-
-var _isArray = void 0;
-if (Array.isArray) {
- _isArray = Array.isArray;
-} else {
- _isArray = function (x) {
- return Object.prototype.toString.call(x) === '[object Array]';
- };
-}
-
-var isArray = _isArray;
-
-var len = 0;
-var vertxNext = void 0;
-var customSchedulerFn = void 0;
-
-var asap = function asap(callback, arg) {
- queue[len] = callback;
- queue[len + 1] = arg;
- len += 2;
- if (len === 2) {
- // If len is 2, that means that we need to schedule an async flush.
- // If additional callbacks are queued before the queue is flushed, they
- // will be processed by this flush that we are scheduling.
- if (customSchedulerFn) {
- customSchedulerFn(flush);
- } else {
- scheduleFlush();
- }
- }
-};
-
-function setScheduler(scheduleFn) {
- customSchedulerFn = scheduleFn;
-}
-
-function setAsap(asapFn) {
- asap = asapFn;
-}
-
-var browserWindow = typeof window !== 'undefined' ? window : undefined;
-var browserGlobal = browserWindow || {};
-var BrowserMutationObserver = browserGlobal.MutationObserver || browserGlobal.WebKitMutationObserver;
-var isNode = typeof self === 'undefined' && typeof process !== 'undefined' && {}.toString.call(process) === '[object process]';
-
-// test for web worker but not in IE10
-var isWorker = typeof Uint8ClampedArray !== 'undefined' && typeof importScripts !== 'undefined' && typeof MessageChannel !== 'undefined';
-
-// node
-function useNextTick() {
- // node version 0.10.x displays a deprecation warning when nextTick is used recursively
- // see https://github.com/cujojs/when/issues/410 for details
- return function () {
- return process.nextTick(flush);
- };
-}
-
-// vertx
-function useVertxTimer() {
- if (typeof vertxNext !== 'undefined') {
- return function () {
- vertxNext(flush);
- };
- }
-
- return useSetTimeout();
-}
-
-function useMutationObserver() {
- var iterations = 0;
- var observer = new BrowserMutationObserver(flush);
- var node = document.createTextNode('');
- observer.observe(node, { characterData: true });
-
- return function () {
- node.data = iterations = ++iterations % 2;
- };
-}
-
-// web worker
-function useMessageChannel() {
- var channel = new MessageChannel();
- channel.port1.onmessage = flush;
- return function () {
- return channel.port2.postMessage(0);
- };
-}
-
-function useSetTimeout() {
- // Store setTimeout reference so es6-promise will be unaffected by
- // other code modifying setTimeout (like sinon.useFakeTimers())
- var globalSetTimeout = setTimeout;
- return function () {
- return globalSetTimeout(flush, 1);
- };
-}
-
-var queue = new Array(1000);
-function flush() {
- for (var i = 0; i < len; i += 2) {
- var callback = queue[i];
- var arg = queue[i + 1];
-
- callback(arg);
-
- queue[i] = undefined;
- queue[i + 1] = undefined;
- }
-
- len = 0;
-}
-
-function attemptVertx() {
- try {
- var vertx = Function('return this')().require('vertx');
- vertxNext = vertx.runOnLoop || vertx.runOnContext;
- return useVertxTimer();
- } catch (e) {
- return useSetTimeout();
- }
-}
-
-var scheduleFlush = void 0;
-// Decide what async method to use to triggering processing of queued callbacks:
-if (isNode) {
- scheduleFlush = useNextTick();
-} else if (BrowserMutationObserver) {
- scheduleFlush = useMutationObserver();
-} else if (isWorker) {
- scheduleFlush = useMessageChannel();
-} else if (browserWindow === undefined && typeof require === 'function') {
- scheduleFlush = attemptVertx();
-} else {
- scheduleFlush = useSetTimeout();
-}
-
-function then(onFulfillment, onRejection) {
- var parent = this;
-
- var child = new this.constructor(noop);
-
- if (child[PROMISE_ID] === undefined) {
- makePromise(child);
- }
-
- var _state = parent._state;
-
-
- if (_state) {
- var callback = arguments[_state - 1];
- asap(function () {
- return invokeCallback(_state, child, callback, parent._result);
- });
- } else {
- subscribe(parent, child, onFulfillment, onRejection);
- }
-
- return child;
-}
-
-/**
- `Promise.resolve` returns a promise that will become resolved with the
- passed `value`. It is shorthand for the following:
-
- ```javascript
- let promise = new Promise(function(resolve, reject){
- resolve(1);
- });
-
- promise.then(function(value){
- // value === 1
- });
- ```
-
- Instead of writing the above, your code now simply becomes the following:
-
- ```javascript
- let promise = Promise.resolve(1);
-
- promise.then(function(value){
- // value === 1
- });
- ```
-
- @method resolve
- @static
- @param {Any} value value that the returned promise will be resolved with
- Useful for tooling.
- @return {Promise} a promise that will become fulfilled with the given
- `value`
-*/
-function resolve$1(object) {
- /*jshint validthis:true */
- var Constructor = this;
-
- if (object && typeof object === 'object' && object.constructor === Constructor) {
- return object;
- }
-
- var promise = new Constructor(noop);
- resolve(promise, object);
- return promise;
-}
-
-var PROMISE_ID = Math.random().toString(36).substring(2);
-
-function noop() {}
-
-var PENDING = void 0;
-var FULFILLED = 1;
-var REJECTED = 2;
-
-function selfFulfillment() {
- return new TypeError("You cannot resolve a promise with itself");
-}
-
-function cannotReturnOwn() {
- return new TypeError('A promises callback cannot return that same promise.');
-}
-
-function tryThen(then$$1, value, fulfillmentHandler, rejectionHandler) {
- try {
- then$$1.call(value, fulfillmentHandler, rejectionHandler);
- } catch (e) {
- return e;
- }
-}
-
-function handleForeignThenable(promise, thenable, then$$1) {
- asap(function (promise) {
- var sealed = false;
- var error = tryThen(then$$1, thenable, function (value) {
- if (sealed) {
- return;
- }
- sealed = true;
- if (thenable !== value) {
- resolve(promise, value);
- } else {
- fulfill(promise, value);
- }
- }, function (reason) {
- if (sealed) {
- return;
- }
- sealed = true;
-
- reject(promise, reason);
- }, 'Settle: ' + (promise._label || ' unknown promise'));
-
- if (!sealed && error) {
- sealed = true;
- reject(promise, error);
- }
- }, promise);
-}
-
-function handleOwnThenable(promise, thenable) {
- if (thenable._state === FULFILLED) {
- fulfill(promise, thenable._result);
- } else if (thenable._state === REJECTED) {
- reject(promise, thenable._result);
- } else {
- subscribe(thenable, undefined, function (value) {
- return resolve(promise, value);
- }, function (reason) {
- return reject(promise, reason);
- });
- }
-}
-
-function handleMaybeThenable(promise, maybeThenable, then$$1) {
- if (maybeThenable.constructor === promise.constructor && then$$1 === then && maybeThenable.constructor.resolve === resolve$1) {
- handleOwnThenable(promise, maybeThenable);
- } else {
- if (then$$1 === undefined) {
- fulfill(promise, maybeThenable);
- } else if (isFunction(then$$1)) {
- handleForeignThenable(promise, maybeThenable, then$$1);
- } else {
- fulfill(promise, maybeThenable);
- }
- }
-}
-
-function resolve(promise, value) {
- if (promise === value) {
- reject(promise, selfFulfillment());
- } else if (objectOrFunction(value)) {
- var then$$1 = void 0;
- try {
- then$$1 = value.then;
- } catch (error) {
- reject(promise, error);
- return;
- }
- handleMaybeThenable(promise, value, then$$1);
- } else {
- fulfill(promise, value);
- }
-}
-
-function publishRejection(promise) {
- if (promise._onerror) {
- promise._onerror(promise._result);
- }
-
- publish(promise);
-}
-
-function fulfill(promise, value) {
- if (promise._state !== PENDING) {
- return;
- }
-
- promise._result = value;
- promise._state = FULFILLED;
-
- if (promise._subscribers.length !== 0) {
- asap(publish, promise);
- }
-}
-
-function reject(promise, reason) {
- if (promise._state !== PENDING) {
- return;
- }
- promise._state = REJECTED;
- promise._result = reason;
-
- asap(publishRejection, promise);
-}
-
-function subscribe(parent, child, onFulfillment, onRejection) {
- var _subscribers = parent._subscribers;
- var length = _subscribers.length;
-
-
- parent._onerror = null;
-
- _subscribers[length] = child;
- _subscribers[length + FULFILLED] = onFulfillment;
- _subscribers[length + REJECTED] = onRejection;
-
- if (length === 0 && parent._state) {
- asap(publish, parent);
- }
-}
-
-function publish(promise) {
- var subscribers = promise._subscribers;
- var settled = promise._state;
-
- if (subscribers.length === 0) {
- return;
- }
-
- var child = void 0,
- callback = void 0,
- detail = promise._result;
-
- for (var i = 0; i < subscribers.length; i += 3) {
- child = subscribers[i];
- callback = subscribers[i + settled];
-
- if (child) {
- invokeCallback(settled, child, callback, detail);
- } else {
- callback(detail);
- }
- }
-
- promise._subscribers.length = 0;
-}
-
-function invokeCallback(settled, promise, callback, detail) {
- var hasCallback = isFunction(callback),
- value = void 0,
- error = void 0,
- succeeded = true;
-
- if (hasCallback) {
- try {
- value = callback(detail);
- } catch (e) {
- succeeded = false;
- error = e;
- }
-
- if (promise === value) {
- reject(promise, cannotReturnOwn());
- return;
- }
- } else {
- value = detail;
- }
-
- if (promise._state !== PENDING) {
- // noop
- } else if (hasCallback && succeeded) {
- resolve(promise, value);
- } else if (succeeded === false) {
- reject(promise, error);
- } else if (settled === FULFILLED) {
- fulfill(promise, value);
- } else if (settled === REJECTED) {
- reject(promise, value);
- }
-}
-
-function initializePromise(promise, resolver) {
- try {
- resolver(function resolvePromise(value) {
- resolve(promise, value);
- }, function rejectPromise(reason) {
- reject(promise, reason);
- });
- } catch (e) {
- reject(promise, e);
- }
-}
-
-var id = 0;
-function nextId() {
- return id++;
-}
-
-function makePromise(promise) {
- promise[PROMISE_ID] = id++;
- promise._state = undefined;
- promise._result = undefined;
- promise._subscribers = [];
-}
-
-function validationError() {
- return new Error('Array Methods must be provided an Array');
-}
-
-var Enumerator = function () {
- function Enumerator(Constructor, input) {
- this._instanceConstructor = Constructor;
- this.promise = new Constructor(noop);
-
- if (!this.promise[PROMISE_ID]) {
- makePromise(this.promise);
- }
-
- if (isArray(input)) {
- this.length = input.length;
- this._remaining = input.length;
-
- this._result = new Array(this.length);
-
- if (this.length === 0) {
- fulfill(this.promise, this._result);
- } else {
- this.length = this.length || 0;
- this._enumerate(input);
- if (this._remaining === 0) {
- fulfill(this.promise, this._result);
- }
- }
- } else {
- reject(this.promise, validationError());
- }
- }
-
- Enumerator.prototype._enumerate = function _enumerate(input) {
- for (var i = 0; this._state === PENDING && i < input.length; i++) {
- this._eachEntry(input[i], i);
- }
- };
-
- Enumerator.prototype._eachEntry = function _eachEntry(entry, i) {
- var c = this._instanceConstructor;
- var resolve$$1 = c.resolve;
-
-
- if (resolve$$1 === resolve$1) {
- var _then = void 0;
- var error = void 0;
- var didError = false;
- try {
- _then = entry.then;
- } catch (e) {
- didError = true;
- error = e;
- }
-
- if (_then === then && entry._state !== PENDING) {
- this._settledAt(entry._state, i, entry._result);
- } else if (typeof _then !== 'function') {
- this._remaining--;
- this._result[i] = entry;
- } else if (c === Promise$2) {
- var promise = new c(noop);
- if (didError) {
- reject(promise, error);
- } else {
- handleMaybeThenable(promise, entry, _then);
- }
- this._willSettleAt(promise, i);
- } else {
- this._willSettleAt(new c(function (resolve$$1) {
- return resolve$$1(entry);
- }), i);
- }
- } else {
- this._willSettleAt(resolve$$1(entry), i);
- }
- };
-
- Enumerator.prototype._settledAt = function _settledAt(state, i, value) {
- var promise = this.promise;
-
-
- if (promise._state === PENDING) {
- this._remaining--;
-
- if (state === REJECTED) {
- reject(promise, value);
- } else {
- this._result[i] = value;
- }
- }
-
- if (this._remaining === 0) {
- fulfill(promise, this._result);
- }
- };
-
- Enumerator.prototype._willSettleAt = function _willSettleAt(promise, i) {
- var enumerator = this;
-
- subscribe(promise, undefined, function (value) {
- return enumerator._settledAt(FULFILLED, i, value);
- }, function (reason) {
- return enumerator._settledAt(REJECTED, i, reason);
- });
- };
-
- return Enumerator;
-}();
-
-/**
- `Promise.all` accepts an array of promises, and returns a new promise which
- is fulfilled with an array of fulfillment values for the passed promises, or
- rejected with the reason of the first passed promise to be rejected. It casts all
- elements of the passed iterable to promises as it runs this algorithm.
-
- Example:
-
- ```javascript
- let promise1 = resolve(1);
- let promise2 = resolve(2);
- let promise3 = resolve(3);
- let promises = [ promise1, promise2, promise3 ];
-
- Promise.all(promises).then(function(array){
- // The array here would be [ 1, 2, 3 ];
- });
- ```
-
- If any of the `promises` given to `all` are rejected, the first promise
- that is rejected will be given as an argument to the returned promises's
- rejection handler. For example:
-
- Example:
-
- ```javascript
- let promise1 = resolve(1);
- let promise2 = reject(new Error("2"));
- let promise3 = reject(new Error("3"));
- let promises = [ promise1, promise2, promise3 ];
-
- Promise.all(promises).then(function(array){
- // Code here never runs because there are rejected promises!
- }, function(error) {
- // error.message === "2"
- });
- ```
-
- @method all
- @static
- @param {Array} entries array of promises
- @param {String} label optional string for labeling the promise.
- Useful for tooling.
- @return {Promise} promise that is fulfilled when all `promises` have been
- fulfilled, or rejected if any of them become rejected.
- @static
-*/
-function all(entries) {
- return new Enumerator(this, entries).promise;
-}
-
-/**
- `Promise.race` returns a new promise which is settled in the same way as the
- first passed promise to settle.
-
- Example:
-
- ```javascript
- let promise1 = new Promise(function(resolve, reject){
- setTimeout(function(){
- resolve('promise 1');
- }, 200);
- });
-
- let promise2 = new Promise(function(resolve, reject){
- setTimeout(function(){
- resolve('promise 2');
- }, 100);
- });
-
- Promise.race([promise1, promise2]).then(function(result){
- // result === 'promise 2' because it was resolved before promise1
- // was resolved.
- });
- ```
-
- `Promise.race` is deterministic in that only the state of the first
- settled promise matters. For example, even if other promises given to the
- `promises` array argument are resolved, but the first settled promise has
- become rejected before the other promises became fulfilled, the returned
- promise will become rejected:
-
- ```javascript
- let promise1 = new Promise(function(resolve, reject){
- setTimeout(function(){
- resolve('promise 1');
- }, 200);
- });
-
- let promise2 = new Promise(function(resolve, reject){
- setTimeout(function(){
- reject(new Error('promise 2'));
- }, 100);
- });
-
- Promise.race([promise1, promise2]).then(function(result){
- // Code here never runs
- }, function(reason){
- // reason.message === 'promise 2' because promise 2 became rejected before
- // promise 1 became fulfilled
- });
- ```
-
- An example real-world use case is implementing timeouts:
-
- ```javascript
- Promise.race([ajax('foo.json'), timeout(5000)])
- ```
-
- @method race
- @static
- @param {Array} promises array of promises to observe
- Useful for tooling.
- @return {Promise} a promise which settles in the same way as the first passed
- promise to settle.
-*/
-function race(entries) {
- /*jshint validthis:true */
- var Constructor = this;
-
- if (!isArray(entries)) {
- return new Constructor(function (_, reject) {
- return reject(new TypeError('You must pass an array to race.'));
- });
- } else {
- return new Constructor(function (resolve, reject) {
- var length = entries.length;
- for (var i = 0; i < length; i++) {
- Constructor.resolve(entries[i]).then(resolve, reject);
- }
- });
- }
-}
-
-/**
- `Promise.reject` returns a promise rejected with the passed `reason`.
- It is shorthand for the following:
-
- ```javascript
- let promise = new Promise(function(resolve, reject){
- reject(new Error('WHOOPS'));
- });
-
- promise.then(function(value){
- // Code here doesn't run because the promise is rejected!
- }, function(reason){
- // reason.message === 'WHOOPS'
- });
- ```
-
- Instead of writing the above, your code now simply becomes the following:
-
- ```javascript
- let promise = Promise.reject(new Error('WHOOPS'));
-
- promise.then(function(value){
- // Code here doesn't run because the promise is rejected!
- }, function(reason){
- // reason.message === 'WHOOPS'
- });
- ```
-
- @method reject
- @static
- @param {Any} reason value that the returned promise will be rejected with.
- Useful for tooling.
- @return {Promise} a promise rejected with the given `reason`.
-*/
-function reject$1(reason) {
- /*jshint validthis:true */
- var Constructor = this;
- var promise = new Constructor(noop);
- reject(promise, reason);
- return promise;
-}
-
-function needsResolver() {
- throw new TypeError('You must pass a resolver function as the first argument to the promise constructor');
-}
-
-function needsNew() {
- throw new TypeError("Failed to construct 'Promise': Please use the 'new' operator, this object constructor cannot be called as a function.");
-}
-
-/**
- Promise objects represent the eventual result of an asynchronous operation. The
- primary way of interacting with a promise is through its `then` method, which
- registers callbacks to receive either a promise's eventual value or the reason
- why the promise cannot be fulfilled.
-
- Terminology
- -----------
-
- - `promise` is an object or function with a `then` method whose behavior conforms to this specification.
- - `thenable` is an object or function that defines a `then` method.
- - `value` is any legal JavaScript value (including undefined, a thenable, or a promise).
- - `exception` is a value that is thrown using the throw statement.
- - `reason` is a value that indicates why a promise was rejected.
- - `settled` the final resting state of a promise, fulfilled or rejected.
-
- A promise can be in one of three states: pending, fulfilled, or rejected.
-
- Promises that are fulfilled have a fulfillment value and are in the fulfilled
- state. Promises that are rejected have a rejection reason and are in the
- rejected state. A fulfillment value is never a thenable.
-
- Promises can also be said to *resolve* a value. If this value is also a
- promise, then the original promise's settled state will match the value's
- settled state. So a promise that *resolves* a promise that rejects will
- itself reject, and a promise that *resolves* a promise that fulfills will
- itself fulfill.
-
-
- Basic Usage:
- ------------
-
- ```js
- let promise = new Promise(function(resolve, reject) {
- // on success
- resolve(value);
-
- // on failure
- reject(reason);
- });
-
- promise.then(function(value) {
- // on fulfillment
- }, function(reason) {
- // on rejection
- });
- ```
-
- Advanced Usage:
- ---------------
-
- Promises shine when abstracting away asynchronous interactions such as
- `XMLHttpRequest`s.
-
- ```js
- function getJSON(url) {
- return new Promise(function(resolve, reject){
- let xhr = new XMLHttpRequest();
-
- xhr.open('GET', url);
- xhr.onreadystatechange = handler;
- xhr.responseType = 'json';
- xhr.setRequestHeader('Accept', 'application/json');
- xhr.send();
-
- function handler() {
- if (this.readyState === this.DONE) {
- if (this.status === 200) {
- resolve(this.response);
- } else {
- reject(new Error('getJSON: `' + url + '` failed with status: [' + this.status + ']'));
- }
- }
- };
- });
- }
-
- getJSON('/posts.json').then(function(json) {
- // on fulfillment
- }, function(reason) {
- // on rejection
- });
- ```
-
- Unlike callbacks, promises are great composable primitives.
-
- ```js
- Promise.all([
- getJSON('/posts'),
- getJSON('/comments')
- ]).then(function(values){
- values[0] // => postsJSON
- values[1] // => commentsJSON
-
- return values;
- });
- ```
-
- @class Promise
- @param {Function} resolver
- Useful for tooling.
- @constructor
-*/
-
-var Promise$2 = function () {
- function Promise(resolver) {
- this[PROMISE_ID] = nextId();
- this._result = this._state = undefined;
- this._subscribers = [];
-
- if (noop !== resolver) {
- typeof resolver !== 'function' && needsResolver();
- this instanceof Promise ? initializePromise(this, resolver) : needsNew();
- }
- }
-
- /**
- The primary way of interacting with a promise is through its `then` method,
- which registers callbacks to receive either a promise's eventual value or the
- reason why the promise cannot be fulfilled.
- ```js
- findUser().then(function(user){
- // user is available
- }, function(reason){
- // user is unavailable, and you are given the reason why
- });
- ```
- Chaining
- --------
- The return value of `then` is itself a promise. This second, 'downstream'
- promise is resolved with the return value of the first promise's fulfillment
- or rejection handler, or rejected if the handler throws an exception.
- ```js
- findUser().then(function (user) {
- return user.name;
- }, function (reason) {
- return 'default name';
- }).then(function (userName) {
- // If `findUser` fulfilled, `userName` will be the user's name, otherwise it
- // will be `'default name'`
- });
- findUser().then(function (user) {
- throw new Error('Found user, but still unhappy');
- }, function (reason) {
- throw new Error('`findUser` rejected and we're unhappy');
- }).then(function (value) {
- // never reached
- }, function (reason) {
- // if `findUser` fulfilled, `reason` will be 'Found user, but still unhappy'.
- // If `findUser` rejected, `reason` will be '`findUser` rejected and we're unhappy'.
- });
- ```
- If the downstream promise does not specify a rejection handler, rejection reasons will be propagated further downstream.
- ```js
- findUser().then(function (user) {
- throw new PedagogicalException('Upstream error');
- }).then(function (value) {
- // never reached
- }).then(function (value) {
- // never reached
- }, function (reason) {
- // The `PedgagocialException` is propagated all the way down to here
- });
- ```
- Assimilation
- ------------
- Sometimes the value you want to propagate to a downstream promise can only be
- retrieved asynchronously. This can be achieved by returning a promise in the
- fulfillment or rejection handler. The downstream promise will then be pending
- until the returned promise is settled. This is called *assimilation*.
- ```js
- findUser().then(function (user) {
- return findCommentsByAuthor(user);
- }).then(function (comments) {
- // The user's comments are now available
- });
- ```
- If the assimliated promise rejects, then the downstream promise will also reject.
- ```js
- findUser().then(function (user) {
- return findCommentsByAuthor(user);
- }).then(function (comments) {
- // If `findCommentsByAuthor` fulfills, we'll have the value here
- }, function (reason) {
- // If `findCommentsByAuthor` rejects, we'll have the reason here
- });
- ```
- Simple Example
- --------------
- Synchronous Example
- ```javascript
- let result;
- try {
- result = findResult();
- // success
- } catch(reason) {
- // failure
- }
- ```
- Errback Example
- ```js
- findResult(function(result, err){
- if (err) {
- // failure
- } else {
- // success
- }
- });
- ```
- Promise Example;
- ```javascript
- findResult().then(function(result){
- // success
- }, function(reason){
- // failure
- });
- ```
- Advanced Example
- --------------
- Synchronous Example
- ```javascript
- let author, books;
- try {
- author = findAuthor();
- books = findBooksByAuthor(author);
- // success
- } catch(reason) {
- // failure
- }
- ```
- Errback Example
- ```js
- function foundBooks(books) {
- }
- function failure(reason) {
- }
- findAuthor(function(author, err){
- if (err) {
- failure(err);
- // failure
- } else {
- try {
- findBoooksByAuthor(author, function(books, err) {
- if (err) {
- failure(err);
- } else {
- try {
- foundBooks(books);
- } catch(reason) {
- failure(reason);
- }
- }
- });
- } catch(error) {
- failure(err);
- }
- // success
- }
- });
- ```
- Promise Example;
- ```javascript
- findAuthor().
- then(findBooksByAuthor).
- then(function(books){
- // found books
- }).catch(function(reason){
- // something went wrong
- });
- ```
- @method then
- @param {Function} onFulfilled
- @param {Function} onRejected
- Useful for tooling.
- @return {Promise}
- */
-
- /**
- `catch` is simply sugar for `then(undefined, onRejection)` which makes it the same
- as the catch block of a try/catch statement.
- ```js
- function findAuthor(){
- throw new Error('couldn't find that author');
- }
- // synchronous
- try {
- findAuthor();
- } catch(reason) {
- // something went wrong
- }
- // async with promises
- findAuthor().catch(function(reason){
- // something went wrong
- });
- ```
- @method catch
- @param {Function} onRejection
- Useful for tooling.
- @return {Promise}
- */
-
-
- Promise.prototype.catch = function _catch(onRejection) {
- return this.then(null, onRejection);
- };
-
- /**
- `finally` will be invoked regardless of the promise's fate just as native
- try/catch/finally behaves
-
- Synchronous example:
-
- ```js
- findAuthor() {
- if (Math.random() > 0.5) {
- throw new Error();
- }
- return new Author();
- }
-
- try {
- return findAuthor(); // succeed or fail
- } catch(error) {
- return findOtherAuther();
- } finally {
- // always runs
- // doesn't affect the return value
- }
- ```
-
- Asynchronous example:
-
- ```js
- findAuthor().catch(function(reason){
- return findOtherAuther();
- }).finally(function(){
- // author was either found, or not
- });
- ```
-
- @method finally
- @param {Function} callback
- @return {Promise}
- */
-
-
- Promise.prototype.finally = function _finally(callback) {
- var promise = this;
- var constructor = promise.constructor;
-
- if (isFunction(callback)) {
- return promise.then(function (value) {
- return constructor.resolve(callback()).then(function () {
- return value;
- });
- }, function (reason) {
- return constructor.resolve(callback()).then(function () {
- throw reason;
- });
- });
- }
-
- return promise.then(callback, callback);
- };
-
- return Promise;
-}();
-
-Promise$2.prototype.then = then;
-Promise$2.all = all;
-Promise$2.race = race;
-Promise$2.resolve = resolve$1;
-Promise$2.reject = reject$1;
-Promise$2._setScheduler = setScheduler;
-Promise$2._setAsap = setAsap;
-Promise$2._asap = asap;
-
-/*global self*/
-function polyfill() {
- var local = void 0;
-
- if (typeof global !== 'undefined') {
- local = global;
- } else if (typeof self !== 'undefined') {
- local = self;
- } else {
- try {
- local = Function('return this')();
- } catch (e) {
- throw new Error('polyfill failed because global object is unavailable in this environment');
- }
- }
-
- var P = local.Promise;
-
- if (P) {
- var promiseToString = null;
- try {
- promiseToString = Object.prototype.toString.call(P.resolve());
- } catch (e) {
- // silently ignored
- }
-
- if (promiseToString === '[object Promise]' && !P.cast) {
- return;
- }
- }
-
- local.Promise = Promise$2;
-}
-
-// Strange compat..
-Promise$2.polyfill = polyfill;
-Promise$2.Promise = Promise$2;
-
-Promise$2.polyfill();
-
-return Promise$2;
-
-})));
-
-
-
-//# sourceMappingURL=es6-promise.auto.map
diff --git a/node_modules/es6-promise/dist/es6-promise.auto.map b/node_modules/es6-promise/dist/es6-promise.auto.map
deleted file mode 100644
index a5abce99f..000000000
--- a/node_modules/es6-promise/dist/es6-promise.auto.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"sources":["config/versionTemplate.txt","lib/es6-promise/utils.js","lib/es6-promise/asap.js","lib/es6-promise/then.js","lib/es6-promise/promise/resolve.js","lib/es6-promise/-internal.js","lib/es6-promise/enumerator.js","lib/es6-promise/promise/all.js","lib/es6-promise/promise/race.js","lib/es6-promise/promise/reject.js","lib/es6-promise/promise.js","lib/es6-promise/polyfill.js","lib/es6-promise.js","lib/es6-promise.auto.js"],"sourcesContent":["/*!\n * @overview es6-promise - a tiny implementation of Promises/A+.\n * @copyright Copyright (c) 2014 Yehuda Katz, Tom Dale, Stefan Penner and contributors (Conversion to ES6 API by Jake Archibald)\n * @license Licensed under MIT license\n * See https://raw.githubusercontent.com/stefanpenner/es6-promise/master/LICENSE\n * @version v4.2.8+1e68dce6\n */\n","export function objectOrFunction(x) {\n var type = typeof x;\n return x !== null && (type === 'object' || type === 'function');\n}\n\nexport function isFunction(x) {\n return typeof x === 'function';\n}\n\nexport function isMaybeThenable(x) {\n return x !== null && typeof x === 'object';\n}\n\nvar _isArray = void 0;\nif (Array.isArray) {\n _isArray = Array.isArray;\n} else {\n _isArray = function (x) {\n return Object.prototype.toString.call(x) === '[object Array]';\n };\n}\n\nexport var isArray = _isArray;","var len = 0;\nvar vertxNext = void 0;\nvar customSchedulerFn = void 0;\n\nexport var asap = function asap(callback, arg) {\n queue[len] = callback;\n queue[len + 1] = arg;\n len += 2;\n if (len === 2) {\n // If len is 2, that means that we need to schedule an async flush.\n // If additional callbacks are queued before the queue is flushed, they\n // will be processed by this flush that we are scheduling.\n if (customSchedulerFn) {\n customSchedulerFn(flush);\n } else {\n scheduleFlush();\n }\n }\n};\n\nexport function setScheduler(scheduleFn) {\n customSchedulerFn = scheduleFn;\n}\n\nexport function setAsap(asapFn) {\n asap = asapFn;\n}\n\nvar browserWindow = typeof window !== 'undefined' ? window : undefined;\nvar browserGlobal = browserWindow || {};\nvar BrowserMutationObserver = browserGlobal.MutationObserver || browserGlobal.WebKitMutationObserver;\nvar isNode = typeof self === 'undefined' && typeof process !== 'undefined' && {}.toString.call(process) === '[object process]';\n\n// test for web worker but not in IE10\nvar isWorker = typeof Uint8ClampedArray !== 'undefined' && typeof importScripts !== 'undefined' && typeof MessageChannel !== 'undefined';\n\n// node\nfunction useNextTick() {\n // node version 0.10.x displays a deprecation warning when nextTick is used recursively\n // see https://github.com/cujojs/when/issues/410 for details\n return function () {\n return process.nextTick(flush);\n };\n}\n\n// vertx\nfunction useVertxTimer() {\n if (typeof vertxNext !== 'undefined') {\n return function () {\n vertxNext(flush);\n };\n }\n\n return useSetTimeout();\n}\n\nfunction useMutationObserver() {\n var iterations = 0;\n var observer = new BrowserMutationObserver(flush);\n var node = document.createTextNode('');\n observer.observe(node, { characterData: true });\n\n return function () {\n node.data = iterations = ++iterations % 2;\n };\n}\n\n// web worker\nfunction useMessageChannel() {\n var channel = new MessageChannel();\n channel.port1.onmessage = flush;\n return function () {\n return channel.port2.postMessage(0);\n };\n}\n\nfunction useSetTimeout() {\n // Store setTimeout reference so es6-promise will be unaffected by\n // other code modifying setTimeout (like sinon.useFakeTimers())\n var globalSetTimeout = setTimeout;\n return function () {\n return globalSetTimeout(flush, 1);\n };\n}\n\nvar queue = new Array(1000);\nfunction flush() {\n for (var i = 0; i < len; i += 2) {\n var callback = queue[i];\n var arg = queue[i + 1];\n\n callback(arg);\n\n queue[i] = undefined;\n queue[i + 1] = undefined;\n }\n\n len = 0;\n}\n\nfunction attemptVertx() {\n try {\n var vertx = Function('return this')().require('vertx');\n vertxNext = vertx.runOnLoop || vertx.runOnContext;\n return useVertxTimer();\n } catch (e) {\n return useSetTimeout();\n }\n}\n\nvar scheduleFlush = void 0;\n// Decide what async method to use to triggering processing of queued callbacks:\nif (isNode) {\n scheduleFlush = useNextTick();\n} else if (BrowserMutationObserver) {\n scheduleFlush = useMutationObserver();\n} else if (isWorker) {\n scheduleFlush = useMessageChannel();\n} else if (browserWindow === undefined && typeof require === 'function') {\n scheduleFlush = attemptVertx();\n} else {\n scheduleFlush = useSetTimeout();\n}","import { invokeCallback, subscribe, FULFILLED, REJECTED, noop, makePromise, PROMISE_ID } from './-internal';\n\nimport { asap } from './asap';\n\nexport default function then(onFulfillment, onRejection) {\n var parent = this;\n\n var child = new this.constructor(noop);\n\n if (child[PROMISE_ID] === undefined) {\n makePromise(child);\n }\n\n var _state = parent._state;\n\n\n if (_state) {\n var callback = arguments[_state - 1];\n asap(function () {\n return invokeCallback(_state, child, callback, parent._result);\n });\n } else {\n subscribe(parent, child, onFulfillment, onRejection);\n }\n\n return child;\n}","import { noop, resolve as _resolve } from '../-internal';\n\n/**\n `Promise.resolve` returns a promise that will become resolved with the\n passed `value`. It is shorthand for the following:\n\n ```javascript\n let promise = new Promise(function(resolve, reject){\n resolve(1);\n });\n\n promise.then(function(value){\n // value === 1\n });\n ```\n\n Instead of writing the above, your code now simply becomes the following:\n\n ```javascript\n let promise = Promise.resolve(1);\n\n promise.then(function(value){\n // value === 1\n });\n ```\n\n @method resolve\n @static\n @param {Any} value value that the returned promise will be resolved with\n Useful for tooling.\n @return {Promise} a promise that will become fulfilled with the given\n `value`\n*/\nexport default function resolve(object) {\n /*jshint validthis:true */\n var Constructor = this;\n\n if (object && typeof object === 'object' && object.constructor === Constructor) {\n return object;\n }\n\n var promise = new Constructor(noop);\n _resolve(promise, object);\n return promise;\n}","import { objectOrFunction, isFunction } from './utils';\n\nimport { asap } from './asap';\n\nimport originalThen from './then';\nimport originalResolve from './promise/resolve';\n\nexport var PROMISE_ID = Math.random().toString(36).substring(2);\n\nfunction noop() {}\n\nvar PENDING = void 0;\nvar FULFILLED = 1;\nvar REJECTED = 2;\n\nfunction selfFulfillment() {\n return new TypeError(\"You cannot resolve a promise with itself\");\n}\n\nfunction cannotReturnOwn() {\n return new TypeError('A promises callback cannot return that same promise.');\n}\n\nfunction tryThen(then, value, fulfillmentHandler, rejectionHandler) {\n try {\n then.call(value, fulfillmentHandler, rejectionHandler);\n } catch (e) {\n return e;\n }\n}\n\nfunction handleForeignThenable(promise, thenable, then) {\n asap(function (promise) {\n var sealed = false;\n var error = tryThen(then, thenable, function (value) {\n if (sealed) {\n return;\n }\n sealed = true;\n if (thenable !== value) {\n resolve(promise, value);\n } else {\n fulfill(promise, value);\n }\n }, function (reason) {\n if (sealed) {\n return;\n }\n sealed = true;\n\n reject(promise, reason);\n }, 'Settle: ' + (promise._label || ' unknown promise'));\n\n if (!sealed && error) {\n sealed = true;\n reject(promise, error);\n }\n }, promise);\n}\n\nfunction handleOwnThenable(promise, thenable) {\n if (thenable._state === FULFILLED) {\n fulfill(promise, thenable._result);\n } else if (thenable._state === REJECTED) {\n reject(promise, thenable._result);\n } else {\n subscribe(thenable, undefined, function (value) {\n return resolve(promise, value);\n }, function (reason) {\n return reject(promise, reason);\n });\n }\n}\n\nfunction handleMaybeThenable(promise, maybeThenable, then) {\n if (maybeThenable.constructor === promise.constructor && then === originalThen && maybeThenable.constructor.resolve === originalResolve) {\n handleOwnThenable(promise, maybeThenable);\n } else {\n if (then === undefined) {\n fulfill(promise, maybeThenable);\n } else if (isFunction(then)) {\n handleForeignThenable(promise, maybeThenable, then);\n } else {\n fulfill(promise, maybeThenable);\n }\n }\n}\n\nfunction resolve(promise, value) {\n if (promise === value) {\n reject(promise, selfFulfillment());\n } else if (objectOrFunction(value)) {\n var then = void 0;\n try {\n then = value.then;\n } catch (error) {\n reject(promise, error);\n return;\n }\n handleMaybeThenable(promise, value, then);\n } else {\n fulfill(promise, value);\n }\n}\n\nfunction publishRejection(promise) {\n if (promise._onerror) {\n promise._onerror(promise._result);\n }\n\n publish(promise);\n}\n\nfunction fulfill(promise, value) {\n if (promise._state !== PENDING) {\n return;\n }\n\n promise._result = value;\n promise._state = FULFILLED;\n\n if (promise._subscribers.length !== 0) {\n asap(publish, promise);\n }\n}\n\nfunction reject(promise, reason) {\n if (promise._state !== PENDING) {\n return;\n }\n promise._state = REJECTED;\n promise._result = reason;\n\n asap(publishRejection, promise);\n}\n\nfunction subscribe(parent, child, onFulfillment, onRejection) {\n var _subscribers = parent._subscribers;\n var length = _subscribers.length;\n\n\n parent._onerror = null;\n\n _subscribers[length] = child;\n _subscribers[length + FULFILLED] = onFulfillment;\n _subscribers[length + REJECTED] = onRejection;\n\n if (length === 0 && parent._state) {\n asap(publish, parent);\n }\n}\n\nfunction publish(promise) {\n var subscribers = promise._subscribers;\n var settled = promise._state;\n\n if (subscribers.length === 0) {\n return;\n }\n\n var child = void 0,\n callback = void 0,\n detail = promise._result;\n\n for (var i = 0; i < subscribers.length; i += 3) {\n child = subscribers[i];\n callback = subscribers[i + settled];\n\n if (child) {\n invokeCallback(settled, child, callback, detail);\n } else {\n callback(detail);\n }\n }\n\n promise._subscribers.length = 0;\n}\n\nfunction invokeCallback(settled, promise, callback, detail) {\n var hasCallback = isFunction(callback),\n value = void 0,\n error = void 0,\n succeeded = true;\n\n if (hasCallback) {\n try {\n value = callback(detail);\n } catch (e) {\n succeeded = false;\n error = e;\n }\n\n if (promise === value) {\n reject(promise, cannotReturnOwn());\n return;\n }\n } else {\n value = detail;\n }\n\n if (promise._state !== PENDING) {\n // noop\n } else if (hasCallback && succeeded) {\n resolve(promise, value);\n } else if (succeeded === false) {\n reject(promise, error);\n } else if (settled === FULFILLED) {\n fulfill(promise, value);\n } else if (settled === REJECTED) {\n reject(promise, value);\n }\n}\n\nfunction initializePromise(promise, resolver) {\n try {\n resolver(function resolvePromise(value) {\n resolve(promise, value);\n }, function rejectPromise(reason) {\n reject(promise, reason);\n });\n } catch (e) {\n reject(promise, e);\n }\n}\n\nvar id = 0;\nfunction nextId() {\n return id++;\n}\n\nfunction makePromise(promise) {\n promise[PROMISE_ID] = id++;\n promise._state = undefined;\n promise._result = undefined;\n promise._subscribers = [];\n}\n\nexport { nextId, makePromise, noop, resolve, reject, fulfill, subscribe, publish, publishRejection, initializePromise, invokeCallback, FULFILLED, REJECTED, PENDING, handleMaybeThenable };","function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nimport { isArray, isMaybeThenable } from './utils';\nimport { noop, reject, fulfill, subscribe, FULFILLED, REJECTED, PENDING, handleMaybeThenable } from './-internal';\n\nimport then from './then';\nimport Promise from './promise';\nimport originalResolve from './promise/resolve';\nimport originalThen from './then';\nimport { makePromise, PROMISE_ID } from './-internal';\n\nfunction validationError() {\n return new Error('Array Methods must be provided an Array');\n};\n\nvar Enumerator = function () {\n function Enumerator(Constructor, input) {\n this._instanceConstructor = Constructor;\n this.promise = new Constructor(noop);\n\n if (!this.promise[PROMISE_ID]) {\n makePromise(this.promise);\n }\n\n if (isArray(input)) {\n this.length = input.length;\n this._remaining = input.length;\n\n this._result = new Array(this.length);\n\n if (this.length === 0) {\n fulfill(this.promise, this._result);\n } else {\n this.length = this.length || 0;\n this._enumerate(input);\n if (this._remaining === 0) {\n fulfill(this.promise, this._result);\n }\n }\n } else {\n reject(this.promise, validationError());\n }\n }\n\n Enumerator.prototype._enumerate = function _enumerate(input) {\n for (var i = 0; this._state === PENDING && i < input.length; i++) {\n this._eachEntry(input[i], i);\n }\n };\n\n Enumerator.prototype._eachEntry = function _eachEntry(entry, i) {\n var c = this._instanceConstructor;\n var resolve = c.resolve;\n\n\n if (resolve === originalResolve) {\n var _then = void 0;\n var error = void 0;\n var didError = false;\n try {\n _then = entry.then;\n } catch (e) {\n didError = true;\n error = e;\n }\n\n if (_then === originalThen && entry._state !== PENDING) {\n this._settledAt(entry._state, i, entry._result);\n } else if (typeof _then !== 'function') {\n this._remaining--;\n this._result[i] = entry;\n } else if (c === Promise) {\n var promise = new c(noop);\n if (didError) {\n reject(promise, error);\n } else {\n handleMaybeThenable(promise, entry, _then);\n }\n this._willSettleAt(promise, i);\n } else {\n this._willSettleAt(new c(function (resolve) {\n return resolve(entry);\n }), i);\n }\n } else {\n this._willSettleAt(resolve(entry), i);\n }\n };\n\n Enumerator.prototype._settledAt = function _settledAt(state, i, value) {\n var promise = this.promise;\n\n\n if (promise._state === PENDING) {\n this._remaining--;\n\n if (state === REJECTED) {\n reject(promise, value);\n } else {\n this._result[i] = value;\n }\n }\n\n if (this._remaining === 0) {\n fulfill(promise, this._result);\n }\n };\n\n Enumerator.prototype._willSettleAt = function _willSettleAt(promise, i) {\n var enumerator = this;\n\n subscribe(promise, undefined, function (value) {\n return enumerator._settledAt(FULFILLED, i, value);\n }, function (reason) {\n return enumerator._settledAt(REJECTED, i, reason);\n });\n };\n\n return Enumerator;\n}();\n\nexport default Enumerator;\n;","import Enumerator from '../enumerator';\n\n/**\n `Promise.all` accepts an array of promises, and returns a new promise which\n is fulfilled with an array of fulfillment values for the passed promises, or\n rejected with the reason of the first passed promise to be rejected. It casts all\n elements of the passed iterable to promises as it runs this algorithm.\n\n Example:\n\n ```javascript\n let promise1 = resolve(1);\n let promise2 = resolve(2);\n let promise3 = resolve(3);\n let promises = [ promise1, promise2, promise3 ];\n\n Promise.all(promises).then(function(array){\n // The array here would be [ 1, 2, 3 ];\n });\n ```\n\n If any of the `promises` given to `all` are rejected, the first promise\n that is rejected will be given as an argument to the returned promises's\n rejection handler. For example:\n\n Example:\n\n ```javascript\n let promise1 = resolve(1);\n let promise2 = reject(new Error(\"2\"));\n let promise3 = reject(new Error(\"3\"));\n let promises = [ promise1, promise2, promise3 ];\n\n Promise.all(promises).then(function(array){\n // Code here never runs because there are rejected promises!\n }, function(error) {\n // error.message === \"2\"\n });\n ```\n\n @method all\n @static\n @param {Array} entries array of promises\n @param {String} label optional string for labeling the promise.\n Useful for tooling.\n @return {Promise} promise that is fulfilled when all `promises` have been\n fulfilled, or rejected if any of them become rejected.\n @static\n*/\nexport default function all(entries) {\n return new Enumerator(this, entries).promise;\n}","import { isArray } from \"../utils\";\n\n/**\n `Promise.race` returns a new promise which is settled in the same way as the\n first passed promise to settle.\n\n Example:\n\n ```javascript\n let promise1 = new Promise(function(resolve, reject){\n setTimeout(function(){\n resolve('promise 1');\n }, 200);\n });\n\n let promise2 = new Promise(function(resolve, reject){\n setTimeout(function(){\n resolve('promise 2');\n }, 100);\n });\n\n Promise.race([promise1, promise2]).then(function(result){\n // result === 'promise 2' because it was resolved before promise1\n // was resolved.\n });\n ```\n\n `Promise.race` is deterministic in that only the state of the first\n settled promise matters. For example, even if other promises given to the\n `promises` array argument are resolved, but the first settled promise has\n become rejected before the other promises became fulfilled, the returned\n promise will become rejected:\n\n ```javascript\n let promise1 = new Promise(function(resolve, reject){\n setTimeout(function(){\n resolve('promise 1');\n }, 200);\n });\n\n let promise2 = new Promise(function(resolve, reject){\n setTimeout(function(){\n reject(new Error('promise 2'));\n }, 100);\n });\n\n Promise.race([promise1, promise2]).then(function(result){\n // Code here never runs\n }, function(reason){\n // reason.message === 'promise 2' because promise 2 became rejected before\n // promise 1 became fulfilled\n });\n ```\n\n An example real-world use case is implementing timeouts:\n\n ```javascript\n Promise.race([ajax('foo.json'), timeout(5000)])\n ```\n\n @method race\n @static\n @param {Array} promises array of promises to observe\n Useful for tooling.\n @return {Promise} a promise which settles in the same way as the first passed\n promise to settle.\n*/\nexport default function race(entries) {\n /*jshint validthis:true */\n var Constructor = this;\n\n if (!isArray(entries)) {\n return new Constructor(function (_, reject) {\n return reject(new TypeError('You must pass an array to race.'));\n });\n } else {\n return new Constructor(function (resolve, reject) {\n var length = entries.length;\n for (var i = 0; i < length; i++) {\n Constructor.resolve(entries[i]).then(resolve, reject);\n }\n });\n }\n}","import { noop, reject as _reject } from '../-internal';\n\n/**\n `Promise.reject` returns a promise rejected with the passed `reason`.\n It is shorthand for the following:\n\n ```javascript\n let promise = new Promise(function(resolve, reject){\n reject(new Error('WHOOPS'));\n });\n\n promise.then(function(value){\n // Code here doesn't run because the promise is rejected!\n }, function(reason){\n // reason.message === 'WHOOPS'\n });\n ```\n\n Instead of writing the above, your code now simply becomes the following:\n\n ```javascript\n let promise = Promise.reject(new Error('WHOOPS'));\n\n promise.then(function(value){\n // Code here doesn't run because the promise is rejected!\n }, function(reason){\n // reason.message === 'WHOOPS'\n });\n ```\n\n @method reject\n @static\n @param {Any} reason value that the returned promise will be rejected with.\n Useful for tooling.\n @return {Promise} a promise rejected with the given `reason`.\n*/\nexport default function reject(reason) {\n /*jshint validthis:true */\n var Constructor = this;\n var promise = new Constructor(noop);\n _reject(promise, reason);\n return promise;\n}","function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nimport { isFunction } from './utils';\nimport { noop, nextId, PROMISE_ID, initializePromise } from './-internal';\nimport { asap, setAsap, setScheduler } from './asap';\n\nimport all from './promise/all';\nimport race from './promise/race';\nimport Resolve from './promise/resolve';\nimport Reject from './promise/reject';\nimport then from './then';\n\nfunction needsResolver() {\n throw new TypeError('You must pass a resolver function as the first argument to the promise constructor');\n}\n\nfunction needsNew() {\n throw new TypeError(\"Failed to construct 'Promise': Please use the 'new' operator, this object constructor cannot be called as a function.\");\n}\n\n/**\n Promise objects represent the eventual result of an asynchronous operation. The\n primary way of interacting with a promise is through its `then` method, which\n registers callbacks to receive either a promise's eventual value or the reason\n why the promise cannot be fulfilled.\n\n Terminology\n -----------\n\n - `promise` is an object or function with a `then` method whose behavior conforms to this specification.\n - `thenable` is an object or function that defines a `then` method.\n - `value` is any legal JavaScript value (including undefined, a thenable, or a promise).\n - `exception` is a value that is thrown using the throw statement.\n - `reason` is a value that indicates why a promise was rejected.\n - `settled` the final resting state of a promise, fulfilled or rejected.\n\n A promise can be in one of three states: pending, fulfilled, or rejected.\n\n Promises that are fulfilled have a fulfillment value and are in the fulfilled\n state. Promises that are rejected have a rejection reason and are in the\n rejected state. A fulfillment value is never a thenable.\n\n Promises can also be said to *resolve* a value. If this value is also a\n promise, then the original promise's settled state will match the value's\n settled state. So a promise that *resolves* a promise that rejects will\n itself reject, and a promise that *resolves* a promise that fulfills will\n itself fulfill.\n\n\n Basic Usage:\n ------------\n\n ```js\n let promise = new Promise(function(resolve, reject) {\n // on success\n resolve(value);\n\n // on failure\n reject(reason);\n });\n\n promise.then(function(value) {\n // on fulfillment\n }, function(reason) {\n // on rejection\n });\n ```\n\n Advanced Usage:\n ---------------\n\n Promises shine when abstracting away asynchronous interactions such as\n `XMLHttpRequest`s.\n\n ```js\n function getJSON(url) {\n return new Promise(function(resolve, reject){\n let xhr = new XMLHttpRequest();\n\n xhr.open('GET', url);\n xhr.onreadystatechange = handler;\n xhr.responseType = 'json';\n xhr.setRequestHeader('Accept', 'application/json');\n xhr.send();\n\n function handler() {\n if (this.readyState === this.DONE) {\n if (this.status === 200) {\n resolve(this.response);\n } else {\n reject(new Error('getJSON: `' + url + '` failed with status: [' + this.status + ']'));\n }\n }\n };\n });\n }\n\n getJSON('/posts.json').then(function(json) {\n // on fulfillment\n }, function(reason) {\n // on rejection\n });\n ```\n\n Unlike callbacks, promises are great composable primitives.\n\n ```js\n Promise.all([\n getJSON('/posts'),\n getJSON('/comments')\n ]).then(function(values){\n values[0] // => postsJSON\n values[1] // => commentsJSON\n\n return values;\n });\n ```\n\n @class Promise\n @param {Function} resolver\n Useful for tooling.\n @constructor\n*/\n\nvar Promise = function () {\n function Promise(resolver) {\n this[PROMISE_ID] = nextId();\n this._result = this._state = undefined;\n this._subscribers = [];\n\n if (noop !== resolver) {\n typeof resolver !== 'function' && needsResolver();\n this instanceof Promise ? initializePromise(this, resolver) : needsNew();\n }\n }\n\n /**\n The primary way of interacting with a promise is through its `then` method,\n which registers callbacks to receive either a promise's eventual value or the\n reason why the promise cannot be fulfilled.\n ```js\n findUser().then(function(user){\n // user is available\n }, function(reason){\n // user is unavailable, and you are given the reason why\n });\n ```\n Chaining\n --------\n The return value of `then` is itself a promise. This second, 'downstream'\n promise is resolved with the return value of the first promise's fulfillment\n or rejection handler, or rejected if the handler throws an exception.\n ```js\n findUser().then(function (user) {\n return user.name;\n }, function (reason) {\n return 'default name';\n }).then(function (userName) {\n // If `findUser` fulfilled, `userName` will be the user's name, otherwise it\n // will be `'default name'`\n });\n findUser().then(function (user) {\n throw new Error('Found user, but still unhappy');\n }, function (reason) {\n throw new Error('`findUser` rejected and we're unhappy');\n }).then(function (value) {\n // never reached\n }, function (reason) {\n // if `findUser` fulfilled, `reason` will be 'Found user, but still unhappy'.\n // If `findUser` rejected, `reason` will be '`findUser` rejected and we're unhappy'.\n });\n ```\n If the downstream promise does not specify a rejection handler, rejection reasons will be propagated further downstream.\n ```js\n findUser().then(function (user) {\n throw new PedagogicalException('Upstream error');\n }).then(function (value) {\n // never reached\n }).then(function (value) {\n // never reached\n }, function (reason) {\n // The `PedgagocialException` is propagated all the way down to here\n });\n ```\n Assimilation\n ------------\n Sometimes the value you want to propagate to a downstream promise can only be\n retrieved asynchronously. This can be achieved by returning a promise in the\n fulfillment or rejection handler. The downstream promise will then be pending\n until the returned promise is settled. This is called *assimilation*.\n ```js\n findUser().then(function (user) {\n return findCommentsByAuthor(user);\n }).then(function (comments) {\n // The user's comments are now available\n });\n ```\n If the assimliated promise rejects, then the downstream promise will also reject.\n ```js\n findUser().then(function (user) {\n return findCommentsByAuthor(user);\n }).then(function (comments) {\n // If `findCommentsByAuthor` fulfills, we'll have the value here\n }, function (reason) {\n // If `findCommentsByAuthor` rejects, we'll have the reason here\n });\n ```\n Simple Example\n --------------\n Synchronous Example\n ```javascript\n let result;\n try {\n result = findResult();\n // success\n } catch(reason) {\n // failure\n }\n ```\n Errback Example\n ```js\n findResult(function(result, err){\n if (err) {\n // failure\n } else {\n // success\n }\n });\n ```\n Promise Example;\n ```javascript\n findResult().then(function(result){\n // success\n }, function(reason){\n // failure\n });\n ```\n Advanced Example\n --------------\n Synchronous Example\n ```javascript\n let author, books;\n try {\n author = findAuthor();\n books = findBooksByAuthor(author);\n // success\n } catch(reason) {\n // failure\n }\n ```\n Errback Example\n ```js\n function foundBooks(books) {\n }\n function failure(reason) {\n }\n findAuthor(function(author, err){\n if (err) {\n failure(err);\n // failure\n } else {\n try {\n findBoooksByAuthor(author, function(books, err) {\n if (err) {\n failure(err);\n } else {\n try {\n foundBooks(books);\n } catch(reason) {\n failure(reason);\n }\n }\n });\n } catch(error) {\n failure(err);\n }\n // success\n }\n });\n ```\n Promise Example;\n ```javascript\n findAuthor().\n then(findBooksByAuthor).\n then(function(books){\n // found books\n }).catch(function(reason){\n // something went wrong\n });\n ```\n @method then\n @param {Function} onFulfilled\n @param {Function} onRejected\n Useful for tooling.\n @return {Promise}\n */\n\n /**\n `catch` is simply sugar for `then(undefined, onRejection)` which makes it the same\n as the catch block of a try/catch statement.\n ```js\n function findAuthor(){\n throw new Error('couldn't find that author');\n }\n // synchronous\n try {\n findAuthor();\n } catch(reason) {\n // something went wrong\n }\n // async with promises\n findAuthor().catch(function(reason){\n // something went wrong\n });\n ```\n @method catch\n @param {Function} onRejection\n Useful for tooling.\n @return {Promise}\n */\n\n\n Promise.prototype.catch = function _catch(onRejection) {\n return this.then(null, onRejection);\n };\n\n /**\n `finally` will be invoked regardless of the promise's fate just as native\n try/catch/finally behaves\n \n Synchronous example:\n \n ```js\n findAuthor() {\n if (Math.random() > 0.5) {\n throw new Error();\n }\n return new Author();\n }\n \n try {\n return findAuthor(); // succeed or fail\n } catch(error) {\n return findOtherAuther();\n } finally {\n // always runs\n // doesn't affect the return value\n }\n ```\n \n Asynchronous example:\n \n ```js\n findAuthor().catch(function(reason){\n return findOtherAuther();\n }).finally(function(){\n // author was either found, or not\n });\n ```\n \n @method finally\n @param {Function} callback\n @return {Promise}\n */\n\n\n Promise.prototype.finally = function _finally(callback) {\n var promise = this;\n var constructor = promise.constructor;\n\n if (isFunction(callback)) {\n return promise.then(function (value) {\n return constructor.resolve(callback()).then(function () {\n return value;\n });\n }, function (reason) {\n return constructor.resolve(callback()).then(function () {\n throw reason;\n });\n });\n }\n\n return promise.then(callback, callback);\n };\n\n return Promise;\n}();\n\nPromise.prototype.then = then;\nexport default Promise;\nPromise.all = all;\nPromise.race = race;\nPromise.resolve = Resolve;\nPromise.reject = Reject;\nPromise._setScheduler = setScheduler;\nPromise._setAsap = setAsap;\nPromise._asap = asap;","/*global self*/\nimport Promise from './promise';\n\nexport default function polyfill() {\n var local = void 0;\n\n if (typeof global !== 'undefined') {\n local = global;\n } else if (typeof self !== 'undefined') {\n local = self;\n } else {\n try {\n local = Function('return this')();\n } catch (e) {\n throw new Error('polyfill failed because global object is unavailable in this environment');\n }\n }\n\n var P = local.Promise;\n\n if (P) {\n var promiseToString = null;\n try {\n promiseToString = Object.prototype.toString.call(P.resolve());\n } catch (e) {\n // silently ignored\n }\n\n if (promiseToString === '[object Promise]' && !P.cast) {\n return;\n }\n }\n\n local.Promise = Promise;\n}","import Promise from './es6-promise/promise';\nimport polyfill from './es6-promise/polyfill';\n\n// Strange compat..\nPromise.polyfill = polyfill;\nPromise.Promise = Promise;\nexport default Promise;","import Promise from './es6-promise';\nPromise.polyfill();\nexport default Promise;"],"names":["resolve","_resolve","then","originalThen","originalResolve","Promise","reject","_reject","Resolve","Reject"],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACNO,SAAS,gBAAgB,CAAC,CAAC,EAAE;EAClC,IAAI,IAAI,GAAG,OAAO,CAAC,CAAC;EACpB,OAAO,CAAC,KAAK,IAAI,KAAK,IAAI,KAAK,QAAQ,IAAI,IAAI,KAAK,UAAU,CAAC,CAAC;CACjE;;AAED,AAAO,SAAS,UAAU,CAAC,CAAC,EAAE;EAC5B,OAAO,OAAO,CAAC,KAAK,UAAU,CAAC;CAChC;;AAED,AAEC;;AAED,IAAI,QAAQ,GAAG,KAAK,CAAC,CAAC;AACtB,IAAI,KAAK,CAAC,OAAO,EAAE;EACjB,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC;CAC1B,MAAM;EACL,QAAQ,GAAG,UAAU,CAAC,EAAE;IACtB,OAAO,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,gBAAgB,CAAC;GAC/D,CAAC;CACH;;AAED,AAAO,IAAI,OAAO,GAAG,QAAQ;;ACtB7B,IAAI,GAAG,GAAG,CAAC,CAAC;AACZ,IAAI,SAAS,GAAG,KAAK,CAAC,CAAC;AACvB,IAAI,iBAAiB,GAAG,KAAK,CAAC,CAAC;;AAE/B,AAAO,IAAI,IAAI,GAAG,SAAS,IAAI,CAAC,QAAQ,EAAE,GAAG,EAAE;EAC7C,KAAK,CAAC,GAAG,CAAC,GAAG,QAAQ,CAAC;EACtB,KAAK,CAAC,GAAG,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;EACrB,GAAG,IAAI,CAAC,CAAC;EACT,IAAI,GAAG,KAAK,CAAC,EAAE;;;;IAIb,IAAI,iBAAiB,EAAE;MACrB,iBAAiB,CAAC,KAAK,CAAC,CAAC;KAC1B,MAAM;MACL,aAAa,EAAE,CAAC;KACjB;GACF;CACF,CAAC;;AAEF,AAAO,SAAS,YAAY,CAAC,UAAU,EAAE;EACvC,iBAAiB,GAAG,UAAU,CAAC;CAChC;;AAED,AAAO,SAAS,OAAO,CAAC,MAAM,EAAE;EAC9B,IAAI,GAAG,MAAM,CAAC;CACf;;AAED,IAAI,aAAa,GAAG,OAAO,MAAM,KAAK,WAAW,GAAG,MAAM,GAAG,SAAS,CAAC;AACvE,IAAI,aAAa,GAAG,aAAa,IAAI,EAAE,CAAC;AACxC,IAAI,uBAAuB,GAAG,aAAa,CAAC,gBAAgB,IAAI,aAAa,CAAC,sBAAsB,CAAC;AACrG,IAAI,MAAM,GAAG,OAAO,IAAI,KAAK,WAAW,IAAI,OAAO,OAAO,KAAK,WAAW,IAAI,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,kBAAkB,CAAC;;;AAG/H,IAAI,QAAQ,GAAG,OAAO,iBAAiB,KAAK,WAAW,IAAI,OAAO,aAAa,KAAK,WAAW,IAAI,OAAO,cAAc,KAAK,WAAW,CAAC;;;AAGzI,SAAS,WAAW,GAAG;;;EAGrB,OAAO,YAAY;IACjB,OAAO,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;GAChC,CAAC;CACH;;;AAGD,SAAS,aAAa,GAAG;EACvB,IAAI,OAAO,SAAS,KAAK,WAAW,EAAE;IACpC,OAAO,YAAY;MACjB,SAAS,CAAC,KAAK,CAAC,CAAC;KAClB,CAAC;GACH;;EAED,OAAO,aAAa,EAAE,CAAC;CACxB;;AAED,SAAS,mBAAmB,GAAG;EAC7B,IAAI,UAAU,GAAG,CAAC,CAAC;EACnB,IAAI,QAAQ,GAAG,IAAI,uBAAuB,CAAC,KAAK,CAAC,CAAC;EAClD,IAAI,IAAI,GAAG,QAAQ,CAAC,cAAc,CAAC,EAAE,CAAC,CAAC;EACvC,QAAQ,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;;EAEhD,OAAO,YAAY;IACjB,IAAI,CAAC,IAAI,GAAG,UAAU,GAAG,EAAE,UAAU,GAAG,CAAC,CAAC;GAC3C,CAAC;CACH;;;AAGD,SAAS,iBAAiB,GAAG;EAC3B,IAAI,OAAO,GAAG,IAAI,cAAc,EAAE,CAAC;EACnC,OAAO,CAAC,KAAK,CAAC,SAAS,GAAG,KAAK,CAAC;EAChC,OAAO,YAAY;IACjB,OAAO,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;GACrC,CAAC;CACH;;AAED,SAAS,aAAa,GAAG;;;EAGvB,IAAI,gBAAgB,GAAG,UAAU,CAAC;EAClC,OAAO,YAAY;IACjB,OAAO,gBAAgB,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;GACnC,CAAC;CACH;;AAED,IAAI,KAAK,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC;AAC5B,SAAS,KAAK,GAAG;EACf,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,IAAI,CAAC,EAAE;IAC/B,IAAI,QAAQ,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACxB,IAAI,GAAG,GAAG,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;;IAEvB,QAAQ,CAAC,GAAG,CAAC,CAAC;;IAEd,KAAK,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC;IACrB,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,SAAS,CAAC;GAC1B;;EAED,GAAG,GAAG,CAAC,CAAC;CACT;;AAED,SAAS,YAAY,GAAG;EACtB,IAAI;IACF,IAAI,KAAK,GAAG,QAAQ,CAAC,aAAa,CAAC,EAAE,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;IACvD,SAAS,GAAG,KAAK,CAAC,SAAS,IAAI,KAAK,CAAC,YAAY,CAAC;IAClD,OAAO,aAAa,EAAE,CAAC;GACxB,CAAC,OAAO,CAAC,EAAE;IACV,OAAO,aAAa,EAAE,CAAC;GACxB;CACF;;AAED,IAAI,aAAa,GAAG,KAAK,CAAC,CAAC;;AAE3B,IAAI,MAAM,EAAE;EACV,aAAa,GAAG,WAAW,EAAE,CAAC;CAC/B,MAAM,IAAI,uBAAuB,EAAE;EAClC,aAAa,GAAG,mBAAmB,EAAE,CAAC;CACvC,MAAM,IAAI,QAAQ,EAAE;EACnB,aAAa,GAAG,iBAAiB,EAAE,CAAC;CACrC,MAAM,IAAI,aAAa,KAAK,SAAS,IAAI,OAAO,OAAO,KAAK,UAAU,EAAE;EACvE,aAAa,GAAG,YAAY,EAAE,CAAC;CAChC,MAAM;EACL,aAAa,GAAG,aAAa,EAAE,CAAC;;;CACjC,DCtHc,SAAS,IAAI,CAAC,aAAa,EAAE,WAAW,EAAE;EACvD,IAAI,MAAM,GAAG,IAAI,CAAC;;EAElB,IAAI,KAAK,GAAG,IAAI,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;;EAEvC,IAAI,KAAK,CAAC,UAAU,CAAC,KAAK,SAAS,EAAE;IACnC,WAAW,CAAC,KAAK,CAAC,CAAC;GACpB;;EAED,IAAI,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;;;EAG3B,IAAI,MAAM,EAAE;IACV,IAAI,QAAQ,GAAG,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;IACrC,IAAI,CAAC,YAAY;MACf,OAAO,cAAc,CAAC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;KAChE,CAAC,CAAC;GACJ,MAAM;IACL,SAAS,CAAC,MAAM,EAAE,KAAK,EAAE,aAAa,EAAE,WAAW,CAAC,CAAC;GACtD;;EAED,OAAO,KAAK,CAAC;;;CACd,DCxBD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+BA,AAAe,SAASA,SAAO,CAAC,MAAM,EAAE;;EAEtC,IAAI,WAAW,GAAG,IAAI,CAAC;;EAEvB,IAAI,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,MAAM,CAAC,WAAW,KAAK,WAAW,EAAE;IAC9E,OAAO,MAAM,CAAC;GACf;;EAED,IAAI,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;EACpCC,OAAQ,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;EAC1B,OAAO,OAAO,CAAC;;;CAChB,DCrCM,IAAI,UAAU,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;;AAEhE,SAAS,IAAI,GAAG,EAAE;;AAElB,IAAI,OAAO,GAAG,KAAK,CAAC,CAAC;AACrB,IAAI,SAAS,GAAG,CAAC,CAAC;AAClB,IAAI,QAAQ,GAAG,CAAC,CAAC;;AAEjB,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,SAAS,CAAC,0CAA0C,CAAC,CAAC;CAClE;;AAED,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,SAAS,CAAC,sDAAsD,CAAC,CAAC;CAC9E;;AAED,SAAS,OAAO,CAACC,OAAI,EAAE,KAAK,EAAE,kBAAkB,EAAE,gBAAgB,EAAE;EAClE,IAAI;IACFA,OAAI,CAAC,IAAI,CAAC,KAAK,EAAE,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;GACxD,CAAC,OAAO,CAAC,EAAE;IACV,OAAO,CAAC,CAAC;GACV;CACF;;AAED,SAAS,qBAAqB,CAAC,OAAO,EAAE,QAAQ,EAAEA,OAAI,EAAE;EACtD,IAAI,CAAC,UAAU,OAAO,EAAE;IACtB,IAAI,MAAM,GAAG,KAAK,CAAC;IACnB,IAAI,KAAK,GAAG,OAAO,CAACA,OAAI,EAAE,QAAQ,EAAE,UAAU,KAAK,EAAE;MACnD,IAAI,MAAM,EAAE;QACV,OAAO;OACR;MACD,MAAM,GAAG,IAAI,CAAC;MACd,IAAI,QAAQ,KAAK,KAAK,EAAE;QACtB,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACzB,MAAM;QACL,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACzB;KACF,EAAE,UAAU,MAAM,EAAE;MACnB,IAAI,MAAM,EAAE;QACV,OAAO;OACR;MACD,MAAM,GAAG,IAAI,CAAC;;MAEd,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KACzB,EAAE,UAAU,IAAI,OAAO,CAAC,MAAM,IAAI,kBAAkB,CAAC,CAAC,CAAC;;IAExD,IAAI,CAAC,MAAM,IAAI,KAAK,EAAE;MACpB,MAAM,GAAG,IAAI,CAAC;MACd,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KACxB;GACF,EAAE,OAAO,CAAC,CAAC;CACb;;AAED,SAAS,iBAAiB,CAAC,OAAO,EAAE,QAAQ,EAAE;EAC5C,IAAI,QAAQ,CAAC,MAAM,KAAK,SAAS,EAAE;IACjC,OAAO,CAAC,OAAO,EAAE,QAAQ,CAAC,OAAO,CAAC,CAAC;GACpC,MAAM,IAAI,QAAQ,CAAC,MAAM,KAAK,QAAQ,EAAE;IACvC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,OAAO,CAAC,CAAC;GACnC,MAAM;IACL,SAAS,CAAC,QAAQ,EAAE,SAAS,EAAE,UAAU,KAAK,EAAE;MAC9C,OAAO,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KAChC,EAAE,UAAU,MAAM,EAAE;MACnB,OAAO,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KAChC,CAAC,CAAC;GACJ;CACF;;AAED,SAAS,mBAAmB,CAAC,OAAO,EAAE,aAAa,EAAEA,OAAI,EAAE;EACzD,IAAI,aAAa,CAAC,WAAW,KAAK,OAAO,CAAC,WAAW,IAAIA,OAAI,KAAKC,IAAY,IAAI,aAAa,CAAC,WAAW,CAAC,OAAO,KAAKC,SAAe,EAAE;IACvI,iBAAiB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;GAC3C,MAAM;IACL,IAAIF,OAAI,KAAK,SAAS,EAAE;MACtB,OAAO,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;KACjC,MAAM,IAAI,UAAU,CAACA,OAAI,CAAC,EAAE;MAC3B,qBAAqB,CAAC,OAAO,EAAE,aAAa,EAAEA,OAAI,CAAC,CAAC;KACrD,MAAM;MACL,OAAO,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;KACjC;GACF;CACF;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE,KAAK,EAAE;EAC/B,IAAI,OAAO,KAAK,KAAK,EAAE;IACrB,MAAM,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;GACpC,MAAM,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE;IAClC,IAAIA,OAAI,GAAG,KAAK,CAAC,CAAC;IAClB,IAAI;MACFA,OAAI,GAAG,KAAK,CAAC,IAAI,CAAC;KACnB,CAAC,OAAO,KAAK,EAAE;MACd,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;MACvB,OAAO;KACR;IACD,mBAAmB,CAAC,OAAO,EAAE,KAAK,EAAEA,OAAI,CAAC,CAAC;GAC3C,MAAM;IACL,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB;CACF;;AAED,SAAS,gBAAgB,CAAC,OAAO,EAAE;EACjC,IAAI,OAAO,CAAC,QAAQ,EAAE;IACpB,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;GACnC;;EAED,OAAO,CAAC,OAAO,CAAC,CAAC;CAClB;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE,KAAK,EAAE;EAC/B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;IAC9B,OAAO;GACR;;EAED,OAAO,CAAC,OAAO,GAAG,KAAK,CAAC;EACxB,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;;EAE3B,IAAI,OAAO,CAAC,YAAY,CAAC,MAAM,KAAK,CAAC,EAAE;IACrC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;GACxB;CACF;;AAED,SAAS,MAAM,CAAC,OAAO,EAAE,MAAM,EAAE;EAC/B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;IAC9B,OAAO;GACR;EACD,OAAO,CAAC,MAAM,GAAG,QAAQ,CAAC;EAC1B,OAAO,CAAC,OAAO,GAAG,MAAM,CAAC;;EAEzB,IAAI,CAAC,gBAAgB,EAAE,OAAO,CAAC,CAAC;CACjC;;AAED,SAAS,SAAS,CAAC,MAAM,EAAE,KAAK,EAAE,aAAa,EAAE,WAAW,EAAE;EAC5D,IAAI,YAAY,GAAG,MAAM,CAAC,YAAY,CAAC;EACvC,IAAI,MAAM,GAAG,YAAY,CAAC,MAAM,CAAC;;;EAGjC,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC;;EAEvB,YAAY,CAAC,MAAM,CAAC,GAAG,KAAK,CAAC;EAC7B,YAAY,CAAC,MAAM,GAAG,SAAS,CAAC,GAAG,aAAa,CAAC;EACjD,YAAY,CAAC,MAAM,GAAG,QAAQ,CAAC,GAAG,WAAW,CAAC;;EAE9C,IAAI,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,MAAM,EAAE;IACjC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;GACvB;CACF;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE;EACxB,IAAI,WAAW,GAAG,OAAO,CAAC,YAAY,CAAC;EACvC,IAAI,OAAO,GAAG,OAAO,CAAC,MAAM,CAAC;;EAE7B,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;IAC5B,OAAO;GACR;;EAED,IAAI,KAAK,GAAG,KAAK,CAAC;MACd,QAAQ,GAAG,KAAK,CAAC;MACjB,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;;EAE7B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,CAAC,MAAM,EAAE,CAAC,IAAI,CAAC,EAAE;IAC9C,KAAK,GAAG,WAAW,CAAC,CAAC,CAAC,CAAC;IACvB,QAAQ,GAAG,WAAW,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC;;IAEpC,IAAI,KAAK,EAAE;MACT,cAAc,CAAC,OAAO,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAC;KAClD,MAAM;MACL,QAAQ,CAAC,MAAM,CAAC,CAAC;KAClB;GACF;;EAED,OAAO,CAAC,YAAY,CAAC,MAAM,GAAG,CAAC,CAAC;CACjC;;AAED,SAAS,cAAc,CAAC,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,MAAM,EAAE;EAC1D,IAAI,WAAW,GAAG,UAAU,CAAC,QAAQ,CAAC;MAClC,KAAK,GAAG,KAAK,CAAC;MACd,KAAK,GAAG,KAAK,CAAC;MACd,SAAS,GAAG,IAAI,CAAC;;EAErB,IAAI,WAAW,EAAE;IACf,IAAI;MACF,KAAK,GAAG,QAAQ,CAAC,MAAM,CAAC,CAAC;KAC1B,CAAC,OAAO,CAAC,EAAE;MACV,SAAS,GAAG,KAAK,CAAC;MAClB,KAAK,GAAG,CAAC,CAAC;KACX;;IAED,IAAI,OAAO,KAAK,KAAK,EAAE;MACrB,MAAM,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;MACnC,OAAO;KACR;GACF,MAAM;IACL,KAAK,GAAG,MAAM,CAAC;GAChB;;EAED,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;;GAE/B,MAAM,IAAI,WAAW,IAAI,SAAS,EAAE;IACnC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB,MAAM,IAAI,SAAS,KAAK,KAAK,EAAE;IAC9B,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACxB,MAAM,IAAI,OAAO,KAAK,SAAS,EAAE;IAChC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB,MAAM,IAAI,OAAO,KAAK,QAAQ,EAAE;IAC/B,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACxB;CACF;;AAED,SAAS,iBAAiB,CAAC,OAAO,EAAE,QAAQ,EAAE;EAC5C,IAAI;IACF,QAAQ,CAAC,SAAS,cAAc,CAAC,KAAK,EAAE;MACtC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KACzB,EAAE,SAAS,aAAa,CAAC,MAAM,EAAE;MAChC,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KACzB,CAAC,CAAC;GACJ,CAAC,OAAO,CAAC,EAAE;IACV,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;GACpB;CACF;;AAED,IAAI,EAAE,GAAG,CAAC,CAAC;AACX,SAAS,MAAM,GAAG;EAChB,OAAO,EAAE,EAAE,CAAC;CACb;;AAED,SAAS,WAAW,CAAC,OAAO,EAAE;EAC5B,OAAO,CAAC,UAAU,CAAC,GAAG,EAAE,EAAE,CAAC;EAC3B,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;EAC3B,OAAO,CAAC,OAAO,GAAG,SAAS,CAAC;EAC5B,OAAO,CAAC,YAAY,GAAG,EAAE,CAAC;CAC3B;;AChOD,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,KAAK,CAAC,yCAAyC,CAAC,CAAC;CAC7D,AAAC;;AAEF,IAAI,UAAU,GAAG,YAAY;EAC3B,SAAS,UAAU,CAAC,WAAW,EAAE,KAAK,EAAE;IACtC,IAAI,CAAC,oBAAoB,GAAG,WAAW,CAAC;IACxC,IAAI,CAAC,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;;IAErC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;MAC7B,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;KAC3B;;IAED,IAAI,OAAO,CAAC,KAAK,CAAC,EAAE;MAClB,IAAI,CAAC,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC;MAC3B,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC,MAAM,CAAC;;MAE/B,IAAI,CAAC,OAAO,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;;MAEtC,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE;QACrB,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;OACrC,MAAM;QACL,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,IAAI,CAAC,CAAC;QAC/B,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;QACvB,IAAI,IAAI,CAAC,UAAU,KAAK,CAAC,EAAE;UACzB,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;SACrC;OACF;KACF,MAAM;MACL,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;KACzC;GACF;;EAED,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE;IAC3D,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,IAAI,CAAC,MAAM,KAAK,OAAO,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;MAChE,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;KAC9B;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE,CAAC,EAAE;IAC9D,IAAI,CAAC,GAAG,IAAI,CAAC,oBAAoB,CAAC;IAClC,IAAIF,UAAO,GAAG,CAAC,CAAC,OAAO,CAAC;;;IAGxB,IAAIA,UAAO,KAAKI,SAAe,EAAE;MAC/B,IAAI,KAAK,GAAG,KAAK,CAAC,CAAC;MACnB,IAAI,KAAK,GAAG,KAAK,CAAC,CAAC;MACnB,IAAI,QAAQ,GAAG,KAAK,CAAC;MACrB,IAAI;QACF,KAAK,GAAG,KAAK,CAAC,IAAI,CAAC;OACpB,CAAC,OAAO,CAAC,EAAE;QACV,QAAQ,GAAG,IAAI,CAAC;QAChB,KAAK,GAAG,CAAC,CAAC;OACX;;MAED,IAAI,KAAK,KAAKD,IAAY,IAAI,KAAK,CAAC,MAAM,KAAK,OAAO,EAAE;QACtD,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;OACjD,MAAM,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;QACtC,IAAI,CAAC,UAAU,EAAE,CAAC;QAClB,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;OACzB,MAAM,IAAI,CAAC,KAAKE,SAAO,EAAE;QACxB,IAAI,OAAO,GAAG,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC;QAC1B,IAAI,QAAQ,EAAE;UACZ,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;SACxB,MAAM;UACL,mBAAmB,CAAC,OAAO,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;SAC5C;QACD,IAAI,CAAC,aAAa,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;OAChC,MAAM;QACL,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,UAAUL,UAAO,EAAE;UAC1C,OAAOA,UAAO,CAAC,KAAK,CAAC,CAAC;SACvB,CAAC,EAAE,CAAC,CAAC,CAAC;OACR;KACF,MAAM;MACL,IAAI,CAAC,aAAa,CAACA,UAAO,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC;KACvC;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE,CAAC,EAAE,KAAK,EAAE;IACrE,IAAI,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC;;;IAG3B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;MAC9B,IAAI,CAAC,UAAU,EAAE,CAAC;;MAElB,IAAI,KAAK,KAAK,QAAQ,EAAE;QACtB,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACxB,MAAM;QACL,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;OACzB;KACF;;IAED,IAAI,IAAI,CAAC,UAAU,KAAK,CAAC,EAAE;MACzB,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;KAChC;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,aAAa,GAAG,SAAS,aAAa,CAAC,OAAO,EAAE,CAAC,EAAE;IACtE,IAAI,UAAU,GAAG,IAAI,CAAC;;IAEtB,SAAS,CAAC,OAAO,EAAE,SAAS,EAAE,UAAU,KAAK,EAAE;MAC7C,OAAO,UAAU,CAAC,UAAU,CAAC,SAAS,EAAE,CAAC,EAAE,KAAK,CAAC,CAAC;KACnD,EAAE,UAAU,MAAM,EAAE;MACnB,OAAO,UAAU,CAAC,UAAU,CAAC,QAAQ,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;KACnD,CAAC,CAAC;GACJ,CAAC;;EAEF,OAAO,UAAU,CAAC;CACnB,EAAE;;ACrHH;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+CA,AAAe,SAAS,GAAG,CAAC,OAAO,EAAE;EACnC,OAAO,IAAI,UAAU,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC,OAAO,CAAC;;;CAC9C,DCjDD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAiEA,AAAe,SAAS,IAAI,CAAC,OAAO,EAAE;;EAEpC,IAAI,WAAW,GAAG,IAAI,CAAC;;EAEvB,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;IACrB,OAAO,IAAI,WAAW,CAAC,UAAU,CAAC,EAAE,MAAM,EAAE;MAC1C,OAAO,MAAM,CAAC,IAAI,SAAS,CAAC,iCAAiC,CAAC,CAAC,CAAC;KACjE,CAAC,CAAC;GACJ,MAAM;IACL,OAAO,IAAI,WAAW,CAAC,UAAU,OAAO,EAAE,MAAM,EAAE;MAChD,IAAI,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;MAC5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,EAAE,CAAC,EAAE,EAAE;QAC/B,WAAW,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;OACvD;KACF,CAAC,CAAC;GACJ;;;CACF,DCjFD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAkCA,AAAe,SAASM,QAAM,CAAC,MAAM,EAAE;;EAErC,IAAI,WAAW,GAAG,IAAI,CAAC;EACvB,IAAI,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;EACpCC,MAAO,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;EACzB,OAAO,OAAO,CAAC;;;CAChB,DC9BD,SAAS,aAAa,GAAG;EACvB,MAAM,IAAI,SAAS,CAAC,oFAAoF,CAAC,CAAC;CAC3G;;AAED,SAAS,QAAQ,GAAG;EAClB,MAAM,IAAI,SAAS,CAAC,uHAAuH,CAAC,CAAC;CAC9I;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA0GD,IAAIF,SAAO,GAAG,YAAY;EACxB,SAAS,OAAO,CAAC,QAAQ,EAAE;IACzB,IAAI,CAAC,UAAU,CAAC,GAAG,MAAM,EAAE,CAAC;IAC5B,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,MAAM,GAAG,SAAS,CAAC;IACvC,IAAI,CAAC,YAAY,GAAG,EAAE,CAAC;;IAEvB,IAAI,IAAI,KAAK,QAAQ,EAAE;MACrB,OAAO,QAAQ,KAAK,UAAU,IAAI,aAAa,EAAE,CAAC;MAClD,IAAI,YAAY,OAAO,GAAG,iBAAiB,CAAC,IAAI,EAAE,QAAQ,CAAC,GAAG,QAAQ,EAAE,CAAC;KAC1E;GACF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA4LD,OAAO,CAAC,SAAS,CAAC,KAAK,GAAG,SAAS,MAAM,CAAC,WAAW,EAAE;IACrD,OAAO,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,WAAW,CAAC,CAAC;GACrC,CAAC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA0CF,OAAO,CAAC,SAAS,CAAC,OAAO,GAAG,SAAS,QAAQ,CAAC,QAAQ,EAAE;IACtD,IAAI,OAAO,GAAG,IAAI,CAAC;IACnB,IAAI,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;;IAEtC,IAAI,UAAU,CAAC,QAAQ,CAAC,EAAE;MACxB,OAAO,OAAO,CAAC,IAAI,CAAC,UAAU,KAAK,EAAE;QACnC,OAAO,WAAW,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,IAAI,CAAC,YAAY;UACtD,OAAO,KAAK,CAAC;SACd,CAAC,CAAC;OACJ,EAAE,UAAU,MAAM,EAAE;QACnB,OAAO,WAAW,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,IAAI,CAAC,YAAY;UACtD,MAAM,MAAM,CAAC;SACd,CAAC,CAAC;OACJ,CAAC,CAAC;KACJ;;IAED,OAAO,OAAO,CAAC,IAAI,CAAC,QAAQ,EAAE,QAAQ,CAAC,CAAC;GACzC,CAAC;;EAEF,OAAO,OAAO,CAAC;CAChB,EAAE,CAAC;;AAEJA,SAAO,CAAC,SAAS,CAAC,IAAI,GAAG,IAAI,CAAC;AAC9B,AACAA,SAAO,CAAC,GAAG,GAAG,GAAG,CAAC;AAClBA,SAAO,CAAC,IAAI,GAAG,IAAI,CAAC;AACpBA,SAAO,CAAC,OAAO,GAAGG,SAAO,CAAC;AAC1BH,SAAO,CAAC,MAAM,GAAGI,QAAM,CAAC;AACxBJ,SAAO,CAAC,aAAa,GAAG,YAAY,CAAC;AACrCA,SAAO,CAAC,QAAQ,GAAG,OAAO,CAAC;AAC3BA,SAAO,CAAC,KAAK,GAAG,IAAI;;AC5YpB;AACA,AAEe,SAAS,QAAQ,GAAG;EACjC,IAAI,KAAK,GAAG,KAAK,CAAC,CAAC;;EAEnB,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;IACjC,KAAK,GAAG,MAAM,CAAC;GAChB,MAAM,IAAI,OAAO,IAAI,KAAK,WAAW,EAAE;IACtC,KAAK,GAAG,IAAI,CAAC;GACd,MAAM;IACL,IAAI;MACF,KAAK,GAAG,QAAQ,CAAC,aAAa,CAAC,EAAE,CAAC;KACnC,CAAC,OAAO,CAAC,EAAE;MACV,MAAM,IAAI,KAAK,CAAC,0EAA0E,CAAC,CAAC;KAC7F;GACF;;EAED,IAAI,CAAC,GAAG,KAAK,CAAC,OAAO,CAAC;;EAEtB,IAAI,CAAC,EAAE;IACL,IAAI,eAAe,GAAG,IAAI,CAAC;IAC3B,IAAI;MACF,eAAe,GAAG,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;KAC/D,CAAC,OAAO,CAAC,EAAE;;KAEX;;IAED,IAAI,eAAe,KAAK,kBAAkB,IAAI,CAAC,CAAC,CAAC,IAAI,EAAE;MACrD,OAAO;KACR;GACF;;EAED,KAAK,CAAC,OAAO,GAAGA,SAAO,CAAC;;;CACzB,DC/BD;AACAA,SAAO,CAAC,QAAQ,GAAG,QAAQ,CAAC;AAC5BA,SAAO,CAAC,OAAO,GAAGA,SAAO,CAAC;;ACJ1BA,SAAO,CAAC,QAAQ,EAAE,CAAC;;;;;;;;","file":"es6-promise.auto.js"} \ No newline at end of file
diff --git a/node_modules/es6-promise/dist/es6-promise.auto.min.js b/node_modules/es6-promise/dist/es6-promise.auto.min.js
deleted file mode 100644
index 5a44a3b08..000000000
--- a/node_modules/es6-promise/dist/es6-promise.auto.min.js
+++ /dev/null
@@ -1 +0,0 @@
-!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?module.exports=e():"function"==typeof define&&define.amd?define(e):t.ES6Promise=e()}(this,function(){"use strict";function t(t){var e=typeof t;return null!==t&&("object"===e||"function"===e)}function e(t){return"function"==typeof t}function n(t){W=t}function r(t){z=t}function o(){return function(){return process.nextTick(a)}}function i(){return"undefined"!=typeof U?function(){U(a)}:c()}function s(){var t=0,e=new H(a),n=document.createTextNode("");return e.observe(n,{characterData:!0}),function(){n.data=t=++t%2}}function u(){var t=new MessageChannel;return t.port1.onmessage=a,function(){return t.port2.postMessage(0)}}function c(){var t=setTimeout;return function(){return t(a,1)}}function a(){for(var t=0;t<N;t+=2){var e=Q[t],n=Q[t+1];e(n),Q[t]=void 0,Q[t+1]=void 0}N=0}function f(){try{var t=Function("return this")().require("vertx");return U=t.runOnLoop||t.runOnContext,i()}catch(e){return c()}}function l(t,e){var n=this,r=new this.constructor(p);void 0===r[V]&&x(r);var o=n._state;if(o){var i=arguments[o-1];z(function(){return T(o,r,i,n._result)})}else j(n,r,t,e);return r}function h(t){var e=this;if(t&&"object"==typeof t&&t.constructor===e)return t;var n=new e(p);return w(n,t),n}function p(){}function v(){return new TypeError("You cannot resolve a promise with itself")}function d(){return new TypeError("A promises callback cannot return that same promise.")}function _(t,e,n,r){try{t.call(e,n,r)}catch(o){return o}}function y(t,e,n){z(function(t){var r=!1,o=_(n,e,function(n){r||(r=!0,e!==n?w(t,n):A(t,n))},function(e){r||(r=!0,S(t,e))},"Settle: "+(t._label||" unknown promise"));!r&&o&&(r=!0,S(t,o))},t)}function m(t,e){e._state===Z?A(t,e._result):e._state===$?S(t,e._result):j(e,void 0,function(e){return w(t,e)},function(e){return S(t,e)})}function b(t,n,r){n.constructor===t.constructor&&r===l&&n.constructor.resolve===h?m(t,n):void 0===r?A(t,n):e(r)?y(t,n,r):A(t,n)}function w(e,n){if(e===n)S(e,v());else if(t(n)){var r=void 0;try{r=n.then}catch(o){return void S(e,o)}b(e,n,r)}else A(e,n)}function g(t){t._onerror&&t._onerror(t._result),E(t)}function A(t,e){t._state===X&&(t._result=e,t._state=Z,0!==t._subscribers.length&&z(E,t))}function S(t,e){t._state===X&&(t._state=$,t._result=e,z(g,t))}function j(t,e,n,r){var o=t._subscribers,i=o.length;t._onerror=null,o[i]=e,o[i+Z]=n,o[i+$]=r,0===i&&t._state&&z(E,t)}function E(t){var e=t._subscribers,n=t._state;if(0!==e.length){for(var r=void 0,o=void 0,i=t._result,s=0;s<e.length;s+=3)r=e[s],o=e[s+n],r?T(n,r,o,i):o(i);t._subscribers.length=0}}function T(t,n,r,o){var i=e(r),s=void 0,u=void 0,c=!0;if(i){try{s=r(o)}catch(a){c=!1,u=a}if(n===s)return void S(n,d())}else s=o;n._state!==X||(i&&c?w(n,s):c===!1?S(n,u):t===Z?A(n,s):t===$&&S(n,s))}function M(t,e){try{e(function(e){w(t,e)},function(e){S(t,e)})}catch(n){S(t,n)}}function P(){return tt++}function x(t){t[V]=tt++,t._state=void 0,t._result=void 0,t._subscribers=[]}function C(){return new Error("Array Methods must be provided an Array")}function O(t){return new et(this,t).promise}function k(t){var e=this;return new e(L(t)?function(n,r){for(var o=t.length,i=0;i<o;i++)e.resolve(t[i]).then(n,r)}:function(t,e){return e(new TypeError("You must pass an array to race."))})}function F(t){var e=this,n=new e(p);return S(n,t),n}function Y(){throw new TypeError("You must pass a resolver function as the first argument to the promise constructor")}function q(){throw new TypeError("Failed to construct 'Promise': Please use the 'new' operator, this object constructor cannot be called as a function.")}function D(){var t=void 0;if("undefined"!=typeof global)t=global;else if("undefined"!=typeof self)t=self;else try{t=Function("return this")()}catch(e){throw new Error("polyfill failed because global object is unavailable in this environment")}var n=t.Promise;if(n){var r=null;try{r=Object.prototype.toString.call(n.resolve())}catch(e){}if("[object Promise]"===r&&!n.cast)return}t.Promise=nt}var K=void 0;K=Array.isArray?Array.isArray:function(t){return"[object Array]"===Object.prototype.toString.call(t)};var L=K,N=0,U=void 0,W=void 0,z=function(t,e){Q[N]=t,Q[N+1]=e,N+=2,2===N&&(W?W(a):R())},B="undefined"!=typeof window?window:void 0,G=B||{},H=G.MutationObserver||G.WebKitMutationObserver,I="undefined"==typeof self&&"undefined"!=typeof process&&"[object process]"==={}.toString.call(process),J="undefined"!=typeof Uint8ClampedArray&&"undefined"!=typeof importScripts&&"undefined"!=typeof MessageChannel,Q=new Array(1e3),R=void 0;R=I?o():H?s():J?u():void 0===B&&"function"==typeof require?f():c();var V=Math.random().toString(36).substring(2),X=void 0,Z=1,$=2,tt=0,et=function(){function t(t,e){this._instanceConstructor=t,this.promise=new t(p),this.promise[V]||x(this.promise),L(e)?(this.length=e.length,this._remaining=e.length,this._result=new Array(this.length),0===this.length?A(this.promise,this._result):(this.length=this.length||0,this._enumerate(e),0===this._remaining&&A(this.promise,this._result))):S(this.promise,C())}return t.prototype._enumerate=function(t){for(var e=0;this._state===X&&e<t.length;e++)this._eachEntry(t[e],e)},t.prototype._eachEntry=function(t,e){var n=this._instanceConstructor,r=n.resolve;if(r===h){var o=void 0,i=void 0,s=!1;try{o=t.then}catch(u){s=!0,i=u}if(o===l&&t._state!==X)this._settledAt(t._state,e,t._result);else if("function"!=typeof o)this._remaining--,this._result[e]=t;else if(n===nt){var c=new n(p);s?S(c,i):b(c,t,o),this._willSettleAt(c,e)}else this._willSettleAt(new n(function(e){return e(t)}),e)}else this._willSettleAt(r(t),e)},t.prototype._settledAt=function(t,e,n){var r=this.promise;r._state===X&&(this._remaining--,t===$?S(r,n):this._result[e]=n),0===this._remaining&&A(r,this._result)},t.prototype._willSettleAt=function(t,e){var n=this;j(t,void 0,function(t){return n._settledAt(Z,e,t)},function(t){return n._settledAt($,e,t)})},t}(),nt=function(){function t(e){this[V]=P(),this._result=this._state=void 0,this._subscribers=[],p!==e&&("function"!=typeof e&&Y(),this instanceof t?M(this,e):q())}return t.prototype["catch"]=function(t){return this.then(null,t)},t.prototype["finally"]=function(t){var n=this,r=n.constructor;return e(t)?n.then(function(e){return r.resolve(t()).then(function(){return e})},function(e){return r.resolve(t()).then(function(){throw e})}):n.then(t,t)},t}();return nt.prototype.then=l,nt.all=O,nt.race=k,nt.resolve=h,nt.reject=F,nt._setScheduler=n,nt._setAsap=r,nt._asap=z,nt.polyfill=D,nt.Promise=nt,nt.polyfill(),nt}); \ No newline at end of file
diff --git a/node_modules/es6-promise/dist/es6-promise.auto.min.map b/node_modules/es6-promise/dist/es6-promise.auto.min.map
deleted file mode 100644
index 1d2a119da..000000000
--- a/node_modules/es6-promise/dist/es6-promise.auto.min.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"sources":["config/versionTemplate.txt","lib/es6-promise/utils.js","lib/es6-promise/asap.js","lib/es6-promise/then.js","lib/es6-promise/promise/resolve.js","lib/es6-promise/-internal.js","lib/es6-promise/enumerator.js","lib/es6-promise/promise/all.js","lib/es6-promise/promise/race.js","lib/es6-promise/promise/reject.js","lib/es6-promise/promise.js","lib/es6-promise/polyfill.js","lib/es6-promise.js","lib/es6-promise.auto.js"],"sourcesContent":["/*!\n * @overview es6-promise - a tiny implementation of Promises/A+.\n * @copyright Copyright (c) 2014 Yehuda Katz, Tom Dale, Stefan Penner and contributors (Conversion to ES6 API by Jake Archibald)\n * @license Licensed under MIT license\n * See https://raw.githubusercontent.com/stefanpenner/es6-promise/master/LICENSE\n * @version v4.2.8+1e68dce6\n */\n","export function objectOrFunction(x) {\n var type = typeof x;\n return x !== null && (type === 'object' || type === 'function');\n}\n\nexport function isFunction(x) {\n return typeof x === 'function';\n}\n\nexport function isMaybeThenable(x) {\n return x !== null && typeof x === 'object';\n}\n\nvar _isArray = void 0;\nif (Array.isArray) {\n _isArray = Array.isArray;\n} else {\n _isArray = function (x) {\n return Object.prototype.toString.call(x) === '[object Array]';\n };\n}\n\nexport var isArray = _isArray;","var len = 0;\nvar vertxNext = void 0;\nvar customSchedulerFn = void 0;\n\nexport var asap = function asap(callback, arg) {\n queue[len] = callback;\n queue[len + 1] = arg;\n len += 2;\n if (len === 2) {\n // If len is 2, that means that we need to schedule an async flush.\n // If additional callbacks are queued before the queue is flushed, they\n // will be processed by this flush that we are scheduling.\n if (customSchedulerFn) {\n customSchedulerFn(flush);\n } else {\n scheduleFlush();\n }\n }\n};\n\nexport function setScheduler(scheduleFn) {\n customSchedulerFn = scheduleFn;\n}\n\nexport function setAsap(asapFn) {\n asap = asapFn;\n}\n\nvar browserWindow = typeof window !== 'undefined' ? window : undefined;\nvar browserGlobal = browserWindow || {};\nvar BrowserMutationObserver = browserGlobal.MutationObserver || browserGlobal.WebKitMutationObserver;\nvar isNode = typeof self === 'undefined' && typeof process !== 'undefined' && {}.toString.call(process) === '[object process]';\n\n// test for web worker but not in IE10\nvar isWorker = typeof Uint8ClampedArray !== 'undefined' && typeof importScripts !== 'undefined' && typeof MessageChannel !== 'undefined';\n\n// node\nfunction useNextTick() {\n // node version 0.10.x displays a deprecation warning when nextTick is used recursively\n // see https://github.com/cujojs/when/issues/410 for details\n return function () {\n return process.nextTick(flush);\n };\n}\n\n// vertx\nfunction useVertxTimer() {\n if (typeof vertxNext !== 'undefined') {\n return function () {\n vertxNext(flush);\n };\n }\n\n return useSetTimeout();\n}\n\nfunction useMutationObserver() {\n var iterations = 0;\n var observer = new BrowserMutationObserver(flush);\n var node = document.createTextNode('');\n observer.observe(node, { characterData: true });\n\n return function () {\n node.data = iterations = ++iterations % 2;\n };\n}\n\n// web worker\nfunction useMessageChannel() {\n var channel = new MessageChannel();\n channel.port1.onmessage = flush;\n return function () {\n return channel.port2.postMessage(0);\n };\n}\n\nfunction useSetTimeout() {\n // Store setTimeout reference so es6-promise will be unaffected by\n // other code modifying setTimeout (like sinon.useFakeTimers())\n var globalSetTimeout = setTimeout;\n return function () {\n return globalSetTimeout(flush, 1);\n };\n}\n\nvar queue = new Array(1000);\nfunction flush() {\n for (var i = 0; i < len; i += 2) {\n var callback = queue[i];\n var arg = queue[i + 1];\n\n callback(arg);\n\n queue[i] = undefined;\n queue[i + 1] = undefined;\n }\n\n len = 0;\n}\n\nfunction attemptVertx() {\n try {\n var vertx = Function('return this')().require('vertx');\n vertxNext = vertx.runOnLoop || vertx.runOnContext;\n return useVertxTimer();\n } catch (e) {\n return useSetTimeout();\n }\n}\n\nvar scheduleFlush = void 0;\n// Decide what async method to use to triggering processing of queued callbacks:\nif (isNode) {\n scheduleFlush = useNextTick();\n} else if (BrowserMutationObserver) {\n scheduleFlush = useMutationObserver();\n} else if (isWorker) {\n scheduleFlush = useMessageChannel();\n} else if (browserWindow === undefined && typeof require === 'function') {\n scheduleFlush = attemptVertx();\n} else {\n scheduleFlush = useSetTimeout();\n}","import { invokeCallback, subscribe, FULFILLED, REJECTED, noop, makePromise, PROMISE_ID } from './-internal';\n\nimport { asap } from './asap';\n\nexport default function then(onFulfillment, onRejection) {\n var parent = this;\n\n var child = new this.constructor(noop);\n\n if (child[PROMISE_ID] === undefined) {\n makePromise(child);\n }\n\n var _state = parent._state;\n\n\n if (_state) {\n var callback = arguments[_state - 1];\n asap(function () {\n return invokeCallback(_state, child, callback, parent._result);\n });\n } else {\n subscribe(parent, child, onFulfillment, onRejection);\n }\n\n return child;\n}","import { noop, resolve as _resolve } from '../-internal';\n\n/**\n `Promise.resolve` returns a promise that will become resolved with the\n passed `value`. It is shorthand for the following:\n\n ```javascript\n let promise = new Promise(function(resolve, reject){\n resolve(1);\n });\n\n promise.then(function(value){\n // value === 1\n });\n ```\n\n Instead of writing the above, your code now simply becomes the following:\n\n ```javascript\n let promise = Promise.resolve(1);\n\n promise.then(function(value){\n // value === 1\n });\n ```\n\n @method resolve\n @static\n @param {Any} value value that the returned promise will be resolved with\n Useful for tooling.\n @return {Promise} a promise that will become fulfilled with the given\n `value`\n*/\nexport default function resolve(object) {\n /*jshint validthis:true */\n var Constructor = this;\n\n if (object && typeof object === 'object' && object.constructor === Constructor) {\n return object;\n }\n\n var promise = new Constructor(noop);\n _resolve(promise, object);\n return promise;\n}","import { objectOrFunction, isFunction } from './utils';\n\nimport { asap } from './asap';\n\nimport originalThen from './then';\nimport originalResolve from './promise/resolve';\n\nexport var PROMISE_ID = Math.random().toString(36).substring(2);\n\nfunction noop() {}\n\nvar PENDING = void 0;\nvar FULFILLED = 1;\nvar REJECTED = 2;\n\nfunction selfFulfillment() {\n return new TypeError(\"You cannot resolve a promise with itself\");\n}\n\nfunction cannotReturnOwn() {\n return new TypeError('A promises callback cannot return that same promise.');\n}\n\nfunction tryThen(then, value, fulfillmentHandler, rejectionHandler) {\n try {\n then.call(value, fulfillmentHandler, rejectionHandler);\n } catch (e) {\n return e;\n }\n}\n\nfunction handleForeignThenable(promise, thenable, then) {\n asap(function (promise) {\n var sealed = false;\n var error = tryThen(then, thenable, function (value) {\n if (sealed) {\n return;\n }\n sealed = true;\n if (thenable !== value) {\n resolve(promise, value);\n } else {\n fulfill(promise, value);\n }\n }, function (reason) {\n if (sealed) {\n return;\n }\n sealed = true;\n\n reject(promise, reason);\n }, 'Settle: ' + (promise._label || ' unknown promise'));\n\n if (!sealed && error) {\n sealed = true;\n reject(promise, error);\n }\n }, promise);\n}\n\nfunction handleOwnThenable(promise, thenable) {\n if (thenable._state === FULFILLED) {\n fulfill(promise, thenable._result);\n } else if (thenable._state === REJECTED) {\n reject(promise, thenable._result);\n } else {\n subscribe(thenable, undefined, function (value) {\n return resolve(promise, value);\n }, function (reason) {\n return reject(promise, reason);\n });\n }\n}\n\nfunction handleMaybeThenable(promise, maybeThenable, then) {\n if (maybeThenable.constructor === promise.constructor && then === originalThen && maybeThenable.constructor.resolve === originalResolve) {\n handleOwnThenable(promise, maybeThenable);\n } else {\n if (then === undefined) {\n fulfill(promise, maybeThenable);\n } else if (isFunction(then)) {\n handleForeignThenable(promise, maybeThenable, then);\n } else {\n fulfill(promise, maybeThenable);\n }\n }\n}\n\nfunction resolve(promise, value) {\n if (promise === value) {\n reject(promise, selfFulfillment());\n } else if (objectOrFunction(value)) {\n var then = void 0;\n try {\n then = value.then;\n } catch (error) {\n reject(promise, error);\n return;\n }\n handleMaybeThenable(promise, value, then);\n } else {\n fulfill(promise, value);\n }\n}\n\nfunction publishRejection(promise) {\n if (promise._onerror) {\n promise._onerror(promise._result);\n }\n\n publish(promise);\n}\n\nfunction fulfill(promise, value) {\n if (promise._state !== PENDING) {\n return;\n }\n\n promise._result = value;\n promise._state = FULFILLED;\n\n if (promise._subscribers.length !== 0) {\n asap(publish, promise);\n }\n}\n\nfunction reject(promise, reason) {\n if (promise._state !== PENDING) {\n return;\n }\n promise._state = REJECTED;\n promise._result = reason;\n\n asap(publishRejection, promise);\n}\n\nfunction subscribe(parent, child, onFulfillment, onRejection) {\n var _subscribers = parent._subscribers;\n var length = _subscribers.length;\n\n\n parent._onerror = null;\n\n _subscribers[length] = child;\n _subscribers[length + FULFILLED] = onFulfillment;\n _subscribers[length + REJECTED] = onRejection;\n\n if (length === 0 && parent._state) {\n asap(publish, parent);\n }\n}\n\nfunction publish(promise) {\n var subscribers = promise._subscribers;\n var settled = promise._state;\n\n if (subscribers.length === 0) {\n return;\n }\n\n var child = void 0,\n callback = void 0,\n detail = promise._result;\n\n for (var i = 0; i < subscribers.length; i += 3) {\n child = subscribers[i];\n callback = subscribers[i + settled];\n\n if (child) {\n invokeCallback(settled, child, callback, detail);\n } else {\n callback(detail);\n }\n }\n\n promise._subscribers.length = 0;\n}\n\nfunction invokeCallback(settled, promise, callback, detail) {\n var hasCallback = isFunction(callback),\n value = void 0,\n error = void 0,\n succeeded = true;\n\n if (hasCallback) {\n try {\n value = callback(detail);\n } catch (e) {\n succeeded = false;\n error = e;\n }\n\n if (promise === value) {\n reject(promise, cannotReturnOwn());\n return;\n }\n } else {\n value = detail;\n }\n\n if (promise._state !== PENDING) {\n // noop\n } else if (hasCallback && succeeded) {\n resolve(promise, value);\n } else if (succeeded === false) {\n reject(promise, error);\n } else if (settled === FULFILLED) {\n fulfill(promise, value);\n } else if (settled === REJECTED) {\n reject(promise, value);\n }\n}\n\nfunction initializePromise(promise, resolver) {\n try {\n resolver(function resolvePromise(value) {\n resolve(promise, value);\n }, function rejectPromise(reason) {\n reject(promise, reason);\n });\n } catch (e) {\n reject(promise, e);\n }\n}\n\nvar id = 0;\nfunction nextId() {\n return id++;\n}\n\nfunction makePromise(promise) {\n promise[PROMISE_ID] = id++;\n promise._state = undefined;\n promise._result = undefined;\n promise._subscribers = [];\n}\n\nexport { nextId, makePromise, noop, resolve, reject, fulfill, subscribe, publish, publishRejection, initializePromise, invokeCallback, FULFILLED, REJECTED, PENDING, handleMaybeThenable };","function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nimport { isArray, isMaybeThenable } from './utils';\nimport { noop, reject, fulfill, subscribe, FULFILLED, REJECTED, PENDING, handleMaybeThenable } from './-internal';\n\nimport then from './then';\nimport Promise from './promise';\nimport originalResolve from './promise/resolve';\nimport originalThen from './then';\nimport { makePromise, PROMISE_ID } from './-internal';\n\nfunction validationError() {\n return new Error('Array Methods must be provided an Array');\n};\n\nvar Enumerator = function () {\n function Enumerator(Constructor, input) {\n this._instanceConstructor = Constructor;\n this.promise = new Constructor(noop);\n\n if (!this.promise[PROMISE_ID]) {\n makePromise(this.promise);\n }\n\n if (isArray(input)) {\n this.length = input.length;\n this._remaining = input.length;\n\n this._result = new Array(this.length);\n\n if (this.length === 0) {\n fulfill(this.promise, this._result);\n } else {\n this.length = this.length || 0;\n this._enumerate(input);\n if (this._remaining === 0) {\n fulfill(this.promise, this._result);\n }\n }\n } else {\n reject(this.promise, validationError());\n }\n }\n\n Enumerator.prototype._enumerate = function _enumerate(input) {\n for (var i = 0; this._state === PENDING && i < input.length; i++) {\n this._eachEntry(input[i], i);\n }\n };\n\n Enumerator.prototype._eachEntry = function _eachEntry(entry, i) {\n var c = this._instanceConstructor;\n var resolve = c.resolve;\n\n\n if (resolve === originalResolve) {\n var _then = void 0;\n var error = void 0;\n var didError = false;\n try {\n _then = entry.then;\n } catch (e) {\n didError = true;\n error = e;\n }\n\n if (_then === originalThen && entry._state !== PENDING) {\n this._settledAt(entry._state, i, entry._result);\n } else if (typeof _then !== 'function') {\n this._remaining--;\n this._result[i] = entry;\n } else if (c === Promise) {\n var promise = new c(noop);\n if (didError) {\n reject(promise, error);\n } else {\n handleMaybeThenable(promise, entry, _then);\n }\n this._willSettleAt(promise, i);\n } else {\n this._willSettleAt(new c(function (resolve) {\n return resolve(entry);\n }), i);\n }\n } else {\n this._willSettleAt(resolve(entry), i);\n }\n };\n\n Enumerator.prototype._settledAt = function _settledAt(state, i, value) {\n var promise = this.promise;\n\n\n if (promise._state === PENDING) {\n this._remaining--;\n\n if (state === REJECTED) {\n reject(promise, value);\n } else {\n this._result[i] = value;\n }\n }\n\n if (this._remaining === 0) {\n fulfill(promise, this._result);\n }\n };\n\n Enumerator.prototype._willSettleAt = function _willSettleAt(promise, i) {\n var enumerator = this;\n\n subscribe(promise, undefined, function (value) {\n return enumerator._settledAt(FULFILLED, i, value);\n }, function (reason) {\n return enumerator._settledAt(REJECTED, i, reason);\n });\n };\n\n return Enumerator;\n}();\n\nexport default Enumerator;\n;","import Enumerator from '../enumerator';\n\n/**\n `Promise.all` accepts an array of promises, and returns a new promise which\n is fulfilled with an array of fulfillment values for the passed promises, or\n rejected with the reason of the first passed promise to be rejected. It casts all\n elements of the passed iterable to promises as it runs this algorithm.\n\n Example:\n\n ```javascript\n let promise1 = resolve(1);\n let promise2 = resolve(2);\n let promise3 = resolve(3);\n let promises = [ promise1, promise2, promise3 ];\n\n Promise.all(promises).then(function(array){\n // The array here would be [ 1, 2, 3 ];\n });\n ```\n\n If any of the `promises` given to `all` are rejected, the first promise\n that is rejected will be given as an argument to the returned promises's\n rejection handler. For example:\n\n Example:\n\n ```javascript\n let promise1 = resolve(1);\n let promise2 = reject(new Error(\"2\"));\n let promise3 = reject(new Error(\"3\"));\n let promises = [ promise1, promise2, promise3 ];\n\n Promise.all(promises).then(function(array){\n // Code here never runs because there are rejected promises!\n }, function(error) {\n // error.message === \"2\"\n });\n ```\n\n @method all\n @static\n @param {Array} entries array of promises\n @param {String} label optional string for labeling the promise.\n Useful for tooling.\n @return {Promise} promise that is fulfilled when all `promises` have been\n fulfilled, or rejected if any of them become rejected.\n @static\n*/\nexport default function all(entries) {\n return new Enumerator(this, entries).promise;\n}","import { isArray } from \"../utils\";\n\n/**\n `Promise.race` returns a new promise which is settled in the same way as the\n first passed promise to settle.\n\n Example:\n\n ```javascript\n let promise1 = new Promise(function(resolve, reject){\n setTimeout(function(){\n resolve('promise 1');\n }, 200);\n });\n\n let promise2 = new Promise(function(resolve, reject){\n setTimeout(function(){\n resolve('promise 2');\n }, 100);\n });\n\n Promise.race([promise1, promise2]).then(function(result){\n // result === 'promise 2' because it was resolved before promise1\n // was resolved.\n });\n ```\n\n `Promise.race` is deterministic in that only the state of the first\n settled promise matters. For example, even if other promises given to the\n `promises` array argument are resolved, but the first settled promise has\n become rejected before the other promises became fulfilled, the returned\n promise will become rejected:\n\n ```javascript\n let promise1 = new Promise(function(resolve, reject){\n setTimeout(function(){\n resolve('promise 1');\n }, 200);\n });\n\n let promise2 = new Promise(function(resolve, reject){\n setTimeout(function(){\n reject(new Error('promise 2'));\n }, 100);\n });\n\n Promise.race([promise1, promise2]).then(function(result){\n // Code here never runs\n }, function(reason){\n // reason.message === 'promise 2' because promise 2 became rejected before\n // promise 1 became fulfilled\n });\n ```\n\n An example real-world use case is implementing timeouts:\n\n ```javascript\n Promise.race([ajax('foo.json'), timeout(5000)])\n ```\n\n @method race\n @static\n @param {Array} promises array of promises to observe\n Useful for tooling.\n @return {Promise} a promise which settles in the same way as the first passed\n promise to settle.\n*/\nexport default function race(entries) {\n /*jshint validthis:true */\n var Constructor = this;\n\n if (!isArray(entries)) {\n return new Constructor(function (_, reject) {\n return reject(new TypeError('You must pass an array to race.'));\n });\n } else {\n return new Constructor(function (resolve, reject) {\n var length = entries.length;\n for (var i = 0; i < length; i++) {\n Constructor.resolve(entries[i]).then(resolve, reject);\n }\n });\n }\n}","import { noop, reject as _reject } from '../-internal';\n\n/**\n `Promise.reject` returns a promise rejected with the passed `reason`.\n It is shorthand for the following:\n\n ```javascript\n let promise = new Promise(function(resolve, reject){\n reject(new Error('WHOOPS'));\n });\n\n promise.then(function(value){\n // Code here doesn't run because the promise is rejected!\n }, function(reason){\n // reason.message === 'WHOOPS'\n });\n ```\n\n Instead of writing the above, your code now simply becomes the following:\n\n ```javascript\n let promise = Promise.reject(new Error('WHOOPS'));\n\n promise.then(function(value){\n // Code here doesn't run because the promise is rejected!\n }, function(reason){\n // reason.message === 'WHOOPS'\n });\n ```\n\n @method reject\n @static\n @param {Any} reason value that the returned promise will be rejected with.\n Useful for tooling.\n @return {Promise} a promise rejected with the given `reason`.\n*/\nexport default function reject(reason) {\n /*jshint validthis:true */\n var Constructor = this;\n var promise = new Constructor(noop);\n _reject(promise, reason);\n return promise;\n}","function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nimport { isFunction } from './utils';\nimport { noop, nextId, PROMISE_ID, initializePromise } from './-internal';\nimport { asap, setAsap, setScheduler } from './asap';\n\nimport all from './promise/all';\nimport race from './promise/race';\nimport Resolve from './promise/resolve';\nimport Reject from './promise/reject';\nimport then from './then';\n\nfunction needsResolver() {\n throw new TypeError('You must pass a resolver function as the first argument to the promise constructor');\n}\n\nfunction needsNew() {\n throw new TypeError(\"Failed to construct 'Promise': Please use the 'new' operator, this object constructor cannot be called as a function.\");\n}\n\n/**\n Promise objects represent the eventual result of an asynchronous operation. The\n primary way of interacting with a promise is through its `then` method, which\n registers callbacks to receive either a promise's eventual value or the reason\n why the promise cannot be fulfilled.\n\n Terminology\n -----------\n\n - `promise` is an object or function with a `then` method whose behavior conforms to this specification.\n - `thenable` is an object or function that defines a `then` method.\n - `value` is any legal JavaScript value (including undefined, a thenable, or a promise).\n - `exception` is a value that is thrown using the throw statement.\n - `reason` is a value that indicates why a promise was rejected.\n - `settled` the final resting state of a promise, fulfilled or rejected.\n\n A promise can be in one of three states: pending, fulfilled, or rejected.\n\n Promises that are fulfilled have a fulfillment value and are in the fulfilled\n state. Promises that are rejected have a rejection reason and are in the\n rejected state. A fulfillment value is never a thenable.\n\n Promises can also be said to *resolve* a value. If this value is also a\n promise, then the original promise's settled state will match the value's\n settled state. So a promise that *resolves* a promise that rejects will\n itself reject, and a promise that *resolves* a promise that fulfills will\n itself fulfill.\n\n\n Basic Usage:\n ------------\n\n ```js\n let promise = new Promise(function(resolve, reject) {\n // on success\n resolve(value);\n\n // on failure\n reject(reason);\n });\n\n promise.then(function(value) {\n // on fulfillment\n }, function(reason) {\n // on rejection\n });\n ```\n\n Advanced Usage:\n ---------------\n\n Promises shine when abstracting away asynchronous interactions such as\n `XMLHttpRequest`s.\n\n ```js\n function getJSON(url) {\n return new Promise(function(resolve, reject){\n let xhr = new XMLHttpRequest();\n\n xhr.open('GET', url);\n xhr.onreadystatechange = handler;\n xhr.responseType = 'json';\n xhr.setRequestHeader('Accept', 'application/json');\n xhr.send();\n\n function handler() {\n if (this.readyState === this.DONE) {\n if (this.status === 200) {\n resolve(this.response);\n } else {\n reject(new Error('getJSON: `' + url + '` failed with status: [' + this.status + ']'));\n }\n }\n };\n });\n }\n\n getJSON('/posts.json').then(function(json) {\n // on fulfillment\n }, function(reason) {\n // on rejection\n });\n ```\n\n Unlike callbacks, promises are great composable primitives.\n\n ```js\n Promise.all([\n getJSON('/posts'),\n getJSON('/comments')\n ]).then(function(values){\n values[0] // => postsJSON\n values[1] // => commentsJSON\n\n return values;\n });\n ```\n\n @class Promise\n @param {Function} resolver\n Useful for tooling.\n @constructor\n*/\n\nvar Promise = function () {\n function Promise(resolver) {\n this[PROMISE_ID] = nextId();\n this._result = this._state = undefined;\n this._subscribers = [];\n\n if (noop !== resolver) {\n typeof resolver !== 'function' && needsResolver();\n this instanceof Promise ? initializePromise(this, resolver) : needsNew();\n }\n }\n\n /**\n The primary way of interacting with a promise is through its `then` method,\n which registers callbacks to receive either a promise's eventual value or the\n reason why the promise cannot be fulfilled.\n ```js\n findUser().then(function(user){\n // user is available\n }, function(reason){\n // user is unavailable, and you are given the reason why\n });\n ```\n Chaining\n --------\n The return value of `then` is itself a promise. This second, 'downstream'\n promise is resolved with the return value of the first promise's fulfillment\n or rejection handler, or rejected if the handler throws an exception.\n ```js\n findUser().then(function (user) {\n return user.name;\n }, function (reason) {\n return 'default name';\n }).then(function (userName) {\n // If `findUser` fulfilled, `userName` will be the user's name, otherwise it\n // will be `'default name'`\n });\n findUser().then(function (user) {\n throw new Error('Found user, but still unhappy');\n }, function (reason) {\n throw new Error('`findUser` rejected and we're unhappy');\n }).then(function (value) {\n // never reached\n }, function (reason) {\n // if `findUser` fulfilled, `reason` will be 'Found user, but still unhappy'.\n // If `findUser` rejected, `reason` will be '`findUser` rejected and we're unhappy'.\n });\n ```\n If the downstream promise does not specify a rejection handler, rejection reasons will be propagated further downstream.\n ```js\n findUser().then(function (user) {\n throw new PedagogicalException('Upstream error');\n }).then(function (value) {\n // never reached\n }).then(function (value) {\n // never reached\n }, function (reason) {\n // The `PedgagocialException` is propagated all the way down to here\n });\n ```\n Assimilation\n ------------\n Sometimes the value you want to propagate to a downstream promise can only be\n retrieved asynchronously. This can be achieved by returning a promise in the\n fulfillment or rejection handler. The downstream promise will then be pending\n until the returned promise is settled. This is called *assimilation*.\n ```js\n findUser().then(function (user) {\n return findCommentsByAuthor(user);\n }).then(function (comments) {\n // The user's comments are now available\n });\n ```\n If the assimliated promise rejects, then the downstream promise will also reject.\n ```js\n findUser().then(function (user) {\n return findCommentsByAuthor(user);\n }).then(function (comments) {\n // If `findCommentsByAuthor` fulfills, we'll have the value here\n }, function (reason) {\n // If `findCommentsByAuthor` rejects, we'll have the reason here\n });\n ```\n Simple Example\n --------------\n Synchronous Example\n ```javascript\n let result;\n try {\n result = findResult();\n // success\n } catch(reason) {\n // failure\n }\n ```\n Errback Example\n ```js\n findResult(function(result, err){\n if (err) {\n // failure\n } else {\n // success\n }\n });\n ```\n Promise Example;\n ```javascript\n findResult().then(function(result){\n // success\n }, function(reason){\n // failure\n });\n ```\n Advanced Example\n --------------\n Synchronous Example\n ```javascript\n let author, books;\n try {\n author = findAuthor();\n books = findBooksByAuthor(author);\n // success\n } catch(reason) {\n // failure\n }\n ```\n Errback Example\n ```js\n function foundBooks(books) {\n }\n function failure(reason) {\n }\n findAuthor(function(author, err){\n if (err) {\n failure(err);\n // failure\n } else {\n try {\n findBoooksByAuthor(author, function(books, err) {\n if (err) {\n failure(err);\n } else {\n try {\n foundBooks(books);\n } catch(reason) {\n failure(reason);\n }\n }\n });\n } catch(error) {\n failure(err);\n }\n // success\n }\n });\n ```\n Promise Example;\n ```javascript\n findAuthor().\n then(findBooksByAuthor).\n then(function(books){\n // found books\n }).catch(function(reason){\n // something went wrong\n });\n ```\n @method then\n @param {Function} onFulfilled\n @param {Function} onRejected\n Useful for tooling.\n @return {Promise}\n */\n\n /**\n `catch` is simply sugar for `then(undefined, onRejection)` which makes it the same\n as the catch block of a try/catch statement.\n ```js\n function findAuthor(){\n throw new Error('couldn't find that author');\n }\n // synchronous\n try {\n findAuthor();\n } catch(reason) {\n // something went wrong\n }\n // async with promises\n findAuthor().catch(function(reason){\n // something went wrong\n });\n ```\n @method catch\n @param {Function} onRejection\n Useful for tooling.\n @return {Promise}\n */\n\n\n Promise.prototype.catch = function _catch(onRejection) {\n return this.then(null, onRejection);\n };\n\n /**\n `finally` will be invoked regardless of the promise's fate just as native\n try/catch/finally behaves\n \n Synchronous example:\n \n ```js\n findAuthor() {\n if (Math.random() > 0.5) {\n throw new Error();\n }\n return new Author();\n }\n \n try {\n return findAuthor(); // succeed or fail\n } catch(error) {\n return findOtherAuther();\n } finally {\n // always runs\n // doesn't affect the return value\n }\n ```\n \n Asynchronous example:\n \n ```js\n findAuthor().catch(function(reason){\n return findOtherAuther();\n }).finally(function(){\n // author was either found, or not\n });\n ```\n \n @method finally\n @param {Function} callback\n @return {Promise}\n */\n\n\n Promise.prototype.finally = function _finally(callback) {\n var promise = this;\n var constructor = promise.constructor;\n\n if (isFunction(callback)) {\n return promise.then(function (value) {\n return constructor.resolve(callback()).then(function () {\n return value;\n });\n }, function (reason) {\n return constructor.resolve(callback()).then(function () {\n throw reason;\n });\n });\n }\n\n return promise.then(callback, callback);\n };\n\n return Promise;\n}();\n\nPromise.prototype.then = then;\nexport default Promise;\nPromise.all = all;\nPromise.race = race;\nPromise.resolve = Resolve;\nPromise.reject = Reject;\nPromise._setScheduler = setScheduler;\nPromise._setAsap = setAsap;\nPromise._asap = asap;","/*global self*/\nimport Promise from './promise';\n\nexport default function polyfill() {\n var local = void 0;\n\n if (typeof global !== 'undefined') {\n local = global;\n } else if (typeof self !== 'undefined') {\n local = self;\n } else {\n try {\n local = Function('return this')();\n } catch (e) {\n throw new Error('polyfill failed because global object is unavailable in this environment');\n }\n }\n\n var P = local.Promise;\n\n if (P) {\n var promiseToString = null;\n try {\n promiseToString = Object.prototype.toString.call(P.resolve());\n } catch (e) {\n // silently ignored\n }\n\n if (promiseToString === '[object Promise]' && !P.cast) {\n return;\n }\n }\n\n local.Promise = Promise;\n}","import Promise from './es6-promise/promise';\nimport polyfill from './es6-promise/polyfill';\n\n// Strange compat..\nPromise.polyfill = polyfill;\nPromise.Promise = Promise;\nexport default Promise;","import Promise from './es6-promise';\nPromise.polyfill();\nexport default Promise;"],"names":["resolve","_resolve","then","originalThen","originalResolve","Promise","reject","_reject","Resolve","Reject"],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACNO,SAAS,gBAAgB,CAAC,CAAC,EAAE;EAClC,IAAI,IAAI,GAAG,OAAO,CAAC,CAAC;EACpB,OAAO,CAAC,KAAK,IAAI,KAAK,IAAI,KAAK,QAAQ,IAAI,IAAI,KAAK,UAAU,CAAC,CAAC;CACjE;;AAED,AAAO,SAAS,UAAU,CAAC,CAAC,EAAE;EAC5B,OAAO,OAAO,CAAC,KAAK,UAAU,CAAC;CAChC;;AAED,AAEC;;AAED,IAAI,QAAQ,GAAG,KAAK,CAAC,CAAC;AACtB,IAAI,KAAK,CAAC,OAAO,EAAE;EACjB,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC;CAC1B,MAAM;EACL,QAAQ,GAAG,UAAU,CAAC,EAAE;IACtB,OAAO,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,gBAAgB,CAAC;GAC/D,CAAC;CACH;;AAED,AAAO,IAAI,OAAO,GAAG,QAAQ;;ACtB7B,IAAI,GAAG,GAAG,CAAC,CAAC;AACZ,IAAI,SAAS,GAAG,KAAK,CAAC,CAAC;AACvB,IAAI,iBAAiB,GAAG,KAAK,CAAC,CAAC;;AAE/B,AAAO,IAAI,IAAI,GAAG,SAAS,IAAI,CAAC,QAAQ,EAAE,GAAG,EAAE;EAC7C,KAAK,CAAC,GAAG,CAAC,GAAG,QAAQ,CAAC;EACtB,KAAK,CAAC,GAAG,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;EACrB,GAAG,IAAI,CAAC,CAAC;EACT,IAAI,GAAG,KAAK,CAAC,EAAE;;;;IAIb,IAAI,iBAAiB,EAAE;MACrB,iBAAiB,CAAC,KAAK,CAAC,CAAC;KAC1B,MAAM;MACL,aAAa,EAAE,CAAC;KACjB;GACF;CACF,CAAC;;AAEF,AAAO,SAAS,YAAY,CAAC,UAAU,EAAE;EACvC,iBAAiB,GAAG,UAAU,CAAC;CAChC;;AAED,AAAO,SAAS,OAAO,CAAC,MAAM,EAAE;EAC9B,IAAI,GAAG,MAAM,CAAC;CACf;;AAED,IAAI,aAAa,GAAG,OAAO,MAAM,KAAK,WAAW,GAAG,MAAM,GAAG,SAAS,CAAC;AACvE,IAAI,aAAa,GAAG,aAAa,IAAI,EAAE,CAAC;AACxC,IAAI,uBAAuB,GAAG,aAAa,CAAC,gBAAgB,IAAI,aAAa,CAAC,sBAAsB,CAAC;AACrG,IAAI,MAAM,GAAG,OAAO,IAAI,KAAK,WAAW,IAAI,OAAO,OAAO,KAAK,WAAW,IAAI,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,kBAAkB,CAAC;;;AAG/H,IAAI,QAAQ,GAAG,OAAO,iBAAiB,KAAK,WAAW,IAAI,OAAO,aAAa,KAAK,WAAW,IAAI,OAAO,cAAc,KAAK,WAAW,CAAC;;;AAGzI,SAAS,WAAW,GAAG;;;EAGrB,OAAO,YAAY;IACjB,OAAO,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;GAChC,CAAC;CACH;;;AAGD,SAAS,aAAa,GAAG;EACvB,IAAI,OAAO,SAAS,KAAK,WAAW,EAAE;IACpC,OAAO,YAAY;MACjB,SAAS,CAAC,KAAK,CAAC,CAAC;KAClB,CAAC;GACH;;EAED,OAAO,aAAa,EAAE,CAAC;CACxB;;AAED,SAAS,mBAAmB,GAAG;EAC7B,IAAI,UAAU,GAAG,CAAC,CAAC;EACnB,IAAI,QAAQ,GAAG,IAAI,uBAAuB,CAAC,KAAK,CAAC,CAAC;EAClD,IAAI,IAAI,GAAG,QAAQ,CAAC,cAAc,CAAC,EAAE,CAAC,CAAC;EACvC,QAAQ,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;;EAEhD,OAAO,YAAY;IACjB,IAAI,CAAC,IAAI,GAAG,UAAU,GAAG,EAAE,UAAU,GAAG,CAAC,CAAC;GAC3C,CAAC;CACH;;;AAGD,SAAS,iBAAiB,GAAG;EAC3B,IAAI,OAAO,GAAG,IAAI,cAAc,EAAE,CAAC;EACnC,OAAO,CAAC,KAAK,CAAC,SAAS,GAAG,KAAK,CAAC;EAChC,OAAO,YAAY;IACjB,OAAO,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;GACrC,CAAC;CACH;;AAED,SAAS,aAAa,GAAG;;;EAGvB,IAAI,gBAAgB,GAAG,UAAU,CAAC;EAClC,OAAO,YAAY;IACjB,OAAO,gBAAgB,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;GACnC,CAAC;CACH;;AAED,IAAI,KAAK,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC;AAC5B,SAAS,KAAK,GAAG;EACf,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,IAAI,CAAC,EAAE;IAC/B,IAAI,QAAQ,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACxB,IAAI,GAAG,GAAG,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;;IAEvB,QAAQ,CAAC,GAAG,CAAC,CAAC;;IAEd,KAAK,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC;IACrB,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,SAAS,CAAC;GAC1B;;EAED,GAAG,GAAG,CAAC,CAAC;CACT;;AAED,SAAS,YAAY,GAAG;EACtB,IAAI;IACF,IAAI,KAAK,GAAG,QAAQ,CAAC,aAAa,CAAC,EAAE,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;IACvD,SAAS,GAAG,KAAK,CAAC,SAAS,IAAI,KAAK,CAAC,YAAY,CAAC;IAClD,OAAO,aAAa,EAAE,CAAC;GACxB,CAAC,OAAO,CAAC,EAAE;IACV,OAAO,aAAa,EAAE,CAAC;GACxB;CACF;;AAED,IAAI,aAAa,GAAG,KAAK,CAAC,CAAC;;AAE3B,IAAI,MAAM,EAAE;EACV,aAAa,GAAG,WAAW,EAAE,CAAC;CAC/B,MAAM,IAAI,uBAAuB,EAAE;EAClC,aAAa,GAAG,mBAAmB,EAAE,CAAC;CACvC,MAAM,IAAI,QAAQ,EAAE;EACnB,aAAa,GAAG,iBAAiB,EAAE,CAAC;CACrC,MAAM,IAAI,aAAa,KAAK,SAAS,IAAI,OAAO,OAAO,KAAK,UAAU,EAAE;EACvE,aAAa,GAAG,YAAY,EAAE,CAAC;CAChC,MAAM;EACL,aAAa,GAAG,aAAa,EAAE,CAAC;;;CACjC,DCtHc,SAAS,IAAI,CAAC,aAAa,EAAE,WAAW,EAAE;EACvD,IAAI,MAAM,GAAG,IAAI,CAAC;;EAElB,IAAI,KAAK,GAAG,IAAI,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;;EAEvC,IAAI,KAAK,CAAC,UAAU,CAAC,KAAK,SAAS,EAAE;IACnC,WAAW,CAAC,KAAK,CAAC,CAAC;GACpB;;EAED,IAAI,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;;;EAG3B,IAAI,MAAM,EAAE;IACV,IAAI,QAAQ,GAAG,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;IACrC,IAAI,CAAC,YAAY;MACf,OAAO,cAAc,CAAC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;KAChE,CAAC,CAAC;GACJ,MAAM;IACL,SAAS,CAAC,MAAM,EAAE,KAAK,EAAE,aAAa,EAAE,WAAW,CAAC,CAAC;GACtD;;EAED,OAAO,KAAK,CAAC;;;CACd,DCxBD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+BA,AAAe,SAASA,SAAO,CAAC,MAAM,EAAE;;EAEtC,IAAI,WAAW,GAAG,IAAI,CAAC;;EAEvB,IAAI,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,MAAM,CAAC,WAAW,KAAK,WAAW,EAAE;IAC9E,OAAO,MAAM,CAAC;GACf;;EAED,IAAI,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;EACpCC,OAAQ,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;EAC1B,OAAO,OAAO,CAAC;;;CAChB,DCrCM,IAAI,UAAU,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;;AAEhE,SAAS,IAAI,GAAG,EAAE;;AAElB,IAAI,OAAO,GAAG,KAAK,CAAC,CAAC;AACrB,IAAI,SAAS,GAAG,CAAC,CAAC;AAClB,IAAI,QAAQ,GAAG,CAAC,CAAC;;AAEjB,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,SAAS,CAAC,0CAA0C,CAAC,CAAC;CAClE;;AAED,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,SAAS,CAAC,sDAAsD,CAAC,CAAC;CAC9E;;AAED,SAAS,OAAO,CAACC,OAAI,EAAE,KAAK,EAAE,kBAAkB,EAAE,gBAAgB,EAAE;EAClE,IAAI;IACFA,OAAI,CAAC,IAAI,CAAC,KAAK,EAAE,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;GACxD,CAAC,OAAO,CAAC,EAAE;IACV,OAAO,CAAC,CAAC;GACV;CACF;;AAED,SAAS,qBAAqB,CAAC,OAAO,EAAE,QAAQ,EAAEA,OAAI,EAAE;EACtD,IAAI,CAAC,UAAU,OAAO,EAAE;IACtB,IAAI,MAAM,GAAG,KAAK,CAAC;IACnB,IAAI,KAAK,GAAG,OAAO,CAACA,OAAI,EAAE,QAAQ,EAAE,UAAU,KAAK,EAAE;MACnD,IAAI,MAAM,EAAE;QACV,OAAO;OACR;MACD,MAAM,GAAG,IAAI,CAAC;MACd,IAAI,QAAQ,KAAK,KAAK,EAAE;QACtB,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACzB,MAAM;QACL,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACzB;KACF,EAAE,UAAU,MAAM,EAAE;MACnB,IAAI,MAAM,EAAE;QACV,OAAO;OACR;MACD,MAAM,GAAG,IAAI,CAAC;;MAEd,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KACzB,EAAE,UAAU,IAAI,OAAO,CAAC,MAAM,IAAI,kBAAkB,CAAC,CAAC,CAAC;;IAExD,IAAI,CAAC,MAAM,IAAI,KAAK,EAAE;MACpB,MAAM,GAAG,IAAI,CAAC;MACd,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KACxB;GACF,EAAE,OAAO,CAAC,CAAC;CACb;;AAED,SAAS,iBAAiB,CAAC,OAAO,EAAE,QAAQ,EAAE;EAC5C,IAAI,QAAQ,CAAC,MAAM,KAAK,SAAS,EAAE;IACjC,OAAO,CAAC,OAAO,EAAE,QAAQ,CAAC,OAAO,CAAC,CAAC;GACpC,MAAM,IAAI,QAAQ,CAAC,MAAM,KAAK,QAAQ,EAAE;IACvC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,OAAO,CAAC,CAAC;GACnC,MAAM;IACL,SAAS,CAAC,QAAQ,EAAE,SAAS,EAAE,UAAU,KAAK,EAAE;MAC9C,OAAO,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KAChC,EAAE,UAAU,MAAM,EAAE;MACnB,OAAO,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KAChC,CAAC,CAAC;GACJ;CACF;;AAED,SAAS,mBAAmB,CAAC,OAAO,EAAE,aAAa,EAAEA,OAAI,EAAE;EACzD,IAAI,aAAa,CAAC,WAAW,KAAK,OAAO,CAAC,WAAW,IAAIA,OAAI,KAAKC,IAAY,IAAI,aAAa,CAAC,WAAW,CAAC,OAAO,KAAKC,SAAe,EAAE;IACvI,iBAAiB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;GAC3C,MAAM;IACL,IAAIF,OAAI,KAAK,SAAS,EAAE;MACtB,OAAO,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;KACjC,MAAM,IAAI,UAAU,CAACA,OAAI,CAAC,EAAE;MAC3B,qBAAqB,CAAC,OAAO,EAAE,aAAa,EAAEA,OAAI,CAAC,CAAC;KACrD,MAAM;MACL,OAAO,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;KACjC;GACF;CACF;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE,KAAK,EAAE;EAC/B,IAAI,OAAO,KAAK,KAAK,EAAE;IACrB,MAAM,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;GACpC,MAAM,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE;IAClC,IAAIA,OAAI,GAAG,KAAK,CAAC,CAAC;IAClB,IAAI;MACFA,OAAI,GAAG,KAAK,CAAC,IAAI,CAAC;KACnB,CAAC,OAAO,KAAK,EAAE;MACd,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;MACvB,OAAO;KACR;IACD,mBAAmB,CAAC,OAAO,EAAE,KAAK,EAAEA,OAAI,CAAC,CAAC;GAC3C,MAAM;IACL,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB;CACF;;AAED,SAAS,gBAAgB,CAAC,OAAO,EAAE;EACjC,IAAI,OAAO,CAAC,QAAQ,EAAE;IACpB,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;GACnC;;EAED,OAAO,CAAC,OAAO,CAAC,CAAC;CAClB;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE,KAAK,EAAE;EAC/B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;IAC9B,OAAO;GACR;;EAED,OAAO,CAAC,OAAO,GAAG,KAAK,CAAC;EACxB,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;;EAE3B,IAAI,OAAO,CAAC,YAAY,CAAC,MAAM,KAAK,CAAC,EAAE;IACrC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;GACxB;CACF;;AAED,SAAS,MAAM,CAAC,OAAO,EAAE,MAAM,EAAE;EAC/B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;IAC9B,OAAO;GACR;EACD,OAAO,CAAC,MAAM,GAAG,QAAQ,CAAC;EAC1B,OAAO,CAAC,OAAO,GAAG,MAAM,CAAC;;EAEzB,IAAI,CAAC,gBAAgB,EAAE,OAAO,CAAC,CAAC;CACjC;;AAED,SAAS,SAAS,CAAC,MAAM,EAAE,KAAK,EAAE,aAAa,EAAE,WAAW,EAAE;EAC5D,IAAI,YAAY,GAAG,MAAM,CAAC,YAAY,CAAC;EACvC,IAAI,MAAM,GAAG,YAAY,CAAC,MAAM,CAAC;;;EAGjC,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC;;EAEvB,YAAY,CAAC,MAAM,CAAC,GAAG,KAAK,CAAC;EAC7B,YAAY,CAAC,MAAM,GAAG,SAAS,CAAC,GAAG,aAAa,CAAC;EACjD,YAAY,CAAC,MAAM,GAAG,QAAQ,CAAC,GAAG,WAAW,CAAC;;EAE9C,IAAI,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,MAAM,EAAE;IACjC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;GACvB;CACF;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE;EACxB,IAAI,WAAW,GAAG,OAAO,CAAC,YAAY,CAAC;EACvC,IAAI,OAAO,GAAG,OAAO,CAAC,MAAM,CAAC;;EAE7B,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;IAC5B,OAAO;GACR;;EAED,IAAI,KAAK,GAAG,KAAK,CAAC;MACd,QAAQ,GAAG,KAAK,CAAC;MACjB,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;;EAE7B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,CAAC,MAAM,EAAE,CAAC,IAAI,CAAC,EAAE;IAC9C,KAAK,GAAG,WAAW,CAAC,CAAC,CAAC,CAAC;IACvB,QAAQ,GAAG,WAAW,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC;;IAEpC,IAAI,KAAK,EAAE;MACT,cAAc,CAAC,OAAO,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAC;KAClD,MAAM;MACL,QAAQ,CAAC,MAAM,CAAC,CAAC;KAClB;GACF;;EAED,OAAO,CAAC,YAAY,CAAC,MAAM,GAAG,CAAC,CAAC;CACjC;;AAED,SAAS,cAAc,CAAC,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,MAAM,EAAE;EAC1D,IAAI,WAAW,GAAG,UAAU,CAAC,QAAQ,CAAC;MAClC,KAAK,GAAG,KAAK,CAAC;MACd,KAAK,GAAG,KAAK,CAAC;MACd,SAAS,GAAG,IAAI,CAAC;;EAErB,IAAI,WAAW,EAAE;IACf,IAAI;MACF,KAAK,GAAG,QAAQ,CAAC,MAAM,CAAC,CAAC;KAC1B,CAAC,OAAO,CAAC,EAAE;MACV,SAAS,GAAG,KAAK,CAAC;MAClB,KAAK,GAAG,CAAC,CAAC;KACX;;IAED,IAAI,OAAO,KAAK,KAAK,EAAE;MACrB,MAAM,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;MACnC,OAAO;KACR;GACF,MAAM;IACL,KAAK,GAAG,MAAM,CAAC;GAChB;;EAED,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;;GAE/B,MAAM,IAAI,WAAW,IAAI,SAAS,EAAE;IACnC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB,MAAM,IAAI,SAAS,KAAK,KAAK,EAAE;IAC9B,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACxB,MAAM,IAAI,OAAO,KAAK,SAAS,EAAE;IAChC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB,MAAM,IAAI,OAAO,KAAK,QAAQ,EAAE;IAC/B,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACxB;CACF;;AAED,SAAS,iBAAiB,CAAC,OAAO,EAAE,QAAQ,EAAE;EAC5C,IAAI;IACF,QAAQ,CAAC,SAAS,cAAc,CAAC,KAAK,EAAE;MACtC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KACzB,EAAE,SAAS,aAAa,CAAC,MAAM,EAAE;MAChC,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KACzB,CAAC,CAAC;GACJ,CAAC,OAAO,CAAC,EAAE;IACV,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;GACpB;CACF;;AAED,IAAI,EAAE,GAAG,CAAC,CAAC;AACX,SAAS,MAAM,GAAG;EAChB,OAAO,EAAE,EAAE,CAAC;CACb;;AAED,SAAS,WAAW,CAAC,OAAO,EAAE;EAC5B,OAAO,CAAC,UAAU,CAAC,GAAG,EAAE,EAAE,CAAC;EAC3B,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;EAC3B,OAAO,CAAC,OAAO,GAAG,SAAS,CAAC;EAC5B,OAAO,CAAC,YAAY,GAAG,EAAE,CAAC;CAC3B;;AChOD,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,KAAK,CAAC,yCAAyC,CAAC,CAAC;CAC7D,AAAC;;AAEF,IAAI,UAAU,GAAG,YAAY;EAC3B,SAAS,UAAU,CAAC,WAAW,EAAE,KAAK,EAAE;IACtC,IAAI,CAAC,oBAAoB,GAAG,WAAW,CAAC;IACxC,IAAI,CAAC,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;;IAErC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;MAC7B,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;KAC3B;;IAED,IAAI,OAAO,CAAC,KAAK,CAAC,EAAE;MAClB,IAAI,CAAC,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC;MAC3B,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC,MAAM,CAAC;;MAE/B,IAAI,CAAC,OAAO,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;;MAEtC,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE;QACrB,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;OACrC,MAAM;QACL,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,IAAI,CAAC,CAAC;QAC/B,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;QACvB,IAAI,IAAI,CAAC,UAAU,KAAK,CAAC,EAAE;UACzB,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;SACrC;OACF;KACF,MAAM;MACL,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;KACzC;GACF;;EAED,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE;IAC3D,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,IAAI,CAAC,MAAM,KAAK,OAAO,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;MAChE,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;KAC9B;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE,CAAC,EAAE;IAC9D,IAAI,CAAC,GAAG,IAAI,CAAC,oBAAoB,CAAC;IAClC,IAAIF,UAAO,GAAG,CAAC,CAAC,OAAO,CAAC;;;IAGxB,IAAIA,UAAO,KAAKI,SAAe,EAAE;MAC/B,IAAI,KAAK,GAAG,KAAK,CAAC,CAAC;MACnB,IAAI,KAAK,GAAG,KAAK,CAAC,CAAC;MACnB,IAAI,QAAQ,GAAG,KAAK,CAAC;MACrB,IAAI;QACF,KAAK,GAAG,KAAK,CAAC,IAAI,CAAC;OACpB,CAAC,OAAO,CAAC,EAAE;QACV,QAAQ,GAAG,IAAI,CAAC;QAChB,KAAK,GAAG,CAAC,CAAC;OACX;;MAED,IAAI,KAAK,KAAKD,IAAY,IAAI,KAAK,CAAC,MAAM,KAAK,OAAO,EAAE;QACtD,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;OACjD,MAAM,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;QACtC,IAAI,CAAC,UAAU,EAAE,CAAC;QAClB,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;OACzB,MAAM,IAAI,CAAC,KAAKE,SAAO,EAAE;QACxB,IAAI,OAAO,GAAG,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC;QAC1B,IAAI,QAAQ,EAAE;UACZ,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;SACxB,MAAM;UACL,mBAAmB,CAAC,OAAO,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;SAC5C;QACD,IAAI,CAAC,aAAa,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;OAChC,MAAM;QACL,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,UAAUL,UAAO,EAAE;UAC1C,OAAOA,UAAO,CAAC,KAAK,CAAC,CAAC;SACvB,CAAC,EAAE,CAAC,CAAC,CAAC;OACR;KACF,MAAM;MACL,IAAI,CAAC,aAAa,CAACA,UAAO,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC;KACvC;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE,CAAC,EAAE,KAAK,EAAE;IACrE,IAAI,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC;;;IAG3B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;MAC9B,IAAI,CAAC,UAAU,EAAE,CAAC;;MAElB,IAAI,KAAK,KAAK,QAAQ,EAAE;QACtB,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACxB,MAAM;QACL,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;OACzB;KACF;;IAED,IAAI,IAAI,CAAC,UAAU,KAAK,CAAC,EAAE;MACzB,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;KAChC;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,aAAa,GAAG,SAAS,aAAa,CAAC,OAAO,EAAE,CAAC,EAAE;IACtE,IAAI,UAAU,GAAG,IAAI,CAAC;;IAEtB,SAAS,CAAC,OAAO,EAAE,SAAS,EAAE,UAAU,KAAK,EAAE;MAC7C,OAAO,UAAU,CAAC,UAAU,CAAC,SAAS,EAAE,CAAC,EAAE,KAAK,CAAC,CAAC;KACnD,EAAE,UAAU,MAAM,EAAE;MACnB,OAAO,UAAU,CAAC,UAAU,CAAC,QAAQ,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;KACnD,CAAC,CAAC;GACJ,CAAC;;EAEF,OAAO,UAAU,CAAC;CACnB,EAAE;;ACrHH;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+CA,AAAe,SAAS,GAAG,CAAC,OAAO,EAAE;EACnC,OAAO,IAAI,UAAU,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC,OAAO,CAAC;;;CAC9C,DCjDD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAiEA,AAAe,SAAS,IAAI,CAAC,OAAO,EAAE;;EAEpC,IAAI,WAAW,GAAG,IAAI,CAAC;;EAEvB,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;IACrB,OAAO,IAAI,WAAW,CAAC,UAAU,CAAC,EAAE,MAAM,EAAE;MAC1C,OAAO,MAAM,CAAC,IAAI,SAAS,CAAC,iCAAiC,CAAC,CAAC,CAAC;KACjE,CAAC,CAAC;GACJ,MAAM;IACL,OAAO,IAAI,WAAW,CAAC,UAAU,OAAO,EAAE,MAAM,EAAE;MAChD,IAAI,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;MAC5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,EAAE,CAAC,EAAE,EAAE;QAC/B,WAAW,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;OACvD;KACF,CAAC,CAAC;GACJ;;;CACF,DCjFD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAkCA,AAAe,SAASM,QAAM,CAAC,MAAM,EAAE;;EAErC,IAAI,WAAW,GAAG,IAAI,CAAC;EACvB,IAAI,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;EACpCC,MAAO,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;EACzB,OAAO,OAAO,CAAC;;;CAChB,DC9BD,SAAS,aAAa,GAAG;EACvB,MAAM,IAAI,SAAS,CAAC,oFAAoF,CAAC,CAAC;CAC3G;;AAED,SAAS,QAAQ,GAAG;EAClB,MAAM,IAAI,SAAS,CAAC,uHAAuH,CAAC,CAAC;CAC9I;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA0GD,IAAIF,SAAO,GAAG,YAAY;EACxB,SAAS,OAAO,CAAC,QAAQ,EAAE;IACzB,IAAI,CAAC,UAAU,CAAC,GAAG,MAAM,EAAE,CAAC;IAC5B,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,MAAM,GAAG,SAAS,CAAC;IACvC,IAAI,CAAC,YAAY,GAAG,EAAE,CAAC;;IAEvB,IAAI,IAAI,KAAK,QAAQ,EAAE;MACrB,OAAO,QAAQ,KAAK,UAAU,IAAI,aAAa,EAAE,CAAC;MAClD,IAAI,YAAY,OAAO,GAAG,iBAAiB,CAAC,IAAI,EAAE,QAAQ,CAAC,GAAG,QAAQ,EAAE,CAAC;KAC1E;GACF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA4LD,OAAO,CAAC,SAAS,CAAC,KAAK,GAAG,SAAS,MAAM,CAAC,WAAW,EAAE;IACrD,OAAO,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,WAAW,CAAC,CAAC;GACrC,CAAC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA0CF,OAAO,CAAC,SAAS,CAAC,OAAO,GAAG,SAAS,QAAQ,CAAC,QAAQ,EAAE;IACtD,IAAI,OAAO,GAAG,IAAI,CAAC;IACnB,IAAI,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;;IAEtC,IAAI,UAAU,CAAC,QAAQ,CAAC,EAAE;MACxB,OAAO,OAAO,CAAC,IAAI,CAAC,UAAU,KAAK,EAAE;QACnC,OAAO,WAAW,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,IAAI,CAAC,YAAY;UACtD,OAAO,KAAK,CAAC;SACd,CAAC,CAAC;OACJ,EAAE,UAAU,MAAM,EAAE;QACnB,OAAO,WAAW,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,IAAI,CAAC,YAAY;UACtD,MAAM,MAAM,CAAC;SACd,CAAC,CAAC;OACJ,CAAC,CAAC;KACJ;;IAED,OAAO,OAAO,CAAC,IAAI,CAAC,QAAQ,EAAE,QAAQ,CAAC,CAAC;GACzC,CAAC;;EAEF,OAAO,OAAO,CAAC;CAChB,EAAE,CAAC;;AAEJA,SAAO,CAAC,SAAS,CAAC,IAAI,GAAG,IAAI,CAAC;AAC9B,AACAA,SAAO,CAAC,GAAG,GAAG,GAAG,CAAC;AAClBA,SAAO,CAAC,IAAI,GAAG,IAAI,CAAC;AACpBA,SAAO,CAAC,OAAO,GAAGG,SAAO,CAAC;AAC1BH,SAAO,CAAC,MAAM,GAAGI,QAAM,CAAC;AACxBJ,SAAO,CAAC,aAAa,GAAG,YAAY,CAAC;AACrCA,SAAO,CAAC,QAAQ,GAAG,OAAO,CAAC;AAC3BA,SAAO,CAAC,KAAK,GAAG,IAAI;;AC5YpB;AACA,AAEe,SAAS,QAAQ,GAAG;EACjC,IAAI,KAAK,GAAG,KAAK,CAAC,CAAC;;EAEnB,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;IACjC,KAAK,GAAG,MAAM,CAAC;GAChB,MAAM,IAAI,OAAO,IAAI,KAAK,WAAW,EAAE;IACtC,KAAK,GAAG,IAAI,CAAC;GACd,MAAM;IACL,IAAI;MACF,KAAK,GAAG,QAAQ,CAAC,aAAa,CAAC,EAAE,CAAC;KACnC,CAAC,OAAO,CAAC,EAAE;MACV,MAAM,IAAI,KAAK,CAAC,0EAA0E,CAAC,CAAC;KAC7F;GACF;;EAED,IAAI,CAAC,GAAG,KAAK,CAAC,OAAO,CAAC;;EAEtB,IAAI,CAAC,EAAE;IACL,IAAI,eAAe,GAAG,IAAI,CAAC;IAC3B,IAAI;MACF,eAAe,GAAG,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;KAC/D,CAAC,OAAO,CAAC,EAAE;;KAEX;;IAED,IAAI,eAAe,KAAK,kBAAkB,IAAI,CAAC,CAAC,CAAC,IAAI,EAAE;MACrD,OAAO;KACR;GACF;;EAED,KAAK,CAAC,OAAO,GAAGA,SAAO,CAAC;;;CACzB,DC/BD;AACAA,SAAO,CAAC,QAAQ,GAAG,QAAQ,CAAC;AAC5BA,SAAO,CAAC,OAAO,GAAGA,SAAO,CAAC;;ACJ1BA,SAAO,CAAC,QAAQ,EAAE,CAAC;;;;;;;;","file":"es6-promise.auto.min.js"} \ No newline at end of file
diff --git a/node_modules/es6-promise/dist/es6-promise.js b/node_modules/es6-promise/dist/es6-promise.js
deleted file mode 100644
index 72fa0da4d..000000000
--- a/node_modules/es6-promise/dist/es6-promise.js
+++ /dev/null
@@ -1,1174 +0,0 @@
-/*!
- * @overview es6-promise - a tiny implementation of Promises/A+.
- * @copyright Copyright (c) 2014 Yehuda Katz, Tom Dale, Stefan Penner and contributors (Conversion to ES6 API by Jake Archibald)
- * @license Licensed under MIT license
- * See https://raw.githubusercontent.com/stefanpenner/es6-promise/master/LICENSE
- * @version v4.2.8+1e68dce6
- */
-
-(function (global, factory) {
- typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
- typeof define === 'function' && define.amd ? define(factory) :
- (global.ES6Promise = factory());
-}(this, (function () { 'use strict';
-
-function objectOrFunction(x) {
- var type = typeof x;
- return x !== null && (type === 'object' || type === 'function');
-}
-
-function isFunction(x) {
- return typeof x === 'function';
-}
-
-
-
-var _isArray = void 0;
-if (Array.isArray) {
- _isArray = Array.isArray;
-} else {
- _isArray = function (x) {
- return Object.prototype.toString.call(x) === '[object Array]';
- };
-}
-
-var isArray = _isArray;
-
-var len = 0;
-var vertxNext = void 0;
-var customSchedulerFn = void 0;
-
-var asap = function asap(callback, arg) {
- queue[len] = callback;
- queue[len + 1] = arg;
- len += 2;
- if (len === 2) {
- // If len is 2, that means that we need to schedule an async flush.
- // If additional callbacks are queued before the queue is flushed, they
- // will be processed by this flush that we are scheduling.
- if (customSchedulerFn) {
- customSchedulerFn(flush);
- } else {
- scheduleFlush();
- }
- }
-};
-
-function setScheduler(scheduleFn) {
- customSchedulerFn = scheduleFn;
-}
-
-function setAsap(asapFn) {
- asap = asapFn;
-}
-
-var browserWindow = typeof window !== 'undefined' ? window : undefined;
-var browserGlobal = browserWindow || {};
-var BrowserMutationObserver = browserGlobal.MutationObserver || browserGlobal.WebKitMutationObserver;
-var isNode = typeof self === 'undefined' && typeof process !== 'undefined' && {}.toString.call(process) === '[object process]';
-
-// test for web worker but not in IE10
-var isWorker = typeof Uint8ClampedArray !== 'undefined' && typeof importScripts !== 'undefined' && typeof MessageChannel !== 'undefined';
-
-// node
-function useNextTick() {
- // node version 0.10.x displays a deprecation warning when nextTick is used recursively
- // see https://github.com/cujojs/when/issues/410 for details
- return function () {
- return process.nextTick(flush);
- };
-}
-
-// vertx
-function useVertxTimer() {
- if (typeof vertxNext !== 'undefined') {
- return function () {
- vertxNext(flush);
- };
- }
-
- return useSetTimeout();
-}
-
-function useMutationObserver() {
- var iterations = 0;
- var observer = new BrowserMutationObserver(flush);
- var node = document.createTextNode('');
- observer.observe(node, { characterData: true });
-
- return function () {
- node.data = iterations = ++iterations % 2;
- };
-}
-
-// web worker
-function useMessageChannel() {
- var channel = new MessageChannel();
- channel.port1.onmessage = flush;
- return function () {
- return channel.port2.postMessage(0);
- };
-}
-
-function useSetTimeout() {
- // Store setTimeout reference so es6-promise will be unaffected by
- // other code modifying setTimeout (like sinon.useFakeTimers())
- var globalSetTimeout = setTimeout;
- return function () {
- return globalSetTimeout(flush, 1);
- };
-}
-
-var queue = new Array(1000);
-function flush() {
- for (var i = 0; i < len; i += 2) {
- var callback = queue[i];
- var arg = queue[i + 1];
-
- callback(arg);
-
- queue[i] = undefined;
- queue[i + 1] = undefined;
- }
-
- len = 0;
-}
-
-function attemptVertx() {
- try {
- var vertx = Function('return this')().require('vertx');
- vertxNext = vertx.runOnLoop || vertx.runOnContext;
- return useVertxTimer();
- } catch (e) {
- return useSetTimeout();
- }
-}
-
-var scheduleFlush = void 0;
-// Decide what async method to use to triggering processing of queued callbacks:
-if (isNode) {
- scheduleFlush = useNextTick();
-} else if (BrowserMutationObserver) {
- scheduleFlush = useMutationObserver();
-} else if (isWorker) {
- scheduleFlush = useMessageChannel();
-} else if (browserWindow === undefined && typeof require === 'function') {
- scheduleFlush = attemptVertx();
-} else {
- scheduleFlush = useSetTimeout();
-}
-
-function then(onFulfillment, onRejection) {
- var parent = this;
-
- var child = new this.constructor(noop);
-
- if (child[PROMISE_ID] === undefined) {
- makePromise(child);
- }
-
- var _state = parent._state;
-
-
- if (_state) {
- var callback = arguments[_state - 1];
- asap(function () {
- return invokeCallback(_state, child, callback, parent._result);
- });
- } else {
- subscribe(parent, child, onFulfillment, onRejection);
- }
-
- return child;
-}
-
-/**
- `Promise.resolve` returns a promise that will become resolved with the
- passed `value`. It is shorthand for the following:
-
- ```javascript
- let promise = new Promise(function(resolve, reject){
- resolve(1);
- });
-
- promise.then(function(value){
- // value === 1
- });
- ```
-
- Instead of writing the above, your code now simply becomes the following:
-
- ```javascript
- let promise = Promise.resolve(1);
-
- promise.then(function(value){
- // value === 1
- });
- ```
-
- @method resolve
- @static
- @param {Any} value value that the returned promise will be resolved with
- Useful for tooling.
- @return {Promise} a promise that will become fulfilled with the given
- `value`
-*/
-function resolve$1(object) {
- /*jshint validthis:true */
- var Constructor = this;
-
- if (object && typeof object === 'object' && object.constructor === Constructor) {
- return object;
- }
-
- var promise = new Constructor(noop);
- resolve(promise, object);
- return promise;
-}
-
-var PROMISE_ID = Math.random().toString(36).substring(2);
-
-function noop() {}
-
-var PENDING = void 0;
-var FULFILLED = 1;
-var REJECTED = 2;
-
-function selfFulfillment() {
- return new TypeError("You cannot resolve a promise with itself");
-}
-
-function cannotReturnOwn() {
- return new TypeError('A promises callback cannot return that same promise.');
-}
-
-function tryThen(then$$1, value, fulfillmentHandler, rejectionHandler) {
- try {
- then$$1.call(value, fulfillmentHandler, rejectionHandler);
- } catch (e) {
- return e;
- }
-}
-
-function handleForeignThenable(promise, thenable, then$$1) {
- asap(function (promise) {
- var sealed = false;
- var error = tryThen(then$$1, thenable, function (value) {
- if (sealed) {
- return;
- }
- sealed = true;
- if (thenable !== value) {
- resolve(promise, value);
- } else {
- fulfill(promise, value);
- }
- }, function (reason) {
- if (sealed) {
- return;
- }
- sealed = true;
-
- reject(promise, reason);
- }, 'Settle: ' + (promise._label || ' unknown promise'));
-
- if (!sealed && error) {
- sealed = true;
- reject(promise, error);
- }
- }, promise);
-}
-
-function handleOwnThenable(promise, thenable) {
- if (thenable._state === FULFILLED) {
- fulfill(promise, thenable._result);
- } else if (thenable._state === REJECTED) {
- reject(promise, thenable._result);
- } else {
- subscribe(thenable, undefined, function (value) {
- return resolve(promise, value);
- }, function (reason) {
- return reject(promise, reason);
- });
- }
-}
-
-function handleMaybeThenable(promise, maybeThenable, then$$1) {
- if (maybeThenable.constructor === promise.constructor && then$$1 === then && maybeThenable.constructor.resolve === resolve$1) {
- handleOwnThenable(promise, maybeThenable);
- } else {
- if (then$$1 === undefined) {
- fulfill(promise, maybeThenable);
- } else if (isFunction(then$$1)) {
- handleForeignThenable(promise, maybeThenable, then$$1);
- } else {
- fulfill(promise, maybeThenable);
- }
- }
-}
-
-function resolve(promise, value) {
- if (promise === value) {
- reject(promise, selfFulfillment());
- } else if (objectOrFunction(value)) {
- var then$$1 = void 0;
- try {
- then$$1 = value.then;
- } catch (error) {
- reject(promise, error);
- return;
- }
- handleMaybeThenable(promise, value, then$$1);
- } else {
- fulfill(promise, value);
- }
-}
-
-function publishRejection(promise) {
- if (promise._onerror) {
- promise._onerror(promise._result);
- }
-
- publish(promise);
-}
-
-function fulfill(promise, value) {
- if (promise._state !== PENDING) {
- return;
- }
-
- promise._result = value;
- promise._state = FULFILLED;
-
- if (promise._subscribers.length !== 0) {
- asap(publish, promise);
- }
-}
-
-function reject(promise, reason) {
- if (promise._state !== PENDING) {
- return;
- }
- promise._state = REJECTED;
- promise._result = reason;
-
- asap(publishRejection, promise);
-}
-
-function subscribe(parent, child, onFulfillment, onRejection) {
- var _subscribers = parent._subscribers;
- var length = _subscribers.length;
-
-
- parent._onerror = null;
-
- _subscribers[length] = child;
- _subscribers[length + FULFILLED] = onFulfillment;
- _subscribers[length + REJECTED] = onRejection;
-
- if (length === 0 && parent._state) {
- asap(publish, parent);
- }
-}
-
-function publish(promise) {
- var subscribers = promise._subscribers;
- var settled = promise._state;
-
- if (subscribers.length === 0) {
- return;
- }
-
- var child = void 0,
- callback = void 0,
- detail = promise._result;
-
- for (var i = 0; i < subscribers.length; i += 3) {
- child = subscribers[i];
- callback = subscribers[i + settled];
-
- if (child) {
- invokeCallback(settled, child, callback, detail);
- } else {
- callback(detail);
- }
- }
-
- promise._subscribers.length = 0;
-}
-
-function invokeCallback(settled, promise, callback, detail) {
- var hasCallback = isFunction(callback),
- value = void 0,
- error = void 0,
- succeeded = true;
-
- if (hasCallback) {
- try {
- value = callback(detail);
- } catch (e) {
- succeeded = false;
- error = e;
- }
-
- if (promise === value) {
- reject(promise, cannotReturnOwn());
- return;
- }
- } else {
- value = detail;
- }
-
- if (promise._state !== PENDING) {
- // noop
- } else if (hasCallback && succeeded) {
- resolve(promise, value);
- } else if (succeeded === false) {
- reject(promise, error);
- } else if (settled === FULFILLED) {
- fulfill(promise, value);
- } else if (settled === REJECTED) {
- reject(promise, value);
- }
-}
-
-function initializePromise(promise, resolver) {
- try {
- resolver(function resolvePromise(value) {
- resolve(promise, value);
- }, function rejectPromise(reason) {
- reject(promise, reason);
- });
- } catch (e) {
- reject(promise, e);
- }
-}
-
-var id = 0;
-function nextId() {
- return id++;
-}
-
-function makePromise(promise) {
- promise[PROMISE_ID] = id++;
- promise._state = undefined;
- promise._result = undefined;
- promise._subscribers = [];
-}
-
-function validationError() {
- return new Error('Array Methods must be provided an Array');
-}
-
-var Enumerator = function () {
- function Enumerator(Constructor, input) {
- this._instanceConstructor = Constructor;
- this.promise = new Constructor(noop);
-
- if (!this.promise[PROMISE_ID]) {
- makePromise(this.promise);
- }
-
- if (isArray(input)) {
- this.length = input.length;
- this._remaining = input.length;
-
- this._result = new Array(this.length);
-
- if (this.length === 0) {
- fulfill(this.promise, this._result);
- } else {
- this.length = this.length || 0;
- this._enumerate(input);
- if (this._remaining === 0) {
- fulfill(this.promise, this._result);
- }
- }
- } else {
- reject(this.promise, validationError());
- }
- }
-
- Enumerator.prototype._enumerate = function _enumerate(input) {
- for (var i = 0; this._state === PENDING && i < input.length; i++) {
- this._eachEntry(input[i], i);
- }
- };
-
- Enumerator.prototype._eachEntry = function _eachEntry(entry, i) {
- var c = this._instanceConstructor;
- var resolve$$1 = c.resolve;
-
-
- if (resolve$$1 === resolve$1) {
- var _then = void 0;
- var error = void 0;
- var didError = false;
- try {
- _then = entry.then;
- } catch (e) {
- didError = true;
- error = e;
- }
-
- if (_then === then && entry._state !== PENDING) {
- this._settledAt(entry._state, i, entry._result);
- } else if (typeof _then !== 'function') {
- this._remaining--;
- this._result[i] = entry;
- } else if (c === Promise$1) {
- var promise = new c(noop);
- if (didError) {
- reject(promise, error);
- } else {
- handleMaybeThenable(promise, entry, _then);
- }
- this._willSettleAt(promise, i);
- } else {
- this._willSettleAt(new c(function (resolve$$1) {
- return resolve$$1(entry);
- }), i);
- }
- } else {
- this._willSettleAt(resolve$$1(entry), i);
- }
- };
-
- Enumerator.prototype._settledAt = function _settledAt(state, i, value) {
- var promise = this.promise;
-
-
- if (promise._state === PENDING) {
- this._remaining--;
-
- if (state === REJECTED) {
- reject(promise, value);
- } else {
- this._result[i] = value;
- }
- }
-
- if (this._remaining === 0) {
- fulfill(promise, this._result);
- }
- };
-
- Enumerator.prototype._willSettleAt = function _willSettleAt(promise, i) {
- var enumerator = this;
-
- subscribe(promise, undefined, function (value) {
- return enumerator._settledAt(FULFILLED, i, value);
- }, function (reason) {
- return enumerator._settledAt(REJECTED, i, reason);
- });
- };
-
- return Enumerator;
-}();
-
-/**
- `Promise.all` accepts an array of promises, and returns a new promise which
- is fulfilled with an array of fulfillment values for the passed promises, or
- rejected with the reason of the first passed promise to be rejected. It casts all
- elements of the passed iterable to promises as it runs this algorithm.
-
- Example:
-
- ```javascript
- let promise1 = resolve(1);
- let promise2 = resolve(2);
- let promise3 = resolve(3);
- let promises = [ promise1, promise2, promise3 ];
-
- Promise.all(promises).then(function(array){
- // The array here would be [ 1, 2, 3 ];
- });
- ```
-
- If any of the `promises` given to `all` are rejected, the first promise
- that is rejected will be given as an argument to the returned promises's
- rejection handler. For example:
-
- Example:
-
- ```javascript
- let promise1 = resolve(1);
- let promise2 = reject(new Error("2"));
- let promise3 = reject(new Error("3"));
- let promises = [ promise1, promise2, promise3 ];
-
- Promise.all(promises).then(function(array){
- // Code here never runs because there are rejected promises!
- }, function(error) {
- // error.message === "2"
- });
- ```
-
- @method all
- @static
- @param {Array} entries array of promises
- @param {String} label optional string for labeling the promise.
- Useful for tooling.
- @return {Promise} promise that is fulfilled when all `promises` have been
- fulfilled, or rejected if any of them become rejected.
- @static
-*/
-function all(entries) {
- return new Enumerator(this, entries).promise;
-}
-
-/**
- `Promise.race` returns a new promise which is settled in the same way as the
- first passed promise to settle.
-
- Example:
-
- ```javascript
- let promise1 = new Promise(function(resolve, reject){
- setTimeout(function(){
- resolve('promise 1');
- }, 200);
- });
-
- let promise2 = new Promise(function(resolve, reject){
- setTimeout(function(){
- resolve('promise 2');
- }, 100);
- });
-
- Promise.race([promise1, promise2]).then(function(result){
- // result === 'promise 2' because it was resolved before promise1
- // was resolved.
- });
- ```
-
- `Promise.race` is deterministic in that only the state of the first
- settled promise matters. For example, even if other promises given to the
- `promises` array argument are resolved, but the first settled promise has
- become rejected before the other promises became fulfilled, the returned
- promise will become rejected:
-
- ```javascript
- let promise1 = new Promise(function(resolve, reject){
- setTimeout(function(){
- resolve('promise 1');
- }, 200);
- });
-
- let promise2 = new Promise(function(resolve, reject){
- setTimeout(function(){
- reject(new Error('promise 2'));
- }, 100);
- });
-
- Promise.race([promise1, promise2]).then(function(result){
- // Code here never runs
- }, function(reason){
- // reason.message === 'promise 2' because promise 2 became rejected before
- // promise 1 became fulfilled
- });
- ```
-
- An example real-world use case is implementing timeouts:
-
- ```javascript
- Promise.race([ajax('foo.json'), timeout(5000)])
- ```
-
- @method race
- @static
- @param {Array} promises array of promises to observe
- Useful for tooling.
- @return {Promise} a promise which settles in the same way as the first passed
- promise to settle.
-*/
-function race(entries) {
- /*jshint validthis:true */
- var Constructor = this;
-
- if (!isArray(entries)) {
- return new Constructor(function (_, reject) {
- return reject(new TypeError('You must pass an array to race.'));
- });
- } else {
- return new Constructor(function (resolve, reject) {
- var length = entries.length;
- for (var i = 0; i < length; i++) {
- Constructor.resolve(entries[i]).then(resolve, reject);
- }
- });
- }
-}
-
-/**
- `Promise.reject` returns a promise rejected with the passed `reason`.
- It is shorthand for the following:
-
- ```javascript
- let promise = new Promise(function(resolve, reject){
- reject(new Error('WHOOPS'));
- });
-
- promise.then(function(value){
- // Code here doesn't run because the promise is rejected!
- }, function(reason){
- // reason.message === 'WHOOPS'
- });
- ```
-
- Instead of writing the above, your code now simply becomes the following:
-
- ```javascript
- let promise = Promise.reject(new Error('WHOOPS'));
-
- promise.then(function(value){
- // Code here doesn't run because the promise is rejected!
- }, function(reason){
- // reason.message === 'WHOOPS'
- });
- ```
-
- @method reject
- @static
- @param {Any} reason value that the returned promise will be rejected with.
- Useful for tooling.
- @return {Promise} a promise rejected with the given `reason`.
-*/
-function reject$1(reason) {
- /*jshint validthis:true */
- var Constructor = this;
- var promise = new Constructor(noop);
- reject(promise, reason);
- return promise;
-}
-
-function needsResolver() {
- throw new TypeError('You must pass a resolver function as the first argument to the promise constructor');
-}
-
-function needsNew() {
- throw new TypeError("Failed to construct 'Promise': Please use the 'new' operator, this object constructor cannot be called as a function.");
-}
-
-/**
- Promise objects represent the eventual result of an asynchronous operation. The
- primary way of interacting with a promise is through its `then` method, which
- registers callbacks to receive either a promise's eventual value or the reason
- why the promise cannot be fulfilled.
-
- Terminology
- -----------
-
- - `promise` is an object or function with a `then` method whose behavior conforms to this specification.
- - `thenable` is an object or function that defines a `then` method.
- - `value` is any legal JavaScript value (including undefined, a thenable, or a promise).
- - `exception` is a value that is thrown using the throw statement.
- - `reason` is a value that indicates why a promise was rejected.
- - `settled` the final resting state of a promise, fulfilled or rejected.
-
- A promise can be in one of three states: pending, fulfilled, or rejected.
-
- Promises that are fulfilled have a fulfillment value and are in the fulfilled
- state. Promises that are rejected have a rejection reason and are in the
- rejected state. A fulfillment value is never a thenable.
-
- Promises can also be said to *resolve* a value. If this value is also a
- promise, then the original promise's settled state will match the value's
- settled state. So a promise that *resolves* a promise that rejects will
- itself reject, and a promise that *resolves* a promise that fulfills will
- itself fulfill.
-
-
- Basic Usage:
- ------------
-
- ```js
- let promise = new Promise(function(resolve, reject) {
- // on success
- resolve(value);
-
- // on failure
- reject(reason);
- });
-
- promise.then(function(value) {
- // on fulfillment
- }, function(reason) {
- // on rejection
- });
- ```
-
- Advanced Usage:
- ---------------
-
- Promises shine when abstracting away asynchronous interactions such as
- `XMLHttpRequest`s.
-
- ```js
- function getJSON(url) {
- return new Promise(function(resolve, reject){
- let xhr = new XMLHttpRequest();
-
- xhr.open('GET', url);
- xhr.onreadystatechange = handler;
- xhr.responseType = 'json';
- xhr.setRequestHeader('Accept', 'application/json');
- xhr.send();
-
- function handler() {
- if (this.readyState === this.DONE) {
- if (this.status === 200) {
- resolve(this.response);
- } else {
- reject(new Error('getJSON: `' + url + '` failed with status: [' + this.status + ']'));
- }
- }
- };
- });
- }
-
- getJSON('/posts.json').then(function(json) {
- // on fulfillment
- }, function(reason) {
- // on rejection
- });
- ```
-
- Unlike callbacks, promises are great composable primitives.
-
- ```js
- Promise.all([
- getJSON('/posts'),
- getJSON('/comments')
- ]).then(function(values){
- values[0] // => postsJSON
- values[1] // => commentsJSON
-
- return values;
- });
- ```
-
- @class Promise
- @param {Function} resolver
- Useful for tooling.
- @constructor
-*/
-
-var Promise$1 = function () {
- function Promise(resolver) {
- this[PROMISE_ID] = nextId();
- this._result = this._state = undefined;
- this._subscribers = [];
-
- if (noop !== resolver) {
- typeof resolver !== 'function' && needsResolver();
- this instanceof Promise ? initializePromise(this, resolver) : needsNew();
- }
- }
-
- /**
- The primary way of interacting with a promise is through its `then` method,
- which registers callbacks to receive either a promise's eventual value or the
- reason why the promise cannot be fulfilled.
- ```js
- findUser().then(function(user){
- // user is available
- }, function(reason){
- // user is unavailable, and you are given the reason why
- });
- ```
- Chaining
- --------
- The return value of `then` is itself a promise. This second, 'downstream'
- promise is resolved with the return value of the first promise's fulfillment
- or rejection handler, or rejected if the handler throws an exception.
- ```js
- findUser().then(function (user) {
- return user.name;
- }, function (reason) {
- return 'default name';
- }).then(function (userName) {
- // If `findUser` fulfilled, `userName` will be the user's name, otherwise it
- // will be `'default name'`
- });
- findUser().then(function (user) {
- throw new Error('Found user, but still unhappy');
- }, function (reason) {
- throw new Error('`findUser` rejected and we're unhappy');
- }).then(function (value) {
- // never reached
- }, function (reason) {
- // if `findUser` fulfilled, `reason` will be 'Found user, but still unhappy'.
- // If `findUser` rejected, `reason` will be '`findUser` rejected and we're unhappy'.
- });
- ```
- If the downstream promise does not specify a rejection handler, rejection reasons will be propagated further downstream.
- ```js
- findUser().then(function (user) {
- throw new PedagogicalException('Upstream error');
- }).then(function (value) {
- // never reached
- }).then(function (value) {
- // never reached
- }, function (reason) {
- // The `PedgagocialException` is propagated all the way down to here
- });
- ```
- Assimilation
- ------------
- Sometimes the value you want to propagate to a downstream promise can only be
- retrieved asynchronously. This can be achieved by returning a promise in the
- fulfillment or rejection handler. The downstream promise will then be pending
- until the returned promise is settled. This is called *assimilation*.
- ```js
- findUser().then(function (user) {
- return findCommentsByAuthor(user);
- }).then(function (comments) {
- // The user's comments are now available
- });
- ```
- If the assimliated promise rejects, then the downstream promise will also reject.
- ```js
- findUser().then(function (user) {
- return findCommentsByAuthor(user);
- }).then(function (comments) {
- // If `findCommentsByAuthor` fulfills, we'll have the value here
- }, function (reason) {
- // If `findCommentsByAuthor` rejects, we'll have the reason here
- });
- ```
- Simple Example
- --------------
- Synchronous Example
- ```javascript
- let result;
- try {
- result = findResult();
- // success
- } catch(reason) {
- // failure
- }
- ```
- Errback Example
- ```js
- findResult(function(result, err){
- if (err) {
- // failure
- } else {
- // success
- }
- });
- ```
- Promise Example;
- ```javascript
- findResult().then(function(result){
- // success
- }, function(reason){
- // failure
- });
- ```
- Advanced Example
- --------------
- Synchronous Example
- ```javascript
- let author, books;
- try {
- author = findAuthor();
- books = findBooksByAuthor(author);
- // success
- } catch(reason) {
- // failure
- }
- ```
- Errback Example
- ```js
- function foundBooks(books) {
- }
- function failure(reason) {
- }
- findAuthor(function(author, err){
- if (err) {
- failure(err);
- // failure
- } else {
- try {
- findBoooksByAuthor(author, function(books, err) {
- if (err) {
- failure(err);
- } else {
- try {
- foundBooks(books);
- } catch(reason) {
- failure(reason);
- }
- }
- });
- } catch(error) {
- failure(err);
- }
- // success
- }
- });
- ```
- Promise Example;
- ```javascript
- findAuthor().
- then(findBooksByAuthor).
- then(function(books){
- // found books
- }).catch(function(reason){
- // something went wrong
- });
- ```
- @method then
- @param {Function} onFulfilled
- @param {Function} onRejected
- Useful for tooling.
- @return {Promise}
- */
-
- /**
- `catch` is simply sugar for `then(undefined, onRejection)` which makes it the same
- as the catch block of a try/catch statement.
- ```js
- function findAuthor(){
- throw new Error('couldn't find that author');
- }
- // synchronous
- try {
- findAuthor();
- } catch(reason) {
- // something went wrong
- }
- // async with promises
- findAuthor().catch(function(reason){
- // something went wrong
- });
- ```
- @method catch
- @param {Function} onRejection
- Useful for tooling.
- @return {Promise}
- */
-
-
- Promise.prototype.catch = function _catch(onRejection) {
- return this.then(null, onRejection);
- };
-
- /**
- `finally` will be invoked regardless of the promise's fate just as native
- try/catch/finally behaves
-
- Synchronous example:
-
- ```js
- findAuthor() {
- if (Math.random() > 0.5) {
- throw new Error();
- }
- return new Author();
- }
-
- try {
- return findAuthor(); // succeed or fail
- } catch(error) {
- return findOtherAuther();
- } finally {
- // always runs
- // doesn't affect the return value
- }
- ```
-
- Asynchronous example:
-
- ```js
- findAuthor().catch(function(reason){
- return findOtherAuther();
- }).finally(function(){
- // author was either found, or not
- });
- ```
-
- @method finally
- @param {Function} callback
- @return {Promise}
- */
-
-
- Promise.prototype.finally = function _finally(callback) {
- var promise = this;
- var constructor = promise.constructor;
-
- if (isFunction(callback)) {
- return promise.then(function (value) {
- return constructor.resolve(callback()).then(function () {
- return value;
- });
- }, function (reason) {
- return constructor.resolve(callback()).then(function () {
- throw reason;
- });
- });
- }
-
- return promise.then(callback, callback);
- };
-
- return Promise;
-}();
-
-Promise$1.prototype.then = then;
-Promise$1.all = all;
-Promise$1.race = race;
-Promise$1.resolve = resolve$1;
-Promise$1.reject = reject$1;
-Promise$1._setScheduler = setScheduler;
-Promise$1._setAsap = setAsap;
-Promise$1._asap = asap;
-
-/*global self*/
-function polyfill() {
- var local = void 0;
-
- if (typeof global !== 'undefined') {
- local = global;
- } else if (typeof self !== 'undefined') {
- local = self;
- } else {
- try {
- local = Function('return this')();
- } catch (e) {
- throw new Error('polyfill failed because global object is unavailable in this environment');
- }
- }
-
- var P = local.Promise;
-
- if (P) {
- var promiseToString = null;
- try {
- promiseToString = Object.prototype.toString.call(P.resolve());
- } catch (e) {
- // silently ignored
- }
-
- if (promiseToString === '[object Promise]' && !P.cast) {
- return;
- }
- }
-
- local.Promise = Promise$1;
-}
-
-// Strange compat..
-Promise$1.polyfill = polyfill;
-Promise$1.Promise = Promise$1;
-
-return Promise$1;
-
-})));
-
-
-
-//# sourceMappingURL=es6-promise.map
diff --git a/node_modules/es6-promise/dist/es6-promise.map b/node_modules/es6-promise/dist/es6-promise.map
deleted file mode 100644
index 27db4142f..000000000
--- a/node_modules/es6-promise/dist/es6-promise.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"sources":["config/versionTemplate.txt","lib/es6-promise/utils.js","lib/es6-promise/asap.js","lib/es6-promise/then.js","lib/es6-promise/promise/resolve.js","lib/es6-promise/-internal.js","lib/es6-promise/enumerator.js","lib/es6-promise/promise/all.js","lib/es6-promise/promise/race.js","lib/es6-promise/promise/reject.js","lib/es6-promise/promise.js","lib/es6-promise/polyfill.js","lib/es6-promise.js"],"sourcesContent":["/*!\n * @overview es6-promise - a tiny implementation of Promises/A+.\n * @copyright Copyright (c) 2014 Yehuda Katz, Tom Dale, Stefan Penner and contributors (Conversion to ES6 API by Jake Archibald)\n * @license Licensed under MIT license\n * See https://raw.githubusercontent.com/stefanpenner/es6-promise/master/LICENSE\n * @version v4.2.8+1e68dce6\n */\n","export function objectOrFunction(x) {\n var type = typeof x;\n return x !== null && (type === 'object' || type === 'function');\n}\n\nexport function isFunction(x) {\n return typeof x === 'function';\n}\n\nexport function isMaybeThenable(x) {\n return x !== null && typeof x === 'object';\n}\n\nvar _isArray = void 0;\nif (Array.isArray) {\n _isArray = Array.isArray;\n} else {\n _isArray = function (x) {\n return Object.prototype.toString.call(x) === '[object Array]';\n };\n}\n\nexport var isArray = _isArray;","var len = 0;\nvar vertxNext = void 0;\nvar customSchedulerFn = void 0;\n\nexport var asap = function asap(callback, arg) {\n queue[len] = callback;\n queue[len + 1] = arg;\n len += 2;\n if (len === 2) {\n // If len is 2, that means that we need to schedule an async flush.\n // If additional callbacks are queued before the queue is flushed, they\n // will be processed by this flush that we are scheduling.\n if (customSchedulerFn) {\n customSchedulerFn(flush);\n } else {\n scheduleFlush();\n }\n }\n};\n\nexport function setScheduler(scheduleFn) {\n customSchedulerFn = scheduleFn;\n}\n\nexport function setAsap(asapFn) {\n asap = asapFn;\n}\n\nvar browserWindow = typeof window !== 'undefined' ? window : undefined;\nvar browserGlobal = browserWindow || {};\nvar BrowserMutationObserver = browserGlobal.MutationObserver || browserGlobal.WebKitMutationObserver;\nvar isNode = typeof self === 'undefined' && typeof process !== 'undefined' && {}.toString.call(process) === '[object process]';\n\n// test for web worker but not in IE10\nvar isWorker = typeof Uint8ClampedArray !== 'undefined' && typeof importScripts !== 'undefined' && typeof MessageChannel !== 'undefined';\n\n// node\nfunction useNextTick() {\n // node version 0.10.x displays a deprecation warning when nextTick is used recursively\n // see https://github.com/cujojs/when/issues/410 for details\n return function () {\n return process.nextTick(flush);\n };\n}\n\n// vertx\nfunction useVertxTimer() {\n if (typeof vertxNext !== 'undefined') {\n return function () {\n vertxNext(flush);\n };\n }\n\n return useSetTimeout();\n}\n\nfunction useMutationObserver() {\n var iterations = 0;\n var observer = new BrowserMutationObserver(flush);\n var node = document.createTextNode('');\n observer.observe(node, { characterData: true });\n\n return function () {\n node.data = iterations = ++iterations % 2;\n };\n}\n\n// web worker\nfunction useMessageChannel() {\n var channel = new MessageChannel();\n channel.port1.onmessage = flush;\n return function () {\n return channel.port2.postMessage(0);\n };\n}\n\nfunction useSetTimeout() {\n // Store setTimeout reference so es6-promise will be unaffected by\n // other code modifying setTimeout (like sinon.useFakeTimers())\n var globalSetTimeout = setTimeout;\n return function () {\n return globalSetTimeout(flush, 1);\n };\n}\n\nvar queue = new Array(1000);\nfunction flush() {\n for (var i = 0; i < len; i += 2) {\n var callback = queue[i];\n var arg = queue[i + 1];\n\n callback(arg);\n\n queue[i] = undefined;\n queue[i + 1] = undefined;\n }\n\n len = 0;\n}\n\nfunction attemptVertx() {\n try {\n var vertx = Function('return this')().require('vertx');\n vertxNext = vertx.runOnLoop || vertx.runOnContext;\n return useVertxTimer();\n } catch (e) {\n return useSetTimeout();\n }\n}\n\nvar scheduleFlush = void 0;\n// Decide what async method to use to triggering processing of queued callbacks:\nif (isNode) {\n scheduleFlush = useNextTick();\n} else if (BrowserMutationObserver) {\n scheduleFlush = useMutationObserver();\n} else if (isWorker) {\n scheduleFlush = useMessageChannel();\n} else if (browserWindow === undefined && typeof require === 'function') {\n scheduleFlush = attemptVertx();\n} else {\n scheduleFlush = useSetTimeout();\n}","import { invokeCallback, subscribe, FULFILLED, REJECTED, noop, makePromise, PROMISE_ID } from './-internal';\n\nimport { asap } from './asap';\n\nexport default function then(onFulfillment, onRejection) {\n var parent = this;\n\n var child = new this.constructor(noop);\n\n if (child[PROMISE_ID] === undefined) {\n makePromise(child);\n }\n\n var _state = parent._state;\n\n\n if (_state) {\n var callback = arguments[_state - 1];\n asap(function () {\n return invokeCallback(_state, child, callback, parent._result);\n });\n } else {\n subscribe(parent, child, onFulfillment, onRejection);\n }\n\n return child;\n}","import { noop, resolve as _resolve } from '../-internal';\n\n/**\n `Promise.resolve` returns a promise that will become resolved with the\n passed `value`. It is shorthand for the following:\n\n ```javascript\n let promise = new Promise(function(resolve, reject){\n resolve(1);\n });\n\n promise.then(function(value){\n // value === 1\n });\n ```\n\n Instead of writing the above, your code now simply becomes the following:\n\n ```javascript\n let promise = Promise.resolve(1);\n\n promise.then(function(value){\n // value === 1\n });\n ```\n\n @method resolve\n @static\n @param {Any} value value that the returned promise will be resolved with\n Useful for tooling.\n @return {Promise} a promise that will become fulfilled with the given\n `value`\n*/\nexport default function resolve(object) {\n /*jshint validthis:true */\n var Constructor = this;\n\n if (object && typeof object === 'object' && object.constructor === Constructor) {\n return object;\n }\n\n var promise = new Constructor(noop);\n _resolve(promise, object);\n return promise;\n}","import { objectOrFunction, isFunction } from './utils';\n\nimport { asap } from './asap';\n\nimport originalThen from './then';\nimport originalResolve from './promise/resolve';\n\nexport var PROMISE_ID = Math.random().toString(36).substring(2);\n\nfunction noop() {}\n\nvar PENDING = void 0;\nvar FULFILLED = 1;\nvar REJECTED = 2;\n\nfunction selfFulfillment() {\n return new TypeError(\"You cannot resolve a promise with itself\");\n}\n\nfunction cannotReturnOwn() {\n return new TypeError('A promises callback cannot return that same promise.');\n}\n\nfunction tryThen(then, value, fulfillmentHandler, rejectionHandler) {\n try {\n then.call(value, fulfillmentHandler, rejectionHandler);\n } catch (e) {\n return e;\n }\n}\n\nfunction handleForeignThenable(promise, thenable, then) {\n asap(function (promise) {\n var sealed = false;\n var error = tryThen(then, thenable, function (value) {\n if (sealed) {\n return;\n }\n sealed = true;\n if (thenable !== value) {\n resolve(promise, value);\n } else {\n fulfill(promise, value);\n }\n }, function (reason) {\n if (sealed) {\n return;\n }\n sealed = true;\n\n reject(promise, reason);\n }, 'Settle: ' + (promise._label || ' unknown promise'));\n\n if (!sealed && error) {\n sealed = true;\n reject(promise, error);\n }\n }, promise);\n}\n\nfunction handleOwnThenable(promise, thenable) {\n if (thenable._state === FULFILLED) {\n fulfill(promise, thenable._result);\n } else if (thenable._state === REJECTED) {\n reject(promise, thenable._result);\n } else {\n subscribe(thenable, undefined, function (value) {\n return resolve(promise, value);\n }, function (reason) {\n return reject(promise, reason);\n });\n }\n}\n\nfunction handleMaybeThenable(promise, maybeThenable, then) {\n if (maybeThenable.constructor === promise.constructor && then === originalThen && maybeThenable.constructor.resolve === originalResolve) {\n handleOwnThenable(promise, maybeThenable);\n } else {\n if (then === undefined) {\n fulfill(promise, maybeThenable);\n } else if (isFunction(then)) {\n handleForeignThenable(promise, maybeThenable, then);\n } else {\n fulfill(promise, maybeThenable);\n }\n }\n}\n\nfunction resolve(promise, value) {\n if (promise === value) {\n reject(promise, selfFulfillment());\n } else if (objectOrFunction(value)) {\n var then = void 0;\n try {\n then = value.then;\n } catch (error) {\n reject(promise, error);\n return;\n }\n handleMaybeThenable(promise, value, then);\n } else {\n fulfill(promise, value);\n }\n}\n\nfunction publishRejection(promise) {\n if (promise._onerror) {\n promise._onerror(promise._result);\n }\n\n publish(promise);\n}\n\nfunction fulfill(promise, value) {\n if (promise._state !== PENDING) {\n return;\n }\n\n promise._result = value;\n promise._state = FULFILLED;\n\n if (promise._subscribers.length !== 0) {\n asap(publish, promise);\n }\n}\n\nfunction reject(promise, reason) {\n if (promise._state !== PENDING) {\n return;\n }\n promise._state = REJECTED;\n promise._result = reason;\n\n asap(publishRejection, promise);\n}\n\nfunction subscribe(parent, child, onFulfillment, onRejection) {\n var _subscribers = parent._subscribers;\n var length = _subscribers.length;\n\n\n parent._onerror = null;\n\n _subscribers[length] = child;\n _subscribers[length + FULFILLED] = onFulfillment;\n _subscribers[length + REJECTED] = onRejection;\n\n if (length === 0 && parent._state) {\n asap(publish, parent);\n }\n}\n\nfunction publish(promise) {\n var subscribers = promise._subscribers;\n var settled = promise._state;\n\n if (subscribers.length === 0) {\n return;\n }\n\n var child = void 0,\n callback = void 0,\n detail = promise._result;\n\n for (var i = 0; i < subscribers.length; i += 3) {\n child = subscribers[i];\n callback = subscribers[i + settled];\n\n if (child) {\n invokeCallback(settled, child, callback, detail);\n } else {\n callback(detail);\n }\n }\n\n promise._subscribers.length = 0;\n}\n\nfunction invokeCallback(settled, promise, callback, detail) {\n var hasCallback = isFunction(callback),\n value = void 0,\n error = void 0,\n succeeded = true;\n\n if (hasCallback) {\n try {\n value = callback(detail);\n } catch (e) {\n succeeded = false;\n error = e;\n }\n\n if (promise === value) {\n reject(promise, cannotReturnOwn());\n return;\n }\n } else {\n value = detail;\n }\n\n if (promise._state !== PENDING) {\n // noop\n } else if (hasCallback && succeeded) {\n resolve(promise, value);\n } else if (succeeded === false) {\n reject(promise, error);\n } else if (settled === FULFILLED) {\n fulfill(promise, value);\n } else if (settled === REJECTED) {\n reject(promise, value);\n }\n}\n\nfunction initializePromise(promise, resolver) {\n try {\n resolver(function resolvePromise(value) {\n resolve(promise, value);\n }, function rejectPromise(reason) {\n reject(promise, reason);\n });\n } catch (e) {\n reject(promise, e);\n }\n}\n\nvar id = 0;\nfunction nextId() {\n return id++;\n}\n\nfunction makePromise(promise) {\n promise[PROMISE_ID] = id++;\n promise._state = undefined;\n promise._result = undefined;\n promise._subscribers = [];\n}\n\nexport { nextId, makePromise, noop, resolve, reject, fulfill, subscribe, publish, publishRejection, initializePromise, invokeCallback, FULFILLED, REJECTED, PENDING, handleMaybeThenable };","function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nimport { isArray, isMaybeThenable } from './utils';\nimport { noop, reject, fulfill, subscribe, FULFILLED, REJECTED, PENDING, handleMaybeThenable } from './-internal';\n\nimport then from './then';\nimport Promise from './promise';\nimport originalResolve from './promise/resolve';\nimport originalThen from './then';\nimport { makePromise, PROMISE_ID } from './-internal';\n\nfunction validationError() {\n return new Error('Array Methods must be provided an Array');\n};\n\nvar Enumerator = function () {\n function Enumerator(Constructor, input) {\n this._instanceConstructor = Constructor;\n this.promise = new Constructor(noop);\n\n if (!this.promise[PROMISE_ID]) {\n makePromise(this.promise);\n }\n\n if (isArray(input)) {\n this.length = input.length;\n this._remaining = input.length;\n\n this._result = new Array(this.length);\n\n if (this.length === 0) {\n fulfill(this.promise, this._result);\n } else {\n this.length = this.length || 0;\n this._enumerate(input);\n if (this._remaining === 0) {\n fulfill(this.promise, this._result);\n }\n }\n } else {\n reject(this.promise, validationError());\n }\n }\n\n Enumerator.prototype._enumerate = function _enumerate(input) {\n for (var i = 0; this._state === PENDING && i < input.length; i++) {\n this._eachEntry(input[i], i);\n }\n };\n\n Enumerator.prototype._eachEntry = function _eachEntry(entry, i) {\n var c = this._instanceConstructor;\n var resolve = c.resolve;\n\n\n if (resolve === originalResolve) {\n var _then = void 0;\n var error = void 0;\n var didError = false;\n try {\n _then = entry.then;\n } catch (e) {\n didError = true;\n error = e;\n }\n\n if (_then === originalThen && entry._state !== PENDING) {\n this._settledAt(entry._state, i, entry._result);\n } else if (typeof _then !== 'function') {\n this._remaining--;\n this._result[i] = entry;\n } else if (c === Promise) {\n var promise = new c(noop);\n if (didError) {\n reject(promise, error);\n } else {\n handleMaybeThenable(promise, entry, _then);\n }\n this._willSettleAt(promise, i);\n } else {\n this._willSettleAt(new c(function (resolve) {\n return resolve(entry);\n }), i);\n }\n } else {\n this._willSettleAt(resolve(entry), i);\n }\n };\n\n Enumerator.prototype._settledAt = function _settledAt(state, i, value) {\n var promise = this.promise;\n\n\n if (promise._state === PENDING) {\n this._remaining--;\n\n if (state === REJECTED) {\n reject(promise, value);\n } else {\n this._result[i] = value;\n }\n }\n\n if (this._remaining === 0) {\n fulfill(promise, this._result);\n }\n };\n\n Enumerator.prototype._willSettleAt = function _willSettleAt(promise, i) {\n var enumerator = this;\n\n subscribe(promise, undefined, function (value) {\n return enumerator._settledAt(FULFILLED, i, value);\n }, function (reason) {\n return enumerator._settledAt(REJECTED, i, reason);\n });\n };\n\n return Enumerator;\n}();\n\nexport default Enumerator;\n;","import Enumerator from '../enumerator';\n\n/**\n `Promise.all` accepts an array of promises, and returns a new promise which\n is fulfilled with an array of fulfillment values for the passed promises, or\n rejected with the reason of the first passed promise to be rejected. It casts all\n elements of the passed iterable to promises as it runs this algorithm.\n\n Example:\n\n ```javascript\n let promise1 = resolve(1);\n let promise2 = resolve(2);\n let promise3 = resolve(3);\n let promises = [ promise1, promise2, promise3 ];\n\n Promise.all(promises).then(function(array){\n // The array here would be [ 1, 2, 3 ];\n });\n ```\n\n If any of the `promises` given to `all` are rejected, the first promise\n that is rejected will be given as an argument to the returned promises's\n rejection handler. For example:\n\n Example:\n\n ```javascript\n let promise1 = resolve(1);\n let promise2 = reject(new Error(\"2\"));\n let promise3 = reject(new Error(\"3\"));\n let promises = [ promise1, promise2, promise3 ];\n\n Promise.all(promises).then(function(array){\n // Code here never runs because there are rejected promises!\n }, function(error) {\n // error.message === \"2\"\n });\n ```\n\n @method all\n @static\n @param {Array} entries array of promises\n @param {String} label optional string for labeling the promise.\n Useful for tooling.\n @return {Promise} promise that is fulfilled when all `promises` have been\n fulfilled, or rejected if any of them become rejected.\n @static\n*/\nexport default function all(entries) {\n return new Enumerator(this, entries).promise;\n}","import { isArray } from \"../utils\";\n\n/**\n `Promise.race` returns a new promise which is settled in the same way as the\n first passed promise to settle.\n\n Example:\n\n ```javascript\n let promise1 = new Promise(function(resolve, reject){\n setTimeout(function(){\n resolve('promise 1');\n }, 200);\n });\n\n let promise2 = new Promise(function(resolve, reject){\n setTimeout(function(){\n resolve('promise 2');\n }, 100);\n });\n\n Promise.race([promise1, promise2]).then(function(result){\n // result === 'promise 2' because it was resolved before promise1\n // was resolved.\n });\n ```\n\n `Promise.race` is deterministic in that only the state of the first\n settled promise matters. For example, even if other promises given to the\n `promises` array argument are resolved, but the first settled promise has\n become rejected before the other promises became fulfilled, the returned\n promise will become rejected:\n\n ```javascript\n let promise1 = new Promise(function(resolve, reject){\n setTimeout(function(){\n resolve('promise 1');\n }, 200);\n });\n\n let promise2 = new Promise(function(resolve, reject){\n setTimeout(function(){\n reject(new Error('promise 2'));\n }, 100);\n });\n\n Promise.race([promise1, promise2]).then(function(result){\n // Code here never runs\n }, function(reason){\n // reason.message === 'promise 2' because promise 2 became rejected before\n // promise 1 became fulfilled\n });\n ```\n\n An example real-world use case is implementing timeouts:\n\n ```javascript\n Promise.race([ajax('foo.json'), timeout(5000)])\n ```\n\n @method race\n @static\n @param {Array} promises array of promises to observe\n Useful for tooling.\n @return {Promise} a promise which settles in the same way as the first passed\n promise to settle.\n*/\nexport default function race(entries) {\n /*jshint validthis:true */\n var Constructor = this;\n\n if (!isArray(entries)) {\n return new Constructor(function (_, reject) {\n return reject(new TypeError('You must pass an array to race.'));\n });\n } else {\n return new Constructor(function (resolve, reject) {\n var length = entries.length;\n for (var i = 0; i < length; i++) {\n Constructor.resolve(entries[i]).then(resolve, reject);\n }\n });\n }\n}","import { noop, reject as _reject } from '../-internal';\n\n/**\n `Promise.reject` returns a promise rejected with the passed `reason`.\n It is shorthand for the following:\n\n ```javascript\n let promise = new Promise(function(resolve, reject){\n reject(new Error('WHOOPS'));\n });\n\n promise.then(function(value){\n // Code here doesn't run because the promise is rejected!\n }, function(reason){\n // reason.message === 'WHOOPS'\n });\n ```\n\n Instead of writing the above, your code now simply becomes the following:\n\n ```javascript\n let promise = Promise.reject(new Error('WHOOPS'));\n\n promise.then(function(value){\n // Code here doesn't run because the promise is rejected!\n }, function(reason){\n // reason.message === 'WHOOPS'\n });\n ```\n\n @method reject\n @static\n @param {Any} reason value that the returned promise will be rejected with.\n Useful for tooling.\n @return {Promise} a promise rejected with the given `reason`.\n*/\nexport default function reject(reason) {\n /*jshint validthis:true */\n var Constructor = this;\n var promise = new Constructor(noop);\n _reject(promise, reason);\n return promise;\n}","function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nimport { isFunction } from './utils';\nimport { noop, nextId, PROMISE_ID, initializePromise } from './-internal';\nimport { asap, setAsap, setScheduler } from './asap';\n\nimport all from './promise/all';\nimport race from './promise/race';\nimport Resolve from './promise/resolve';\nimport Reject from './promise/reject';\nimport then from './then';\n\nfunction needsResolver() {\n throw new TypeError('You must pass a resolver function as the first argument to the promise constructor');\n}\n\nfunction needsNew() {\n throw new TypeError(\"Failed to construct 'Promise': Please use the 'new' operator, this object constructor cannot be called as a function.\");\n}\n\n/**\n Promise objects represent the eventual result of an asynchronous operation. The\n primary way of interacting with a promise is through its `then` method, which\n registers callbacks to receive either a promise's eventual value or the reason\n why the promise cannot be fulfilled.\n\n Terminology\n -----------\n\n - `promise` is an object or function with a `then` method whose behavior conforms to this specification.\n - `thenable` is an object or function that defines a `then` method.\n - `value` is any legal JavaScript value (including undefined, a thenable, or a promise).\n - `exception` is a value that is thrown using the throw statement.\n - `reason` is a value that indicates why a promise was rejected.\n - `settled` the final resting state of a promise, fulfilled or rejected.\n\n A promise can be in one of three states: pending, fulfilled, or rejected.\n\n Promises that are fulfilled have a fulfillment value and are in the fulfilled\n state. Promises that are rejected have a rejection reason and are in the\n rejected state. A fulfillment value is never a thenable.\n\n Promises can also be said to *resolve* a value. If this value is also a\n promise, then the original promise's settled state will match the value's\n settled state. So a promise that *resolves* a promise that rejects will\n itself reject, and a promise that *resolves* a promise that fulfills will\n itself fulfill.\n\n\n Basic Usage:\n ------------\n\n ```js\n let promise = new Promise(function(resolve, reject) {\n // on success\n resolve(value);\n\n // on failure\n reject(reason);\n });\n\n promise.then(function(value) {\n // on fulfillment\n }, function(reason) {\n // on rejection\n });\n ```\n\n Advanced Usage:\n ---------------\n\n Promises shine when abstracting away asynchronous interactions such as\n `XMLHttpRequest`s.\n\n ```js\n function getJSON(url) {\n return new Promise(function(resolve, reject){\n let xhr = new XMLHttpRequest();\n\n xhr.open('GET', url);\n xhr.onreadystatechange = handler;\n xhr.responseType = 'json';\n xhr.setRequestHeader('Accept', 'application/json');\n xhr.send();\n\n function handler() {\n if (this.readyState === this.DONE) {\n if (this.status === 200) {\n resolve(this.response);\n } else {\n reject(new Error('getJSON: `' + url + '` failed with status: [' + this.status + ']'));\n }\n }\n };\n });\n }\n\n getJSON('/posts.json').then(function(json) {\n // on fulfillment\n }, function(reason) {\n // on rejection\n });\n ```\n\n Unlike callbacks, promises are great composable primitives.\n\n ```js\n Promise.all([\n getJSON('/posts'),\n getJSON('/comments')\n ]).then(function(values){\n values[0] // => postsJSON\n values[1] // => commentsJSON\n\n return values;\n });\n ```\n\n @class Promise\n @param {Function} resolver\n Useful for tooling.\n @constructor\n*/\n\nvar Promise = function () {\n function Promise(resolver) {\n this[PROMISE_ID] = nextId();\n this._result = this._state = undefined;\n this._subscribers = [];\n\n if (noop !== resolver) {\n typeof resolver !== 'function' && needsResolver();\n this instanceof Promise ? initializePromise(this, resolver) : needsNew();\n }\n }\n\n /**\n The primary way of interacting with a promise is through its `then` method,\n which registers callbacks to receive either a promise's eventual value or the\n reason why the promise cannot be fulfilled.\n ```js\n findUser().then(function(user){\n // user is available\n }, function(reason){\n // user is unavailable, and you are given the reason why\n });\n ```\n Chaining\n --------\n The return value of `then` is itself a promise. This second, 'downstream'\n promise is resolved with the return value of the first promise's fulfillment\n or rejection handler, or rejected if the handler throws an exception.\n ```js\n findUser().then(function (user) {\n return user.name;\n }, function (reason) {\n return 'default name';\n }).then(function (userName) {\n // If `findUser` fulfilled, `userName` will be the user's name, otherwise it\n // will be `'default name'`\n });\n findUser().then(function (user) {\n throw new Error('Found user, but still unhappy');\n }, function (reason) {\n throw new Error('`findUser` rejected and we're unhappy');\n }).then(function (value) {\n // never reached\n }, function (reason) {\n // if `findUser` fulfilled, `reason` will be 'Found user, but still unhappy'.\n // If `findUser` rejected, `reason` will be '`findUser` rejected and we're unhappy'.\n });\n ```\n If the downstream promise does not specify a rejection handler, rejection reasons will be propagated further downstream.\n ```js\n findUser().then(function (user) {\n throw new PedagogicalException('Upstream error');\n }).then(function (value) {\n // never reached\n }).then(function (value) {\n // never reached\n }, function (reason) {\n // The `PedgagocialException` is propagated all the way down to here\n });\n ```\n Assimilation\n ------------\n Sometimes the value you want to propagate to a downstream promise can only be\n retrieved asynchronously. This can be achieved by returning a promise in the\n fulfillment or rejection handler. The downstream promise will then be pending\n until the returned promise is settled. This is called *assimilation*.\n ```js\n findUser().then(function (user) {\n return findCommentsByAuthor(user);\n }).then(function (comments) {\n // The user's comments are now available\n });\n ```\n If the assimliated promise rejects, then the downstream promise will also reject.\n ```js\n findUser().then(function (user) {\n return findCommentsByAuthor(user);\n }).then(function (comments) {\n // If `findCommentsByAuthor` fulfills, we'll have the value here\n }, function (reason) {\n // If `findCommentsByAuthor` rejects, we'll have the reason here\n });\n ```\n Simple Example\n --------------\n Synchronous Example\n ```javascript\n let result;\n try {\n result = findResult();\n // success\n } catch(reason) {\n // failure\n }\n ```\n Errback Example\n ```js\n findResult(function(result, err){\n if (err) {\n // failure\n } else {\n // success\n }\n });\n ```\n Promise Example;\n ```javascript\n findResult().then(function(result){\n // success\n }, function(reason){\n // failure\n });\n ```\n Advanced Example\n --------------\n Synchronous Example\n ```javascript\n let author, books;\n try {\n author = findAuthor();\n books = findBooksByAuthor(author);\n // success\n } catch(reason) {\n // failure\n }\n ```\n Errback Example\n ```js\n function foundBooks(books) {\n }\n function failure(reason) {\n }\n findAuthor(function(author, err){\n if (err) {\n failure(err);\n // failure\n } else {\n try {\n findBoooksByAuthor(author, function(books, err) {\n if (err) {\n failure(err);\n } else {\n try {\n foundBooks(books);\n } catch(reason) {\n failure(reason);\n }\n }\n });\n } catch(error) {\n failure(err);\n }\n // success\n }\n });\n ```\n Promise Example;\n ```javascript\n findAuthor().\n then(findBooksByAuthor).\n then(function(books){\n // found books\n }).catch(function(reason){\n // something went wrong\n });\n ```\n @method then\n @param {Function} onFulfilled\n @param {Function} onRejected\n Useful for tooling.\n @return {Promise}\n */\n\n /**\n `catch` is simply sugar for `then(undefined, onRejection)` which makes it the same\n as the catch block of a try/catch statement.\n ```js\n function findAuthor(){\n throw new Error('couldn't find that author');\n }\n // synchronous\n try {\n findAuthor();\n } catch(reason) {\n // something went wrong\n }\n // async with promises\n findAuthor().catch(function(reason){\n // something went wrong\n });\n ```\n @method catch\n @param {Function} onRejection\n Useful for tooling.\n @return {Promise}\n */\n\n\n Promise.prototype.catch = function _catch(onRejection) {\n return this.then(null, onRejection);\n };\n\n /**\n `finally` will be invoked regardless of the promise's fate just as native\n try/catch/finally behaves\n \n Synchronous example:\n \n ```js\n findAuthor() {\n if (Math.random() > 0.5) {\n throw new Error();\n }\n return new Author();\n }\n \n try {\n return findAuthor(); // succeed or fail\n } catch(error) {\n return findOtherAuther();\n } finally {\n // always runs\n // doesn't affect the return value\n }\n ```\n \n Asynchronous example:\n \n ```js\n findAuthor().catch(function(reason){\n return findOtherAuther();\n }).finally(function(){\n // author was either found, or not\n });\n ```\n \n @method finally\n @param {Function} callback\n @return {Promise}\n */\n\n\n Promise.prototype.finally = function _finally(callback) {\n var promise = this;\n var constructor = promise.constructor;\n\n if (isFunction(callback)) {\n return promise.then(function (value) {\n return constructor.resolve(callback()).then(function () {\n return value;\n });\n }, function (reason) {\n return constructor.resolve(callback()).then(function () {\n throw reason;\n });\n });\n }\n\n return promise.then(callback, callback);\n };\n\n return Promise;\n}();\n\nPromise.prototype.then = then;\nexport default Promise;\nPromise.all = all;\nPromise.race = race;\nPromise.resolve = Resolve;\nPromise.reject = Reject;\nPromise._setScheduler = setScheduler;\nPromise._setAsap = setAsap;\nPromise._asap = asap;","/*global self*/\nimport Promise from './promise';\n\nexport default function polyfill() {\n var local = void 0;\n\n if (typeof global !== 'undefined') {\n local = global;\n } else if (typeof self !== 'undefined') {\n local = self;\n } else {\n try {\n local = Function('return this')();\n } catch (e) {\n throw new Error('polyfill failed because global object is unavailable in this environment');\n }\n }\n\n var P = local.Promise;\n\n if (P) {\n var promiseToString = null;\n try {\n promiseToString = Object.prototype.toString.call(P.resolve());\n } catch (e) {\n // silently ignored\n }\n\n if (promiseToString === '[object Promise]' && !P.cast) {\n return;\n }\n }\n\n local.Promise = Promise;\n}","import Promise from './es6-promise/promise';\nimport polyfill from './es6-promise/polyfill';\n\n// Strange compat..\nPromise.polyfill = polyfill;\nPromise.Promise = Promise;\nexport default Promise;"],"names":["resolve","_resolve","then","originalThen","originalResolve","Promise","reject","_reject","Resolve","Reject"],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACNO,SAAS,gBAAgB,CAAC,CAAC,EAAE;EAClC,IAAI,IAAI,GAAG,OAAO,CAAC,CAAC;EACpB,OAAO,CAAC,KAAK,IAAI,KAAK,IAAI,KAAK,QAAQ,IAAI,IAAI,KAAK,UAAU,CAAC,CAAC;CACjE;;AAED,AAAO,SAAS,UAAU,CAAC,CAAC,EAAE;EAC5B,OAAO,OAAO,CAAC,KAAK,UAAU,CAAC;CAChC;;AAED,AAEC;;AAED,IAAI,QAAQ,GAAG,KAAK,CAAC,CAAC;AACtB,IAAI,KAAK,CAAC,OAAO,EAAE;EACjB,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC;CAC1B,MAAM;EACL,QAAQ,GAAG,UAAU,CAAC,EAAE;IACtB,OAAO,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,gBAAgB,CAAC;GAC/D,CAAC;CACH;;AAED,AAAO,IAAI,OAAO,GAAG,QAAQ;;ACtB7B,IAAI,GAAG,GAAG,CAAC,CAAC;AACZ,IAAI,SAAS,GAAG,KAAK,CAAC,CAAC;AACvB,IAAI,iBAAiB,GAAG,KAAK,CAAC,CAAC;;AAE/B,AAAO,IAAI,IAAI,GAAG,SAAS,IAAI,CAAC,QAAQ,EAAE,GAAG,EAAE;EAC7C,KAAK,CAAC,GAAG,CAAC,GAAG,QAAQ,CAAC;EACtB,KAAK,CAAC,GAAG,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;EACrB,GAAG,IAAI,CAAC,CAAC;EACT,IAAI,GAAG,KAAK,CAAC,EAAE;;;;IAIb,IAAI,iBAAiB,EAAE;MACrB,iBAAiB,CAAC,KAAK,CAAC,CAAC;KAC1B,MAAM;MACL,aAAa,EAAE,CAAC;KACjB;GACF;CACF,CAAC;;AAEF,AAAO,SAAS,YAAY,CAAC,UAAU,EAAE;EACvC,iBAAiB,GAAG,UAAU,CAAC;CAChC;;AAED,AAAO,SAAS,OAAO,CAAC,MAAM,EAAE;EAC9B,IAAI,GAAG,MAAM,CAAC;CACf;;AAED,IAAI,aAAa,GAAG,OAAO,MAAM,KAAK,WAAW,GAAG,MAAM,GAAG,SAAS,CAAC;AACvE,IAAI,aAAa,GAAG,aAAa,IAAI,EAAE,CAAC;AACxC,IAAI,uBAAuB,GAAG,aAAa,CAAC,gBAAgB,IAAI,aAAa,CAAC,sBAAsB,CAAC;AACrG,IAAI,MAAM,GAAG,OAAO,IAAI,KAAK,WAAW,IAAI,OAAO,OAAO,KAAK,WAAW,IAAI,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,kBAAkB,CAAC;;;AAG/H,IAAI,QAAQ,GAAG,OAAO,iBAAiB,KAAK,WAAW,IAAI,OAAO,aAAa,KAAK,WAAW,IAAI,OAAO,cAAc,KAAK,WAAW,CAAC;;;AAGzI,SAAS,WAAW,GAAG;;;EAGrB,OAAO,YAAY;IACjB,OAAO,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;GAChC,CAAC;CACH;;;AAGD,SAAS,aAAa,GAAG;EACvB,IAAI,OAAO,SAAS,KAAK,WAAW,EAAE;IACpC,OAAO,YAAY;MACjB,SAAS,CAAC,KAAK,CAAC,CAAC;KAClB,CAAC;GACH;;EAED,OAAO,aAAa,EAAE,CAAC;CACxB;;AAED,SAAS,mBAAmB,GAAG;EAC7B,IAAI,UAAU,GAAG,CAAC,CAAC;EACnB,IAAI,QAAQ,GAAG,IAAI,uBAAuB,CAAC,KAAK,CAAC,CAAC;EAClD,IAAI,IAAI,GAAG,QAAQ,CAAC,cAAc,CAAC,EAAE,CAAC,CAAC;EACvC,QAAQ,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;;EAEhD,OAAO,YAAY;IACjB,IAAI,CAAC,IAAI,GAAG,UAAU,GAAG,EAAE,UAAU,GAAG,CAAC,CAAC;GAC3C,CAAC;CACH;;;AAGD,SAAS,iBAAiB,GAAG;EAC3B,IAAI,OAAO,GAAG,IAAI,cAAc,EAAE,CAAC;EACnC,OAAO,CAAC,KAAK,CAAC,SAAS,GAAG,KAAK,CAAC;EAChC,OAAO,YAAY;IACjB,OAAO,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;GACrC,CAAC;CACH;;AAED,SAAS,aAAa,GAAG;;;EAGvB,IAAI,gBAAgB,GAAG,UAAU,CAAC;EAClC,OAAO,YAAY;IACjB,OAAO,gBAAgB,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;GACnC,CAAC;CACH;;AAED,IAAI,KAAK,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC;AAC5B,SAAS,KAAK,GAAG;EACf,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,IAAI,CAAC,EAAE;IAC/B,IAAI,QAAQ,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACxB,IAAI,GAAG,GAAG,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;;IAEvB,QAAQ,CAAC,GAAG,CAAC,CAAC;;IAEd,KAAK,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC;IACrB,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,SAAS,CAAC;GAC1B;;EAED,GAAG,GAAG,CAAC,CAAC;CACT;;AAED,SAAS,YAAY,GAAG;EACtB,IAAI;IACF,IAAI,KAAK,GAAG,QAAQ,CAAC,aAAa,CAAC,EAAE,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;IACvD,SAAS,GAAG,KAAK,CAAC,SAAS,IAAI,KAAK,CAAC,YAAY,CAAC;IAClD,OAAO,aAAa,EAAE,CAAC;GACxB,CAAC,OAAO,CAAC,EAAE;IACV,OAAO,aAAa,EAAE,CAAC;GACxB;CACF;;AAED,IAAI,aAAa,GAAG,KAAK,CAAC,CAAC;;AAE3B,IAAI,MAAM,EAAE;EACV,aAAa,GAAG,WAAW,EAAE,CAAC;CAC/B,MAAM,IAAI,uBAAuB,EAAE;EAClC,aAAa,GAAG,mBAAmB,EAAE,CAAC;CACvC,MAAM,IAAI,QAAQ,EAAE;EACnB,aAAa,GAAG,iBAAiB,EAAE,CAAC;CACrC,MAAM,IAAI,aAAa,KAAK,SAAS,IAAI,OAAO,OAAO,KAAK,UAAU,EAAE;EACvE,aAAa,GAAG,YAAY,EAAE,CAAC;CAChC,MAAM;EACL,aAAa,GAAG,aAAa,EAAE,CAAC;;;CACjC,DCtHc,SAAS,IAAI,CAAC,aAAa,EAAE,WAAW,EAAE;EACvD,IAAI,MAAM,GAAG,IAAI,CAAC;;EAElB,IAAI,KAAK,GAAG,IAAI,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;;EAEvC,IAAI,KAAK,CAAC,UAAU,CAAC,KAAK,SAAS,EAAE;IACnC,WAAW,CAAC,KAAK,CAAC,CAAC;GACpB;;EAED,IAAI,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;;;EAG3B,IAAI,MAAM,EAAE;IACV,IAAI,QAAQ,GAAG,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;IACrC,IAAI,CAAC,YAAY;MACf,OAAO,cAAc,CAAC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;KAChE,CAAC,CAAC;GACJ,MAAM;IACL,SAAS,CAAC,MAAM,EAAE,KAAK,EAAE,aAAa,EAAE,WAAW,CAAC,CAAC;GACtD;;EAED,OAAO,KAAK,CAAC;;;CACd,DCxBD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+BA,AAAe,SAASA,SAAO,CAAC,MAAM,EAAE;;EAEtC,IAAI,WAAW,GAAG,IAAI,CAAC;;EAEvB,IAAI,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,MAAM,CAAC,WAAW,KAAK,WAAW,EAAE;IAC9E,OAAO,MAAM,CAAC;GACf;;EAED,IAAI,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;EACpCC,OAAQ,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;EAC1B,OAAO,OAAO,CAAC;;;CAChB,DCrCM,IAAI,UAAU,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;;AAEhE,SAAS,IAAI,GAAG,EAAE;;AAElB,IAAI,OAAO,GAAG,KAAK,CAAC,CAAC;AACrB,IAAI,SAAS,GAAG,CAAC,CAAC;AAClB,IAAI,QAAQ,GAAG,CAAC,CAAC;;AAEjB,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,SAAS,CAAC,0CAA0C,CAAC,CAAC;CAClE;;AAED,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,SAAS,CAAC,sDAAsD,CAAC,CAAC;CAC9E;;AAED,SAAS,OAAO,CAACC,OAAI,EAAE,KAAK,EAAE,kBAAkB,EAAE,gBAAgB,EAAE;EAClE,IAAI;IACFA,OAAI,CAAC,IAAI,CAAC,KAAK,EAAE,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;GACxD,CAAC,OAAO,CAAC,EAAE;IACV,OAAO,CAAC,CAAC;GACV;CACF;;AAED,SAAS,qBAAqB,CAAC,OAAO,EAAE,QAAQ,EAAEA,OAAI,EAAE;EACtD,IAAI,CAAC,UAAU,OAAO,EAAE;IACtB,IAAI,MAAM,GAAG,KAAK,CAAC;IACnB,IAAI,KAAK,GAAG,OAAO,CAACA,OAAI,EAAE,QAAQ,EAAE,UAAU,KAAK,EAAE;MACnD,IAAI,MAAM,EAAE;QACV,OAAO;OACR;MACD,MAAM,GAAG,IAAI,CAAC;MACd,IAAI,QAAQ,KAAK,KAAK,EAAE;QACtB,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACzB,MAAM;QACL,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACzB;KACF,EAAE,UAAU,MAAM,EAAE;MACnB,IAAI,MAAM,EAAE;QACV,OAAO;OACR;MACD,MAAM,GAAG,IAAI,CAAC;;MAEd,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KACzB,EAAE,UAAU,IAAI,OAAO,CAAC,MAAM,IAAI,kBAAkB,CAAC,CAAC,CAAC;;IAExD,IAAI,CAAC,MAAM,IAAI,KAAK,EAAE;MACpB,MAAM,GAAG,IAAI,CAAC;MACd,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KACxB;GACF,EAAE,OAAO,CAAC,CAAC;CACb;;AAED,SAAS,iBAAiB,CAAC,OAAO,EAAE,QAAQ,EAAE;EAC5C,IAAI,QAAQ,CAAC,MAAM,KAAK,SAAS,EAAE;IACjC,OAAO,CAAC,OAAO,EAAE,QAAQ,CAAC,OAAO,CAAC,CAAC;GACpC,MAAM,IAAI,QAAQ,CAAC,MAAM,KAAK,QAAQ,EAAE;IACvC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,OAAO,CAAC,CAAC;GACnC,MAAM;IACL,SAAS,CAAC,QAAQ,EAAE,SAAS,EAAE,UAAU,KAAK,EAAE;MAC9C,OAAO,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KAChC,EAAE,UAAU,MAAM,EAAE;MACnB,OAAO,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KAChC,CAAC,CAAC;GACJ;CACF;;AAED,SAAS,mBAAmB,CAAC,OAAO,EAAE,aAAa,EAAEA,OAAI,EAAE;EACzD,IAAI,aAAa,CAAC,WAAW,KAAK,OAAO,CAAC,WAAW,IAAIA,OAAI,KAAKC,IAAY,IAAI,aAAa,CAAC,WAAW,CAAC,OAAO,KAAKC,SAAe,EAAE;IACvI,iBAAiB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;GAC3C,MAAM;IACL,IAAIF,OAAI,KAAK,SAAS,EAAE;MACtB,OAAO,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;KACjC,MAAM,IAAI,UAAU,CAACA,OAAI,CAAC,EAAE;MAC3B,qBAAqB,CAAC,OAAO,EAAE,aAAa,EAAEA,OAAI,CAAC,CAAC;KACrD,MAAM;MACL,OAAO,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;KACjC;GACF;CACF;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE,KAAK,EAAE;EAC/B,IAAI,OAAO,KAAK,KAAK,EAAE;IACrB,MAAM,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;GACpC,MAAM,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE;IAClC,IAAIA,OAAI,GAAG,KAAK,CAAC,CAAC;IAClB,IAAI;MACFA,OAAI,GAAG,KAAK,CAAC,IAAI,CAAC;KACnB,CAAC,OAAO,KAAK,EAAE;MACd,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;MACvB,OAAO;KACR;IACD,mBAAmB,CAAC,OAAO,EAAE,KAAK,EAAEA,OAAI,CAAC,CAAC;GAC3C,MAAM;IACL,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB;CACF;;AAED,SAAS,gBAAgB,CAAC,OAAO,EAAE;EACjC,IAAI,OAAO,CAAC,QAAQ,EAAE;IACpB,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;GACnC;;EAED,OAAO,CAAC,OAAO,CAAC,CAAC;CAClB;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE,KAAK,EAAE;EAC/B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;IAC9B,OAAO;GACR;;EAED,OAAO,CAAC,OAAO,GAAG,KAAK,CAAC;EACxB,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;;EAE3B,IAAI,OAAO,CAAC,YAAY,CAAC,MAAM,KAAK,CAAC,EAAE;IACrC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;GACxB;CACF;;AAED,SAAS,MAAM,CAAC,OAAO,EAAE,MAAM,EAAE;EAC/B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;IAC9B,OAAO;GACR;EACD,OAAO,CAAC,MAAM,GAAG,QAAQ,CAAC;EAC1B,OAAO,CAAC,OAAO,GAAG,MAAM,CAAC;;EAEzB,IAAI,CAAC,gBAAgB,EAAE,OAAO,CAAC,CAAC;CACjC;;AAED,SAAS,SAAS,CAAC,MAAM,EAAE,KAAK,EAAE,aAAa,EAAE,WAAW,EAAE;EAC5D,IAAI,YAAY,GAAG,MAAM,CAAC,YAAY,CAAC;EACvC,IAAI,MAAM,GAAG,YAAY,CAAC,MAAM,CAAC;;;EAGjC,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC;;EAEvB,YAAY,CAAC,MAAM,CAAC,GAAG,KAAK,CAAC;EAC7B,YAAY,CAAC,MAAM,GAAG,SAAS,CAAC,GAAG,aAAa,CAAC;EACjD,YAAY,CAAC,MAAM,GAAG,QAAQ,CAAC,GAAG,WAAW,CAAC;;EAE9C,IAAI,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,MAAM,EAAE;IACjC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;GACvB;CACF;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE;EACxB,IAAI,WAAW,GAAG,OAAO,CAAC,YAAY,CAAC;EACvC,IAAI,OAAO,GAAG,OAAO,CAAC,MAAM,CAAC;;EAE7B,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;IAC5B,OAAO;GACR;;EAED,IAAI,KAAK,GAAG,KAAK,CAAC;MACd,QAAQ,GAAG,KAAK,CAAC;MACjB,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;;EAE7B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,CAAC,MAAM,EAAE,CAAC,IAAI,CAAC,EAAE;IAC9C,KAAK,GAAG,WAAW,CAAC,CAAC,CAAC,CAAC;IACvB,QAAQ,GAAG,WAAW,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC;;IAEpC,IAAI,KAAK,EAAE;MACT,cAAc,CAAC,OAAO,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAC;KAClD,MAAM;MACL,QAAQ,CAAC,MAAM,CAAC,CAAC;KAClB;GACF;;EAED,OAAO,CAAC,YAAY,CAAC,MAAM,GAAG,CAAC,CAAC;CACjC;;AAED,SAAS,cAAc,CAAC,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,MAAM,EAAE;EAC1D,IAAI,WAAW,GAAG,UAAU,CAAC,QAAQ,CAAC;MAClC,KAAK,GAAG,KAAK,CAAC;MACd,KAAK,GAAG,KAAK,CAAC;MACd,SAAS,GAAG,IAAI,CAAC;;EAErB,IAAI,WAAW,EAAE;IACf,IAAI;MACF,KAAK,GAAG,QAAQ,CAAC,MAAM,CAAC,CAAC;KAC1B,CAAC,OAAO,CAAC,EAAE;MACV,SAAS,GAAG,KAAK,CAAC;MAClB,KAAK,GAAG,CAAC,CAAC;KACX;;IAED,IAAI,OAAO,KAAK,KAAK,EAAE;MACrB,MAAM,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;MACnC,OAAO;KACR;GACF,MAAM;IACL,KAAK,GAAG,MAAM,CAAC;GAChB;;EAED,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;;GAE/B,MAAM,IAAI,WAAW,IAAI,SAAS,EAAE;IACnC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB,MAAM,IAAI,SAAS,KAAK,KAAK,EAAE;IAC9B,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACxB,MAAM,IAAI,OAAO,KAAK,SAAS,EAAE;IAChC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB,MAAM,IAAI,OAAO,KAAK,QAAQ,EAAE;IAC/B,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACxB;CACF;;AAED,SAAS,iBAAiB,CAAC,OAAO,EAAE,QAAQ,EAAE;EAC5C,IAAI;IACF,QAAQ,CAAC,SAAS,cAAc,CAAC,KAAK,EAAE;MACtC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KACzB,EAAE,SAAS,aAAa,CAAC,MAAM,EAAE;MAChC,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KACzB,CAAC,CAAC;GACJ,CAAC,OAAO,CAAC,EAAE;IACV,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;GACpB;CACF;;AAED,IAAI,EAAE,GAAG,CAAC,CAAC;AACX,SAAS,MAAM,GAAG;EAChB,OAAO,EAAE,EAAE,CAAC;CACb;;AAED,SAAS,WAAW,CAAC,OAAO,EAAE;EAC5B,OAAO,CAAC,UAAU,CAAC,GAAG,EAAE,EAAE,CAAC;EAC3B,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;EAC3B,OAAO,CAAC,OAAO,GAAG,SAAS,CAAC;EAC5B,OAAO,CAAC,YAAY,GAAG,EAAE,CAAC;CAC3B;;AChOD,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,KAAK,CAAC,yCAAyC,CAAC,CAAC;CAC7D,AAAC;;AAEF,IAAI,UAAU,GAAG,YAAY;EAC3B,SAAS,UAAU,CAAC,WAAW,EAAE,KAAK,EAAE;IACtC,IAAI,CAAC,oBAAoB,GAAG,WAAW,CAAC;IACxC,IAAI,CAAC,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;;IAErC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;MAC7B,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;KAC3B;;IAED,IAAI,OAAO,CAAC,KAAK,CAAC,EAAE;MAClB,IAAI,CAAC,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC;MAC3B,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC,MAAM,CAAC;;MAE/B,IAAI,CAAC,OAAO,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;;MAEtC,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE;QACrB,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;OACrC,MAAM;QACL,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,IAAI,CAAC,CAAC;QAC/B,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;QACvB,IAAI,IAAI,CAAC,UAAU,KAAK,CAAC,EAAE;UACzB,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;SACrC;OACF;KACF,MAAM;MACL,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;KACzC;GACF;;EAED,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE;IAC3D,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,IAAI,CAAC,MAAM,KAAK,OAAO,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;MAChE,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;KAC9B;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE,CAAC,EAAE;IAC9D,IAAI,CAAC,GAAG,IAAI,CAAC,oBAAoB,CAAC;IAClC,IAAIF,UAAO,GAAG,CAAC,CAAC,OAAO,CAAC;;;IAGxB,IAAIA,UAAO,KAAKI,SAAe,EAAE;MAC/B,IAAI,KAAK,GAAG,KAAK,CAAC,CAAC;MACnB,IAAI,KAAK,GAAG,KAAK,CAAC,CAAC;MACnB,IAAI,QAAQ,GAAG,KAAK,CAAC;MACrB,IAAI;QACF,KAAK,GAAG,KAAK,CAAC,IAAI,CAAC;OACpB,CAAC,OAAO,CAAC,EAAE;QACV,QAAQ,GAAG,IAAI,CAAC;QAChB,KAAK,GAAG,CAAC,CAAC;OACX;;MAED,IAAI,KAAK,KAAKD,IAAY,IAAI,KAAK,CAAC,MAAM,KAAK,OAAO,EAAE;QACtD,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;OACjD,MAAM,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;QACtC,IAAI,CAAC,UAAU,EAAE,CAAC;QAClB,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;OACzB,MAAM,IAAI,CAAC,KAAKE,SAAO,EAAE;QACxB,IAAI,OAAO,GAAG,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC;QAC1B,IAAI,QAAQ,EAAE;UACZ,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;SACxB,MAAM;UACL,mBAAmB,CAAC,OAAO,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;SAC5C;QACD,IAAI,CAAC,aAAa,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;OAChC,MAAM;QACL,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,UAAUL,UAAO,EAAE;UAC1C,OAAOA,UAAO,CAAC,KAAK,CAAC,CAAC;SACvB,CAAC,EAAE,CAAC,CAAC,CAAC;OACR;KACF,MAAM;MACL,IAAI,CAAC,aAAa,CAACA,UAAO,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC;KACvC;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE,CAAC,EAAE,KAAK,EAAE;IACrE,IAAI,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC;;;IAG3B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;MAC9B,IAAI,CAAC,UAAU,EAAE,CAAC;;MAElB,IAAI,KAAK,KAAK,QAAQ,EAAE;QACtB,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACxB,MAAM;QACL,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;OACzB;KACF;;IAED,IAAI,IAAI,CAAC,UAAU,KAAK,CAAC,EAAE;MACzB,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;KAChC;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,aAAa,GAAG,SAAS,aAAa,CAAC,OAAO,EAAE,CAAC,EAAE;IACtE,IAAI,UAAU,GAAG,IAAI,CAAC;;IAEtB,SAAS,CAAC,OAAO,EAAE,SAAS,EAAE,UAAU,KAAK,EAAE;MAC7C,OAAO,UAAU,CAAC,UAAU,CAAC,SAAS,EAAE,CAAC,EAAE,KAAK,CAAC,CAAC;KACnD,EAAE,UAAU,MAAM,EAAE;MACnB,OAAO,UAAU,CAAC,UAAU,CAAC,QAAQ,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;KACnD,CAAC,CAAC;GACJ,CAAC;;EAEF,OAAO,UAAU,CAAC;CACnB,EAAE;;ACrHH;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+CA,AAAe,SAAS,GAAG,CAAC,OAAO,EAAE;EACnC,OAAO,IAAI,UAAU,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC,OAAO,CAAC;;;CAC9C,DCjDD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAiEA,AAAe,SAAS,IAAI,CAAC,OAAO,EAAE;;EAEpC,IAAI,WAAW,GAAG,IAAI,CAAC;;EAEvB,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;IACrB,OAAO,IAAI,WAAW,CAAC,UAAU,CAAC,EAAE,MAAM,EAAE;MAC1C,OAAO,MAAM,CAAC,IAAI,SAAS,CAAC,iCAAiC,CAAC,CAAC,CAAC;KACjE,CAAC,CAAC;GACJ,MAAM;IACL,OAAO,IAAI,WAAW,CAAC,UAAU,OAAO,EAAE,MAAM,EAAE;MAChD,IAAI,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;MAC5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,EAAE,CAAC,EAAE,EAAE;QAC/B,WAAW,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;OACvD;KACF,CAAC,CAAC;GACJ;;;CACF,DCjFD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAkCA,AAAe,SAASM,QAAM,CAAC,MAAM,EAAE;;EAErC,IAAI,WAAW,GAAG,IAAI,CAAC;EACvB,IAAI,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;EACpCC,MAAO,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;EACzB,OAAO,OAAO,CAAC;;;CAChB,DC9BD,SAAS,aAAa,GAAG;EACvB,MAAM,IAAI,SAAS,CAAC,oFAAoF,CAAC,CAAC;CAC3G;;AAED,SAAS,QAAQ,GAAG;EAClB,MAAM,IAAI,SAAS,CAAC,uHAAuH,CAAC,CAAC;CAC9I;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA0GD,IAAIF,SAAO,GAAG,YAAY;EACxB,SAAS,OAAO,CAAC,QAAQ,EAAE;IACzB,IAAI,CAAC,UAAU,CAAC,GAAG,MAAM,EAAE,CAAC;IAC5B,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,MAAM,GAAG,SAAS,CAAC;IACvC,IAAI,CAAC,YAAY,GAAG,EAAE,CAAC;;IAEvB,IAAI,IAAI,KAAK,QAAQ,EAAE;MACrB,OAAO,QAAQ,KAAK,UAAU,IAAI,aAAa,EAAE,CAAC;MAClD,IAAI,YAAY,OAAO,GAAG,iBAAiB,CAAC,IAAI,EAAE,QAAQ,CAAC,GAAG,QAAQ,EAAE,CAAC;KAC1E;GACF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA4LD,OAAO,CAAC,SAAS,CAAC,KAAK,GAAG,SAAS,MAAM,CAAC,WAAW,EAAE;IACrD,OAAO,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,WAAW,CAAC,CAAC;GACrC,CAAC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA0CF,OAAO,CAAC,SAAS,CAAC,OAAO,GAAG,SAAS,QAAQ,CAAC,QAAQ,EAAE;IACtD,IAAI,OAAO,GAAG,IAAI,CAAC;IACnB,IAAI,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;;IAEtC,IAAI,UAAU,CAAC,QAAQ,CAAC,EAAE;MACxB,OAAO,OAAO,CAAC,IAAI,CAAC,UAAU,KAAK,EAAE;QACnC,OAAO,WAAW,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,IAAI,CAAC,YAAY;UACtD,OAAO,KAAK,CAAC;SACd,CAAC,CAAC;OACJ,EAAE,UAAU,MAAM,EAAE;QACnB,OAAO,WAAW,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,IAAI,CAAC,YAAY;UACtD,MAAM,MAAM,CAAC;SACd,CAAC,CAAC;OACJ,CAAC,CAAC;KACJ;;IAED,OAAO,OAAO,CAAC,IAAI,CAAC,QAAQ,EAAE,QAAQ,CAAC,CAAC;GACzC,CAAC;;EAEF,OAAO,OAAO,CAAC;CAChB,EAAE,CAAC;;AAEJA,SAAO,CAAC,SAAS,CAAC,IAAI,GAAG,IAAI,CAAC;AAC9B,AACAA,SAAO,CAAC,GAAG,GAAG,GAAG,CAAC;AAClBA,SAAO,CAAC,IAAI,GAAG,IAAI,CAAC;AACpBA,SAAO,CAAC,OAAO,GAAGG,SAAO,CAAC;AAC1BH,SAAO,CAAC,MAAM,GAAGI,QAAM,CAAC;AACxBJ,SAAO,CAAC,aAAa,GAAG,YAAY,CAAC;AACrCA,SAAO,CAAC,QAAQ,GAAG,OAAO,CAAC;AAC3BA,SAAO,CAAC,KAAK,GAAG,IAAI;;AC5YpB;AACA,AAEe,SAAS,QAAQ,GAAG;EACjC,IAAI,KAAK,GAAG,KAAK,CAAC,CAAC;;EAEnB,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;IACjC,KAAK,GAAG,MAAM,CAAC;GAChB,MAAM,IAAI,OAAO,IAAI,KAAK,WAAW,EAAE;IACtC,KAAK,GAAG,IAAI,CAAC;GACd,MAAM;IACL,IAAI;MACF,KAAK,GAAG,QAAQ,CAAC,aAAa,CAAC,EAAE,CAAC;KACnC,CAAC,OAAO,CAAC,EAAE;MACV,MAAM,IAAI,KAAK,CAAC,0EAA0E,CAAC,CAAC;KAC7F;GACF;;EAED,IAAI,CAAC,GAAG,KAAK,CAAC,OAAO,CAAC;;EAEtB,IAAI,CAAC,EAAE;IACL,IAAI,eAAe,GAAG,IAAI,CAAC;IAC3B,IAAI;MACF,eAAe,GAAG,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;KAC/D,CAAC,OAAO,CAAC,EAAE;;KAEX;;IAED,IAAI,eAAe,KAAK,kBAAkB,IAAI,CAAC,CAAC,CAAC,IAAI,EAAE;MACrD,OAAO;KACR;GACF;;EAED,KAAK,CAAC,OAAO,GAAGA,SAAO,CAAC;;;CACzB,DC/BD;AACAA,SAAO,CAAC,QAAQ,GAAG,QAAQ,CAAC;AAC5BA,SAAO,CAAC,OAAO,GAAGA,SAAO,CAAC;;;;;;;;","file":"es6-promise.js"} \ No newline at end of file
diff --git a/node_modules/es6-promise/dist/es6-promise.min.js b/node_modules/es6-promise/dist/es6-promise.min.js
deleted file mode 100644
index 6af5903ab..000000000
--- a/node_modules/es6-promise/dist/es6-promise.min.js
+++ /dev/null
@@ -1 +0,0 @@
-!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?module.exports=e():"function"==typeof define&&define.amd?define(e):t.ES6Promise=e()}(this,function(){"use strict";function t(t){var e=typeof t;return null!==t&&("object"===e||"function"===e)}function e(t){return"function"==typeof t}function n(t){W=t}function r(t){z=t}function o(){return function(){return process.nextTick(a)}}function i(){return"undefined"!=typeof U?function(){U(a)}:c()}function s(){var t=0,e=new H(a),n=document.createTextNode("");return e.observe(n,{characterData:!0}),function(){n.data=t=++t%2}}function u(){var t=new MessageChannel;return t.port1.onmessage=a,function(){return t.port2.postMessage(0)}}function c(){var t=setTimeout;return function(){return t(a,1)}}function a(){for(var t=0;t<N;t+=2){var e=Q[t],n=Q[t+1];e(n),Q[t]=void 0,Q[t+1]=void 0}N=0}function f(){try{var t=Function("return this")().require("vertx");return U=t.runOnLoop||t.runOnContext,i()}catch(e){return c()}}function l(t,e){var n=this,r=new this.constructor(v);void 0===r[V]&&x(r);var o=n._state;if(o){var i=arguments[o-1];z(function(){return T(o,r,i,n._result)})}else j(n,r,t,e);return r}function h(t){var e=this;if(t&&"object"==typeof t&&t.constructor===e)return t;var n=new e(v);return w(n,t),n}function v(){}function p(){return new TypeError("You cannot resolve a promise with itself")}function d(){return new TypeError("A promises callback cannot return that same promise.")}function _(t,e,n,r){try{t.call(e,n,r)}catch(o){return o}}function y(t,e,n){z(function(t){var r=!1,o=_(n,e,function(n){r||(r=!0,e!==n?w(t,n):A(t,n))},function(e){r||(r=!0,S(t,e))},"Settle: "+(t._label||" unknown promise"));!r&&o&&(r=!0,S(t,o))},t)}function m(t,e){e._state===Z?A(t,e._result):e._state===$?S(t,e._result):j(e,void 0,function(e){return w(t,e)},function(e){return S(t,e)})}function b(t,n,r){n.constructor===t.constructor&&r===l&&n.constructor.resolve===h?m(t,n):void 0===r?A(t,n):e(r)?y(t,n,r):A(t,n)}function w(e,n){if(e===n)S(e,p());else if(t(n)){var r=void 0;try{r=n.then}catch(o){return void S(e,o)}b(e,n,r)}else A(e,n)}function g(t){t._onerror&&t._onerror(t._result),E(t)}function A(t,e){t._state===X&&(t._result=e,t._state=Z,0!==t._subscribers.length&&z(E,t))}function S(t,e){t._state===X&&(t._state=$,t._result=e,z(g,t))}function j(t,e,n,r){var o=t._subscribers,i=o.length;t._onerror=null,o[i]=e,o[i+Z]=n,o[i+$]=r,0===i&&t._state&&z(E,t)}function E(t){var e=t._subscribers,n=t._state;if(0!==e.length){for(var r=void 0,o=void 0,i=t._result,s=0;s<e.length;s+=3)r=e[s],o=e[s+n],r?T(n,r,o,i):o(i);t._subscribers.length=0}}function T(t,n,r,o){var i=e(r),s=void 0,u=void 0,c=!0;if(i){try{s=r(o)}catch(a){c=!1,u=a}if(n===s)return void S(n,d())}else s=o;n._state!==X||(i&&c?w(n,s):c===!1?S(n,u):t===Z?A(n,s):t===$&&S(n,s))}function M(t,e){try{e(function(e){w(t,e)},function(e){S(t,e)})}catch(n){S(t,n)}}function P(){return tt++}function x(t){t[V]=tt++,t._state=void 0,t._result=void 0,t._subscribers=[]}function C(){return new Error("Array Methods must be provided an Array")}function O(t){return new et(this,t).promise}function k(t){var e=this;return new e(L(t)?function(n,r){for(var o=t.length,i=0;i<o;i++)e.resolve(t[i]).then(n,r)}:function(t,e){return e(new TypeError("You must pass an array to race."))})}function F(t){var e=this,n=new e(v);return S(n,t),n}function Y(){throw new TypeError("You must pass a resolver function as the first argument to the promise constructor")}function q(){throw new TypeError("Failed to construct 'Promise': Please use the 'new' operator, this object constructor cannot be called as a function.")}function D(){var t=void 0;if("undefined"!=typeof global)t=global;else if("undefined"!=typeof self)t=self;else try{t=Function("return this")()}catch(e){throw new Error("polyfill failed because global object is unavailable in this environment")}var n=t.Promise;if(n){var r=null;try{r=Object.prototype.toString.call(n.resolve())}catch(e){}if("[object Promise]"===r&&!n.cast)return}t.Promise=nt}var K=void 0;K=Array.isArray?Array.isArray:function(t){return"[object Array]"===Object.prototype.toString.call(t)};var L=K,N=0,U=void 0,W=void 0,z=function(t,e){Q[N]=t,Q[N+1]=e,N+=2,2===N&&(W?W(a):R())},B="undefined"!=typeof window?window:void 0,G=B||{},H=G.MutationObserver||G.WebKitMutationObserver,I="undefined"==typeof self&&"undefined"!=typeof process&&"[object process]"==={}.toString.call(process),J="undefined"!=typeof Uint8ClampedArray&&"undefined"!=typeof importScripts&&"undefined"!=typeof MessageChannel,Q=new Array(1e3),R=void 0;R=I?o():H?s():J?u():void 0===B&&"function"==typeof require?f():c();var V=Math.random().toString(36).substring(2),X=void 0,Z=1,$=2,tt=0,et=function(){function t(t,e){this._instanceConstructor=t,this.promise=new t(v),this.promise[V]||x(this.promise),L(e)?(this.length=e.length,this._remaining=e.length,this._result=new Array(this.length),0===this.length?A(this.promise,this._result):(this.length=this.length||0,this._enumerate(e),0===this._remaining&&A(this.promise,this._result))):S(this.promise,C())}return t.prototype._enumerate=function(t){for(var e=0;this._state===X&&e<t.length;e++)this._eachEntry(t[e],e)},t.prototype._eachEntry=function(t,e){var n=this._instanceConstructor,r=n.resolve;if(r===h){var o=void 0,i=void 0,s=!1;try{o=t.then}catch(u){s=!0,i=u}if(o===l&&t._state!==X)this._settledAt(t._state,e,t._result);else if("function"!=typeof o)this._remaining--,this._result[e]=t;else if(n===nt){var c=new n(v);s?S(c,i):b(c,t,o),this._willSettleAt(c,e)}else this._willSettleAt(new n(function(e){return e(t)}),e)}else this._willSettleAt(r(t),e)},t.prototype._settledAt=function(t,e,n){var r=this.promise;r._state===X&&(this._remaining--,t===$?S(r,n):this._result[e]=n),0===this._remaining&&A(r,this._result)},t.prototype._willSettleAt=function(t,e){var n=this;j(t,void 0,function(t){return n._settledAt(Z,e,t)},function(t){return n._settledAt($,e,t)})},t}(),nt=function(){function t(e){this[V]=P(),this._result=this._state=void 0,this._subscribers=[],v!==e&&("function"!=typeof e&&Y(),this instanceof t?M(this,e):q())}return t.prototype["catch"]=function(t){return this.then(null,t)},t.prototype["finally"]=function(t){var n=this,r=n.constructor;return e(t)?n.then(function(e){return r.resolve(t()).then(function(){return e})},function(e){return r.resolve(t()).then(function(){throw e})}):n.then(t,t)},t}();return nt.prototype.then=l,nt.all=O,nt.race=k,nt.resolve=h,nt.reject=F,nt._setScheduler=n,nt._setAsap=r,nt._asap=z,nt.polyfill=D,nt.Promise=nt,nt}); \ No newline at end of file
diff --git a/node_modules/es6-promise/dist/es6-promise.min.map b/node_modules/es6-promise/dist/es6-promise.min.map
deleted file mode 100644
index d51081573..000000000
--- a/node_modules/es6-promise/dist/es6-promise.min.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"sources":["config/versionTemplate.txt","lib/es6-promise/utils.js","lib/es6-promise/asap.js","lib/es6-promise/then.js","lib/es6-promise/promise/resolve.js","lib/es6-promise/-internal.js","lib/es6-promise/enumerator.js","lib/es6-promise/promise/all.js","lib/es6-promise/promise/race.js","lib/es6-promise/promise/reject.js","lib/es6-promise/promise.js","lib/es6-promise/polyfill.js","lib/es6-promise.js"],"sourcesContent":["/*!\n * @overview es6-promise - a tiny implementation of Promises/A+.\n * @copyright Copyright (c) 2014 Yehuda Katz, Tom Dale, Stefan Penner and contributors (Conversion to ES6 API by Jake Archibald)\n * @license Licensed under MIT license\n * See https://raw.githubusercontent.com/stefanpenner/es6-promise/master/LICENSE\n * @version v4.2.8+1e68dce6\n */\n","export function objectOrFunction(x) {\n var type = typeof x;\n return x !== null && (type === 'object' || type === 'function');\n}\n\nexport function isFunction(x) {\n return typeof x === 'function';\n}\n\nexport function isMaybeThenable(x) {\n return x !== null && typeof x === 'object';\n}\n\nvar _isArray = void 0;\nif (Array.isArray) {\n _isArray = Array.isArray;\n} else {\n _isArray = function (x) {\n return Object.prototype.toString.call(x) === '[object Array]';\n };\n}\n\nexport var isArray = _isArray;","var len = 0;\nvar vertxNext = void 0;\nvar customSchedulerFn = void 0;\n\nexport var asap = function asap(callback, arg) {\n queue[len] = callback;\n queue[len + 1] = arg;\n len += 2;\n if (len === 2) {\n // If len is 2, that means that we need to schedule an async flush.\n // If additional callbacks are queued before the queue is flushed, they\n // will be processed by this flush that we are scheduling.\n if (customSchedulerFn) {\n customSchedulerFn(flush);\n } else {\n scheduleFlush();\n }\n }\n};\n\nexport function setScheduler(scheduleFn) {\n customSchedulerFn = scheduleFn;\n}\n\nexport function setAsap(asapFn) {\n asap = asapFn;\n}\n\nvar browserWindow = typeof window !== 'undefined' ? window : undefined;\nvar browserGlobal = browserWindow || {};\nvar BrowserMutationObserver = browserGlobal.MutationObserver || browserGlobal.WebKitMutationObserver;\nvar isNode = typeof self === 'undefined' && typeof process !== 'undefined' && {}.toString.call(process) === '[object process]';\n\n// test for web worker but not in IE10\nvar isWorker = typeof Uint8ClampedArray !== 'undefined' && typeof importScripts !== 'undefined' && typeof MessageChannel !== 'undefined';\n\n// node\nfunction useNextTick() {\n // node version 0.10.x displays a deprecation warning when nextTick is used recursively\n // see https://github.com/cujojs/when/issues/410 for details\n return function () {\n return process.nextTick(flush);\n };\n}\n\n// vertx\nfunction useVertxTimer() {\n if (typeof vertxNext !== 'undefined') {\n return function () {\n vertxNext(flush);\n };\n }\n\n return useSetTimeout();\n}\n\nfunction useMutationObserver() {\n var iterations = 0;\n var observer = new BrowserMutationObserver(flush);\n var node = document.createTextNode('');\n observer.observe(node, { characterData: true });\n\n return function () {\n node.data = iterations = ++iterations % 2;\n };\n}\n\n// web worker\nfunction useMessageChannel() {\n var channel = new MessageChannel();\n channel.port1.onmessage = flush;\n return function () {\n return channel.port2.postMessage(0);\n };\n}\n\nfunction useSetTimeout() {\n // Store setTimeout reference so es6-promise will be unaffected by\n // other code modifying setTimeout (like sinon.useFakeTimers())\n var globalSetTimeout = setTimeout;\n return function () {\n return globalSetTimeout(flush, 1);\n };\n}\n\nvar queue = new Array(1000);\nfunction flush() {\n for (var i = 0; i < len; i += 2) {\n var callback = queue[i];\n var arg = queue[i + 1];\n\n callback(arg);\n\n queue[i] = undefined;\n queue[i + 1] = undefined;\n }\n\n len = 0;\n}\n\nfunction attemptVertx() {\n try {\n var vertx = Function('return this')().require('vertx');\n vertxNext = vertx.runOnLoop || vertx.runOnContext;\n return useVertxTimer();\n } catch (e) {\n return useSetTimeout();\n }\n}\n\nvar scheduleFlush = void 0;\n// Decide what async method to use to triggering processing of queued callbacks:\nif (isNode) {\n scheduleFlush = useNextTick();\n} else if (BrowserMutationObserver) {\n scheduleFlush = useMutationObserver();\n} else if (isWorker) {\n scheduleFlush = useMessageChannel();\n} else if (browserWindow === undefined && typeof require === 'function') {\n scheduleFlush = attemptVertx();\n} else {\n scheduleFlush = useSetTimeout();\n}","import { invokeCallback, subscribe, FULFILLED, REJECTED, noop, makePromise, PROMISE_ID } from './-internal';\n\nimport { asap } from './asap';\n\nexport default function then(onFulfillment, onRejection) {\n var parent = this;\n\n var child = new this.constructor(noop);\n\n if (child[PROMISE_ID] === undefined) {\n makePromise(child);\n }\n\n var _state = parent._state;\n\n\n if (_state) {\n var callback = arguments[_state - 1];\n asap(function () {\n return invokeCallback(_state, child, callback, parent._result);\n });\n } else {\n subscribe(parent, child, onFulfillment, onRejection);\n }\n\n return child;\n}","import { noop, resolve as _resolve } from '../-internal';\n\n/**\n `Promise.resolve` returns a promise that will become resolved with the\n passed `value`. It is shorthand for the following:\n\n ```javascript\n let promise = new Promise(function(resolve, reject){\n resolve(1);\n });\n\n promise.then(function(value){\n // value === 1\n });\n ```\n\n Instead of writing the above, your code now simply becomes the following:\n\n ```javascript\n let promise = Promise.resolve(1);\n\n promise.then(function(value){\n // value === 1\n });\n ```\n\n @method resolve\n @static\n @param {Any} value value that the returned promise will be resolved with\n Useful for tooling.\n @return {Promise} a promise that will become fulfilled with the given\n `value`\n*/\nexport default function resolve(object) {\n /*jshint validthis:true */\n var Constructor = this;\n\n if (object && typeof object === 'object' && object.constructor === Constructor) {\n return object;\n }\n\n var promise = new Constructor(noop);\n _resolve(promise, object);\n return promise;\n}","import { objectOrFunction, isFunction } from './utils';\n\nimport { asap } from './asap';\n\nimport originalThen from './then';\nimport originalResolve from './promise/resolve';\n\nexport var PROMISE_ID = Math.random().toString(36).substring(2);\n\nfunction noop() {}\n\nvar PENDING = void 0;\nvar FULFILLED = 1;\nvar REJECTED = 2;\n\nfunction selfFulfillment() {\n return new TypeError(\"You cannot resolve a promise with itself\");\n}\n\nfunction cannotReturnOwn() {\n return new TypeError('A promises callback cannot return that same promise.');\n}\n\nfunction tryThen(then, value, fulfillmentHandler, rejectionHandler) {\n try {\n then.call(value, fulfillmentHandler, rejectionHandler);\n } catch (e) {\n return e;\n }\n}\n\nfunction handleForeignThenable(promise, thenable, then) {\n asap(function (promise) {\n var sealed = false;\n var error = tryThen(then, thenable, function (value) {\n if (sealed) {\n return;\n }\n sealed = true;\n if (thenable !== value) {\n resolve(promise, value);\n } else {\n fulfill(promise, value);\n }\n }, function (reason) {\n if (sealed) {\n return;\n }\n sealed = true;\n\n reject(promise, reason);\n }, 'Settle: ' + (promise._label || ' unknown promise'));\n\n if (!sealed && error) {\n sealed = true;\n reject(promise, error);\n }\n }, promise);\n}\n\nfunction handleOwnThenable(promise, thenable) {\n if (thenable._state === FULFILLED) {\n fulfill(promise, thenable._result);\n } else if (thenable._state === REJECTED) {\n reject(promise, thenable._result);\n } else {\n subscribe(thenable, undefined, function (value) {\n return resolve(promise, value);\n }, function (reason) {\n return reject(promise, reason);\n });\n }\n}\n\nfunction handleMaybeThenable(promise, maybeThenable, then) {\n if (maybeThenable.constructor === promise.constructor && then === originalThen && maybeThenable.constructor.resolve === originalResolve) {\n handleOwnThenable(promise, maybeThenable);\n } else {\n if (then === undefined) {\n fulfill(promise, maybeThenable);\n } else if (isFunction(then)) {\n handleForeignThenable(promise, maybeThenable, then);\n } else {\n fulfill(promise, maybeThenable);\n }\n }\n}\n\nfunction resolve(promise, value) {\n if (promise === value) {\n reject(promise, selfFulfillment());\n } else if (objectOrFunction(value)) {\n var then = void 0;\n try {\n then = value.then;\n } catch (error) {\n reject(promise, error);\n return;\n }\n handleMaybeThenable(promise, value, then);\n } else {\n fulfill(promise, value);\n }\n}\n\nfunction publishRejection(promise) {\n if (promise._onerror) {\n promise._onerror(promise._result);\n }\n\n publish(promise);\n}\n\nfunction fulfill(promise, value) {\n if (promise._state !== PENDING) {\n return;\n }\n\n promise._result = value;\n promise._state = FULFILLED;\n\n if (promise._subscribers.length !== 0) {\n asap(publish, promise);\n }\n}\n\nfunction reject(promise, reason) {\n if (promise._state !== PENDING) {\n return;\n }\n promise._state = REJECTED;\n promise._result = reason;\n\n asap(publishRejection, promise);\n}\n\nfunction subscribe(parent, child, onFulfillment, onRejection) {\n var _subscribers = parent._subscribers;\n var length = _subscribers.length;\n\n\n parent._onerror = null;\n\n _subscribers[length] = child;\n _subscribers[length + FULFILLED] = onFulfillment;\n _subscribers[length + REJECTED] = onRejection;\n\n if (length === 0 && parent._state) {\n asap(publish, parent);\n }\n}\n\nfunction publish(promise) {\n var subscribers = promise._subscribers;\n var settled = promise._state;\n\n if (subscribers.length === 0) {\n return;\n }\n\n var child = void 0,\n callback = void 0,\n detail = promise._result;\n\n for (var i = 0; i < subscribers.length; i += 3) {\n child = subscribers[i];\n callback = subscribers[i + settled];\n\n if (child) {\n invokeCallback(settled, child, callback, detail);\n } else {\n callback(detail);\n }\n }\n\n promise._subscribers.length = 0;\n}\n\nfunction invokeCallback(settled, promise, callback, detail) {\n var hasCallback = isFunction(callback),\n value = void 0,\n error = void 0,\n succeeded = true;\n\n if (hasCallback) {\n try {\n value = callback(detail);\n } catch (e) {\n succeeded = false;\n error = e;\n }\n\n if (promise === value) {\n reject(promise, cannotReturnOwn());\n return;\n }\n } else {\n value = detail;\n }\n\n if (promise._state !== PENDING) {\n // noop\n } else if (hasCallback && succeeded) {\n resolve(promise, value);\n } else if (succeeded === false) {\n reject(promise, error);\n } else if (settled === FULFILLED) {\n fulfill(promise, value);\n } else if (settled === REJECTED) {\n reject(promise, value);\n }\n}\n\nfunction initializePromise(promise, resolver) {\n try {\n resolver(function resolvePromise(value) {\n resolve(promise, value);\n }, function rejectPromise(reason) {\n reject(promise, reason);\n });\n } catch (e) {\n reject(promise, e);\n }\n}\n\nvar id = 0;\nfunction nextId() {\n return id++;\n}\n\nfunction makePromise(promise) {\n promise[PROMISE_ID] = id++;\n promise._state = undefined;\n promise._result = undefined;\n promise._subscribers = [];\n}\n\nexport { nextId, makePromise, noop, resolve, reject, fulfill, subscribe, publish, publishRejection, initializePromise, invokeCallback, FULFILLED, REJECTED, PENDING, handleMaybeThenable };","function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nimport { isArray, isMaybeThenable } from './utils';\nimport { noop, reject, fulfill, subscribe, FULFILLED, REJECTED, PENDING, handleMaybeThenable } from './-internal';\n\nimport then from './then';\nimport Promise from './promise';\nimport originalResolve from './promise/resolve';\nimport originalThen from './then';\nimport { makePromise, PROMISE_ID } from './-internal';\n\nfunction validationError() {\n return new Error('Array Methods must be provided an Array');\n};\n\nvar Enumerator = function () {\n function Enumerator(Constructor, input) {\n this._instanceConstructor = Constructor;\n this.promise = new Constructor(noop);\n\n if (!this.promise[PROMISE_ID]) {\n makePromise(this.promise);\n }\n\n if (isArray(input)) {\n this.length = input.length;\n this._remaining = input.length;\n\n this._result = new Array(this.length);\n\n if (this.length === 0) {\n fulfill(this.promise, this._result);\n } else {\n this.length = this.length || 0;\n this._enumerate(input);\n if (this._remaining === 0) {\n fulfill(this.promise, this._result);\n }\n }\n } else {\n reject(this.promise, validationError());\n }\n }\n\n Enumerator.prototype._enumerate = function _enumerate(input) {\n for (var i = 0; this._state === PENDING && i < input.length; i++) {\n this._eachEntry(input[i], i);\n }\n };\n\n Enumerator.prototype._eachEntry = function _eachEntry(entry, i) {\n var c = this._instanceConstructor;\n var resolve = c.resolve;\n\n\n if (resolve === originalResolve) {\n var _then = void 0;\n var error = void 0;\n var didError = false;\n try {\n _then = entry.then;\n } catch (e) {\n didError = true;\n error = e;\n }\n\n if (_then === originalThen && entry._state !== PENDING) {\n this._settledAt(entry._state, i, entry._result);\n } else if (typeof _then !== 'function') {\n this._remaining--;\n this._result[i] = entry;\n } else if (c === Promise) {\n var promise = new c(noop);\n if (didError) {\n reject(promise, error);\n } else {\n handleMaybeThenable(promise, entry, _then);\n }\n this._willSettleAt(promise, i);\n } else {\n this._willSettleAt(new c(function (resolve) {\n return resolve(entry);\n }), i);\n }\n } else {\n this._willSettleAt(resolve(entry), i);\n }\n };\n\n Enumerator.prototype._settledAt = function _settledAt(state, i, value) {\n var promise = this.promise;\n\n\n if (promise._state === PENDING) {\n this._remaining--;\n\n if (state === REJECTED) {\n reject(promise, value);\n } else {\n this._result[i] = value;\n }\n }\n\n if (this._remaining === 0) {\n fulfill(promise, this._result);\n }\n };\n\n Enumerator.prototype._willSettleAt = function _willSettleAt(promise, i) {\n var enumerator = this;\n\n subscribe(promise, undefined, function (value) {\n return enumerator._settledAt(FULFILLED, i, value);\n }, function (reason) {\n return enumerator._settledAt(REJECTED, i, reason);\n });\n };\n\n return Enumerator;\n}();\n\nexport default Enumerator;\n;","import Enumerator from '../enumerator';\n\n/**\n `Promise.all` accepts an array of promises, and returns a new promise which\n is fulfilled with an array of fulfillment values for the passed promises, or\n rejected with the reason of the first passed promise to be rejected. It casts all\n elements of the passed iterable to promises as it runs this algorithm.\n\n Example:\n\n ```javascript\n let promise1 = resolve(1);\n let promise2 = resolve(2);\n let promise3 = resolve(3);\n let promises = [ promise1, promise2, promise3 ];\n\n Promise.all(promises).then(function(array){\n // The array here would be [ 1, 2, 3 ];\n });\n ```\n\n If any of the `promises` given to `all` are rejected, the first promise\n that is rejected will be given as an argument to the returned promises's\n rejection handler. For example:\n\n Example:\n\n ```javascript\n let promise1 = resolve(1);\n let promise2 = reject(new Error(\"2\"));\n let promise3 = reject(new Error(\"3\"));\n let promises = [ promise1, promise2, promise3 ];\n\n Promise.all(promises).then(function(array){\n // Code here never runs because there are rejected promises!\n }, function(error) {\n // error.message === \"2\"\n });\n ```\n\n @method all\n @static\n @param {Array} entries array of promises\n @param {String} label optional string for labeling the promise.\n Useful for tooling.\n @return {Promise} promise that is fulfilled when all `promises` have been\n fulfilled, or rejected if any of them become rejected.\n @static\n*/\nexport default function all(entries) {\n return new Enumerator(this, entries).promise;\n}","import { isArray } from \"../utils\";\n\n/**\n `Promise.race` returns a new promise which is settled in the same way as the\n first passed promise to settle.\n\n Example:\n\n ```javascript\n let promise1 = new Promise(function(resolve, reject){\n setTimeout(function(){\n resolve('promise 1');\n }, 200);\n });\n\n let promise2 = new Promise(function(resolve, reject){\n setTimeout(function(){\n resolve('promise 2');\n }, 100);\n });\n\n Promise.race([promise1, promise2]).then(function(result){\n // result === 'promise 2' because it was resolved before promise1\n // was resolved.\n });\n ```\n\n `Promise.race` is deterministic in that only the state of the first\n settled promise matters. For example, even if other promises given to the\n `promises` array argument are resolved, but the first settled promise has\n become rejected before the other promises became fulfilled, the returned\n promise will become rejected:\n\n ```javascript\n let promise1 = new Promise(function(resolve, reject){\n setTimeout(function(){\n resolve('promise 1');\n }, 200);\n });\n\n let promise2 = new Promise(function(resolve, reject){\n setTimeout(function(){\n reject(new Error('promise 2'));\n }, 100);\n });\n\n Promise.race([promise1, promise2]).then(function(result){\n // Code here never runs\n }, function(reason){\n // reason.message === 'promise 2' because promise 2 became rejected before\n // promise 1 became fulfilled\n });\n ```\n\n An example real-world use case is implementing timeouts:\n\n ```javascript\n Promise.race([ajax('foo.json'), timeout(5000)])\n ```\n\n @method race\n @static\n @param {Array} promises array of promises to observe\n Useful for tooling.\n @return {Promise} a promise which settles in the same way as the first passed\n promise to settle.\n*/\nexport default function race(entries) {\n /*jshint validthis:true */\n var Constructor = this;\n\n if (!isArray(entries)) {\n return new Constructor(function (_, reject) {\n return reject(new TypeError('You must pass an array to race.'));\n });\n } else {\n return new Constructor(function (resolve, reject) {\n var length = entries.length;\n for (var i = 0; i < length; i++) {\n Constructor.resolve(entries[i]).then(resolve, reject);\n }\n });\n }\n}","import { noop, reject as _reject } from '../-internal';\n\n/**\n `Promise.reject` returns a promise rejected with the passed `reason`.\n It is shorthand for the following:\n\n ```javascript\n let promise = new Promise(function(resolve, reject){\n reject(new Error('WHOOPS'));\n });\n\n promise.then(function(value){\n // Code here doesn't run because the promise is rejected!\n }, function(reason){\n // reason.message === 'WHOOPS'\n });\n ```\n\n Instead of writing the above, your code now simply becomes the following:\n\n ```javascript\n let promise = Promise.reject(new Error('WHOOPS'));\n\n promise.then(function(value){\n // Code here doesn't run because the promise is rejected!\n }, function(reason){\n // reason.message === 'WHOOPS'\n });\n ```\n\n @method reject\n @static\n @param {Any} reason value that the returned promise will be rejected with.\n Useful for tooling.\n @return {Promise} a promise rejected with the given `reason`.\n*/\nexport default function reject(reason) {\n /*jshint validthis:true */\n var Constructor = this;\n var promise = new Constructor(noop);\n _reject(promise, reason);\n return promise;\n}","function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nimport { isFunction } from './utils';\nimport { noop, nextId, PROMISE_ID, initializePromise } from './-internal';\nimport { asap, setAsap, setScheduler } from './asap';\n\nimport all from './promise/all';\nimport race from './promise/race';\nimport Resolve from './promise/resolve';\nimport Reject from './promise/reject';\nimport then from './then';\n\nfunction needsResolver() {\n throw new TypeError('You must pass a resolver function as the first argument to the promise constructor');\n}\n\nfunction needsNew() {\n throw new TypeError(\"Failed to construct 'Promise': Please use the 'new' operator, this object constructor cannot be called as a function.\");\n}\n\n/**\n Promise objects represent the eventual result of an asynchronous operation. The\n primary way of interacting with a promise is through its `then` method, which\n registers callbacks to receive either a promise's eventual value or the reason\n why the promise cannot be fulfilled.\n\n Terminology\n -----------\n\n - `promise` is an object or function with a `then` method whose behavior conforms to this specification.\n - `thenable` is an object or function that defines a `then` method.\n - `value` is any legal JavaScript value (including undefined, a thenable, or a promise).\n - `exception` is a value that is thrown using the throw statement.\n - `reason` is a value that indicates why a promise was rejected.\n - `settled` the final resting state of a promise, fulfilled or rejected.\n\n A promise can be in one of three states: pending, fulfilled, or rejected.\n\n Promises that are fulfilled have a fulfillment value and are in the fulfilled\n state. Promises that are rejected have a rejection reason and are in the\n rejected state. A fulfillment value is never a thenable.\n\n Promises can also be said to *resolve* a value. If this value is also a\n promise, then the original promise's settled state will match the value's\n settled state. So a promise that *resolves* a promise that rejects will\n itself reject, and a promise that *resolves* a promise that fulfills will\n itself fulfill.\n\n\n Basic Usage:\n ------------\n\n ```js\n let promise = new Promise(function(resolve, reject) {\n // on success\n resolve(value);\n\n // on failure\n reject(reason);\n });\n\n promise.then(function(value) {\n // on fulfillment\n }, function(reason) {\n // on rejection\n });\n ```\n\n Advanced Usage:\n ---------------\n\n Promises shine when abstracting away asynchronous interactions such as\n `XMLHttpRequest`s.\n\n ```js\n function getJSON(url) {\n return new Promise(function(resolve, reject){\n let xhr = new XMLHttpRequest();\n\n xhr.open('GET', url);\n xhr.onreadystatechange = handler;\n xhr.responseType = 'json';\n xhr.setRequestHeader('Accept', 'application/json');\n xhr.send();\n\n function handler() {\n if (this.readyState === this.DONE) {\n if (this.status === 200) {\n resolve(this.response);\n } else {\n reject(new Error('getJSON: `' + url + '` failed with status: [' + this.status + ']'));\n }\n }\n };\n });\n }\n\n getJSON('/posts.json').then(function(json) {\n // on fulfillment\n }, function(reason) {\n // on rejection\n });\n ```\n\n Unlike callbacks, promises are great composable primitives.\n\n ```js\n Promise.all([\n getJSON('/posts'),\n getJSON('/comments')\n ]).then(function(values){\n values[0] // => postsJSON\n values[1] // => commentsJSON\n\n return values;\n });\n ```\n\n @class Promise\n @param {Function} resolver\n Useful for tooling.\n @constructor\n*/\n\nvar Promise = function () {\n function Promise(resolver) {\n this[PROMISE_ID] = nextId();\n this._result = this._state = undefined;\n this._subscribers = [];\n\n if (noop !== resolver) {\n typeof resolver !== 'function' && needsResolver();\n this instanceof Promise ? initializePromise(this, resolver) : needsNew();\n }\n }\n\n /**\n The primary way of interacting with a promise is through its `then` method,\n which registers callbacks to receive either a promise's eventual value or the\n reason why the promise cannot be fulfilled.\n ```js\n findUser().then(function(user){\n // user is available\n }, function(reason){\n // user is unavailable, and you are given the reason why\n });\n ```\n Chaining\n --------\n The return value of `then` is itself a promise. This second, 'downstream'\n promise is resolved with the return value of the first promise's fulfillment\n or rejection handler, or rejected if the handler throws an exception.\n ```js\n findUser().then(function (user) {\n return user.name;\n }, function (reason) {\n return 'default name';\n }).then(function (userName) {\n // If `findUser` fulfilled, `userName` will be the user's name, otherwise it\n // will be `'default name'`\n });\n findUser().then(function (user) {\n throw new Error('Found user, but still unhappy');\n }, function (reason) {\n throw new Error('`findUser` rejected and we're unhappy');\n }).then(function (value) {\n // never reached\n }, function (reason) {\n // if `findUser` fulfilled, `reason` will be 'Found user, but still unhappy'.\n // If `findUser` rejected, `reason` will be '`findUser` rejected and we're unhappy'.\n });\n ```\n If the downstream promise does not specify a rejection handler, rejection reasons will be propagated further downstream.\n ```js\n findUser().then(function (user) {\n throw new PedagogicalException('Upstream error');\n }).then(function (value) {\n // never reached\n }).then(function (value) {\n // never reached\n }, function (reason) {\n // The `PedgagocialException` is propagated all the way down to here\n });\n ```\n Assimilation\n ------------\n Sometimes the value you want to propagate to a downstream promise can only be\n retrieved asynchronously. This can be achieved by returning a promise in the\n fulfillment or rejection handler. The downstream promise will then be pending\n until the returned promise is settled. This is called *assimilation*.\n ```js\n findUser().then(function (user) {\n return findCommentsByAuthor(user);\n }).then(function (comments) {\n // The user's comments are now available\n });\n ```\n If the assimliated promise rejects, then the downstream promise will also reject.\n ```js\n findUser().then(function (user) {\n return findCommentsByAuthor(user);\n }).then(function (comments) {\n // If `findCommentsByAuthor` fulfills, we'll have the value here\n }, function (reason) {\n // If `findCommentsByAuthor` rejects, we'll have the reason here\n });\n ```\n Simple Example\n --------------\n Synchronous Example\n ```javascript\n let result;\n try {\n result = findResult();\n // success\n } catch(reason) {\n // failure\n }\n ```\n Errback Example\n ```js\n findResult(function(result, err){\n if (err) {\n // failure\n } else {\n // success\n }\n });\n ```\n Promise Example;\n ```javascript\n findResult().then(function(result){\n // success\n }, function(reason){\n // failure\n });\n ```\n Advanced Example\n --------------\n Synchronous Example\n ```javascript\n let author, books;\n try {\n author = findAuthor();\n books = findBooksByAuthor(author);\n // success\n } catch(reason) {\n // failure\n }\n ```\n Errback Example\n ```js\n function foundBooks(books) {\n }\n function failure(reason) {\n }\n findAuthor(function(author, err){\n if (err) {\n failure(err);\n // failure\n } else {\n try {\n findBoooksByAuthor(author, function(books, err) {\n if (err) {\n failure(err);\n } else {\n try {\n foundBooks(books);\n } catch(reason) {\n failure(reason);\n }\n }\n });\n } catch(error) {\n failure(err);\n }\n // success\n }\n });\n ```\n Promise Example;\n ```javascript\n findAuthor().\n then(findBooksByAuthor).\n then(function(books){\n // found books\n }).catch(function(reason){\n // something went wrong\n });\n ```\n @method then\n @param {Function} onFulfilled\n @param {Function} onRejected\n Useful for tooling.\n @return {Promise}\n */\n\n /**\n `catch` is simply sugar for `then(undefined, onRejection)` which makes it the same\n as the catch block of a try/catch statement.\n ```js\n function findAuthor(){\n throw new Error('couldn't find that author');\n }\n // synchronous\n try {\n findAuthor();\n } catch(reason) {\n // something went wrong\n }\n // async with promises\n findAuthor().catch(function(reason){\n // something went wrong\n });\n ```\n @method catch\n @param {Function} onRejection\n Useful for tooling.\n @return {Promise}\n */\n\n\n Promise.prototype.catch = function _catch(onRejection) {\n return this.then(null, onRejection);\n };\n\n /**\n `finally` will be invoked regardless of the promise's fate just as native\n try/catch/finally behaves\n \n Synchronous example:\n \n ```js\n findAuthor() {\n if (Math.random() > 0.5) {\n throw new Error();\n }\n return new Author();\n }\n \n try {\n return findAuthor(); // succeed or fail\n } catch(error) {\n return findOtherAuther();\n } finally {\n // always runs\n // doesn't affect the return value\n }\n ```\n \n Asynchronous example:\n \n ```js\n findAuthor().catch(function(reason){\n return findOtherAuther();\n }).finally(function(){\n // author was either found, or not\n });\n ```\n \n @method finally\n @param {Function} callback\n @return {Promise}\n */\n\n\n Promise.prototype.finally = function _finally(callback) {\n var promise = this;\n var constructor = promise.constructor;\n\n if (isFunction(callback)) {\n return promise.then(function (value) {\n return constructor.resolve(callback()).then(function () {\n return value;\n });\n }, function (reason) {\n return constructor.resolve(callback()).then(function () {\n throw reason;\n });\n });\n }\n\n return promise.then(callback, callback);\n };\n\n return Promise;\n}();\n\nPromise.prototype.then = then;\nexport default Promise;\nPromise.all = all;\nPromise.race = race;\nPromise.resolve = Resolve;\nPromise.reject = Reject;\nPromise._setScheduler = setScheduler;\nPromise._setAsap = setAsap;\nPromise._asap = asap;","/*global self*/\nimport Promise from './promise';\n\nexport default function polyfill() {\n var local = void 0;\n\n if (typeof global !== 'undefined') {\n local = global;\n } else if (typeof self !== 'undefined') {\n local = self;\n } else {\n try {\n local = Function('return this')();\n } catch (e) {\n throw new Error('polyfill failed because global object is unavailable in this environment');\n }\n }\n\n var P = local.Promise;\n\n if (P) {\n var promiseToString = null;\n try {\n promiseToString = Object.prototype.toString.call(P.resolve());\n } catch (e) {\n // silently ignored\n }\n\n if (promiseToString === '[object Promise]' && !P.cast) {\n return;\n }\n }\n\n local.Promise = Promise;\n}","import Promise from './es6-promise/promise';\nimport polyfill from './es6-promise/polyfill';\n\n// Strange compat..\nPromise.polyfill = polyfill;\nPromise.Promise = Promise;\nexport default Promise;"],"names":["resolve","_resolve","then","originalThen","originalResolve","Promise","reject","_reject","Resolve","Reject"],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACNO,SAAS,gBAAgB,CAAC,CAAC,EAAE;EAClC,IAAI,IAAI,GAAG,OAAO,CAAC,CAAC;EACpB,OAAO,CAAC,KAAK,IAAI,KAAK,IAAI,KAAK,QAAQ,IAAI,IAAI,KAAK,UAAU,CAAC,CAAC;CACjE;;AAED,AAAO,SAAS,UAAU,CAAC,CAAC,EAAE;EAC5B,OAAO,OAAO,CAAC,KAAK,UAAU,CAAC;CAChC;;AAED,AAEC;;AAED,IAAI,QAAQ,GAAG,KAAK,CAAC,CAAC;AACtB,IAAI,KAAK,CAAC,OAAO,EAAE;EACjB,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC;CAC1B,MAAM;EACL,QAAQ,GAAG,UAAU,CAAC,EAAE;IACtB,OAAO,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,gBAAgB,CAAC;GAC/D,CAAC;CACH;;AAED,AAAO,IAAI,OAAO,GAAG,QAAQ;;ACtB7B,IAAI,GAAG,GAAG,CAAC,CAAC;AACZ,IAAI,SAAS,GAAG,KAAK,CAAC,CAAC;AACvB,IAAI,iBAAiB,GAAG,KAAK,CAAC,CAAC;;AAE/B,AAAO,IAAI,IAAI,GAAG,SAAS,IAAI,CAAC,QAAQ,EAAE,GAAG,EAAE;EAC7C,KAAK,CAAC,GAAG,CAAC,GAAG,QAAQ,CAAC;EACtB,KAAK,CAAC,GAAG,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;EACrB,GAAG,IAAI,CAAC,CAAC;EACT,IAAI,GAAG,KAAK,CAAC,EAAE;;;;IAIb,IAAI,iBAAiB,EAAE;MACrB,iBAAiB,CAAC,KAAK,CAAC,CAAC;KAC1B,MAAM;MACL,aAAa,EAAE,CAAC;KACjB;GACF;CACF,CAAC;;AAEF,AAAO,SAAS,YAAY,CAAC,UAAU,EAAE;EACvC,iBAAiB,GAAG,UAAU,CAAC;CAChC;;AAED,AAAO,SAAS,OAAO,CAAC,MAAM,EAAE;EAC9B,IAAI,GAAG,MAAM,CAAC;CACf;;AAED,IAAI,aAAa,GAAG,OAAO,MAAM,KAAK,WAAW,GAAG,MAAM,GAAG,SAAS,CAAC;AACvE,IAAI,aAAa,GAAG,aAAa,IAAI,EAAE,CAAC;AACxC,IAAI,uBAAuB,GAAG,aAAa,CAAC,gBAAgB,IAAI,aAAa,CAAC,sBAAsB,CAAC;AACrG,IAAI,MAAM,GAAG,OAAO,IAAI,KAAK,WAAW,IAAI,OAAO,OAAO,KAAK,WAAW,IAAI,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,kBAAkB,CAAC;;;AAG/H,IAAI,QAAQ,GAAG,OAAO,iBAAiB,KAAK,WAAW,IAAI,OAAO,aAAa,KAAK,WAAW,IAAI,OAAO,cAAc,KAAK,WAAW,CAAC;;;AAGzI,SAAS,WAAW,GAAG;;;EAGrB,OAAO,YAAY;IACjB,OAAO,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;GAChC,CAAC;CACH;;;AAGD,SAAS,aAAa,GAAG;EACvB,IAAI,OAAO,SAAS,KAAK,WAAW,EAAE;IACpC,OAAO,YAAY;MACjB,SAAS,CAAC,KAAK,CAAC,CAAC;KAClB,CAAC;GACH;;EAED,OAAO,aAAa,EAAE,CAAC;CACxB;;AAED,SAAS,mBAAmB,GAAG;EAC7B,IAAI,UAAU,GAAG,CAAC,CAAC;EACnB,IAAI,QAAQ,GAAG,IAAI,uBAAuB,CAAC,KAAK,CAAC,CAAC;EAClD,IAAI,IAAI,GAAG,QAAQ,CAAC,cAAc,CAAC,EAAE,CAAC,CAAC;EACvC,QAAQ,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;;EAEhD,OAAO,YAAY;IACjB,IAAI,CAAC,IAAI,GAAG,UAAU,GAAG,EAAE,UAAU,GAAG,CAAC,CAAC;GAC3C,CAAC;CACH;;;AAGD,SAAS,iBAAiB,GAAG;EAC3B,IAAI,OAAO,GAAG,IAAI,cAAc,EAAE,CAAC;EACnC,OAAO,CAAC,KAAK,CAAC,SAAS,GAAG,KAAK,CAAC;EAChC,OAAO,YAAY;IACjB,OAAO,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;GACrC,CAAC;CACH;;AAED,SAAS,aAAa,GAAG;;;EAGvB,IAAI,gBAAgB,GAAG,UAAU,CAAC;EAClC,OAAO,YAAY;IACjB,OAAO,gBAAgB,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;GACnC,CAAC;CACH;;AAED,IAAI,KAAK,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC;AAC5B,SAAS,KAAK,GAAG;EACf,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,IAAI,CAAC,EAAE;IAC/B,IAAI,QAAQ,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IACxB,IAAI,GAAG,GAAG,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;;IAEvB,QAAQ,CAAC,GAAG,CAAC,CAAC;;IAEd,KAAK,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC;IACrB,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,SAAS,CAAC;GAC1B;;EAED,GAAG,GAAG,CAAC,CAAC;CACT;;AAED,SAAS,YAAY,GAAG;EACtB,IAAI;IACF,IAAI,KAAK,GAAG,QAAQ,CAAC,aAAa,CAAC,EAAE,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;IACvD,SAAS,GAAG,KAAK,CAAC,SAAS,IAAI,KAAK,CAAC,YAAY,CAAC;IAClD,OAAO,aAAa,EAAE,CAAC;GACxB,CAAC,OAAO,CAAC,EAAE;IACV,OAAO,aAAa,EAAE,CAAC;GACxB;CACF;;AAED,IAAI,aAAa,GAAG,KAAK,CAAC,CAAC;;AAE3B,IAAI,MAAM,EAAE;EACV,aAAa,GAAG,WAAW,EAAE,CAAC;CAC/B,MAAM,IAAI,uBAAuB,EAAE;EAClC,aAAa,GAAG,mBAAmB,EAAE,CAAC;CACvC,MAAM,IAAI,QAAQ,EAAE;EACnB,aAAa,GAAG,iBAAiB,EAAE,CAAC;CACrC,MAAM,IAAI,aAAa,KAAK,SAAS,IAAI,OAAO,OAAO,KAAK,UAAU,EAAE;EACvE,aAAa,GAAG,YAAY,EAAE,CAAC;CAChC,MAAM;EACL,aAAa,GAAG,aAAa,EAAE,CAAC;;;CACjC,DCtHc,SAAS,IAAI,CAAC,aAAa,EAAE,WAAW,EAAE;EACvD,IAAI,MAAM,GAAG,IAAI,CAAC;;EAElB,IAAI,KAAK,GAAG,IAAI,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;;EAEvC,IAAI,KAAK,CAAC,UAAU,CAAC,KAAK,SAAS,EAAE;IACnC,WAAW,CAAC,KAAK,CAAC,CAAC;GACpB;;EAED,IAAI,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;;;EAG3B,IAAI,MAAM,EAAE;IACV,IAAI,QAAQ,GAAG,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;IACrC,IAAI,CAAC,YAAY;MACf,OAAO,cAAc,CAAC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,CAAC,OAAO,CAAC,CAAC;KAChE,CAAC,CAAC;GACJ,MAAM;IACL,SAAS,CAAC,MAAM,EAAE,KAAK,EAAE,aAAa,EAAE,WAAW,CAAC,CAAC;GACtD;;EAED,OAAO,KAAK,CAAC;;;CACd,DCxBD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+BA,AAAe,SAASA,SAAO,CAAC,MAAM,EAAE;;EAEtC,IAAI,WAAW,GAAG,IAAI,CAAC;;EAEvB,IAAI,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,MAAM,CAAC,WAAW,KAAK,WAAW,EAAE;IAC9E,OAAO,MAAM,CAAC;GACf;;EAED,IAAI,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;EACpCC,OAAQ,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;EAC1B,OAAO,OAAO,CAAC;;;CAChB,DCrCM,IAAI,UAAU,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;;AAEhE,SAAS,IAAI,GAAG,EAAE;;AAElB,IAAI,OAAO,GAAG,KAAK,CAAC,CAAC;AACrB,IAAI,SAAS,GAAG,CAAC,CAAC;AAClB,IAAI,QAAQ,GAAG,CAAC,CAAC;;AAEjB,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,SAAS,CAAC,0CAA0C,CAAC,CAAC;CAClE;;AAED,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,SAAS,CAAC,sDAAsD,CAAC,CAAC;CAC9E;;AAED,SAAS,OAAO,CAACC,OAAI,EAAE,KAAK,EAAE,kBAAkB,EAAE,gBAAgB,EAAE;EAClE,IAAI;IACFA,OAAI,CAAC,IAAI,CAAC,KAAK,EAAE,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;GACxD,CAAC,OAAO,CAAC,EAAE;IACV,OAAO,CAAC,CAAC;GACV;CACF;;AAED,SAAS,qBAAqB,CAAC,OAAO,EAAE,QAAQ,EAAEA,OAAI,EAAE;EACtD,IAAI,CAAC,UAAU,OAAO,EAAE;IACtB,IAAI,MAAM,GAAG,KAAK,CAAC;IACnB,IAAI,KAAK,GAAG,OAAO,CAACA,OAAI,EAAE,QAAQ,EAAE,UAAU,KAAK,EAAE;MACnD,IAAI,MAAM,EAAE;QACV,OAAO;OACR;MACD,MAAM,GAAG,IAAI,CAAC;MACd,IAAI,QAAQ,KAAK,KAAK,EAAE;QACtB,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACzB,MAAM;QACL,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACzB;KACF,EAAE,UAAU,MAAM,EAAE;MACnB,IAAI,MAAM,EAAE;QACV,OAAO;OACR;MACD,MAAM,GAAG,IAAI,CAAC;;MAEd,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KACzB,EAAE,UAAU,IAAI,OAAO,CAAC,MAAM,IAAI,kBAAkB,CAAC,CAAC,CAAC;;IAExD,IAAI,CAAC,MAAM,IAAI,KAAK,EAAE;MACpB,MAAM,GAAG,IAAI,CAAC;MACd,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KACxB;GACF,EAAE,OAAO,CAAC,CAAC;CACb;;AAED,SAAS,iBAAiB,CAAC,OAAO,EAAE,QAAQ,EAAE;EAC5C,IAAI,QAAQ,CAAC,MAAM,KAAK,SAAS,EAAE;IACjC,OAAO,CAAC,OAAO,EAAE,QAAQ,CAAC,OAAO,CAAC,CAAC;GACpC,MAAM,IAAI,QAAQ,CAAC,MAAM,KAAK,QAAQ,EAAE;IACvC,MAAM,CAAC,OAAO,EAAE,QAAQ,CAAC,OAAO,CAAC,CAAC;GACnC,MAAM;IACL,SAAS,CAAC,QAAQ,EAAE,SAAS,EAAE,UAAU,KAAK,EAAE;MAC9C,OAAO,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KAChC,EAAE,UAAU,MAAM,EAAE;MACnB,OAAO,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KAChC,CAAC,CAAC;GACJ;CACF;;AAED,SAAS,mBAAmB,CAAC,OAAO,EAAE,aAAa,EAAEA,OAAI,EAAE;EACzD,IAAI,aAAa,CAAC,WAAW,KAAK,OAAO,CAAC,WAAW,IAAIA,OAAI,KAAKC,IAAY,IAAI,aAAa,CAAC,WAAW,CAAC,OAAO,KAAKC,SAAe,EAAE;IACvI,iBAAiB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;GAC3C,MAAM;IACL,IAAIF,OAAI,KAAK,SAAS,EAAE;MACtB,OAAO,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;KACjC,MAAM,IAAI,UAAU,CAACA,OAAI,CAAC,EAAE;MAC3B,qBAAqB,CAAC,OAAO,EAAE,aAAa,EAAEA,OAAI,CAAC,CAAC;KACrD,MAAM;MACL,OAAO,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;KACjC;GACF;CACF;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE,KAAK,EAAE;EAC/B,IAAI,OAAO,KAAK,KAAK,EAAE;IACrB,MAAM,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;GACpC,MAAM,IAAI,gBAAgB,CAAC,KAAK,CAAC,EAAE;IAClC,IAAIA,OAAI,GAAG,KAAK,CAAC,CAAC;IAClB,IAAI;MACFA,OAAI,GAAG,KAAK,CAAC,IAAI,CAAC;KACnB,CAAC,OAAO,KAAK,EAAE;MACd,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;MACvB,OAAO;KACR;IACD,mBAAmB,CAAC,OAAO,EAAE,KAAK,EAAEA,OAAI,CAAC,CAAC;GAC3C,MAAM;IACL,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB;CACF;;AAED,SAAS,gBAAgB,CAAC,OAAO,EAAE;EACjC,IAAI,OAAO,CAAC,QAAQ,EAAE;IACpB,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;GACnC;;EAED,OAAO,CAAC,OAAO,CAAC,CAAC;CAClB;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE,KAAK,EAAE;EAC/B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;IAC9B,OAAO;GACR;;EAED,OAAO,CAAC,OAAO,GAAG,KAAK,CAAC;EACxB,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;;EAE3B,IAAI,OAAO,CAAC,YAAY,CAAC,MAAM,KAAK,CAAC,EAAE;IACrC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;GACxB;CACF;;AAED,SAAS,MAAM,CAAC,OAAO,EAAE,MAAM,EAAE;EAC/B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;IAC9B,OAAO;GACR;EACD,OAAO,CAAC,MAAM,GAAG,QAAQ,CAAC;EAC1B,OAAO,CAAC,OAAO,GAAG,MAAM,CAAC;;EAEzB,IAAI,CAAC,gBAAgB,EAAE,OAAO,CAAC,CAAC;CACjC;;AAED,SAAS,SAAS,CAAC,MAAM,EAAE,KAAK,EAAE,aAAa,EAAE,WAAW,EAAE;EAC5D,IAAI,YAAY,GAAG,MAAM,CAAC,YAAY,CAAC;EACvC,IAAI,MAAM,GAAG,YAAY,CAAC,MAAM,CAAC;;;EAGjC,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC;;EAEvB,YAAY,CAAC,MAAM,CAAC,GAAG,KAAK,CAAC;EAC7B,YAAY,CAAC,MAAM,GAAG,SAAS,CAAC,GAAG,aAAa,CAAC;EACjD,YAAY,CAAC,MAAM,GAAG,QAAQ,CAAC,GAAG,WAAW,CAAC;;EAE9C,IAAI,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,MAAM,EAAE;IACjC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;GACvB;CACF;;AAED,SAAS,OAAO,CAAC,OAAO,EAAE;EACxB,IAAI,WAAW,GAAG,OAAO,CAAC,YAAY,CAAC;EACvC,IAAI,OAAO,GAAG,OAAO,CAAC,MAAM,CAAC;;EAE7B,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;IAC5B,OAAO;GACR;;EAED,IAAI,KAAK,GAAG,KAAK,CAAC;MACd,QAAQ,GAAG,KAAK,CAAC;MACjB,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;;EAE7B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,CAAC,MAAM,EAAE,CAAC,IAAI,CAAC,EAAE;IAC9C,KAAK,GAAG,WAAW,CAAC,CAAC,CAAC,CAAC;IACvB,QAAQ,GAAG,WAAW,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC;;IAEpC,IAAI,KAAK,EAAE;MACT,cAAc,CAAC,OAAO,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAC;KAClD,MAAM;MACL,QAAQ,CAAC,MAAM,CAAC,CAAC;KAClB;GACF;;EAED,OAAO,CAAC,YAAY,CAAC,MAAM,GAAG,CAAC,CAAC;CACjC;;AAED,SAAS,cAAc,CAAC,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,MAAM,EAAE;EAC1D,IAAI,WAAW,GAAG,UAAU,CAAC,QAAQ,CAAC;MAClC,KAAK,GAAG,KAAK,CAAC;MACd,KAAK,GAAG,KAAK,CAAC;MACd,SAAS,GAAG,IAAI,CAAC;;EAErB,IAAI,WAAW,EAAE;IACf,IAAI;MACF,KAAK,GAAG,QAAQ,CAAC,MAAM,CAAC,CAAC;KAC1B,CAAC,OAAO,CAAC,EAAE;MACV,SAAS,GAAG,KAAK,CAAC;MAClB,KAAK,GAAG,CAAC,CAAC;KACX;;IAED,IAAI,OAAO,KAAK,KAAK,EAAE;MACrB,MAAM,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;MACnC,OAAO;KACR;GACF,MAAM;IACL,KAAK,GAAG,MAAM,CAAC;GAChB;;EAED,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;;GAE/B,MAAM,IAAI,WAAW,IAAI,SAAS,EAAE;IACnC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB,MAAM,IAAI,SAAS,KAAK,KAAK,EAAE;IAC9B,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACxB,MAAM,IAAI,OAAO,KAAK,SAAS,EAAE;IAChC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACzB,MAAM,IAAI,OAAO,KAAK,QAAQ,EAAE;IAC/B,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;GACxB;CACF;;AAED,SAAS,iBAAiB,CAAC,OAAO,EAAE,QAAQ,EAAE;EAC5C,IAAI;IACF,QAAQ,CAAC,SAAS,cAAc,CAAC,KAAK,EAAE;MACtC,OAAO,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;KACzB,EAAE,SAAS,aAAa,CAAC,MAAM,EAAE;MAChC,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;KACzB,CAAC,CAAC;GACJ,CAAC,OAAO,CAAC,EAAE;IACV,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;GACpB;CACF;;AAED,IAAI,EAAE,GAAG,CAAC,CAAC;AACX,SAAS,MAAM,GAAG;EAChB,OAAO,EAAE,EAAE,CAAC;CACb;;AAED,SAAS,WAAW,CAAC,OAAO,EAAE;EAC5B,OAAO,CAAC,UAAU,CAAC,GAAG,EAAE,EAAE,CAAC;EAC3B,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;EAC3B,OAAO,CAAC,OAAO,GAAG,SAAS,CAAC;EAC5B,OAAO,CAAC,YAAY,GAAG,EAAE,CAAC;CAC3B;;AChOD,SAAS,eAAe,GAAG;EACzB,OAAO,IAAI,KAAK,CAAC,yCAAyC,CAAC,CAAC;CAC7D,AAAC;;AAEF,IAAI,UAAU,GAAG,YAAY;EAC3B,SAAS,UAAU,CAAC,WAAW,EAAE,KAAK,EAAE;IACtC,IAAI,CAAC,oBAAoB,GAAG,WAAW,CAAC;IACxC,IAAI,CAAC,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;;IAErC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;MAC7B,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;KAC3B;;IAED,IAAI,OAAO,CAAC,KAAK,CAAC,EAAE;MAClB,IAAI,CAAC,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC;MAC3B,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC,MAAM,CAAC;;MAE/B,IAAI,CAAC,OAAO,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;;MAEtC,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE;QACrB,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;OACrC,MAAM;QACL,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,IAAI,CAAC,CAAC;QAC/B,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;QACvB,IAAI,IAAI,CAAC,UAAU,KAAK,CAAC,EAAE;UACzB,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;SACrC;OACF;KACF,MAAM;MACL,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,CAAC;KACzC;GACF;;EAED,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE;IAC3D,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,IAAI,CAAC,MAAM,KAAK,OAAO,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;MAChE,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;KAC9B;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE,CAAC,EAAE;IAC9D,IAAI,CAAC,GAAG,IAAI,CAAC,oBAAoB,CAAC;IAClC,IAAIF,UAAO,GAAG,CAAC,CAAC,OAAO,CAAC;;;IAGxB,IAAIA,UAAO,KAAKI,SAAe,EAAE;MAC/B,IAAI,KAAK,GAAG,KAAK,CAAC,CAAC;MACnB,IAAI,KAAK,GAAG,KAAK,CAAC,CAAC;MACnB,IAAI,QAAQ,GAAG,KAAK,CAAC;MACrB,IAAI;QACF,KAAK,GAAG,KAAK,CAAC,IAAI,CAAC;OACpB,CAAC,OAAO,CAAC,EAAE;QACV,QAAQ,GAAG,IAAI,CAAC;QAChB,KAAK,GAAG,CAAC,CAAC;OACX;;MAED,IAAI,KAAK,KAAKD,IAAY,IAAI,KAAK,CAAC,MAAM,KAAK,OAAO,EAAE;QACtD,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;OACjD,MAAM,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;QACtC,IAAI,CAAC,UAAU,EAAE,CAAC;QAClB,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;OACzB,MAAM,IAAI,CAAC,KAAKE,SAAO,EAAE;QACxB,IAAI,OAAO,GAAG,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC;QAC1B,IAAI,QAAQ,EAAE;UACZ,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;SACxB,MAAM;UACL,mBAAmB,CAAC,OAAO,EAAE,KAAK,EAAE,KAAK,CAAC,CAAC;SAC5C;QACD,IAAI,CAAC,aAAa,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;OAChC,MAAM;QACL,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,UAAUL,UAAO,EAAE;UAC1C,OAAOA,UAAO,CAAC,KAAK,CAAC,CAAC;SACvB,CAAC,EAAE,CAAC,CAAC,CAAC;OACR;KACF,MAAM;MACL,IAAI,CAAC,aAAa,CAACA,UAAO,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC;KACvC;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,SAAS,UAAU,CAAC,KAAK,EAAE,CAAC,EAAE,KAAK,EAAE;IACrE,IAAI,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC;;;IAG3B,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;MAC9B,IAAI,CAAC,UAAU,EAAE,CAAC;;MAElB,IAAI,KAAK,KAAK,QAAQ,EAAE;QACtB,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;OACxB,MAAM;QACL,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC;OACzB;KACF;;IAED,IAAI,IAAI,CAAC,UAAU,KAAK,CAAC,EAAE;MACzB,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;KAChC;GACF,CAAC;;EAEF,UAAU,CAAC,SAAS,CAAC,aAAa,GAAG,SAAS,aAAa,CAAC,OAAO,EAAE,CAAC,EAAE;IACtE,IAAI,UAAU,GAAG,IAAI,CAAC;;IAEtB,SAAS,CAAC,OAAO,EAAE,SAAS,EAAE,UAAU,KAAK,EAAE;MAC7C,OAAO,UAAU,CAAC,UAAU,CAAC,SAAS,EAAE,CAAC,EAAE,KAAK,CAAC,CAAC;KACnD,EAAE,UAAU,MAAM,EAAE;MACnB,OAAO,UAAU,CAAC,UAAU,CAAC,QAAQ,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;KACnD,CAAC,CAAC;GACJ,CAAC;;EAEF,OAAO,UAAU,CAAC;CACnB,EAAE;;ACrHH;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+CA,AAAe,SAAS,GAAG,CAAC,OAAO,EAAE;EACnC,OAAO,IAAI,UAAU,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC,OAAO,CAAC;;;CAC9C,DCjDD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAiEA,AAAe,SAAS,IAAI,CAAC,OAAO,EAAE;;EAEpC,IAAI,WAAW,GAAG,IAAI,CAAC;;EAEvB,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;IACrB,OAAO,IAAI,WAAW,CAAC,UAAU,CAAC,EAAE,MAAM,EAAE;MAC1C,OAAO,MAAM,CAAC,IAAI,SAAS,CAAC,iCAAiC,CAAC,CAAC,CAAC;KACjE,CAAC,CAAC;GACJ,MAAM;IACL,OAAO,IAAI,WAAW,CAAC,UAAU,OAAO,EAAE,MAAM,EAAE;MAChD,IAAI,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;MAC5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,EAAE,CAAC,EAAE,EAAE;QAC/B,WAAW,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;OACvD;KACF,CAAC,CAAC;GACJ;;;CACF,DCjFD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAkCA,AAAe,SAASM,QAAM,CAAC,MAAM,EAAE;;EAErC,IAAI,WAAW,GAAG,IAAI,CAAC;EACvB,IAAI,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,CAAC;EACpCC,MAAO,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;EACzB,OAAO,OAAO,CAAC;;;CAChB,DC9BD,SAAS,aAAa,GAAG;EACvB,MAAM,IAAI,SAAS,CAAC,oFAAoF,CAAC,CAAC;CAC3G;;AAED,SAAS,QAAQ,GAAG;EAClB,MAAM,IAAI,SAAS,CAAC,uHAAuH,CAAC,CAAC;CAC9I;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA0GD,IAAIF,SAAO,GAAG,YAAY;EACxB,SAAS,OAAO,CAAC,QAAQ,EAAE;IACzB,IAAI,CAAC,UAAU,CAAC,GAAG,MAAM,EAAE,CAAC;IAC5B,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,MAAM,GAAG,SAAS,CAAC;IACvC,IAAI,CAAC,YAAY,GAAG,EAAE,CAAC;;IAEvB,IAAI,IAAI,KAAK,QAAQ,EAAE;MACrB,OAAO,QAAQ,KAAK,UAAU,IAAI,aAAa,EAAE,CAAC;MAClD,IAAI,YAAY,OAAO,GAAG,iBAAiB,CAAC,IAAI,EAAE,QAAQ,CAAC,GAAG,QAAQ,EAAE,CAAC;KAC1E;GACF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA4LD,OAAO,CAAC,SAAS,CAAC,KAAK,GAAG,SAAS,MAAM,CAAC,WAAW,EAAE;IACrD,OAAO,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,WAAW,CAAC,CAAC;GACrC,CAAC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA0CF,OAAO,CAAC,SAAS,CAAC,OAAO,GAAG,SAAS,QAAQ,CAAC,QAAQ,EAAE;IACtD,IAAI,OAAO,GAAG,IAAI,CAAC;IACnB,IAAI,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;;IAEtC,IAAI,UAAU,CAAC,QAAQ,CAAC,EAAE;MACxB,OAAO,OAAO,CAAC,IAAI,CAAC,UAAU,KAAK,EAAE;QACnC,OAAO,WAAW,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,IAAI,CAAC,YAAY;UACtD,OAAO,KAAK,CAAC;SACd,CAAC,CAAC;OACJ,EAAE,UAAU,MAAM,EAAE;QACnB,OAAO,WAAW,CAAC,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC,IAAI,CAAC,YAAY;UACtD,MAAM,MAAM,CAAC;SACd,CAAC,CAAC;OACJ,CAAC,CAAC;KACJ;;IAED,OAAO,OAAO,CAAC,IAAI,CAAC,QAAQ,EAAE,QAAQ,CAAC,CAAC;GACzC,CAAC;;EAEF,OAAO,OAAO,CAAC;CAChB,EAAE,CAAC;;AAEJA,SAAO,CAAC,SAAS,CAAC,IAAI,GAAG,IAAI,CAAC;AAC9B,AACAA,SAAO,CAAC,GAAG,GAAG,GAAG,CAAC;AAClBA,SAAO,CAAC,IAAI,GAAG,IAAI,CAAC;AACpBA,SAAO,CAAC,OAAO,GAAGG,SAAO,CAAC;AAC1BH,SAAO,CAAC,MAAM,GAAGI,QAAM,CAAC;AACxBJ,SAAO,CAAC,aAAa,GAAG,YAAY,CAAC;AACrCA,SAAO,CAAC,QAAQ,GAAG,OAAO,CAAC;AAC3BA,SAAO,CAAC,KAAK,GAAG,IAAI;;AC5YpB;AACA,AAEe,SAAS,QAAQ,GAAG;EACjC,IAAI,KAAK,GAAG,KAAK,CAAC,CAAC;;EAEnB,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;IACjC,KAAK,GAAG,MAAM,CAAC;GAChB,MAAM,IAAI,OAAO,IAAI,KAAK,WAAW,EAAE;IACtC,KAAK,GAAG,IAAI,CAAC;GACd,MAAM;IACL,IAAI;MACF,KAAK,GAAG,QAAQ,CAAC,aAAa,CAAC,EAAE,CAAC;KACnC,CAAC,OAAO,CAAC,EAAE;MACV,MAAM,IAAI,KAAK,CAAC,0EAA0E,CAAC,CAAC;KAC7F;GACF;;EAED,IAAI,CAAC,GAAG,KAAK,CAAC,OAAO,CAAC;;EAEtB,IAAI,CAAC,EAAE;IACL,IAAI,eAAe,GAAG,IAAI,CAAC;IAC3B,IAAI;MACF,eAAe,GAAG,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;KAC/D,CAAC,OAAO,CAAC,EAAE;;KAEX;;IAED,IAAI,eAAe,KAAK,kBAAkB,IAAI,CAAC,CAAC,CAAC,IAAI,EAAE;MACrD,OAAO;KACR;GACF;;EAED,KAAK,CAAC,OAAO,GAAGA,SAAO,CAAC;;;CACzB,DC/BD;AACAA,SAAO,CAAC,QAAQ,GAAG,QAAQ,CAAC;AAC5BA,SAAO,CAAC,OAAO,GAAGA,SAAO,CAAC;;;;;;;;","file":"es6-promise.min.js"} \ No newline at end of file
diff --git a/node_modules/es6-promise/es6-promise.d.ts b/node_modules/es6-promise/es6-promise.d.ts
deleted file mode 100644
index e4200dfd0..000000000
--- a/node_modules/es6-promise/es6-promise.d.ts
+++ /dev/null
@@ -1,85 +0,0 @@
-export interface Thenable <R> {
- then <U> (onFulfilled?: (value: R) => U | Thenable<U>, onRejected?: (error: any) => U | Thenable<U>): Thenable<U>;
- then <U> (onFulfilled?: (value: R) => U | Thenable<U>, onRejected?: (error: any) => void): Thenable<U>;
-}
-
-export class Promise <R> implements Thenable <R> {
- /**
- * If you call resolve in the body of the callback passed to the constructor,
- * your promise is fulfilled with result object passed to resolve.
- * If you call reject your promise is rejected with the object passed to resolve.
- * For consistency and debugging (eg stack traces), obj should be an instanceof Error.
- * Any errors thrown in the constructor callback will be implicitly passed to reject().
- */
- constructor (callback: (resolve : (value?: R | Thenable<R>) => void, reject: (error?: any) => void) => void);
-
- /**
- * onFulfilled is called when/if "promise" resolves. onRejected is called when/if "promise" rejects.
- * Both are optional, if either/both are omitted the next onFulfilled/onRejected in the chain is called.
- * Both callbacks have a single parameter , the fulfillment value or rejection reason.
- * "then" returns a new promise equivalent to the value you return from onFulfilled/onRejected after being passed through Promise.resolve.
- * If an error is thrown in the callback, the returned promise rejects with that error.
- *
- * @param onFulfilled called when/if "promise" resolves
- * @param onRejected called when/if "promise" rejects
- */
- then <U> (onFulfilled?: (value: R) => U | Thenable<U>, onRejected?: (error: any) => U | Thenable<U>): Promise<U>;
- then <U> (onFulfilled?: (value: R) => U | Thenable<U>, onRejected?: (error: any) => void): Promise<U>;
-
- /**
- * Sugar for promise.then(undefined, onRejected)
- *
- * @param onRejected called when/if "promise" rejects
- */
- catch <U> (onRejected?: (error: any) => U | Thenable<U>): Promise<U>;
-
- /**
- * onSettled is invoked when/if the "promise" settles (either rejects or fulfills).
- * The returned promise is settled when the `Thenable` returned by `onFinally` settles;
- * it is rejected if `onFinally` throws or rejects; otherwise it assumes the state of the
- * original Promise.
- *
- * @param onFinally called when/if "promise" settles
-
- */
- finally (onFinally?: () => any | Thenable<any>): Promise<R>;
-
- /**
- * Make a new promise from the thenable.
- * A thenable is promise-like in as far as it has a "then" method.
- */
- static resolve (): Promise<void>;
- static resolve <R> (value: R | Thenable<R>): Promise<R>;
-
- /**
- * Make a promise that rejects to obj. For consistency and debugging (eg stack traces), obj should be an instanceof Error
- */
- static reject <R> (error: any): Promise<R>;
-
- /**
- * Make a promise that fulfills when every item in the array fulfills, and rejects if (and when) any item rejects.
- * the array passed to all can be a mixture of promise-like objects and other objects.
- * The fulfillment value is an array (in order) of fulfillment values. The rejection value is the first rejection value.
- */
- static all<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>(values: [T1 | Thenable<T1>, T2 | Thenable<T2>, T3 | Thenable<T3>, T4 | Thenable <T4>, T5 | Thenable<T5>, T6 | Thenable<T6>, T7 | Thenable<T7>, T8 | Thenable<T8>, T9 | Thenable<T9>, T10 | Thenable<T10>]): Promise<[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10]>;
- static all<T1, T2, T3, T4, T5, T6, T7, T8, T9>(values: [T1 | Thenable<T1>, T2 | Thenable<T2>, T3 | Thenable<T3>, T4 | Thenable <T4>, T5 | Thenable<T5>, T6 | Thenable<T6>, T7 | Thenable<T7>, T8 | Thenable<T8>, T9 | Thenable<T9>]): Promise<[T1, T2, T3, T4, T5, T6, T7, T8, T9]>;
- static all<T1, T2, T3, T4, T5, T6, T7, T8>(values: [T1 | Thenable<T1>, T2 | Thenable<T2>, T3 | Thenable<T3>, T4 | Thenable <T4>, T5 | Thenable<T5>, T6 | Thenable<T6>, T7 | Thenable<T7>, T8 | Thenable<T8>]): Promise<[T1, T2, T3, T4, T5, T6, T7, T8]>;
- static all<T1, T2, T3, T4, T5, T6, T7>(values: [T1 | Thenable<T1>, T2 | Thenable<T2>, T3 | Thenable<T3>, T4 | Thenable <T4>, T5 | Thenable<T5>, T6 | Thenable<T6>, T7 | Thenable<T7>]): Promise<[T1, T2, T3, T4, T5, T6, T7]>;
- static all<T1, T2, T3, T4, T5, T6>(values: [T1 | Thenable<T1>, T2 | Thenable<T2>, T3 | Thenable<T3>, T4 | Thenable <T4>, T5 | Thenable<T5>, T6 | Thenable<T6>]): Promise<[T1, T2, T3, T4, T5, T6]>;
- static all<T1, T2, T3, T4, T5>(values: [T1 | Thenable<T1>, T2 | Thenable<T2>, T3 | Thenable<T3>, T4 | Thenable <T4>, T5 | Thenable<T5>]): Promise<[T1, T2, T3, T4, T5]>;
- static all<T1, T2, T3, T4>(values: [T1 | Thenable<T1>, T2 | Thenable<T2>, T3 | Thenable<T3>, T4 | Thenable <T4>]): Promise<[T1, T2, T3, T4]>;
- static all<T1, T2, T3>(values: [T1 | Thenable<T1>, T2 | Thenable<T2>, T3 | Thenable<T3>]): Promise<[T1, T2, T3]>;
- static all<T1, T2>(values: [T1 | Thenable<T1>, T2 | Thenable<T2>]): Promise<[T1, T2]>;
- static all<T1>(values: [T1 | Thenable<T1>]): Promise<[T1]>;
- static all<TAll>(values: Array<TAll | Thenable<TAll>>): Promise<TAll[]>;
-
- /**
- * Make a Promise that fulfills when any item fulfills, and rejects if any item rejects.
- */
- static race <R> (promises: (R | Thenable<R>)[]): Promise<R>;
-}
-
-/**
- * The polyfill method will patch the global environment (in this case to the Promise name) when called.
- */
-export function polyfill (): void;
diff --git a/node_modules/es6-promise/lib/es6-promise.auto.js b/node_modules/es6-promise/lib/es6-promise.auto.js
deleted file mode 100644
index 77997866f..000000000
--- a/node_modules/es6-promise/lib/es6-promise.auto.js
+++ /dev/null
@@ -1,3 +0,0 @@
-import Promise from './es6-promise';
-Promise.polyfill();
-export default Promise;
diff --git a/node_modules/es6-promise/lib/es6-promise.js b/node_modules/es6-promise/lib/es6-promise.js
deleted file mode 100644
index 4f4d840d0..000000000
--- a/node_modules/es6-promise/lib/es6-promise.js
+++ /dev/null
@@ -1,7 +0,0 @@
-import Promise from './es6-promise/promise';
-import polyfill from './es6-promise/polyfill';
-
-// Strange compat..
-Promise.polyfill = polyfill;
-Promise.Promise = Promise;
-export default Promise;
diff --git a/node_modules/es6-promise/lib/es6-promise/-internal.js b/node_modules/es6-promise/lib/es6-promise/-internal.js
deleted file mode 100644
index 6bd75a82c..000000000
--- a/node_modules/es6-promise/lib/es6-promise/-internal.js
+++ /dev/null
@@ -1,243 +0,0 @@
-import {
- objectOrFunction,
- isFunction
-} from './utils';
-
-import {
- asap
-} from './asap';
-
-import originalThen from './then';
-import originalResolve from './promise/resolve';
-
-export const PROMISE_ID = Math.random().toString(36).substring(2);
-
-function noop() {}
-
-const PENDING = void 0;
-const FULFILLED = 1;
-const REJECTED = 2;
-
-function selfFulfillment() {
- return new TypeError("You cannot resolve a promise with itself");
-}
-
-function cannotReturnOwn() {
- return new TypeError('A promises callback cannot return that same promise.');
-}
-
-function tryThen(then, value, fulfillmentHandler, rejectionHandler) {
- try {
- then.call(value, fulfillmentHandler, rejectionHandler);
- } catch(e) {
- return e;
- }
-}
-
-function handleForeignThenable(promise, thenable, then) {
- asap(promise => {
- let sealed = false;
- let error = tryThen(then, thenable, value => {
- if (sealed) { return; }
- sealed = true;
- if (thenable !== value) {
- resolve(promise, value);
- } else {
- fulfill(promise, value);
- }
- }, reason => {
- if (sealed) { return; }
- sealed = true;
-
- reject(promise, reason);
- }, 'Settle: ' + (promise._label || ' unknown promise'));
-
- if (!sealed && error) {
- sealed = true;
- reject(promise, error);
- }
- }, promise);
-}
-
-function handleOwnThenable(promise, thenable) {
- if (thenable._state === FULFILLED) {
- fulfill(promise, thenable._result);
- } else if (thenable._state === REJECTED) {
- reject(promise, thenable._result);
- } else {
- subscribe(thenable, undefined, value => resolve(promise, value),
- reason => reject(promise, reason))
- }
-}
-
-function handleMaybeThenable(promise, maybeThenable, then) {
- if (maybeThenable.constructor === promise.constructor &&
- then === originalThen &&
- maybeThenable.constructor.resolve === originalResolve) {
- handleOwnThenable(promise, maybeThenable);
- } else {
- if (then === undefined) {
- fulfill(promise, maybeThenable);
- } else if (isFunction(then)) {
- handleForeignThenable(promise, maybeThenable, then);
- } else {
- fulfill(promise, maybeThenable);
- }
- }
-}
-
-function resolve(promise, value) {
- if (promise === value) {
- reject(promise, selfFulfillment());
- } else if (objectOrFunction(value)) {
- let then;
- try {
- then = value.then;
- } catch (error) {
- reject(promise, error);
- return;
- }
- handleMaybeThenable(promise, value, then);
- } else {
- fulfill(promise, value);
- }
-}
-
-function publishRejection(promise) {
- if (promise._onerror) {
- promise._onerror(promise._result);
- }
-
- publish(promise);
-}
-
-function fulfill(promise, value) {
- if (promise._state !== PENDING) { return; }
-
- promise._result = value;
- promise._state = FULFILLED;
-
- if (promise._subscribers.length !== 0) {
- asap(publish, promise);
- }
-}
-
-function reject(promise, reason) {
- if (promise._state !== PENDING) { return; }
- promise._state = REJECTED;
- promise._result = reason;
-
- asap(publishRejection, promise);
-}
-
-function subscribe(parent, child, onFulfillment, onRejection) {
- let { _subscribers } = parent;
- let { length } = _subscribers;
-
- parent._onerror = null;
-
- _subscribers[length] = child;
- _subscribers[length + FULFILLED] = onFulfillment;
- _subscribers[length + REJECTED] = onRejection;
-
- if (length === 0 && parent._state) {
- asap(publish, parent);
- }
-}
-
-function publish(promise) {
- let subscribers = promise._subscribers;
- let settled = promise._state;
-
- if (subscribers.length === 0) { return; }
-
- let child, callback, detail = promise._result;
-
- for (let i = 0; i < subscribers.length; i += 3) {
- child = subscribers[i];
- callback = subscribers[i + settled];
-
- if (child) {
- invokeCallback(settled, child, callback, detail);
- } else {
- callback(detail);
- }
- }
-
- promise._subscribers.length = 0;
-}
-
-function invokeCallback(settled, promise, callback, detail) {
- let hasCallback = isFunction(callback),
- value, error, succeeded = true;
-
- if (hasCallback) {
- try {
- value = callback(detail);
- } catch (e) {
- succeeded = false;
- error = e;
- }
-
- if (promise === value) {
- reject(promise, cannotReturnOwn());
- return;
- }
- } else {
- value = detail;
- }
-
- if (promise._state !== PENDING) {
- // noop
- } else if (hasCallback && succeeded) {
- resolve(promise, value);
- } else if (succeeded === false) {
- reject(promise, error);
- } else if (settled === FULFILLED) {
- fulfill(promise, value);
- } else if (settled === REJECTED) {
- reject(promise, value);
- }
-}
-
-function initializePromise(promise, resolver) {
- try {
- resolver(function resolvePromise(value){
- resolve(promise, value);
- }, function rejectPromise(reason) {
- reject(promise, reason);
- });
- } catch(e) {
- reject(promise, e);
- }
-}
-
-let id = 0;
-function nextId() {
- return id++;
-}
-
-function makePromise(promise) {
- promise[PROMISE_ID] = id++;
- promise._state = undefined;
- promise._result = undefined;
- promise._subscribers = [];
-}
-
-export {
- nextId,
- makePromise,
- noop,
- resolve,
- reject,
- fulfill,
- subscribe,
- publish,
- publishRejection,
- initializePromise,
- invokeCallback,
- FULFILLED,
- REJECTED,
- PENDING,
- handleMaybeThenable
-};
diff --git a/node_modules/es6-promise/lib/es6-promise/asap.js b/node_modules/es6-promise/lib/es6-promise/asap.js
deleted file mode 100644
index 0483201dc..000000000
--- a/node_modules/es6-promise/lib/es6-promise/asap.js
+++ /dev/null
@@ -1,119 +0,0 @@
-let len = 0;
-let vertxNext;
-let customSchedulerFn;
-
-export var asap = function asap(callback, arg) {
- queue[len] = callback;
- queue[len + 1] = arg;
- len += 2;
- if (len === 2) {
- // If len is 2, that means that we need to schedule an async flush.
- // If additional callbacks are queued before the queue is flushed, they
- // will be processed by this flush that we are scheduling.
- if (customSchedulerFn) {
- customSchedulerFn(flush);
- } else {
- scheduleFlush();
- }
- }
-}
-
-export function setScheduler(scheduleFn) {
- customSchedulerFn = scheduleFn;
-}
-
-export function setAsap(asapFn) {
- asap = asapFn;
-}
-
-const browserWindow = (typeof window !== 'undefined') ? window : undefined;
-const browserGlobal = browserWindow || {};
-const BrowserMutationObserver = browserGlobal.MutationObserver || browserGlobal.WebKitMutationObserver;
-const isNode = typeof self === 'undefined' && typeof process !== 'undefined' && {}.toString.call(process) === '[object process]';
-
-// test for web worker but not in IE10
-const isWorker = typeof Uint8ClampedArray !== 'undefined' &&
- typeof importScripts !== 'undefined' &&
- typeof MessageChannel !== 'undefined';
-
-// node
-function useNextTick() {
- // node version 0.10.x displays a deprecation warning when nextTick is used recursively
- // see https://github.com/cujojs/when/issues/410 for details
- return () => process.nextTick(flush);
-}
-
-// vertx
-function useVertxTimer() {
- if (typeof vertxNext !== 'undefined') {
- return function() {
- vertxNext(flush);
- };
- }
-
- return useSetTimeout();
-}
-
-function useMutationObserver() {
- let iterations = 0;
- const observer = new BrowserMutationObserver(flush);
- const node = document.createTextNode('');
- observer.observe(node, { characterData: true });
-
- return () => {
- node.data = (iterations = ++iterations % 2);
- };
-}
-
-// web worker
-function useMessageChannel() {
- const channel = new MessageChannel();
- channel.port1.onmessage = flush;
- return () => channel.port2.postMessage(0);
-}
-
-function useSetTimeout() {
- // Store setTimeout reference so es6-promise will be unaffected by
- // other code modifying setTimeout (like sinon.useFakeTimers())
- const globalSetTimeout = setTimeout;
- return () => globalSetTimeout(flush, 1);
-}
-
-const queue = new Array(1000);
-function flush() {
- for (let i = 0; i < len; i+=2) {
- let callback = queue[i];
- let arg = queue[i+1];
-
- callback(arg);
-
- queue[i] = undefined;
- queue[i+1] = undefined;
- }
-
- len = 0;
-}
-
-function attemptVertx() {
- try {
- const vertx = Function('return this')().require('vertx');
- vertxNext = vertx.runOnLoop || vertx.runOnContext;
- return useVertxTimer();
- } catch(e) {
- return useSetTimeout();
- }
-}
-
-let scheduleFlush;
-// Decide what async method to use to triggering processing of queued callbacks:
-if (isNode) {
- scheduleFlush = useNextTick();
-} else if (BrowserMutationObserver) {
- scheduleFlush = useMutationObserver();
-} else if (isWorker) {
- scheduleFlush = useMessageChannel();
-} else if (browserWindow === undefined && typeof require === 'function') {
- scheduleFlush = attemptVertx();
-} else {
- scheduleFlush = useSetTimeout();
-}
diff --git a/node_modules/es6-promise/lib/es6-promise/enumerator.js b/node_modules/es6-promise/lib/es6-promise/enumerator.js
deleted file mode 100644
index be2e0938a..000000000
--- a/node_modules/es6-promise/lib/es6-promise/enumerator.js
+++ /dev/null
@@ -1,124 +0,0 @@
-import {
- isArray,
- isMaybeThenable
-} from './utils';
-import {
- noop,
- reject,
- fulfill,
- subscribe,
- FULFILLED,
- REJECTED,
- PENDING,
- handleMaybeThenable
-} from './-internal';
-
-import then from './then';
-import Promise from './promise';
-import originalResolve from './promise/resolve';
-import originalThen from './then';
-import { makePromise, PROMISE_ID } from './-internal';
-
-function validationError() {
- return new Error('Array Methods must be provided an Array');
-};
-
-export default class Enumerator {
- constructor(Constructor, input) {
- this._instanceConstructor = Constructor;
- this.promise = new Constructor(noop);
-
- if (!this.promise[PROMISE_ID]) {
- makePromise(this.promise);
- }
-
- if (isArray(input)) {
- this.length = input.length;
- this._remaining = input.length;
-
- this._result = new Array(this.length);
-
- if (this.length === 0) {
- fulfill(this.promise, this._result);
- } else {
- this.length = this.length || 0;
- this._enumerate(input);
- if (this._remaining === 0) {
- fulfill(this.promise, this._result);
- }
- }
- } else {
- reject(this.promise, validationError());
- }
- }
- _enumerate(input) {
- for (let i = 0; this._state === PENDING && i < input.length; i++) {
- this._eachEntry(input[i], i);
- }
- }
-
- _eachEntry(entry, i) {
- let c = this._instanceConstructor;
- let { resolve } = c;
-
- if (resolve === originalResolve) {
- let then;
- let error;
- let didError = false;
- try {
- then = entry.then;
- } catch (e) {
- didError = true;
- error = e;
- }
-
- if (then === originalThen &&
- entry._state !== PENDING) {
- this._settledAt(entry._state, i, entry._result);
- } else if (typeof then !== 'function') {
- this._remaining--;
- this._result[i] = entry;
- } else if (c === Promise) {
- let promise = new c(noop);
- if (didError) {
- reject(promise, error);
- } else {
- handleMaybeThenable(promise, entry, then);
- }
- this._willSettleAt(promise, i);
- } else {
- this._willSettleAt(new c(resolve => resolve(entry)), i);
- }
- } else {
- this._willSettleAt(resolve(entry), i);
- }
- }
-
- _settledAt(state, i, value) {
- let { promise } = this;
-
- if (promise._state === PENDING) {
- this._remaining--;
-
- if (state === REJECTED) {
- reject(promise, value);
- } else {
- this._result[i] = value;
- }
- }
-
- if (this._remaining === 0) {
- fulfill(promise, this._result);
- }
- }
-
- _willSettleAt(promise, i) {
- let enumerator = this;
-
- subscribe(
- promise, undefined,
- value => enumerator._settledAt(FULFILLED, i, value),
- reason => enumerator._settledAt(REJECTED, i, reason)
- );
- }
-};
diff --git a/node_modules/es6-promise/lib/es6-promise/polyfill.js b/node_modules/es6-promise/lib/es6-promise/polyfill.js
deleted file mode 100644
index 30db73c9d..000000000
--- a/node_modules/es6-promise/lib/es6-promise/polyfill.js
+++ /dev/null
@@ -1,35 +0,0 @@
-/*global self*/
-import Promise from './promise';
-
-export default function polyfill() {
- let local;
-
- if (typeof global !== 'undefined') {
- local = global;
- } else if (typeof self !== 'undefined') {
- local = self;
- } else {
- try {
- local = Function('return this')();
- } catch (e) {
- throw new Error('polyfill failed because global object is unavailable in this environment');
- }
- }
-
- let P = local.Promise;
-
- if (P) {
- var promiseToString = null;
- try {
- promiseToString = Object.prototype.toString.call(P.resolve());
- } catch(e) {
- // silently ignored
- }
-
- if (promiseToString === '[object Promise]' && !P.cast){
- return;
- }
- }
-
- local.Promise = Promise;
-}
diff --git a/node_modules/es6-promise/lib/es6-promise/promise.js b/node_modules/es6-promise/lib/es6-promise/promise.js
deleted file mode 100644
index ae1703638..000000000
--- a/node_modules/es6-promise/lib/es6-promise/promise.js
+++ /dev/null
@@ -1,431 +0,0 @@
-import {
- isFunction
-} from './utils';
-import {
- noop,
- nextId,
- PROMISE_ID,
- initializePromise
-} from './-internal';
-import {
- asap,
- setAsap,
- setScheduler
-} from './asap';
-
-import all from './promise/all';
-import race from './promise/race';
-import Resolve from './promise/resolve';
-import Reject from './promise/reject';
-import then from './then';
-
-function needsResolver() {
- throw new TypeError('You must pass a resolver function as the first argument to the promise constructor');
-}
-
-function needsNew() {
- throw new TypeError("Failed to construct 'Promise': Please use the 'new' operator, this object constructor cannot be called as a function.");
-}
-
-/**
- Promise objects represent the eventual result of an asynchronous operation. The
- primary way of interacting with a promise is through its `then` method, which
- registers callbacks to receive either a promise's eventual value or the reason
- why the promise cannot be fulfilled.
-
- Terminology
- -----------
-
- - `promise` is an object or function with a `then` method whose behavior conforms to this specification.
- - `thenable` is an object or function that defines a `then` method.
- - `value` is any legal JavaScript value (including undefined, a thenable, or a promise).
- - `exception` is a value that is thrown using the throw statement.
- - `reason` is a value that indicates why a promise was rejected.
- - `settled` the final resting state of a promise, fulfilled or rejected.
-
- A promise can be in one of three states: pending, fulfilled, or rejected.
-
- Promises that are fulfilled have a fulfillment value and are in the fulfilled
- state. Promises that are rejected have a rejection reason and are in the
- rejected state. A fulfillment value is never a thenable.
-
- Promises can also be said to *resolve* a value. If this value is also a
- promise, then the original promise's settled state will match the value's
- settled state. So a promise that *resolves* a promise that rejects will
- itself reject, and a promise that *resolves* a promise that fulfills will
- itself fulfill.
-
-
- Basic Usage:
- ------------
-
- ```js
- let promise = new Promise(function(resolve, reject) {
- // on success
- resolve(value);
-
- // on failure
- reject(reason);
- });
-
- promise.then(function(value) {
- // on fulfillment
- }, function(reason) {
- // on rejection
- });
- ```
-
- Advanced Usage:
- ---------------
-
- Promises shine when abstracting away asynchronous interactions such as
- `XMLHttpRequest`s.
-
- ```js
- function getJSON(url) {
- return new Promise(function(resolve, reject){
- let xhr = new XMLHttpRequest();
-
- xhr.open('GET', url);
- xhr.onreadystatechange = handler;
- xhr.responseType = 'json';
- xhr.setRequestHeader('Accept', 'application/json');
- xhr.send();
-
- function handler() {
- if (this.readyState === this.DONE) {
- if (this.status === 200) {
- resolve(this.response);
- } else {
- reject(new Error('getJSON: `' + url + '` failed with status: [' + this.status + ']'));
- }
- }
- };
- });
- }
-
- getJSON('/posts.json').then(function(json) {
- // on fulfillment
- }, function(reason) {
- // on rejection
- });
- ```
-
- Unlike callbacks, promises are great composable primitives.
-
- ```js
- Promise.all([
- getJSON('/posts'),
- getJSON('/comments')
- ]).then(function(values){
- values[0] // => postsJSON
- values[1] // => commentsJSON
-
- return values;
- });
- ```
-
- @class Promise
- @param {Function} resolver
- Useful for tooling.
- @constructor
-*/
-
-class Promise {
- constructor(resolver) {
- this[PROMISE_ID] = nextId();
- this._result = this._state = undefined;
- this._subscribers = [];
-
- if (noop !== resolver) {
- typeof resolver !== 'function' && needsResolver();
- this instanceof Promise ? initializePromise(this, resolver) : needsNew();
- }
- }
-
- /**
- The primary way of interacting with a promise is through its `then` method,
- which registers callbacks to receive either a promise's eventual value or the
- reason why the promise cannot be fulfilled.
-
- ```js
- findUser().then(function(user){
- // user is available
- }, function(reason){
- // user is unavailable, and you are given the reason why
- });
- ```
-
- Chaining
- --------
-
- The return value of `then` is itself a promise. This second, 'downstream'
- promise is resolved with the return value of the first promise's fulfillment
- or rejection handler, or rejected if the handler throws an exception.
-
- ```js
- findUser().then(function (user) {
- return user.name;
- }, function (reason) {
- return 'default name';
- }).then(function (userName) {
- // If `findUser` fulfilled, `userName` will be the user's name, otherwise it
- // will be `'default name'`
- });
-
- findUser().then(function (user) {
- throw new Error('Found user, but still unhappy');
- }, function (reason) {
- throw new Error('`findUser` rejected and we're unhappy');
- }).then(function (value) {
- // never reached
- }, function (reason) {
- // if `findUser` fulfilled, `reason` will be 'Found user, but still unhappy'.
- // If `findUser` rejected, `reason` will be '`findUser` rejected and we're unhappy'.
- });
- ```
- If the downstream promise does not specify a rejection handler, rejection reasons will be propagated further downstream.
-
- ```js
- findUser().then(function (user) {
- throw new PedagogicalException('Upstream error');
- }).then(function (value) {
- // never reached
- }).then(function (value) {
- // never reached
- }, function (reason) {
- // The `PedgagocialException` is propagated all the way down to here
- });
- ```
-
- Assimilation
- ------------
-
- Sometimes the value you want to propagate to a downstream promise can only be
- retrieved asynchronously. This can be achieved by returning a promise in the
- fulfillment or rejection handler. The downstream promise will then be pending
- until the returned promise is settled. This is called *assimilation*.
-
- ```js
- findUser().then(function (user) {
- return findCommentsByAuthor(user);
- }).then(function (comments) {
- // The user's comments are now available
- });
- ```
-
- If the assimliated promise rejects, then the downstream promise will also reject.
-
- ```js
- findUser().then(function (user) {
- return findCommentsByAuthor(user);
- }).then(function (comments) {
- // If `findCommentsByAuthor` fulfills, we'll have the value here
- }, function (reason) {
- // If `findCommentsByAuthor` rejects, we'll have the reason here
- });
- ```
-
- Simple Example
- --------------
-
- Synchronous Example
-
- ```javascript
- let result;
-
- try {
- result = findResult();
- // success
- } catch(reason) {
- // failure
- }
- ```
-
- Errback Example
-
- ```js
- findResult(function(result, err){
- if (err) {
- // failure
- } else {
- // success
- }
- });
- ```
-
- Promise Example;
-
- ```javascript
- findResult().then(function(result){
- // success
- }, function(reason){
- // failure
- });
- ```
-
- Advanced Example
- --------------
-
- Synchronous Example
-
- ```javascript
- let author, books;
-
- try {
- author = findAuthor();
- books = findBooksByAuthor(author);
- // success
- } catch(reason) {
- // failure
- }
- ```
-
- Errback Example
-
- ```js
-
- function foundBooks(books) {
-
- }
-
- function failure(reason) {
-
- }
-
- findAuthor(function(author, err){
- if (err) {
- failure(err);
- // failure
- } else {
- try {
- findBoooksByAuthor(author, function(books, err) {
- if (err) {
- failure(err);
- } else {
- try {
- foundBooks(books);
- } catch(reason) {
- failure(reason);
- }
- }
- });
- } catch(error) {
- failure(err);
- }
- // success
- }
- });
- ```
-
- Promise Example;
-
- ```javascript
- findAuthor().
- then(findBooksByAuthor).
- then(function(books){
- // found books
- }).catch(function(reason){
- // something went wrong
- });
- ```
-
- @method then
- @param {Function} onFulfilled
- @param {Function} onRejected
- Useful for tooling.
- @return {Promise}
- */
-
- /**
- `catch` is simply sugar for `then(undefined, onRejection)` which makes it the same
- as the catch block of a try/catch statement.
-
- ```js
- function findAuthor(){
- throw new Error('couldn't find that author');
- }
-
- // synchronous
- try {
- findAuthor();
- } catch(reason) {
- // something went wrong
- }
-
- // async with promises
- findAuthor().catch(function(reason){
- // something went wrong
- });
- ```
-
- @method catch
- @param {Function} onRejection
- Useful for tooling.
- @return {Promise}
- */
- catch(onRejection) {
- return this.then(null, onRejection);
- }
-
-/**
- `finally` will be invoked regardless of the promise's fate just as native
- try/catch/finally behaves
-
- Synchronous example:
-
- ```js
- findAuthor() {
- if (Math.random() > 0.5) {
- throw new Error();
- }
- return new Author();
- }
-
- try {
- return findAuthor(); // succeed or fail
- } catch(error) {
- return findOtherAuther();
- } finally {
- // always runs
- // doesn't affect the return value
- }
- ```
-
- Asynchronous example:
-
- ```js
- findAuthor().catch(function(reason){
- return findOtherAuther();
- }).finally(function(){
- // author was either found, or not
- });
- ```
-
- @method finally
- @param {Function} callback
- @return {Promise}
-*/
- finally(callback) {
- let promise = this;
- let constructor = promise.constructor;
-
- if ( isFunction(callback) ) {
- return promise.then(value => constructor.resolve(callback()).then(() => value),
- reason => constructor.resolve(callback()).then(() => { throw reason; }));
- }
-
- return promise.then(callback, callback);
- }
-}
-
-Promise.prototype.then = then;
-export default Promise;
-Promise.all = all;
-Promise.race = race;
-Promise.resolve = Resolve;
-Promise.reject = Reject;
-Promise._setScheduler = setScheduler;
-Promise._setAsap = setAsap;
-Promise._asap = asap;
-
diff --git a/node_modules/es6-promise/lib/es6-promise/promise/all.js b/node_modules/es6-promise/lib/es6-promise/promise/all.js
deleted file mode 100644
index 9ca3c063a..000000000
--- a/node_modules/es6-promise/lib/es6-promise/promise/all.js
+++ /dev/null
@@ -1,52 +0,0 @@
-import Enumerator from '../enumerator';
-
-/**
- `Promise.all` accepts an array of promises, and returns a new promise which
- is fulfilled with an array of fulfillment values for the passed promises, or
- rejected with the reason of the first passed promise to be rejected. It casts all
- elements of the passed iterable to promises as it runs this algorithm.
-
- Example:
-
- ```javascript
- let promise1 = resolve(1);
- let promise2 = resolve(2);
- let promise3 = resolve(3);
- let promises = [ promise1, promise2, promise3 ];
-
- Promise.all(promises).then(function(array){
- // The array here would be [ 1, 2, 3 ];
- });
- ```
-
- If any of the `promises` given to `all` are rejected, the first promise
- that is rejected will be given as an argument to the returned promises's
- rejection handler. For example:
-
- Example:
-
- ```javascript
- let promise1 = resolve(1);
- let promise2 = reject(new Error("2"));
- let promise3 = reject(new Error("3"));
- let promises = [ promise1, promise2, promise3 ];
-
- Promise.all(promises).then(function(array){
- // Code here never runs because there are rejected promises!
- }, function(error) {
- // error.message === "2"
- });
- ```
-
- @method all
- @static
- @param {Array} entries array of promises
- @param {String} label optional string for labeling the promise.
- Useful for tooling.
- @return {Promise} promise that is fulfilled when all `promises` have been
- fulfilled, or rejected if any of them become rejected.
- @static
-*/
-export default function all(entries) {
- return new Enumerator(this, entries).promise;
-}
diff --git a/node_modules/es6-promise/lib/es6-promise/promise/race.js b/node_modules/es6-promise/lib/es6-promise/promise/race.js
deleted file mode 100644
index 166dc820b..000000000
--- a/node_modules/es6-promise/lib/es6-promise/promise/race.js
+++ /dev/null
@@ -1,84 +0,0 @@
-import {
- isArray
-} from "../utils";
-
-/**
- `Promise.race` returns a new promise which is settled in the same way as the
- first passed promise to settle.
-
- Example:
-
- ```javascript
- let promise1 = new Promise(function(resolve, reject){
- setTimeout(function(){
- resolve('promise 1');
- }, 200);
- });
-
- let promise2 = new Promise(function(resolve, reject){
- setTimeout(function(){
- resolve('promise 2');
- }, 100);
- });
-
- Promise.race([promise1, promise2]).then(function(result){
- // result === 'promise 2' because it was resolved before promise1
- // was resolved.
- });
- ```
-
- `Promise.race` is deterministic in that only the state of the first
- settled promise matters. For example, even if other promises given to the
- `promises` array argument are resolved, but the first settled promise has
- become rejected before the other promises became fulfilled, the returned
- promise will become rejected:
-
- ```javascript
- let promise1 = new Promise(function(resolve, reject){
- setTimeout(function(){
- resolve('promise 1');
- }, 200);
- });
-
- let promise2 = new Promise(function(resolve, reject){
- setTimeout(function(){
- reject(new Error('promise 2'));
- }, 100);
- });
-
- Promise.race([promise1, promise2]).then(function(result){
- // Code here never runs
- }, function(reason){
- // reason.message === 'promise 2' because promise 2 became rejected before
- // promise 1 became fulfilled
- });
- ```
-
- An example real-world use case is implementing timeouts:
-
- ```javascript
- Promise.race([ajax('foo.json'), timeout(5000)])
- ```
-
- @method race
- @static
- @param {Array} promises array of promises to observe
- Useful for tooling.
- @return {Promise} a promise which settles in the same way as the first passed
- promise to settle.
-*/
-export default function race(entries) {
- /*jshint validthis:true */
- let Constructor = this;
-
- if (!isArray(entries)) {
- return new Constructor((_, reject) => reject(new TypeError('You must pass an array to race.')));
- } else {
- return new Constructor((resolve, reject) => {
- let length = entries.length;
- for (let i = 0; i < length; i++) {
- Constructor.resolve(entries[i]).then(resolve, reject);
- }
- });
- }
-}
diff --git a/node_modules/es6-promise/lib/es6-promise/promise/reject.js b/node_modules/es6-promise/lib/es6-promise/promise/reject.js
deleted file mode 100644
index cd55faabf..000000000
--- a/node_modules/es6-promise/lib/es6-promise/promise/reject.js
+++ /dev/null
@@ -1,46 +0,0 @@
-import {
- noop,
- reject as _reject
-} from '../-internal';
-
-/**
- `Promise.reject` returns a promise rejected with the passed `reason`.
- It is shorthand for the following:
-
- ```javascript
- let promise = new Promise(function(resolve, reject){
- reject(new Error('WHOOPS'));
- });
-
- promise.then(function(value){
- // Code here doesn't run because the promise is rejected!
- }, function(reason){
- // reason.message === 'WHOOPS'
- });
- ```
-
- Instead of writing the above, your code now simply becomes the following:
-
- ```javascript
- let promise = Promise.reject(new Error('WHOOPS'));
-
- promise.then(function(value){
- // Code here doesn't run because the promise is rejected!
- }, function(reason){
- // reason.message === 'WHOOPS'
- });
- ```
-
- @method reject
- @static
- @param {Any} reason value that the returned promise will be rejected with.
- Useful for tooling.
- @return {Promise} a promise rejected with the given `reason`.
-*/
-export default function reject(reason) {
- /*jshint validthis:true */
- let Constructor = this;
- let promise = new Constructor(noop);
- _reject(promise, reason);
- return promise;
-}
diff --git a/node_modules/es6-promise/lib/es6-promise/promise/resolve.js b/node_modules/es6-promise/lib/es6-promise/promise/resolve.js
deleted file mode 100644
index f4642b630..000000000
--- a/node_modules/es6-promise/lib/es6-promise/promise/resolve.js
+++ /dev/null
@@ -1,48 +0,0 @@
-import {
- noop,
- resolve as _resolve
-} from '../-internal';
-
-/**
- `Promise.resolve` returns a promise that will become resolved with the
- passed `value`. It is shorthand for the following:
-
- ```javascript
- let promise = new Promise(function(resolve, reject){
- resolve(1);
- });
-
- promise.then(function(value){
- // value === 1
- });
- ```
-
- Instead of writing the above, your code now simply becomes the following:
-
- ```javascript
- let promise = Promise.resolve(1);
-
- promise.then(function(value){
- // value === 1
- });
- ```
-
- @method resolve
- @static
- @param {Any} value value that the returned promise will be resolved with
- Useful for tooling.
- @return {Promise} a promise that will become fulfilled with the given
- `value`
-*/
-export default function resolve(object) {
- /*jshint validthis:true */
- let Constructor = this;
-
- if (object && typeof object === 'object' && object.constructor === Constructor) {
- return object;
- }
-
- let promise = new Constructor(noop);
- _resolve(promise, object);
- return promise;
-}
diff --git a/node_modules/es6-promise/lib/es6-promise/then.js b/node_modules/es6-promise/lib/es6-promise/then.js
deleted file mode 100644
index b2b79f0a1..000000000
--- a/node_modules/es6-promise/lib/es6-promise/then.js
+++ /dev/null
@@ -1,32 +0,0 @@
-import {
- invokeCallback,
- subscribe,
- FULFILLED,
- REJECTED,
- noop,
- makePromise,
- PROMISE_ID
-} from './-internal';
-
-import { asap } from './asap';
-
-export default function then(onFulfillment, onRejection) {
- const parent = this;
-
- const child = new this.constructor(noop);
-
- if (child[PROMISE_ID] === undefined) {
- makePromise(child);
- }
-
- const { _state } = parent;
-
- if (_state) {
- const callback = arguments[_state - 1];
- asap(() => invokeCallback(_state, child, callback, parent._result));
- } else {
- subscribe(parent, child, onFulfillment, onRejection);
- }
-
- return child;
-}
diff --git a/node_modules/es6-promise/lib/es6-promise/utils.js b/node_modules/es6-promise/lib/es6-promise/utils.js
deleted file mode 100644
index 72545c5e9..000000000
--- a/node_modules/es6-promise/lib/es6-promise/utils.js
+++ /dev/null
@@ -1,21 +0,0 @@
-export function objectOrFunction(x) {
- let type = typeof x;
- return x !== null && (type === 'object' || type === 'function');
-}
-
-export function isFunction(x) {
- return typeof x === 'function';
-}
-
-export function isMaybeThenable(x) {
- return x !== null && typeof x === 'object';
-}
-
-let _isArray;
-if (Array.isArray) {
- _isArray = Array.isArray;
-} else {
- _isArray = x => Object.prototype.toString.call(x) === '[object Array]';
-}
-
-export const isArray = _isArray;
diff --git a/node_modules/es6-promise/package.json b/node_modules/es6-promise/package.json
deleted file mode 100644
index 909519784..000000000
--- a/node_modules/es6-promise/package.json
+++ /dev/null
@@ -1,106 +0,0 @@
-{
- "_from": "es6-promise@^4.0.3",
- "_id": "es6-promise@4.2.8",
- "_inBundle": false,
- "_integrity": "sha512-HJDGx5daxeIvxdBxvG2cb9g4tEvwIk3i8+nhX0yGrYmZUzbkdg8QbDevheDB8gd0//uPj4c1EQua8Q+MViT0/w==",
- "_location": "/es6-promise",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "es6-promise@^4.0.3",
- "name": "es6-promise",
- "escapedName": "es6-promise",
- "rawSpec": "^4.0.3",
- "saveSpec": null,
- "fetchSpec": "^4.0.3"
- },
- "_requiredBy": [
- "/es6-promisify"
- ],
- "_resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz",
- "_shasum": "4eb21594c972bc40553d276e510539143db53e0a",
- "_spec": "es6-promise@^4.0.3",
- "_where": "/Users/isaacs/dev/npm/cli/node_modules/es6-promisify",
- "author": {
- "name": "Yehuda Katz, Tom Dale, Stefan Penner and contributors",
- "url": "Conversion to ES6 API by Jake Archibald"
- },
- "browser": {
- "vertx": false
- },
- "bugs": {
- "url": "https://github.com/stefanpenner/es6-promise/issues"
- },
- "bundleDependencies": false,
- "dependencies": {},
- "deprecated": false,
- "description": "A lightweight library that provides tools for organizing asynchronous code",
- "devDependencies": {
- "babel-plugin-transform-es2015-arrow-functions": "^6.22.0",
- "babel-plugin-transform-es2015-block-scoping": "^6.24.1",
- "babel-plugin-transform-es2015-classes": "^6.24.1",
- "babel-plugin-transform-es2015-computed-properties": "^6.24.1",
- "babel-plugin-transform-es2015-constants": "^6.1.4",
- "babel-plugin-transform-es2015-destructuring": "^6.23.0",
- "babel-plugin-transform-es2015-parameters": "^6.24.1",
- "babel-plugin-transform-es2015-shorthand-properties": "^6.24.1",
- "babel-plugin-transform-es2015-spread": "^6.22.0",
- "babel-plugin-transform-es2015-template-literals": "^6.22.0",
- "babel6-plugin-strip-class-callcheck": "^6.0.0",
- "broccoli-babel-transpiler": "^6.0.0",
- "broccoli-concat": "^3.1.0",
- "broccoli-merge-trees": "^2.0.0",
- "broccoli-rollup": "^2.0.0",
- "broccoli-stew": "^1.5.0",
- "broccoli-uglify-js": "^0.2.0",
- "broccoli-watchify": "^1.0.1",
- "ember-cli": "2.18.0-beta.2",
- "ember-cli-dependency-checker": "^2.1.0",
- "git-repo-version": "1.0.1",
- "json3": "^3.3.2",
- "mocha": "^4.0.1",
- "promises-aplus-tests-phantom": "^2.1.0-revise"
- },
- "directories": {
- "lib": "lib"
- },
- "files": [
- "dist",
- "lib",
- "es6-promise.d.ts",
- "auto.js",
- "!dist/test"
- ],
- "homepage": "https://github.com/stefanpenner/es6-promise",
- "jsdelivr": "dist/es6-promise.auto.min.js",
- "keywords": [
- "futures",
- "polyfill",
- "promise",
- "promises"
- ],
- "license": "MIT",
- "main": "dist/es6-promise.js",
- "name": "es6-promise",
- "namespace": "es6-promise",
- "repository": {
- "type": "git",
- "url": "git://github.com/stefanpenner/es6-promise.git"
- },
- "scripts": {
- "build": "ember build --environment production",
- "prepublishOnly": "ember build --environment production",
- "start": "ember s",
- "test": "ember test",
- "test:browser": "ember test --launch PhantomJS",
- "test:node": "ember test --launch Mocha",
- "test:server": "ember test --server"
- },
- "spm": {
- "main": "dist/es6-promise.js"
- },
- "typings": "es6-promise.d.ts",
- "unpkg": "dist/es6-promise.auto.min.js",
- "version": "4.2.8"
-}
diff --git a/node_modules/es6-promisify/README.md b/node_modules/es6-promisify/README.md
deleted file mode 100644
index 4141cc840..000000000
--- a/node_modules/es6-promisify/README.md
+++ /dev/null
@@ -1,89 +0,0 @@
-[![Travis CI](https://travis-ci.org/digitaldesignlabs/es6-promisify.svg)](https://travis-ci.org/digitaldesignlabs/es6-promisify)
-
-# es6-promisify
-
-Converts callback-based functions to Promise-based functions.
-
-## Install
-
-Install with [npm](https://npmjs.org/package/es6-promisify)
-
-```bash
-npm install --save es6-promisify
-```
-
-## Example
-
-```js
-"use strict";
-
-// Declare variables
-const promisify = require("es6-promisify");
-const fs = require("fs");
-
-// Convert the stat function
-const stat = promisify(fs.stat);
-
-// Now usable as a promise!
-stat("example.txt").then(function (stats) {
- console.log("Got stats", stats);
-}).catch(function (err) {
- console.error("Yikes!", err);
-});
-```
-
-## Promisify methods
-```js
-"use strict";
-
-// Declare variables
-const promisify = require("es6-promisify");
-const redis = require("redis").createClient(6379, "localhost");
-
-// Create a promise-based version of send_command
-const client = promisify(redis.send_command, redis);
-
-// Send commands to redis and get a promise back
-client("ping").then(function (pong) {
- console.log("Got", pong);
-}).catch(function (err) {
- console.error("Unexpected error", err);
-}).then(function () {
- redis.quit();
-});
-```
-
-## Handle callback multiple arguments
-```js
-"use strict";
-
-// Declare functions
-function test(cb) {
- return cb(undefined, 1, 2, 3);
-}
-
-// Declare variables
-const promisify = require("es6-promisify");
-
-// Create promise-based version of test
-const single = promisify(test);
-const multi = promisify(test, {multiArgs: true});
-
-// Discards additional arguments
-single().then(function (result) {
- console.log(result); // 1
-});
-
-// Returns all arguments as an array
-multi().then(function (result) {
- console.log(result); // [1, 2, 3]
-});
-```
-
-### Tests
-Test with nodeunit
-```bash
-$ npm test
-```
-
-Published under the [MIT License](http://opensource.org/licenses/MIT).
diff --git a/node_modules/es6-promisify/dist/promise.js b/node_modules/es6-promisify/dist/promise.js
deleted file mode 100644
index 2fe5c6103..000000000
--- a/node_modules/es6-promisify/dist/promise.js
+++ /dev/null
@@ -1,73 +0,0 @@
-"use strict";
-
-/* global self, window, module, global, require */
-module.exports = function () {
-
- "use strict";
-
- var globalObject = void 0;
-
- function isFunction(x) {
- return typeof x === "function";
- }
-
- // Seek the global object
- if (global !== undefined) {
- globalObject = global;
- } else if (window !== undefined && window.document) {
- globalObject = window;
- } else {
- globalObject = self;
- }
-
- // Test for any native promise implementation, and if that
- // implementation appears to conform to the specificaton.
- // This code mostly nicked from the es6-promise module polyfill
- // and then fooled with.
- var hasPromiseSupport = function () {
-
- // No promise object at all, and it's a non-starter
- if (!globalObject.hasOwnProperty("Promise")) {
- return false;
- }
-
- // There is a Promise object. Does it conform to the spec?
- var P = globalObject.Promise;
-
- // Some of these methods are missing from
- // Firefox/Chrome experimental implementations
- if (!P.hasOwnProperty("resolve") || !P.hasOwnProperty("reject")) {
- return false;
- }
-
- if (!P.hasOwnProperty("all") || !P.hasOwnProperty("race")) {
- return false;
- }
-
- // Older version of the spec had a resolver object
- // as the arg rather than a function
- return function () {
-
- var resolve = void 0;
-
- var p = new globalObject.Promise(function (r) {
- resolve = r;
- });
-
- if (p) {
- return isFunction(resolve);
- }
-
- return false;
- }();
- }();
-
- // Export the native Promise implementation if it
- // looks like it matches the spec
- if (hasPromiseSupport) {
- return globalObject.Promise;
- }
-
- // Otherwise, return the es6-promise polyfill by @jaffathecake.
- return require("es6-promise").Promise;
-}(); \ No newline at end of file
diff --git a/node_modules/es6-promisify/dist/promisify.js b/node_modules/es6-promisify/dist/promisify.js
deleted file mode 100644
index ce38041b0..000000000
--- a/node_modules/es6-promisify/dist/promisify.js
+++ /dev/null
@@ -1,85 +0,0 @@
-"use strict";
-
-/* global module, require */
-module.exports = function () {
-
- "use strict";
-
- // Get a promise object. This may be native, or it may be polyfilled
-
- var ES6Promise = require("./promise.js");
-
- /**
- * thatLooksLikeAPromiseToMe()
- *
- * Duck-types a promise.
- *
- * @param {object} o
- * @return {bool} True if this resembles a promise
- */
- function thatLooksLikeAPromiseToMe(o) {
- return o && typeof o.then === "function" && typeof o.catch === "function";
- }
-
- /**
- * promisify()
- *
- * Transforms callback-based function -- func(arg1, arg2 .. argN, callback) -- into
- * an ES6-compatible Promise. Promisify provides a default callback of the form (error, result)
- * and rejects when `error` is truthy. You can also supply settings object as the second argument.
- *
- * @param {function} original - The function to promisify
- * @param {object} settings - Settings object
- * @param {object} settings.thisArg - A `this` context to use. If not set, assume `settings` _is_ `thisArg`
- * @param {bool} settings.multiArgs - Should multiple arguments be returned as an array?
- * @return {function} A promisified version of `original`
- */
- return function promisify(original, settings) {
-
- return function () {
- for (var _len = arguments.length, args = Array(_len), _key = 0; _key < _len; _key++) {
- args[_key] = arguments[_key];
- }
-
- var returnMultipleArguments = settings && settings.multiArgs;
-
- var target = void 0;
- if (settings && settings.thisArg) {
- target = settings.thisArg;
- } else if (settings) {
- target = settings;
- }
-
- // Return the promisified function
- return new ES6Promise(function (resolve, reject) {
-
- // Append the callback bound to the context
- args.push(function callback(err) {
-
- if (err) {
- return reject(err);
- }
-
- for (var _len2 = arguments.length, values = Array(_len2 > 1 ? _len2 - 1 : 0), _key2 = 1; _key2 < _len2; _key2++) {
- values[_key2 - 1] = arguments[_key2];
- }
-
- if (false === !!returnMultipleArguments) {
- return resolve(values[0]);
- }
-
- resolve(values);
- });
-
- // Call the function
- var response = original.apply(target, args);
-
- // If it looks like original already returns a promise,
- // then just resolve with that promise. Hopefully, the callback function we added will just be ignored.
- if (thatLooksLikeAPromiseToMe(response)) {
- resolve(response);
- }
- });
- };
- };
-}(); \ No newline at end of file
diff --git a/node_modules/es6-promisify/package.json b/node_modules/es6-promisify/package.json
deleted file mode 100644
index c66b9667f..000000000
--- a/node_modules/es6-promisify/package.json
+++ /dev/null
@@ -1,72 +0,0 @@
-{
- "_from": "es6-promisify@^5.0.0",
- "_id": "es6-promisify@5.0.0",
- "_inBundle": false,
- "_integrity": "sha1-UQnWLz5W6pZ8S2NQWu8IKRyKUgM=",
- "_location": "/es6-promisify",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "es6-promisify@^5.0.0",
- "name": "es6-promisify",
- "escapedName": "es6-promisify",
- "rawSpec": "^5.0.0",
- "saveSpec": null,
- "fetchSpec": "^5.0.0"
- },
- "_requiredBy": [
- "/agent-base"
- ],
- "_resolved": "https://registry.npmjs.org/es6-promisify/-/es6-promisify-5.0.0.tgz",
- "_shasum": "5109d62f3e56ea967c4b63505aef08291c8a5203",
- "_spec": "es6-promisify@^5.0.0",
- "_where": "/Users/rebecca/code/npm/node_modules/agent-base",
- "author": {
- "name": "Mike Hall",
- "email": "mikehall314@gmail.com"
- },
- "bugs": {
- "url": "http://github.com/digitaldesignlabs/es6-promisify/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "es6-promise": "^4.0.3"
- },
- "deprecated": false,
- "description": "Converts callback-based functions to ES6 Promises",
- "devDependencies": {
- "babel-preset-es2015": "^6.9.0",
- "eslint": "^2.13.1",
- "gulp": "^3.9.1",
- "gulp-babel": "^6.1.2",
- "nodeunit": "^0.10.0"
- },
- "files": [
- "dist/promisify.js",
- "dist/promise.js"
- ],
- "greenkeeper": {
- "ignore": [
- "eslint"
- ]
- },
- "homepage": "https://github.com/digitaldesignlabs/es6-promisify#readme",
- "keywords": [
- "promises",
- "es6",
- "promisify"
- ],
- "license": "MIT",
- "main": "dist/promisify.js",
- "name": "es6-promisify",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/digitaldesignlabs/es6-promisify.git"
- },
- "scripts": {
- "pretest": "./node_modules/eslint/bin/eslint.js ./lib/*.js ./tests/*.js",
- "test": "gulp && nodeunit tests"
- },
- "version": "5.0.0"
-}
diff --git a/node_modules/genfun/CHANGELOG.md b/node_modules/genfun/CHANGELOG.md
deleted file mode 100644
index 461e22fc5..000000000
--- a/node_modules/genfun/CHANGELOG.md
+++ /dev/null
@@ -1,53 +0,0 @@
-# Change Log
-
-All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
-
-<a name="5.0.0"></a>
-# [5.0.0](https://github.com/zkat/genfun/compare/v4.0.1...v5.0.0) (2017-12-12)
-
-
-### Bug Fixes
-
-* **license:** relicense to MIT ([857e720](https://github.com/zkat/genfun/commit/857e720))
-* **platforms:** drop support for node 4 and 7 ([2cdbe32](https://github.com/zkat/genfun/commit/2cdbe32))
-
-
-### BREAKING CHANGES
-
-* **platforms:** node 4 and node 7 are no longer officially supported
-* **license:** license changed from CC0-1.0 to MIT
-
-
-
-<a name="4.0.1"></a>
-## [4.0.1](https://github.com/zkat/genfun/compare/v4.0.0...v4.0.1) (2017-04-16)
-
-
-### Bug Fixes
-
-* **cache:** stop side-effecting cached applicableMethods ([09cee84](https://github.com/zkat/genfun/commit/09cee84))
-
-
-
-<a name="4.0.0"></a>
-# [4.0.0](https://github.com/zkat/genfun/compare/v3.2.1...v4.0.0) (2017-04-16)
-
-
-### Bug Fixes
-
-* **genfun:** make internal properties private ([e855c72](https://github.com/zkat/genfun/commit/e855c72))
-* **perf:** short-circuit default methods ([7a9b06b](https://github.com/zkat/genfun/commit/7a9b06b))
-
-
-### Features
-
-* **addMethod:** default-method shortcut syntax for gf.add ([40a3ebb](https://github.com/zkat/genfun/commit/40a3ebb))
-* **genfun:** default method and name opts + default shortcut ([0a40939](https://github.com/zkat/genfun/commit/0a40939))
-* **genfun:** now with inheritance! ([74abcc2](https://github.com/zkat/genfun/commit/74abcc2))
-* **nextMethod:** arg-based nextMethod calls ([17a0b35](https://github.com/zkat/genfun/commit/17a0b35))
-* **noNext:** allow users to disable nextMethod functionality ([cc00d95](https://github.com/zkat/genfun/commit/cc00d95))
-
-
-### BREAKING CHANGES
-
-* **nextMethod:** next methods are now passed in as arguments. context/callNextMethod/etc are all gone.
diff --git a/node_modules/genfun/LICENSE b/node_modules/genfun/LICENSE
deleted file mode 100644
index ab41caa64..000000000
--- a/node_modules/genfun/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-The MIT License (MIT)
-Copyright (c) 2017 Kat Marchán
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
-OR OTHER DEALINGS IN THE SOFTWARE.
-
diff --git a/node_modules/genfun/README.md b/node_modules/genfun/README.md
deleted file mode 100644
index 01417ff87..000000000
--- a/node_modules/genfun/README.md
+++ /dev/null
@@ -1,223 +0,0 @@
-# Genfun [![Travis](https://img.shields.io/travis/zkat/genfun.svg)](https://travis-ci.org/zkat/genfun) [![npm](https://img.shields.io/npm/v/genfun.svg)](https://npm.im/genfun) [![npm](https://img.shields.io/npm/l/genfun.svg)](https://npm.im/genfun)
-
-[`genfun`](https://github.com/zkat/genfun) is a Javascript library that lets you
-define generic functions: regular-seeming functions that can be invoked just
-like any other function, but that automatically dispatch methods based on the
-combination of arguments passed to it when it's called, also known as multiple
-dispatch.
-
-It was inspired by [Slate](http://slatelanguage.org/),
-[CLOS](http://en.wikipedia.org/wiki/CLOS) and
-[Sheeple](http://github.com/zkat/sheeple).
-
-## Install
-
-`$ npm install genfun`
-
-## Table of Contents
-
-* [Example](#example)
-* [API](#api)
- * [`Genfun()`](#genfun)
- * [`gf.add()`](#addMethod)
- * [`Genfun.callNextMethod()`](#callNextMethod)
- * [`Genfun.noApplicableMethod()`](#noApplicableMethod)
-* [Performance](#performance)
-
-### Example
-
-Various examples are available to look at in the examples/ folder included in
-this project. Most examples are also runnable by just invoking them with node.
-
-```javascript
-import Genfun from "genfun"
-
-class Person {}
-class Dog {}
-
-const frobnicate = Genfun()
-
-frobnicate.add([Person], (person) => {
- console.log('Got a person!')
-})
-
-frobnicate.add([Dog], (dog) => {
- console.log('Got a dog!')
-})
-
-frobnicate.add([String, Person, Dog], (greeting, person, dog) => {
- console.log(person, ' greets ', dog, ', \'' + greeting + '\'')
-})
-
-const person = new Person()
-const dog = new Dog()
-
-frobnicate(person) // Got a person!
-frobnicate(dog) // Got a dog!
-frobnicate('Hi, dog!', person, dog); // {} greets {}, 'Hi, dog!'
-```
-
-### API
-
-The basic API for `Genfun` is fairly simple: You create a new `genfun` by
-calling `Genfun()`, and add methods to them. Then you call the `genfun` object
-like a regular function, and it takes care of dispatching the appropriate
-methods!
-
-#### `Genfun()`
-
-Takes no arguments. Simply creates a new `genfun`. A `genfun` is a regular
-function object with overriden function call/dispatch behavior.
-
-When called, it will look at its arguments and determine if a matching method
-has been defined that applies to **all** arguments passed in, considered
-together.
-
-New methods may be added to the `genfun` object with [`gf.add()`](#addMethod).
-
-If no method is found, or none has been defined, it will invoke
-[`Genfun.noApplicableMethod`](#noApplicableMethod) with the appropriate
-arguments.
-
-Genfuns preserve the value of `this` if invoked using `.call` or `.apply`.
-
-##### Example
-
-```javascript
-var gf = Genfun()
-
-//... add some methods ..
-
-// These calls are all identical.
-gf(1, 2, 3)
-gf.call(null, 1, 2, 3)
-gf.apply(null, [1, 2, 3])
-```
-
-#### <a name="addMethod"></a> `gf.add(<selector>, <body>)`
-
-Adds a new method to `gf` and returns `gf` to allow chaining multiple `add`s.
-
-`<selector>` must be an array of objects that will receive new `Role`s (dispatch
-positions) for the method. If an object in the selector is a function, its
-`.prototype` field will receive the new `Role`. The array must not contain any
-frozen objects.
-
-When a `genfun` is called (like a function), it will look at its set of added
-methods and, based on the `Role`s assigned, and corresponding prototype chains,
-will determine which method, if any, will be invoked. On invocation, a method's
-`<body>` argument will be the called with the arguments passed to the `genfun`,
-including its `this` and `arguments` values`.
-
-Within the `<body>`, [`Genfun.callNextMethod`](#callNextMethod) may be called.
-
-##### Example
-
-```javascript
-
-var numStr = Genfun()
-
-numStr.add([String, Number], function (str, num) {
- console.log('got a str:', str, 'and a num: ', num)
-})
-
-numStr.add([Number, String], function (num, str) {
- console.log('got a num:', num, 'and a str:', str)
-})
-
-```
-
-#### <a name="callNextMethod"></a> `Genfun.callNextMethod([...<arguments>])`
-
-**NOTE**: This function can only be called synchronously. To call it
-asynchronously (for example, in a `Promise` or in a callback), use
-[`getContext`](#getContext)
-
-Calls the "next" applicable method in the method chain. Can only be called
-within the body of a method.
-
-If no arguments are given, `callNextMethod` will pass the current method's
-original arguments to the next method.
-
-If arguments are passed to `callNextMethod`, it will invoke the next applicable
-method (based on the **original** method list calculation), with **the given
-arguments**, even if they would otherwise not have triggered that method.
-
-Returns whatever value the next method returns.
-
-There **must** be a next method available when invoked. This function **will
-not** call `noApplicableMethod` when it runs out of methods to call. It will
-instead throw an error.
-
-##### Example
-
-```javascript
-class Foo {}
-class Bar extends Foo {}
-
-var cnm = Genfun()
-
-cnm.add([Foo], function (foo) {
- console.log('calling the method on Foo with', foo)
- return foo
-})
-
-cnm.add([Bar], function (bar) {
- console.log('calling the method on Bar with', bar)
- return Genfun.callNextMethod('some other value!')
-})
-
-cnm(new Bar())
-// calling the method on Bar with {}
-// calling the method on Foo with "some other value!"
-// => 'some other value!'
-```
-
-#### <a name="getContext"></a> `Genfun.getContext()`
-
-The `context` returned by this function will have a `callNextMethod` method
-which can be used to invoke the correct next method even during asynchronous
-calls (for example, when used in a callback or a `Promise`).
-
-This function must be called synchronously within the body of the method before
-any asynchronous calls, and will error if invoked outside the context of a
-method call.
-
-##### Example
-
-```javascript
-someGenfun.add([MyThing], function (thing) {
- const ctx = Genfun.getContext()
- return somePromisedCall(thing).then(res => ctx.callNextMethod(res))
-})
-```
-
-#### <a name="noApplicableMethod"></a> `Genfun.noApplicableMethod(<gf>, <this>, <args>)`
-
-`Genfun.noApplicableMethod` is a `genfun` itself, which is called whenever **any `genfun`** fails to find a matching method for its given arguments.
-
-It will be called with the `genfun` as its first argument, then the `this`
-value, and then the arguments it was called with.
-
-By default, this will simply throw a NoApplicableMethod error.
-
-Users may override this behavior for particular `genfun` and `this`
-combinations, although `args` will always be an `Array`. The value returned from
-the dispatched `noApplicableMethod` method will be returned by `genfun` as if it
-had been its original method. Comparable to [Ruby's
-`method_missing`](http://ruby-doc.org/core-2.1.0/BasicObject.html#method-i-method_missing).
-
-### Performance
-
-`Genfun` pulls a few caching tricks to make sure dispatch, specially for common
-cases, is as fast as possible.
-
-How fast? Well, not much slower than native methods:
-
-```
-Regular function: 30.402ms
-Native method: 28.109ms
-Singly-dispatched genfun: 64.467ms
-Double-dispatched genfun: 70.052ms
-Double-dispatched genfun with string primitive: 76.742ms
-```
diff --git a/node_modules/genfun/lib/genfun.js b/node_modules/genfun/lib/genfun.js
deleted file mode 100644
index c6ba01ca5..000000000
--- a/node_modules/genfun/lib/genfun.js
+++ /dev/null
@@ -1,296 +0,0 @@
-'use strict'
-
-const Method = require('./method')
-const Role = require('./role')
-const util = require('./util')
-
-const kCache = Symbol('cache')
-const kDefaultMethod = Symbol('defaultMethod')
-const kMethods = Symbol('methods')
-const kNoNext = Symbol('noNext')
-
-module.exports = function genfun (opts) {
- function gf () {
- if (!gf[kMethods].length && gf[kDefaultMethod]) {
- return gf[kDefaultMethod].func.apply(this, arguments)
- } else {
- return gf.applyGenfun(this, arguments)
- }
- }
- Object.setPrototypeOf(gf, Genfun.prototype)
- gf[kMethods] = []
- gf[kCache] = {key: [], methods: [], state: STATES.UNINITIALIZED}
- if (opts && typeof opts === 'function') {
- gf.add(opts)
- } else if (opts && opts.default) {
- gf.add(opts.default)
- }
- if (opts && opts.name) {
- Object.defineProperty(gf, 'name', {
- value: opts.name
- })
- }
- if (opts && opts.noNextMethod) {
- gf[kNoNext] = true
- }
- return gf
-}
-
-class Genfun extends Function {}
-Genfun.prototype.isGenfun = true
-
-const STATES = {
- UNINITIALIZED: 0,
- MONOMORPHIC: 1,
- POLYMORPHIC: 2,
- MEGAMORPHIC: 3
-}
-
-const MAX_CACHE_SIZE = 32
-
-/**
- * Defines a method on a generic function.
- *
- * @function
- * @param {Array-like} selector - Selector array for dispatching the method.
- * @param {Function} methodFunction - Function to execute when the method
- * successfully dispatches.
- */
-Genfun.prototype.add = function addMethod (selector, func) {
- if (!func && typeof selector === 'function') {
- func = selector
- selector = []
- }
- selector = [].slice.call(selector)
- for (var i = 0; i < selector.length; i++) {
- if (!selector.hasOwnProperty(i)) {
- selector[i] = Object.prototype
- }
- }
- this[kCache] = {key: [], methods: [], state: STATES.UNINITIALIZED}
- let method = new Method(this, selector, func)
- if (selector.length) {
- this[kMethods].push(method)
- } else {
- this[kDefaultMethod] = method
- }
- return this
-}
-
-/**
- * Removes a previously-defined method on `genfun` that matches
- * `selector` exactly.
- *
- * @function
- * @param {Genfun} genfun - Genfun to remove a method from.
- * @param {Array-like} selector - Objects to match on when finding a
- * method to remove.
- */
-Genfun.prototype.rm = function removeMethod () {
- throw new Error('not yet implemented')
-}
-
-/**
- * Returns true if there are methods that apply to the given arguments on
- * `genfun`. Additionally, makes sure the cache is warmed up for the given
- * arguments.
- *
- */
-Genfun.prototype.hasMethod = function hasMethod () {
- const methods = this.getApplicableMethods(arguments)
- return !!(methods && methods.length)
-}
-
-/**
- * This generic function is called when `genfun` has been called and no
- * applicable method was found. The default method throws an `Error`.
- *
- * @function
- * @param {Genfun} genfun - Generic function instance that was called.
- * @param {*} newthis - value of `this` the genfun was called with.
- * @param {Array} callArgs - Arguments the genfun was called with.
- */
-module.exports.noApplicableMethod = module.exports()
-module.exports.noApplicableMethod.add([], (gf, thisArg, args) => {
- let msg =
- 'No applicable method found when called with arguments of types: (' +
- [].map.call(args, (arg) => {
- return (/\[object ([a-zA-Z0-9]+)\]/)
- .exec(({}).toString.call(arg))[1]
- }).join(', ') + ')'
- let err = new Error(msg)
- err.genfun = gf
- err.thisArg = thisArg
- err.args = args
- throw err
-})
-
-/*
- * Internal
- */
-Genfun.prototype.applyGenfun = function applyGenfun (newThis, args) {
- let applicableMethods = this.getApplicableMethods(args)
- if (applicableMethods.length === 1 || this[kNoNext]) {
- return applicableMethods[0].func.apply(newThis, args)
- } else if (applicableMethods.length > 1) {
- let idx = 0
- const nextMethod = function nextMethod () {
- if (arguments.length) {
- // Replace args if passed in explicitly
- args = arguments
- Array.prototype.push.call(args, nextMethod)
- }
- const next = applicableMethods[idx++]
- if (idx >= applicableMethods.length) {
- Array.prototype.pop.call(args)
- }
- return next.func.apply(newThis, args)
- }
- Array.prototype.push.call(args, nextMethod)
- return nextMethod()
- } else {
- return module.exports.noApplicableMethod(this, newThis, args)
- }
-}
-
-Genfun.prototype.getApplicableMethods = function getApplicableMethods (args) {
- if (!args.length || !this[kMethods].length) {
- return this[kDefaultMethod] ? [this[kDefaultMethod]] : []
- }
- let applicableMethods
- let maybeMethods = cachedMethods(this, args)
- if (maybeMethods) {
- applicableMethods = maybeMethods
- } else {
- applicableMethods = computeApplicableMethods(this, args)
- cacheArgs(this, args, applicableMethods)
- }
- return applicableMethods
-}
-
-function cacheArgs (genfun, args, methods) {
- if (genfun[kCache].state === STATES.MEGAMORPHIC) { return }
- var key = []
- var proto
- for (var i = 0; i < args.length; i++) {
- proto = cacheableProto(genfun, args[i])
- if (proto) {
- key[i] = proto
- } else {
- return null
- }
- }
- genfun[kCache].key.unshift(key)
- genfun[kCache].methods.unshift(methods)
- if (genfun[kCache].key.length === 1) {
- genfun[kCache].state = STATES.MONOMORPHIC
- } else if (genfun[kCache].key.length < MAX_CACHE_SIZE) {
- genfun[kCache].state = STATES.POLYMORPHIC
- } else {
- genfun[kCache].state = STATES.MEGAMORPHIC
- }
-}
-
-function cacheableProto (genfun, arg) {
- var dispatchable = util.dispatchableObject(arg)
- if (Object.hasOwnProperty.call(dispatchable, Role.roleKeyName)) {
- for (var j = 0; j < dispatchable[Role.roleKeyName].length; j++) {
- var role = dispatchable[Role.roleKeyName][j]
- if (role.method.genfun === genfun) {
- return null
- }
- }
- }
- return Object.getPrototypeOf(dispatchable)
-}
-
-function cachedMethods (genfun, args) {
- if (genfun[kCache].state === STATES.UNINITIALIZED ||
- genfun[kCache].state === STATES.MEGAMORPHIC) {
- return null
- }
- var protos = []
- var proto
- for (var i = 0; i < args.length; i++) {
- proto = cacheableProto(genfun, args[i])
- if (proto) {
- protos[i] = proto
- } else {
- return
- }
- }
- for (i = 0; i < genfun[kCache].key.length; i++) {
- if (matchCachedMethods(genfun[kCache].key[i], protos)) {
- return genfun[kCache].methods[i]
- }
- }
-}
-
-function matchCachedMethods (key, protos) {
- if (key.length !== protos.length) { return false }
- for (var i = 0; i < key.length; i++) {
- if (key[i] !== protos[i]) {
- return false
- }
- }
- return true
-}
-
-function computeApplicableMethods (genfun, args) {
- args = [].slice.call(args)
- let discoveredMethods = []
- function findAndRankRoles (object, hierarchyPosition, index) {
- var roles = Object.hasOwnProperty.call(object, Role.roleKeyName)
- ? object[Role.roleKeyName]
- : []
- roles.forEach(role => {
- if (role.method.genfun === genfun && index === role.position) {
- if (discoveredMethods.indexOf(role.method) < 0) {
- Method.clearRank(role.method)
- discoveredMethods.push(role.method)
- }
- Method.setRankHierarchyPosition(role.method, index, hierarchyPosition)
- }
- })
- // When a discovered method would receive more arguments than
- // were specialized, we pretend all extra arguments have a role
- // on Object.prototype.
- if (util.isObjectProto(object)) {
- discoveredMethods.forEach(method => {
- if (method.minimalSelector <= index) {
- Method.setRankHierarchyPosition(method, index, hierarchyPosition)
- }
- })
- }
- }
- args.forEach((arg, index) => {
- getPrecedenceList(util.dispatchableObject(arg))
- .forEach((obj, hierarchyPosition) => {
- findAndRankRoles(obj, hierarchyPosition, index)
- })
- })
- let applicableMethods = discoveredMethods.filter(method => {
- return (args.length === method._rank.length &&
- Method.isFullySpecified(method))
- })
- applicableMethods.sort((a, b) => Method.score(a) - Method.score(b))
- if (genfun[kDefaultMethod]) {
- applicableMethods.push(genfun[kDefaultMethod])
- }
- return applicableMethods
-}
-
-/*
- * Helper function for getting an array representing the entire
- * inheritance/precedence chain for an object by navigating its
- * prototype pointers.
- */
-function getPrecedenceList (obj) {
- var precedenceList = []
- var nextObj = obj
- while (nextObj) {
- precedenceList.push(nextObj)
- nextObj = Object.getPrototypeOf(nextObj)
- }
- return precedenceList
-}
diff --git a/node_modules/genfun/lib/method.js b/node_modules/genfun/lib/method.js
deleted file mode 100644
index eddb7d325..000000000
--- a/node_modules/genfun/lib/method.js
+++ /dev/null
@@ -1,82 +0,0 @@
-'use strict'
-
-/*
- * Method
- *
- * Methods are added, conceptually, to Genfuns, not to objects
- * themselves, although the Genfun object does not have any pointers to
- * method objects.
- *
- * The _rank vector is an internal datastructure used during dispatch
- * to figure out whether a method is applicable, and if so, how to
- * order multiple discovered methods.
- *
- * Right now, the score method on Method does not take into account any
- * ordering, and all arguments to a method are ranked equally for the
- * sake of ordering.
- *
- */
-const Role = require('./role')
-const util = require('./util')
-
-module.exports = Method
-function Method (genfun, selector, func) {
- var method = this
- method.genfun = genfun
- method.func = func
- method._rank = []
- method.minimalSelector = 0
-
- const tmpSelector = selector.length ? selector : [Object.prototype]
- for (var object, i = tmpSelector.length - 1; i >= 0; i--) {
- object = Object.hasOwnProperty.call(tmpSelector, i)
- ? tmpSelector[i]
- : Object.prototype
- object = util.dispatchableObject(object)
- if (
- typeof object === 'function' &&
- !object.isGenfun
- ) {
- object = object.prototype
- }
- if (i > 0 &&
- !method.minimalSelector &&
- util.isObjectProto(object)) {
- continue
- } else {
- method.minimalSelector++
- if (!Object.hasOwnProperty.call(object, Role.roleKeyName)) {
- // Object.defineProperty is JS 1.8.0+
- Object.defineProperty(
- object, Role.roleKeyName, {value: [], enumerable: false})
- }
- // XXX HACK - no method replacement now, so we just shove
- // it in a place where it'll always show up first. This
- // would probably seriously break method combination if we
- // had it.
- object[Role.roleKeyName].unshift(new Role(method, i))
- }
- }
-}
-
-Method.setRankHierarchyPosition = (method, index, hierarchyPosition) => {
- method._rank[index] = hierarchyPosition
-}
-
-Method.clearRank = method => {
- method._rank = []
-}
-
-Method.isFullySpecified = method => {
- for (var i = 0; i < method.minimalSelector; i++) {
- if (!method._rank.hasOwnProperty(i)) {
- return false
- }
- }
- return true
-}
-
-Method.score = method => {
- // TODO - this makes all items in the list equal
- return method._rank.reduce((a, b) => a + b, 0)
-}
diff --git a/node_modules/genfun/lib/role.js b/node_modules/genfun/lib/role.js
deleted file mode 100644
index 69e35c2e5..000000000
--- a/node_modules/genfun/lib/role.js
+++ /dev/null
@@ -1,17 +0,0 @@
-'use strict'
-
-/*
- * Role
- *
- * A Role encapsulates a particular object's 'role' in a method's
- * dispatch. They are added directly to the selector for a method, and thus
- * do not prevent the objects a method was defined on from being garbage
- * collected.
- */
-module.exports = Role
-function Role (method, position) {
- this.method = method
- this.position = position
-}
-
-Role.roleKeyName = Symbol('roles')
diff --git a/node_modules/genfun/lib/util.js b/node_modules/genfun/lib/util.js
deleted file mode 100644
index 23770629d..000000000
--- a/node_modules/genfun/lib/util.js
+++ /dev/null
@@ -1,37 +0,0 @@
-'use strict'
-
-module.exports.isObjectProto = isObjectProto
-function isObjectProto (obj) {
- return obj === Object.prototype
-}
-
-const _null = {}
-const _undefined = {}
-const Bool = Boolean
-const Num = Number
-const Str = String
-const boolCache = {
- true: new Bool(true),
- false: new Bool(false)
-}
-const numCache = {}
-const strCache = {}
-
-/*
- * Returns a useful dispatch object for value using a process similar to
- * the ToObject operation specified in http://es5.github.com/#x9.9
- */
-module.exports.dispatchableObject = dispatchableObject
-function dispatchableObject (value) {
- // To shut up jshint, which doesn't let me turn off this warning.
- const Obj = Object
- if (value === null) { return _null }
- if (value === undefined) { return _undefined }
- switch (typeof value) {
- case 'object': return value
- case 'boolean': return boolCache[value]
- case 'number': return numCache[value] || (numCache[value] = new Num(value))
- case 'string': return strCache[value] || (strCache[value] = new Str(value))
- default: return new Obj(value)
- }
-}
diff --git a/node_modules/genfun/package.json b/node_modules/genfun/package.json
deleted file mode 100644
index 4a557eb45..000000000
--- a/node_modules/genfun/package.json
+++ /dev/null
@@ -1,79 +0,0 @@
-{
- "_from": "genfun@^5.0.0",
- "_id": "genfun@5.0.0",
- "_inBundle": false,
- "_integrity": "sha512-KGDOARWVga7+rnB3z9Sd2Letx515owfk0hSxHGuqjANb1M+x2bGZGqHLiozPsYMdM2OubeMni/Hpwmjq6qIUhA==",
- "_location": "/genfun",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "genfun@^5.0.0",
- "name": "genfun",
- "escapedName": "genfun",
- "rawSpec": "^5.0.0",
- "saveSpec": null,
- "fetchSpec": "^5.0.0"
- },
- "_requiredBy": [
- "/protoduck"
- ],
- "_resolved": "https://registry.npmjs.org/genfun/-/genfun-5.0.0.tgz",
- "_shasum": "9dd9710a06900a5c4a5bf57aca5da4e52fe76537",
- "_spec": "genfun@^5.0.0",
- "_where": "/Users/zkat/Documents/code/work/npm/node_modules/protoduck",
- "author": {
- "name": "Kat Marchán",
- "email": "kzm@sykosomatic.org"
- },
- "bugs": {
- "url": "https://github.com/zkat/genfun/issues"
- },
- "bundleDependencies": false,
- "dependencies": {},
- "deprecated": false,
- "description": "Fast, prototype-friendly multimethods.",
- "devDependencies": {
- "mocha": "^3.2.0",
- "nyc": "^10.2.0",
- "standard": "^10.0.2",
- "standard-version": "^4.0.0",
- "weallbehave": "^1.0.3",
- "weallcontribute": "^1.0.8"
- },
- "files": [
- "lib/*.js"
- ],
- "homepage": "http://github.com/zkat/genfun",
- "keywords": [
- "clos",
- "functional",
- "oop",
- "util",
- "object oriented",
- "prototypes",
- "multimethod",
- "generic functions",
- "multiple dispatch",
- "polymorphism",
- "polymorphic",
- "protocols"
- ],
- "license": "MIT",
- "main": "lib/genfun.js",
- "name": "genfun",
- "repository": {
- "type": "git",
- "url": "git://github.com/zkat/genfun.git"
- },
- "scripts": {
- "postrelease": "npm publish && git push --follow-tags",
- "prerelease": "npm t",
- "pretest": "standard lib",
- "release": "standard-version -s",
- "test": "nyc -- mocha --reporter spec",
- "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
- "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
- },
- "version": "5.0.0"
-}
diff --git a/node_modules/http-cache-semantics/README.md b/node_modules/http-cache-semantics/README.md
deleted file mode 100644
index 99069fc8d..000000000
--- a/node_modules/http-cache-semantics/README.md
+++ /dev/null
@@ -1,177 +0,0 @@
-# Can I cache this? [![Build Status](https://travis-ci.org/pornel/http-cache-semantics.svg?branch=master)](https://travis-ci.org/pornel/http-cache-semantics)
-
-`CachePolicy` tells when responses can be reused from a cache, taking into account [HTTP RFC 7234](http://httpwg.org/specs/rfc7234.html) rules for user agents and shared caches. It's aware of many tricky details such as the `Vary` header, proxy revalidation, and authenticated responses.
-
-## Usage
-
-Cacheability of an HTTP response depends on how it was requested, so both `request` and `response` are required to create the policy.
-
-```js
-const policy = new CachePolicy(request, response, options);
-
-if (!policy.storable()) {
- // throw the response away, it's not usable at all
- return;
-}
-
-// Cache the data AND the policy object in your cache
-// (this is pseudocode, roll your own cache (lru-cache package works))
-letsPretendThisIsSomeCache.set(request.url, {policy, response}, policy.timeToLive());
-```
-
-```js
-// And later, when you receive a new request:
-const {policy, response} = letsPretendThisIsSomeCache.get(newRequest.url);
-
-// It's not enough that it exists in the cache, it has to match the new request, too:
-if (policy && policy.satisfiesWithoutRevalidation(newRequest)) {
- // OK, the previous response can be used to respond to the `newRequest`.
- // Response headers have to be updated, e.g. to add Age and remove uncacheable headers.
- response.headers = policy.responseHeaders();
- return response;
-}
-```
-
-It may be surprising, but it's not enough for an HTTP response to be [fresh](#yo-fresh) to satisfy a request. It may need to match request headers specified in `Vary`. Even a matching fresh response may still not be usable if the new request restricted cacheability, etc.
-
-The key method is `satisfiesWithoutRevalidation(newRequest)`, which checks whether the `newRequest` is compatible with the original request and whether all caching conditions are met.
-
-### Constructor options
-
-Request and response must have a `headers` property with all header names in lower case. `url`, `status` and `method` are optional (defaults are any URL, status `200`, and `GET` method).
-
-```js
-const request = {
- url: '/',
- method: 'GET',
- headers: {
- accept: '*/*',
- },
-};
-
-const response = {
- status: 200,
- headers: {
- 'cache-control': 'public, max-age=7234',
- },
-};
-
-const options = {
- shared: true,
- cacheHeuristic: 0.1,
- immutableMinTimeToLive: 24*3600*1000, // 24h
- ignoreCargoCult: false,
-};
-```
-
-If `options.shared` is `true` (default), then the response is evaluated from a perspective of a shared cache (i.e. `private` is not cacheable and `s-maxage` is respected). If `options.shared` is `false`, then the response is evaluated from a perspective of a single-user cache (i.e. `private` is cacheable and `s-maxage` is ignored).
-
-`options.cacheHeuristic` is a fraction of response's age that is used as a fallback cache duration. The default is 0.1 (10%), e.g. if a file hasn't been modified for 100 days, it'll be cached for 100*0.1 = 10 days.
-
-`options.immutableMinTimeToLive` is a number of milliseconds to assume as the default time to cache responses with `Cache-Control: immutable`. Note that [per RFC](http://httpwg.org/http-extensions/immutable.html) these can become stale, so `max-age` still overrides the default.
-
-If `options.ignoreCargoCult` is true, common anti-cache directives will be completely ignored if the non-standard `pre-check` and `post-check` directives are present. These two useless directives are most commonly found in bad StackOverflow answers and PHP's "session limiter" defaults.
-
-### `storable()`
-
-Returns `true` if the response can be stored in a cache. If it's `false` then you MUST NOT store either the request or the response.
-
-### `satisfiesWithoutRevalidation(newRequest)`
-
-This is the most important method. Use this method to check whether the cached response is still fresh in the context of the new request.
-
-If it returns `true`, then the given `request` matches the original response this cache policy has been created with, and the response can be reused without contacting the server. Note that the old response can't be returned without being updated, see `responseHeaders()`.
-
-If it returns `false`, then the response may not be matching at all (e.g. it's for a different URL or method), or may require to be refreshed first (see `revalidationHeaders()`).
-
-### `responseHeaders()`
-
-Returns updated, filtered set of response headers to return to clients receiving the cached response. This function is necessary, because proxies MUST always remove hop-by-hop headers (such as `TE` and `Connection`) and update response's `Age` to avoid doubling cache time.
-
-```js
-cachedResponse.headers = cachePolicy.responseHeaders(cachedResponse);
-```
-
-### `timeToLive()`
-
-Returns approximate time in *milliseconds* until the response becomes stale (i.e. not fresh).
-
-After that time (when `timeToLive() <= 0`) the response might not be usable without revalidation. However, there are exceptions, e.g. a client can explicitly allow stale responses, so always check with `satisfiesWithoutRevalidation()`.
-
-### `toObject()`/`fromObject(json)`
-
-Chances are you'll want to store the `CachePolicy` object along with the cached response. `obj = policy.toObject()` gives a plain JSON-serializable object. `policy = CachePolicy.fromObject(obj)` creates an instance from it.
-
-### Refreshing stale cache (revalidation)
-
-When a cached response has expired, it can be made fresh again by making a request to the origin server. The server may respond with status 304 (Not Modified) without sending the response body again, saving bandwidth.
-
-The following methods help perform the update efficiently and correctly.
-
-#### `revalidationHeaders(newRequest)`
-
-Returns updated, filtered set of request headers to send to the origin server to check if the cached response can be reused. These headers allow the origin server to return status 304 indicating the response is still fresh. All headers unrelated to caching are passed through as-is.
-
-Use this method when updating cache from the origin server.
-
-```js
-updateRequest.headers = cachePolicy.revalidationHeaders(updateRequest);
-```
-
-#### `revalidatedPolicy(revalidationRequest, revalidationResponse)`
-
-Use this method to update the cache after receiving a new response from the origin server. It returns an object with two keys:
-
-* `policy` — A new `CachePolicy` with HTTP headers updated from `revalidationResponse`. You can always replace the old cached `CachePolicy` with the new one.
-* `modified` — Boolean indicating whether the response body has changed.
- * If `false`, then a valid 304 Not Modified response has been received, and you can reuse the old cached response body.
- * If `true`, you should use new response's body (if present), or make another request to the origin server without any conditional headers (i.e. don't use `revalidationHeaders()` this time) to get the new resource.
-
-```js
-// When serving requests from cache:
-const {oldPolicy, oldResponse} = letsPretendThisIsSomeCache.get(newRequest.url);
-
-if (!oldPolicy.satisfiesWithoutRevalidation(newRequest)) {
- // Change the request to ask the origin server if the cached response can be used
- newRequest.headers = oldPolicy.revalidationHeaders(newRequest);
-
- // Send request to the origin server. The server may respond with status 304
- const newResponse = await makeRequest(newResponse);
-
- // Create updated policy and combined response from the old and new data
- const {policy, modified} = oldPolicy.revalidatedPolicy(newRequest, newResponse);
- const response = modified ? newResponse : oldResponse;
-
- // Update the cache with the newer/fresher response
- letsPretendThisIsSomeCache.set(newRequest.url, {policy, response}, policy.timeToLive());
-
- // And proceed returning cached response as usual
- response.headers = policy.responseHeaders();
- return response;
-}
-```
-
-# Yo, FRESH
-
-![satisfiesWithoutRevalidation](fresh.jpg)
-
-## Used by
-
-* [ImageOptim API](https://imageoptim.com/api), [make-fetch-happen](https://github.com/zkat/make-fetch-happen), [cacheable-request](https://www.npmjs.com/package/cacheable-request), [npm/registry-fetch](https://github.com/npm/registry-fetch), [etc.](https://github.com/pornel/http-cache-semantics/network/dependents)
-
-## Implemented
-
-* `Cache-Control` response header with all the quirks.
-* `Expires` with check for bad clocks.
-* `Pragma` response header.
-* `Age` response header.
-* `Vary` response header.
-* Default cacheability of statuses and methods.
-* Requests for stale data.
-* Filtering of hop-by-hop headers.
-* Basic revalidation request
-
-## Unimplemented
-
-* Merging of range requests, If-Range (but correctly supports them as non-cacheable)
-* Revalidation of multiple representations
diff --git a/node_modules/http-cache-semantics/node4/index.js b/node_modules/http-cache-semantics/node4/index.js
deleted file mode 100644
index bcdaebe80..000000000
--- a/node_modules/http-cache-semantics/node4/index.js
+++ /dev/null
@@ -1,559 +0,0 @@
-'use strict';
-// rfc7231 6.1
-
-function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
-
-var statusCodeCacheableByDefault = [200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501];
-
-// This implementation does not understand partial responses (206)
-var understoodStatuses = [200, 203, 204, 300, 301, 302, 303, 307, 308, 404, 405, 410, 414, 501];
-
-var hopByHopHeaders = { 'connection': true, 'keep-alive': true, 'proxy-authenticate': true, 'proxy-authorization': true, 'te': true, 'trailer': true, 'transfer-encoding': true, 'upgrade': true };
-var excludedFromRevalidationUpdate = {
- // Since the old body is reused, it doesn't make sense to change properties of the body
- 'content-length': true, 'content-encoding': true, 'transfer-encoding': true,
- 'content-range': true
-};
-
-function parseCacheControl(header) {
- var cc = {};
- if (!header) return cc;
-
- // TODO: When there is more than one value present for a given directive (e.g., two Expires header fields, multiple Cache-Control: max-age directives),
- // the directive's value is considered invalid. Caches are encouraged to consider responses that have invalid freshness information to be stale
- var parts = header.trim().split(/\s*,\s*/); // TODO: lame parsing
- for (var _iterator = parts, _isArray = Array.isArray(_iterator), _i = 0, _iterator = _isArray ? _iterator : _iterator[Symbol.iterator]();;) {
- var _ref;
-
- if (_isArray) {
- if (_i >= _iterator.length) break;
- _ref = _iterator[_i++];
- } else {
- _i = _iterator.next();
- if (_i.done) break;
- _ref = _i.value;
- }
-
- var part = _ref;
-
- var _part$split = part.split(/\s*=\s*/, 2),
- k = _part$split[0],
- v = _part$split[1];
-
- cc[k] = v === undefined ? true : v.replace(/^"|"$/g, ''); // TODO: lame unquoting
- }
-
- return cc;
-}
-
-function formatCacheControl(cc) {
- var parts = [];
- for (var k in cc) {
- var v = cc[k];
- parts.push(v === true ? k : k + '=' + v);
- }
- if (!parts.length) {
- return undefined;
- }
- return parts.join(', ');
-}
-
-module.exports = function () {
- function CachePolicy(req, res) {
- var _ref2 = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {},
- shared = _ref2.shared,
- cacheHeuristic = _ref2.cacheHeuristic,
- immutableMinTimeToLive = _ref2.immutableMinTimeToLive,
- ignoreCargoCult = _ref2.ignoreCargoCult,
- _fromObject = _ref2._fromObject;
-
- _classCallCheck(this, CachePolicy);
-
- if (_fromObject) {
- this._fromObject(_fromObject);
- return;
- }
-
- if (!res || !res.headers) {
- throw Error("Response headers missing");
- }
- this._assertRequestHasHeaders(req);
-
- this._responseTime = this.now();
- this._isShared = shared !== false;
- this._cacheHeuristic = undefined !== cacheHeuristic ? cacheHeuristic : 0.1; // 10% matches IE
- this._immutableMinTtl = undefined !== immutableMinTimeToLive ? immutableMinTimeToLive : 24 * 3600 * 1000;
-
- this._status = 'status' in res ? res.status : 200;
- this._resHeaders = res.headers;
- this._rescc = parseCacheControl(res.headers['cache-control']);
- this._method = 'method' in req ? req.method : 'GET';
- this._url = req.url;
- this._host = req.headers.host;
- this._noAuthorization = !req.headers.authorization;
- this._reqHeaders = res.headers.vary ? req.headers : null; // Don't keep all request headers if they won't be used
- this._reqcc = parseCacheControl(req.headers['cache-control']);
-
- // Assume that if someone uses legacy, non-standard uncecessary options they don't understand caching,
- // so there's no point stricly adhering to the blindly copy&pasted directives.
- if (ignoreCargoCult && "pre-check" in this._rescc && "post-check" in this._rescc) {
- delete this._rescc['pre-check'];
- delete this._rescc['post-check'];
- delete this._rescc['no-cache'];
- delete this._rescc['no-store'];
- delete this._rescc['must-revalidate'];
- this._resHeaders = Object.assign({}, this._resHeaders, { 'cache-control': formatCacheControl(this._rescc) });
- delete this._resHeaders.expires;
- delete this._resHeaders.pragma;
- }
-
- // When the Cache-Control header field is not present in a request, caches MUST consider the no-cache request pragma-directive
- // as having the same effect as if "Cache-Control: no-cache" were present (see Section 5.2.1).
- if (!res.headers['cache-control'] && /no-cache/.test(res.headers.pragma)) {
- this._rescc['no-cache'] = true;
- }
- }
-
- CachePolicy.prototype.now = function now() {
- return Date.now();
- };
-
- CachePolicy.prototype.storable = function storable() {
- // The "no-store" request directive indicates that a cache MUST NOT store any part of either this request or any response to it.
- return !!(!this._reqcc['no-store'] && (
- // A cache MUST NOT store a response to any request, unless:
- // The request method is understood by the cache and defined as being cacheable, and
- 'GET' === this._method || 'HEAD' === this._method || 'POST' === this._method && this._hasExplicitExpiration()) &&
- // the response status code is understood by the cache, and
- understoodStatuses.indexOf(this._status) !== -1 &&
- // the "no-store" cache directive does not appear in request or response header fields, and
- !this._rescc['no-store'] && (
- // the "private" response directive does not appear in the response, if the cache is shared, and
- !this._isShared || !this._rescc.private) && (
- // the Authorization header field does not appear in the request, if the cache is shared,
- !this._isShared || this._noAuthorization || this._allowsStoringAuthenticated()) && (
- // the response either:
-
- // contains an Expires header field, or
- this._resHeaders.expires ||
- // contains a max-age response directive, or
- // contains a s-maxage response directive and the cache is shared, or
- // contains a public response directive.
- this._rescc.public || this._rescc['max-age'] || this._rescc['s-maxage'] ||
- // has a status code that is defined as cacheable by default
- statusCodeCacheableByDefault.indexOf(this._status) !== -1));
- };
-
- CachePolicy.prototype._hasExplicitExpiration = function _hasExplicitExpiration() {
- // 4.2.1 Calculating Freshness Lifetime
- return this._isShared && this._rescc['s-maxage'] || this._rescc['max-age'] || this._resHeaders.expires;
- };
-
- CachePolicy.prototype._assertRequestHasHeaders = function _assertRequestHasHeaders(req) {
- if (!req || !req.headers) {
- throw Error("Request headers missing");
- }
- };
-
- CachePolicy.prototype.satisfiesWithoutRevalidation = function satisfiesWithoutRevalidation(req) {
- this._assertRequestHasHeaders(req);
-
- // When presented with a request, a cache MUST NOT reuse a stored response, unless:
- // the presented request does not contain the no-cache pragma (Section 5.4), nor the no-cache cache directive,
- // unless the stored response is successfully validated (Section 4.3), and
- var requestCC = parseCacheControl(req.headers['cache-control']);
- if (requestCC['no-cache'] || /no-cache/.test(req.headers.pragma)) {
- return false;
- }
-
- if (requestCC['max-age'] && this.age() > requestCC['max-age']) {
- return false;
- }
-
- if (requestCC['min-fresh'] && this.timeToLive() < 1000 * requestCC['min-fresh']) {
- return false;
- }
-
- // the stored response is either:
- // fresh, or allowed to be served stale
- if (this.stale()) {
- var allowsStale = requestCC['max-stale'] && !this._rescc['must-revalidate'] && (true === requestCC['max-stale'] || requestCC['max-stale'] > this.age() - this.maxAge());
- if (!allowsStale) {
- return false;
- }
- }
-
- return this._requestMatches(req, false);
- };
-
- CachePolicy.prototype._requestMatches = function _requestMatches(req, allowHeadMethod) {
- // The presented effective request URI and that of the stored response match, and
- return (!this._url || this._url === req.url) && this._host === req.headers.host && (
- // the request method associated with the stored response allows it to be used for the presented request, and
- !req.method || this._method === req.method || allowHeadMethod && 'HEAD' === req.method) &&
- // selecting header fields nominated by the stored response (if any) match those presented, and
- this._varyMatches(req);
- };
-
- CachePolicy.prototype._allowsStoringAuthenticated = function _allowsStoringAuthenticated() {
- // following Cache-Control response directives (Section 5.2.2) have such an effect: must-revalidate, public, and s-maxage.
- return this._rescc['must-revalidate'] || this._rescc.public || this._rescc['s-maxage'];
- };
-
- CachePolicy.prototype._varyMatches = function _varyMatches(req) {
- if (!this._resHeaders.vary) {
- return true;
- }
-
- // A Vary header field-value of "*" always fails to match
- if (this._resHeaders.vary === '*') {
- return false;
- }
-
- var fields = this._resHeaders.vary.trim().toLowerCase().split(/\s*,\s*/);
- for (var _iterator2 = fields, _isArray2 = Array.isArray(_iterator2), _i2 = 0, _iterator2 = _isArray2 ? _iterator2 : _iterator2[Symbol.iterator]();;) {
- var _ref3;
-
- if (_isArray2) {
- if (_i2 >= _iterator2.length) break;
- _ref3 = _iterator2[_i2++];
- } else {
- _i2 = _iterator2.next();
- if (_i2.done) break;
- _ref3 = _i2.value;
- }
-
- var name = _ref3;
-
- if (req.headers[name] !== this._reqHeaders[name]) return false;
- }
- return true;
- };
-
- CachePolicy.prototype._copyWithoutHopByHopHeaders = function _copyWithoutHopByHopHeaders(inHeaders) {
- var headers = {};
- for (var name in inHeaders) {
- if (hopByHopHeaders[name]) continue;
- headers[name] = inHeaders[name];
- }
- // 9.1. Connection
- if (inHeaders.connection) {
- var tokens = inHeaders.connection.trim().split(/\s*,\s*/);
- for (var _iterator3 = tokens, _isArray3 = Array.isArray(_iterator3), _i3 = 0, _iterator3 = _isArray3 ? _iterator3 : _iterator3[Symbol.iterator]();;) {
- var _ref4;
-
- if (_isArray3) {
- if (_i3 >= _iterator3.length) break;
- _ref4 = _iterator3[_i3++];
- } else {
- _i3 = _iterator3.next();
- if (_i3.done) break;
- _ref4 = _i3.value;
- }
-
- var _name = _ref4;
-
- delete headers[_name];
- }
- }
- if (headers.warning) {
- var warnings = headers.warning.split(/,/).filter(function (warning) {
- return !/^\s*1[0-9][0-9]/.test(warning);
- });
- if (!warnings.length) {
- delete headers.warning;
- } else {
- headers.warning = warnings.join(',').trim();
- }
- }
- return headers;
- };
-
- CachePolicy.prototype.responseHeaders = function responseHeaders() {
- var headers = this._copyWithoutHopByHopHeaders(this._resHeaders);
- var age = this.age();
-
- // A cache SHOULD generate 113 warning if it heuristically chose a freshness
- // lifetime greater than 24 hours and the response's age is greater than 24 hours.
- if (age > 3600 * 24 && !this._hasExplicitExpiration() && this.maxAge() > 3600 * 24) {
- headers.warning = (headers.warning ? `${headers.warning}, ` : '') + '113 - "rfc7234 5.5.4"';
- }
- headers.age = `${Math.round(age)}`;
- return headers;
- };
-
- /**
- * Value of the Date response header or current time if Date was demed invalid
- * @return timestamp
- */
-
-
- CachePolicy.prototype.date = function date() {
- var dateValue = Date.parse(this._resHeaders.date);
- var maxClockDrift = 8 * 3600 * 1000;
- if (Number.isNaN(dateValue) || dateValue < this._responseTime - maxClockDrift || dateValue > this._responseTime + maxClockDrift) {
- return this._responseTime;
- }
- return dateValue;
- };
-
- /**
- * Value of the Age header, in seconds, updated for the current time.
- * May be fractional.
- *
- * @return Number
- */
-
-
- CachePolicy.prototype.age = function age() {
- var age = Math.max(0, (this._responseTime - this.date()) / 1000);
- if (this._resHeaders.age) {
- var ageValue = this._ageValue();
- if (ageValue > age) age = ageValue;
- }
-
- var residentTime = (this.now() - this._responseTime) / 1000;
- return age + residentTime;
- };
-
- CachePolicy.prototype._ageValue = function _ageValue() {
- var ageValue = parseInt(this._resHeaders.age);
- return isFinite(ageValue) ? ageValue : 0;
- };
-
- /**
- * Value of applicable max-age (or heuristic equivalent) in seconds. This counts since response's `Date`.
- *
- * For an up-to-date value, see `timeToLive()`.
- *
- * @return Number
- */
-
-
- CachePolicy.prototype.maxAge = function maxAge() {
- if (!this.storable() || this._rescc['no-cache']) {
- return 0;
- }
-
- // Shared responses with cookies are cacheable according to the RFC, but IMHO it'd be unwise to do so by default
- // so this implementation requires explicit opt-in via public header
- if (this._isShared && this._resHeaders['set-cookie'] && !this._rescc.public && !this._rescc.immutable) {
- return 0;
- }
-
- if (this._resHeaders.vary === '*') {
- return 0;
- }
-
- if (this._isShared) {
- if (this._rescc['proxy-revalidate']) {
- return 0;
- }
- // if a response includes the s-maxage directive, a shared cache recipient MUST ignore the Expires field.
- if (this._rescc['s-maxage']) {
- return parseInt(this._rescc['s-maxage'], 10);
- }
- }
-
- // If a response includes a Cache-Control field with the max-age directive, a recipient MUST ignore the Expires field.
- if (this._rescc['max-age']) {
- return parseInt(this._rescc['max-age'], 10);
- }
-
- var defaultMinTtl = this._rescc.immutable ? this._immutableMinTtl : 0;
-
- var dateValue = this.date();
- if (this._resHeaders.expires) {
- var expires = Date.parse(this._resHeaders.expires);
- // A cache recipient MUST interpret invalid date formats, especially the value "0", as representing a time in the past (i.e., "already expired").
- if (Number.isNaN(expires) || expires < dateValue) {
- return 0;
- }
- return Math.max(defaultMinTtl, (expires - dateValue) / 1000);
- }
-
- if (this._resHeaders['last-modified']) {
- var lastModified = Date.parse(this._resHeaders['last-modified']);
- if (isFinite(lastModified) && dateValue > lastModified) {
- return Math.max(defaultMinTtl, (dateValue - lastModified) / 1000 * this._cacheHeuristic);
- }
- }
-
- return defaultMinTtl;
- };
-
- CachePolicy.prototype.timeToLive = function timeToLive() {
- return Math.max(0, this.maxAge() - this.age()) * 1000;
- };
-
- CachePolicy.prototype.stale = function stale() {
- return this.maxAge() <= this.age();
- };
-
- CachePolicy.fromObject = function fromObject(obj) {
- return new this(undefined, undefined, { _fromObject: obj });
- };
-
- CachePolicy.prototype._fromObject = function _fromObject(obj) {
- if (this._responseTime) throw Error("Reinitialized");
- if (!obj || obj.v !== 1) throw Error("Invalid serialization");
-
- this._responseTime = obj.t;
- this._isShared = obj.sh;
- this._cacheHeuristic = obj.ch;
- this._immutableMinTtl = obj.imm !== undefined ? obj.imm : 24 * 3600 * 1000;
- this._status = obj.st;
- this._resHeaders = obj.resh;
- this._rescc = obj.rescc;
- this._method = obj.m;
- this._url = obj.u;
- this._host = obj.h;
- this._noAuthorization = obj.a;
- this._reqHeaders = obj.reqh;
- this._reqcc = obj.reqcc;
- };
-
- CachePolicy.prototype.toObject = function toObject() {
- return {
- v: 1,
- t: this._responseTime,
- sh: this._isShared,
- ch: this._cacheHeuristic,
- imm: this._immutableMinTtl,
- st: this._status,
- resh: this._resHeaders,
- rescc: this._rescc,
- m: this._method,
- u: this._url,
- h: this._host,
- a: this._noAuthorization,
- reqh: this._reqHeaders,
- reqcc: this._reqcc
- };
- };
-
- /**
- * Headers for sending to the origin server to revalidate stale response.
- * Allows server to return 304 to allow reuse of the previous response.
- *
- * Hop by hop headers are always stripped.
- * Revalidation headers may be added or removed, depending on request.
- */
-
-
- CachePolicy.prototype.revalidationHeaders = function revalidationHeaders(incomingReq) {
- this._assertRequestHasHeaders(incomingReq);
- var headers = this._copyWithoutHopByHopHeaders(incomingReq.headers);
-
- // This implementation does not understand range requests
- delete headers['if-range'];
-
- if (!this._requestMatches(incomingReq, true) || !this.storable()) {
- // revalidation allowed via HEAD
- // not for the same resource, or wasn't allowed to be cached anyway
- delete headers['if-none-match'];
- delete headers['if-modified-since'];
- return headers;
- }
-
- /* MUST send that entity-tag in any cache validation request (using If-Match or If-None-Match) if an entity-tag has been provided by the origin server. */
- if (this._resHeaders.etag) {
- headers['if-none-match'] = headers['if-none-match'] ? `${headers['if-none-match']}, ${this._resHeaders.etag}` : this._resHeaders.etag;
- }
-
- // Clients MAY issue simple (non-subrange) GET requests with either weak validators or strong validators. Clients MUST NOT use weak validators in other forms of request.
- var forbidsWeakValidators = headers['accept-ranges'] || headers['if-match'] || headers['if-unmodified-since'] || this._method && this._method != 'GET';
-
- /* SHOULD send the Last-Modified value in non-subrange cache validation requests (using If-Modified-Since) if only a Last-Modified value has been provided by the origin server.
- Note: This implementation does not understand partial responses (206) */
- if (forbidsWeakValidators) {
- delete headers['if-modified-since'];
-
- if (headers['if-none-match']) {
- var etags = headers['if-none-match'].split(/,/).filter(function (etag) {
- return !/^\s*W\//.test(etag);
- });
- if (!etags.length) {
- delete headers['if-none-match'];
- } else {
- headers['if-none-match'] = etags.join(',').trim();
- }
- }
- } else if (this._resHeaders['last-modified'] && !headers['if-modified-since']) {
- headers['if-modified-since'] = this._resHeaders['last-modified'];
- }
-
- return headers;
- };
-
- /**
- * Creates new CachePolicy with information combined from the previews response,
- * and the new revalidation response.
- *
- * Returns {policy, modified} where modified is a boolean indicating
- * whether the response body has been modified, and old cached body can't be used.
- *
- * @return {Object} {policy: CachePolicy, modified: Boolean}
- */
-
-
- CachePolicy.prototype.revalidatedPolicy = function revalidatedPolicy(request, response) {
- this._assertRequestHasHeaders(request);
- if (!response || !response.headers) {
- throw Error("Response headers missing");
- }
-
- // These aren't going to be supported exactly, since one CachePolicy object
- // doesn't know about all the other cached objects.
- var matches = false;
- if (response.status !== undefined && response.status != 304) {
- matches = false;
- } else if (response.headers.etag && !/^\s*W\//.test(response.headers.etag)) {
- // "All of the stored responses with the same strong validator are selected.
- // If none of the stored responses contain the same strong validator,
- // then the cache MUST NOT use the new response to update any stored responses."
- matches = this._resHeaders.etag && this._resHeaders.etag.replace(/^\s*W\//, '') === response.headers.etag;
- } else if (this._resHeaders.etag && response.headers.etag) {
- // "If the new response contains a weak validator and that validator corresponds
- // to one of the cache's stored responses,
- // then the most recent of those matching stored responses is selected for update."
- matches = this._resHeaders.etag.replace(/^\s*W\//, '') === response.headers.etag.replace(/^\s*W\//, '');
- } else if (this._resHeaders['last-modified']) {
- matches = this._resHeaders['last-modified'] === response.headers['last-modified'];
- } else {
- // If the new response does not include any form of validator (such as in the case where
- // a client generates an If-Modified-Since request from a source other than the Last-Modified
- // response header field), and there is only one stored response, and that stored response also
- // lacks a validator, then that stored response is selected for update.
- if (!this._resHeaders.etag && !this._resHeaders['last-modified'] && !response.headers.etag && !response.headers['last-modified']) {
- matches = true;
- }
- }
-
- if (!matches) {
- return {
- policy: new this.constructor(request, response),
- modified: true
- };
- }
-
- // use other header fields provided in the 304 (Not Modified) response to replace all instances
- // of the corresponding header fields in the stored response.
- var headers = {};
- for (var k in this._resHeaders) {
- headers[k] = k in response.headers && !excludedFromRevalidationUpdate[k] ? response.headers[k] : this._resHeaders[k];
- }
-
- var newResponse = Object.assign({}, response, {
- status: this._status,
- method: this._method,
- headers
- });
- return {
- policy: new this.constructor(request, newResponse),
- modified: false
- };
- };
-
- return CachePolicy;
-}(); \ No newline at end of file
diff --git a/node_modules/http-cache-semantics/package.json b/node_modules/http-cache-semantics/package.json
deleted file mode 100644
index 000f357a8..000000000
--- a/node_modules/http-cache-semantics/package.json
+++ /dev/null
@@ -1,60 +0,0 @@
-{
- "_from": "http-cache-semantics@^3.8.1",
- "_id": "http-cache-semantics@3.8.1",
- "_inBundle": false,
- "_integrity": "sha512-5ai2iksyV8ZXmnZhHH4rWPoxxistEexSi5936zIQ1bnNTW5VnA85B6P/VpXiRM017IgRvb2kKo1a//y+0wSp3w==",
- "_location": "/http-cache-semantics",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "http-cache-semantics@^3.8.1",
- "name": "http-cache-semantics",
- "escapedName": "http-cache-semantics",
- "rawSpec": "^3.8.1",
- "saveSpec": null,
- "fetchSpec": "^3.8.1"
- },
- "_requiredBy": [
- "/make-fetch-happen",
- "/npm-profile/make-fetch-happen",
- "/npm-registry-fetch/make-fetch-happen"
- ],
- "_resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-3.8.1.tgz",
- "_shasum": "39b0e16add9b605bf0a9ef3d9daaf4843b4cacd2",
- "_spec": "http-cache-semantics@^3.8.1",
- "_where": "/Users/rebecca/code/npm/node_modules/make-fetch-happen",
- "author": {
- "name": "Kornel Lesiński",
- "email": "kornel@geekhood.net",
- "url": "https://kornel.ski/"
- },
- "bugs": {
- "url": "https://github.com/pornel/http-cache-semantics/issues"
- },
- "bundleDependencies": false,
- "deprecated": false,
- "description": "Parses Cache-Control and other headers. Helps building correct HTTP caches and proxies",
- "devDependencies": {
- "babel-cli": "^6.24.1",
- "babel-preset-env": "^1.6.1",
- "mocha": "^3.4.2"
- },
- "files": [
- "node4/index.js"
- ],
- "homepage": "https://github.com/pornel/http-cache-semantics#readme",
- "license": "BSD-2-Clause",
- "main": "node4/index.js",
- "name": "http-cache-semantics",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/pornel/http-cache-semantics.git"
- },
- "scripts": {
- "compile": "babel -d node4/ index.js; babel -d node4/test test",
- "prepublish": "npm run compile",
- "test": "npm run compile; mocha node4/test"
- },
- "version": "3.8.1"
-}
diff --git a/node_modules/http-proxy-agent/.travis.yml b/node_modules/http-proxy-agent/.travis.yml
deleted file mode 100644
index 805d3d50d..000000000
--- a/node_modules/http-proxy-agent/.travis.yml
+++ /dev/null
@@ -1,22 +0,0 @@
-sudo: false
-
-language: node_js
-
-node_js:
- - "4"
- - "5"
- - "6"
- - "7"
- - "8"
-
-install:
- - PATH="`npm bin`:`npm bin -g`:$PATH"
- # Install dependencies and build
- - npm install
-
-script:
- # Output useful info for debugging
- - node --version
- - npm --version
- # Run tests
- - npm test
diff --git a/node_modules/http-proxy-agent/History.md b/node_modules/http-proxy-agent/History.md
deleted file mode 100644
index 7e3e1e783..000000000
--- a/node_modules/http-proxy-agent/History.md
+++ /dev/null
@@ -1,101 +0,0 @@
-
-2.1.0 / 2018-03-03
-==================
-
- * Add "engines" to package.json
- * Use `Buffer.from()`
- * Update package.json - outdated debug version (#7)
-
-2.0.0 / 2017-06-27
-==================
-
- * drop support for Node.js < v4
- * update "mocha" to v3
- * update to "agent-base" v4
- * rename http-proxy-agent.js to index.js
- * remove `extend` dependency
- * test Node.js 4, 5, 6, 7 and 8 on Travis-CI
-
-1.0.0 / 2015-07-10
-==================
-
- * http-proxy-agent: use %o debug() formatter
- * http-proxy-agent: remove `defaults` merging logic
- * package: update "agent-base" to v2
- * test: add an assert() call
- * test: use ssl-cert-snakeoil self-signed SSL certs
- * README: add note about node-https-proxy-agent
-
-0.2.7 / 2015-07-06
-==================
-
- * travis: ensure latest npm before testing
- * travis: test node v0.8, v0.10, and v0.12
- * README: use SVG for Travis-CI badge
- * package: update "extend" to v3
- * package: update "mocha" to v2
- * package: update "debug" to v2
-
-0.2.6 / 2014-06-11
-==================
-
- * package: update "debug" to v1.0.0
-
-0.2.5 / 2014-04-09
-==================
-
- * package: update outdated deps
-
-0.2.4 / 2014-01-12
-==================
-
- * http-proxy-agent: fix using the agent after the first tick of creating the ClientRequest
- * http-proxy-agent: use "debug" module
- * History: fix whitespace
-
-0.2.3 / 2013-11-18
-==================
-
- * https-proxy-agent: allow "https" without trailing colon
-
-0.2.2 / 2013-11-16
-==================
-
- * http-proxy-agent: delete the `port` if it matches default port
- * http-proxy-agent: don't mix in the `proxy` opts to the endpoint opts
- * http-proxy-agent: delete `pathname` from the proxy opts as well
-
-0.2.1 / 2013-10-28
-==================
-
- * http-proxy-agent: properly proxy the query-string on request URLs (GH-1)
-
-0.2.0 / 2013-09-16
-==================
-
- * http-proxy-agent: update to `agent-base` v1.0.0 API
- * http-proxy-agent: rename `secure` option to `secureProxy`
- * http-proxy-agent: default the "port" to 80 if not set
- * http-proxy-agent: use "extend" module
- * test: refactor tests
- * test: add 407 auth test
- * test: add bad proxy info test
- * test: add "secureProxy" option tests
-
-0.1.0 / 2013-09-03
-==================
-
- * Add initial "Proxy-Authorization" Basic authentication support
-
-0.0.2 / 2013-07-11
-==================
-
- * test: make tests pass, ensure valid IP addresses are returned
- * test: add tests
- * throw an Error when no proxy info is given
- * add support for passing options to net/tls .connect()
-
-0.0.1 / 2013-07-09
-==================
-
- * Initial release
diff --git a/node_modules/http-proxy-agent/README.md b/node_modules/http-proxy-agent/README.md
deleted file mode 100644
index 4f1fc372b..000000000
--- a/node_modules/http-proxy-agent/README.md
+++ /dev/null
@@ -1,74 +0,0 @@
-http-proxy-agent
-================
-### An HTTP(s) proxy `http.Agent` implementation for HTTP
-[![Build Status](https://travis-ci.org/TooTallNate/node-http-proxy-agent.svg?branch=master)](https://travis-ci.org/TooTallNate/node-http-proxy-agent)
-
-This module provides an `http.Agent` implementation that connects to a specified
-HTTP or HTTPS proxy server, and can be used with the built-in `http` module.
-
-__Note:__ For HTTP proxy usage with the `https` module, check out
-[`node-https-proxy-agent`](https://github.com/TooTallNate/node-https-proxy-agent).
-
-Installation
-------------
-
-Install with `npm`:
-
-``` bash
-$ npm install http-proxy-agent
-```
-
-
-Example
--------
-
-``` js
-var url = require('url');
-var http = require('http');
-var HttpProxyAgent = require('http-proxy-agent');
-
-// HTTP/HTTPS proxy to connect to
-var proxy = process.env.http_proxy || 'http://168.63.76.32:3128';
-console.log('using proxy server %j', proxy);
-
-// HTTP endpoint for the proxy to connect to
-var endpoint = process.argv[2] || 'http://nodejs.org/api/';
-console.log('attempting to GET %j', endpoint);
-var opts = url.parse(endpoint);
-
-// create an instance of the `HttpProxyAgent` class with the proxy server information
-var agent = new HttpProxyAgent(proxy);
-opts.agent = agent;
-
-http.get(opts, function (res) {
- console.log('"response" event!', res.headers);
- res.pipe(process.stdout);
-});
-```
-
-
-License
--------
-
-(The MIT License)
-
-Copyright (c) 2013 Nathan Rajlich &lt;nathan@tootallnate.net&gt;
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-'Software'), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/http-proxy-agent/index.js b/node_modules/http-proxy-agent/index.js
deleted file mode 100644
index f90a5297c..000000000
--- a/node_modules/http-proxy-agent/index.js
+++ /dev/null
@@ -1,111 +0,0 @@
-
-/**
- * Module dependencies.
- */
-
-var net = require('net');
-var tls = require('tls');
-var url = require('url');
-var Agent = require('agent-base');
-var inherits = require('util').inherits;
-var debug = require('debug')('http-proxy-agent');
-
-/**
- * Module exports.
- */
-
-module.exports = HttpProxyAgent;
-
-/**
- * The `HttpProxyAgent` implements an HTTP Agent subclass that connects to the
- * specified "HTTP proxy server" in order to proxy HTTP requests.
- *
- * @api public
- */
-
-function HttpProxyAgent (opts) {
- if (!(this instanceof HttpProxyAgent)) return new HttpProxyAgent(opts);
- if ('string' == typeof opts) opts = url.parse(opts);
- if (!opts) throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!');
- debug('creating new HttpProxyAgent instance: %o', opts);
- Agent.call(this, opts);
-
- var proxy = Object.assign({}, opts);
-
- // if `true`, then connect to the proxy server over TLS. defaults to `false`.
- this.secureProxy = proxy.protocol ? /^https:?$/i.test(proxy.protocol) : false;
-
- // prefer `hostname` over `host`, and set the `port` if needed
- proxy.host = proxy.hostname || proxy.host;
- proxy.port = +proxy.port || (this.secureProxy ? 443 : 80);
-
- if (proxy.host && proxy.path) {
- // if both a `host` and `path` are specified then it's most likely the
- // result of a `url.parse()` call... we need to remove the `path` portion so
- // that `net.connect()` doesn't attempt to open that as a unix socket file.
- delete proxy.path;
- delete proxy.pathname;
- }
-
- this.proxy = proxy;
-}
-inherits(HttpProxyAgent, Agent);
-
-/**
- * Called when the node-core HTTP client library is creating a new HTTP request.
- *
- * @api public
- */
-
-HttpProxyAgent.prototype.callback = function connect (req, opts, fn) {
- var proxy = this.proxy;
-
- // change the `http.ClientRequest` instance's "path" field
- // to the absolute path of the URL that will be requested
- var parsed = url.parse(req.path);
- if (null == parsed.protocol) parsed.protocol = 'http:';
- if (null == parsed.hostname) parsed.hostname = opts.hostname || opts.host;
- if (null == parsed.port) parsed.port = opts.port;
- if (parsed.port == 80) {
- // if port is 80, then we can remove the port so that the
- // ":80" portion is not on the produced URL
- delete parsed.port;
- }
- var absolute = url.format(parsed);
- req.path = absolute;
-
- // inject the `Proxy-Authorization` header if necessary
- if (proxy.auth) {
- req.setHeader(
- 'Proxy-Authorization',
- 'Basic ' + Buffer.from(proxy.auth).toString('base64')
- );
- }
-
- // create a socket connection to the proxy server
- var socket;
- if (this.secureProxy) {
- socket = tls.connect(proxy);
- } else {
- socket = net.connect(proxy);
- }
-
- // at this point, the http ClientRequest's internal `_header` field might have
- // already been set. If this is the case then we'll need to re-generate the
- // string since we just changed the `req.path`
- if (req._header) {
- debug('regenerating stored HTTP header string for request');
- req._header = null;
- req._implicitHeader();
- if (req.output && req.output.length > 0) {
- debug('patching connection write() output buffer with updated header');
- // the _header has already been queued to be written to the socket
- var first = req.output[0];
- var endOfHeaders = first.indexOf('\r\n\r\n') + 4;
- req.output[0] = req._header + first.substring(endOfHeaders);
- debug('output buffer: %o', req.output);
- }
- }
-
- fn(null, socket);
-};
diff --git a/node_modules/http-proxy-agent/package.json b/node_modules/http-proxy-agent/package.json
deleted file mode 100644
index 99e28fdc6..000000000
--- a/node_modules/http-proxy-agent/package.json
+++ /dev/null
@@ -1,67 +0,0 @@
-{
- "_from": "http-proxy-agent@^2.1.0",
- "_id": "http-proxy-agent@2.1.0",
- "_inBundle": false,
- "_integrity": "sha512-qwHbBLV7WviBl0rQsOzH6o5lwyOIvwp/BdFnvVxXORldu5TmjFfjzBcWUWS5kWAZhmv+JtiDhSuQCp4sBfbIgg==",
- "_location": "/http-proxy-agent",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "http-proxy-agent@^2.1.0",
- "name": "http-proxy-agent",
- "escapedName": "http-proxy-agent",
- "rawSpec": "^2.1.0",
- "saveSpec": null,
- "fetchSpec": "^2.1.0"
- },
- "_requiredBy": [
- "/make-fetch-happen",
- "/npm-profile/make-fetch-happen",
- "/npm-registry-fetch/make-fetch-happen"
- ],
- "_resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-2.1.0.tgz",
- "_shasum": "e4821beef5b2142a2026bd73926fe537631c5405",
- "_spec": "http-proxy-agent@^2.1.0",
- "_where": "/Users/rebecca/code/npm/node_modules/make-fetch-happen",
- "author": {
- "name": "Nathan Rajlich",
- "email": "nathan@tootallnate.net",
- "url": "http://n8.io/"
- },
- "bugs": {
- "url": "https://github.com/TooTallNate/node-http-proxy-agent/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "agent-base": "4",
- "debug": "3.1.0"
- },
- "deprecated": false,
- "description": "An HTTP(s) proxy `http.Agent` implementation for HTTP",
- "devDependencies": {
- "mocha": "3",
- "proxy": "~0.2.3"
- },
- "engines": {
- "node": ">= 4.5.0"
- },
- "homepage": "https://github.com/TooTallNate/node-http-proxy-agent#readme",
- "keywords": [
- "http",
- "proxy",
- "endpoint",
- "agent"
- ],
- "license": "MIT",
- "main": "./index.js",
- "name": "http-proxy-agent",
- "repository": {
- "type": "git",
- "url": "git://github.com/TooTallNate/node-http-proxy-agent.git"
- },
- "scripts": {
- "test": "mocha --reporter spec"
- },
- "version": "2.1.0"
-}
diff --git a/node_modules/http-proxy-agent/test/ssl-cert-snakeoil.key b/node_modules/http-proxy-agent/test/ssl-cert-snakeoil.key
deleted file mode 100644
index fd1250122..000000000
--- a/node_modules/http-proxy-agent/test/ssl-cert-snakeoil.key
+++ /dev/null
@@ -1,15 +0,0 @@
------BEGIN RSA PRIVATE KEY-----
-MIICWwIBAAKBgQCzURxIqzer0ACAbX/lHdsn4Gd9PLKrf7EeDYfIdV0HZKPD8WDr
-bBx2/fBu0OW2sjnzv/SVZbJ0DAuPE/p0+eT0qb2qC10iz9iTD7ribd7gxhirVb8y
-b3fBjXsxc8V8p4Ny1LcvNSqCjwUbJqdRogfoJeTiqPM58z5sNzuv5iq7iwIDAQAB
-AoGAPMQy4olrP0UotlzlJ36bowLP70ffgHCwU+/f4NWs5fF78c3du0oSx1w820Dd
-Z7E0JF8bgnlJJTxjumPZz0RUCugrEHBKJmzEz3cxF5E3+7NvteZcjKn9D67RrM5x
-1/uSZ9cqKE9cYvY4fSuHx18diyZ4axR/wB1Pea2utjjDM+ECQQDb9ZbmmaWMiRpQ
-5Up+loxP7BZNPsEVsm+DVJmEFbaFgGfncWBqSIqnPNjMwTwj0OigTwCAEGPkfRVW
-T0pbYWCxAkEA0LK7SCTwzyDmhASUalk0x+3uCAA6ryFdwJf/wd8TRAvVOmkTEldX
-uJ7ldLvfrONYO3v56uKTU/SoNdZYzKtO+wJAX2KM4ctXYy5BXztPpr2acz4qHa1N
-Bh+vBAC34fOYhyQ76r3b1btHhWZ5jbFuZwm9F2erC94Ps5IaoqcX07DSwQJAPKGw
-h2U0EPkd/3zVIZCJJQya+vgWFIs9EZcXVtvYXQyTBkVApTN66MhBIYjzkub5205J
-bVQmOV37AKklY1DhwQJAA1wos0cYxro02edzatxd0DIR2r4qqOqLkw6BhYHhq6HJ
-ZvIcQkHqdSXzdETFc01I1znDGGIrJHcnvKWgBPoEUg==
------END RSA PRIVATE KEY-----
diff --git a/node_modules/http-proxy-agent/test/ssl-cert-snakeoil.pem b/node_modules/http-proxy-agent/test/ssl-cert-snakeoil.pem
deleted file mode 100644
index b115a5e91..000000000
--- a/node_modules/http-proxy-agent/test/ssl-cert-snakeoil.pem
+++ /dev/null
@@ -1,12 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIB1TCCAT4CCQDV5mPlzm9+izANBgkqhkiG9w0BAQUFADAvMS0wKwYDVQQDEyQ3
-NTI3YmQ3Ny1hYjNlLTQ3NGItYWNlNy1lZWQ2MDUzOTMxZTcwHhcNMTUwNzA2MjI0
-NTA3WhcNMjUwNzAzMjI0NTA3WjAvMS0wKwYDVQQDEyQ3NTI3YmQ3Ny1hYjNlLTQ3
-NGItYWNlNy1lZWQ2MDUzOTMxZTcwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGB
-ALNRHEirN6vQAIBtf+Ud2yfgZ308sqt/sR4Nh8h1XQdko8PxYOtsHHb98G7Q5bay
-OfO/9JVlsnQMC48T+nT55PSpvaoLXSLP2JMPuuJt3uDGGKtVvzJvd8GNezFzxXyn
-g3LUty81KoKPBRsmp1GiB+gl5OKo8znzPmw3O6/mKruLAgMBAAEwDQYJKoZIhvcN
-AQEFBQADgYEACzoHUF8UV2Z6541Q2wKEA0UFUzmUjf/E1XwBO+1P15ZZ64uw34B4
-1RwMPtAo9RY/PmICTWtNxWGxkzwb2JtDWtnxVER/lF8k2XcXPE76fxTHJF/BKk9J
-QU8OTD1dd9gHCBviQB9TqntRZ5X7axjtuWjb2umY+owBYzAHZkp1HKI=
------END CERTIFICATE-----
diff --git a/node_modules/http-proxy-agent/test/test.js b/node_modules/http-proxy-agent/test/test.js
deleted file mode 100644
index cc320c7fc..000000000
--- a/node_modules/http-proxy-agent/test/test.js
+++ /dev/null
@@ -1,303 +0,0 @@
-
-/**
- * Module dependencies.
- */
-
-var fs = require('fs');
-var url = require('url');
-var http = require('http');
-var https = require('https');
-var assert = require('assert');
-var Proxy = require('proxy');
-var HttpProxyAgent = require('../');
-
-describe('HttpProxyAgent', function () {
-
- var server;
- var serverPort;
-
- var proxy;
- var proxyPort;
-
- var sslProxy;
- var sslProxyPort;
-
- before(function (done) {
- // setup HTTP proxy server
- proxy = Proxy();
- proxy.listen(function () {
- proxyPort = proxy.address().port;
- done();
- });
- });
-
- before(function (done) {
- // setup target HTTP server
- server = http.createServer();
- server.listen(function () {
- serverPort = server.address().port;
- done();
- });
- });
-
- before(function (done) {
- // setup SSL HTTP proxy server
- var options = {
- key: fs.readFileSync(__dirname + '/ssl-cert-snakeoil.key'),
- cert: fs.readFileSync(__dirname + '/ssl-cert-snakeoil.pem')
- };
- sslProxy = Proxy(https.createServer(options));
- sslProxy.listen(function () {
- sslProxyPort = sslProxy.address().port;
- done();
- });
- });
-
- // shut down test HTTP server
- after(function (done) {
- proxy.once('close', function () { done(); });
- proxy.close();
- });
-
- after(function (done) {
- server.once('close', function () { done(); });
- server.close();
- });
-
- after(function (done) {
- sslProxy.once('close', function () { done(); });
- sslProxy.close();
- });
-
- describe('constructor', function () {
- it('should throw an Error if no "proxy" argument is given', function () {
- assert.throws(function () {
- new HttpProxyAgent();
- });
- });
- it('should accept a "string" proxy argument', function () {
- var agent = new HttpProxyAgent('http://127.0.0.1:' + proxyPort);
- assert.equal('127.0.0.1', agent.proxy.host);
- assert.equal(proxyPort, agent.proxy.port);
- });
- it('should accept a `url.parse()` result object argument', function () {
- var opts = url.parse('http://127.0.0.1:' + proxyPort);
- var agent = new HttpProxyAgent(opts);
- assert.equal('127.0.0.1', agent.proxy.host);
- assert.equal(proxyPort, agent.proxy.port);
- });
- describe('secureProxy', function () {
- it('should default to `false`', function () {
- var agent = new HttpProxyAgent({ port: proxyPort });
- assert.equal(false, agent.secureProxy);
- });
- it('should be `false` when "http:" protocol is used', function () {
- var agent = new HttpProxyAgent({ port: proxyPort, protocol: 'http:' });
- assert.equal(false, agent.secureProxy);
- });
- it('should be `true` when "https:" protocol is used', function () {
- var agent = new HttpProxyAgent({ port: proxyPort, protocol: 'https:' });
- assert.equal(true, agent.secureProxy);
- });
- it('should be `true` when "https" protocol is used', function () {
- var agent = new HttpProxyAgent({ port: proxyPort, protocol: 'https' });
- assert.equal(true, agent.secureProxy);
- });
- });
- });
-
- describe('"http" module', function () {
- it('should work over an HTTP proxy', function (done) {
- // set HTTP "request" event handler for this test
- server.once('request', function (req, res) {
- res.end(JSON.stringify(req.headers));
- });
-
- var proxy = process.env.HTTP_PROXY || process.env.http_proxy || 'http://127.0.0.1:' + proxyPort;
- var agent = new HttpProxyAgent(proxy);
-
- var opts = url.parse('http://127.0.0.1:' + serverPort);
- opts.agent = agent;
-
- http.get(opts, function (res) {
- var data = '';
- res.setEncoding('utf8');
- res.on('data', function (b) {
- data += b;
- });
- res.on('end', function () {
- data = JSON.parse(data);
- assert.equal('127.0.0.1:' + serverPort, data.host);
- assert('via' in data);
- done();
- });
- });
- });
- it('should work over an HTTPS proxy', function (done) {
- // set HTTP "request" event handler for this test
- server.once('request', function (req, res) {
- res.end(JSON.stringify(req.headers));
- });
-
- var proxy = process.env.HTTPS_PROXY || process.env.https_proxy || 'https://127.0.0.1:' + sslProxyPort;
- proxy = url.parse(proxy);
- proxy.rejectUnauthorized = false;
- var agent = new HttpProxyAgent(proxy);
- assert.equal(true, agent.secureProxy);
-
- var opts = url.parse('http://127.0.0.1:' + serverPort);
- opts.agent = agent;
-
- http.get(opts, function (res) {
- var data = '';
- res.setEncoding('utf8');
- res.on('data', function (b) {
- data += b;
- });
- res.on('end', function () {
- data = JSON.parse(data);
- assert.equal('127.0.0.1:' + serverPort, data.host);
- assert('via' in data);
- done();
- });
- });
- });
- it('should proxy the query string of the request path', function (done) {
- // set HTTP "request" event handler for this test
- server.once('request', function (req, res) {
- res.end(JSON.stringify({
- url: req.url
- }));
- });
-
- var proxy = process.env.HTTP_PROXY || process.env.http_proxy || 'http://127.0.0.1:' + proxyPort;
- var agent = new HttpProxyAgent(proxy);
-
- var opts = url.parse('http://127.0.0.1:' + serverPort + '/test?foo=bar&1=2');
- opts.agent = agent;
-
- http.get(opts, function (res) {
- var data = '';
- res.setEncoding('utf8');
- res.on('data', function (b) {
- data += b;
- });
- res.on('end', function () {
- data = JSON.parse(data);
- assert.equal('/test?foo=bar&1=2', data.url);
- done();
- });
- });
- });
- it('should receive the 407 authorization code on the `http.ClientResponse`', function (done) {
- // set a proxy authentication function for this test
- proxy.authenticate = function (req, fn) {
- // reject all requests
- fn(null, false);
- };
-
- var proxyUri = process.env.HTTP_PROXY || process.env.http_proxy || 'http://127.0.0.1:' + proxyPort;
- var agent = new HttpProxyAgent(proxyUri);
-
- var opts = {};
- // `host` and `port` don't really matter since the proxy will reject anyways
- opts.host = '127.0.0.1';
- opts.port = 80;
- opts.agent = agent;
-
- http.get(opts, function (res) {
- assert.equal(407, res.statusCode);
- assert('proxy-authenticate' in res.headers);
- delete proxy.authenticate;
- done();
- });
- });
- it('should send the "Proxy-Authorization" request header', function (done) {
- // set a proxy authentication function for this test
- proxy.authenticate = function (req, fn) {
- // username:password is "foo:bar"
- fn(null, req.headers['proxy-authorization'] == 'Basic Zm9vOmJhcg==');
- };
-
- // set HTTP "request" event handler for this test
- server.once('request', function (req, res) {
- res.end(JSON.stringify(req.headers));
- });
-
- var proxyUri = process.env.HTTP_PROXY || process.env.http_proxy || 'http://127.0.0.1:' + proxyPort;
- var proxyOpts = url.parse(proxyUri);
- proxyOpts.auth = 'foo:bar';
- var agent = new HttpProxyAgent(proxyOpts);
-
- var opts = url.parse('http://127.0.0.1:' + serverPort);
- opts.agent = agent;
-
- http.get(opts, function (res) {
- var data = '';
- res.setEncoding('utf8');
- res.on('data', function (b) {
- data += b;
- });
- res.on('end', function () {
- data = JSON.parse(data);
- assert.equal('127.0.0.1:' + serverPort, data.host);
- assert('via' in data);
- delete proxy.authenticate;
- done();
- });
- });
- });
- it('should emit an "error" event on the `http.ClientRequest` if the proxy does not exist', function (done) {
- // port 4 is a reserved, but "unassigned" port
- var proxyUri = 'http://127.0.0.1:4';
- var agent = new HttpProxyAgent(proxyUri);
-
- var opts = url.parse('http://nodejs.org');
- opts.agent = agent;
-
- var req = http.get(opts);
- req.once('error', function (err) {
- assert.equal('ECONNREFUSED', err.code);
- req.abort();
- done();
- });
- });
- it('should work after the first tick of the `http.ClientRequest` instance', function (done) {
- // set HTTP "request" event handler for this test
- server.once('request', function (req, res) {
- res.end(JSON.stringify(req.url));
- });
-
- var proxy = process.env.HTTP_PROXY || process.env.http_proxy || 'http://127.0.0.1:' + proxyPort;
- var agent = new HttpProxyAgent(proxy);
-
- var opts = url.parse('http://127.0.0.1:' + serverPort + '/test');
- opts.agent = agent;
-
- // defer the "connect()" function logic, since calling .end() before the
- // "socket" event can cause weirdness since the HTTP header will have been
- // cached and the HttpProxyAgent `req.path` patches won't be respected
- var callback = agent.callback;
- agent.callback = function (req, opts, fn) {
- setTimeout(function () {
- agent.callback = callback;
- agent.callback(req, opts, fn);
- }, 10);
- };
-
- http.get(opts, function (res) {
- var data = '';
- res.setEncoding('utf8');
- res.on('data', function (b) {
- data += b;
- });
- res.on('end', function () {
- data = JSON.parse(data);
- assert.equal('/test', data);
- done();
- });
- });
- });
- });
-
-});
diff --git a/node_modules/https-proxy-agent/.editorconfig b/node_modules/https-proxy-agent/.editorconfig
deleted file mode 100644
index 12b4b9a3b..000000000
--- a/node_modules/https-proxy-agent/.editorconfig
+++ /dev/null
@@ -1,37 +0,0 @@
-root = true
-
-[*]
-indent_style = tab
-indent_size = 4
-tab_width = 4
-end_of_line = lf
-charset = utf-8
-trim_trailing_whitespace = true
-insert_final_newline = true
-
-[{*.json,*.json.example,*.gyp,*.yml,*.yaml,*.workflow}]
-indent_style = space
-indent_size = 2
-
-[{*.py,*.asm}]
-indent_style = space
-
-[*.py]
-indent_size = 4
-
-[*.asm]
-indent_size = 8
-
-[*.md]
-trim_trailing_whitespace = false
-
-# Ideal settings - some plugins might support these.
-[*.js]
-quote_type = single
-
-[{*.c,*.cc,*.h,*.hh,*.cpp,*.hpp,*.m,*.mm,*.mpp,*.js,*.java,*.go,*.rs,*.php,*.ng,*.jsx,*.ts,*.d,*.cs,*.swift}]
-curly_bracket_next_line = false
-spaces_around_operators = true
-spaces_around_brackets = outside
-# close enough to 1TB
-indent_brace_style = K&R
diff --git a/node_modules/https-proxy-agent/.eslintrc.js b/node_modules/https-proxy-agent/.eslintrc.js
deleted file mode 100644
index 62743f2c4..000000000
--- a/node_modules/https-proxy-agent/.eslintrc.js
+++ /dev/null
@@ -1,86 +0,0 @@
-module.exports = {
- 'extends': [
- 'airbnb',
- 'prettier'
- ],
- 'parser': '@typescript-eslint/parser',
- 'parserOptions': {
- 'ecmaVersion': 2018,
- 'sourceType': 'module',
- 'modules': true
- },
- 'plugins': [
- '@typescript-eslint'
- ],
- 'settings': {
- 'import/resolver': {
- 'typescript': {
- }
- }
- },
- 'rules': {
- 'quotes': [
- 2,
- 'single',
- {
- 'allowTemplateLiterals': true
- }
- ],
- 'class-methods-use-this': 0,
- 'consistent-return': 0,
- 'func-names': 0,
- 'global-require': 0,
- 'guard-for-in': 0,
- 'import/no-duplicates': 0,
- 'import/no-dynamic-require': 0,
- 'import/no-extraneous-dependencies': 0,
- 'import/prefer-default-export': 0,
- 'lines-between-class-members': 0,
- 'no-await-in-loop': 0,
- 'no-bitwise': 0,
- 'no-console': 0,
- 'no-continue': 0,
- 'no-control-regex': 0,
- 'no-empty': 0,
- 'no-loop-func': 0,
- 'no-nested-ternary': 0,
- 'no-param-reassign': 0,
- 'no-plusplus': 0,
- 'no-restricted-globals': 0,
- 'no-restricted-syntax': 0,
- 'no-shadow': 0,
- 'no-underscore-dangle': 0,
- 'no-use-before-define': 0,
- 'prefer-const': 0,
- 'prefer-destructuring': 0,
- 'camelcase': 0,
- 'no-unused-vars': 0, // in favor of '@typescript-eslint/no-unused-vars'
- // 'indent': 0 // in favor of '@typescript-eslint/indent'
- '@typescript-eslint/no-unused-vars': 'warn',
- // '@typescript-eslint/indent': ['error', 2] // this might conflict with a lot ongoing changes
- '@typescript-eslint/no-array-constructor': 'error',
- '@typescript-eslint/adjacent-overload-signatures': 'error',
- '@typescript-eslint/class-name-casing': 'error',
- '@typescript-eslint/interface-name-prefix': 'error',
- '@typescript-eslint/no-empty-interface': 'error',
- '@typescript-eslint/no-inferrable-types': 'error',
- '@typescript-eslint/no-misused-new': 'error',
- '@typescript-eslint/no-namespace': 'error',
- '@typescript-eslint/no-non-null-assertion': 'error',
- '@typescript-eslint/no-parameter-properties': 'error',
- '@typescript-eslint/no-triple-slash-reference': 'error',
- '@typescript-eslint/prefer-namespace-keyword': 'error',
- '@typescript-eslint/type-annotation-spacing': 'error',
- // '@typescript-eslint/array-type': 'error',
- // '@typescript-eslint/ban-types': 'error',
- // '@typescript-eslint/explicit-function-return-type': 'warn',
- // '@typescript-eslint/explicit-member-accessibility': 'error',
- // '@typescript-eslint/member-delimiter-style': 'error',
- // '@typescript-eslint/no-angle-bracket-type-assertion': 'error',
- // '@typescript-eslint/no-explicit-any': 'warn',
- // '@typescript-eslint/no-object-literal-type-assertion': 'error',
- // '@typescript-eslint/no-use-before-define': 'error',
- // '@typescript-eslint/no-var-requires': 'error',
- // '@typescript-eslint/prefer-interface': 'error'
- }
-}
diff --git a/node_modules/https-proxy-agent/History.md b/node_modules/https-proxy-agent/History.md
deleted file mode 100644
index f72381231..000000000
--- a/node_modules/https-proxy-agent/History.md
+++ /dev/null
@@ -1,124 +0,0 @@
-
-2.2.0 / 2018-03-03
-==================
-
- * Add "engines" to package.json - requires Node.js >= 4.5.0
- * Use `Buffer.from()`
-
-2.1.1 / 2017-11-28
-==================
-
- * Update `debug` - Security Problems with Previous Version (#38)
-
-2.1.0 / 2017-08-08
-==================
-
- * only include the port number in the Host header when non-default port (#22)
- * set ALPN to "http 1.1" by default when using tlsproxy (#25)
- * only set `ALPNProtocols` when the property does not already exist
- * support SNI (#14)
-
-2.0.0 / 2017-06-26
-==================
-
- * rename https-proxy-agent.js to index.js
- * update dependencies and remove semver-specific test case
- * update `agent-base` to v4
- * remove `extend` dependency
- * :arrow_up: update minimum version of debug dependency
- * opts/options
- * drop Node versions < v4 from Travis-CI
- * test Node.js 5, 6, 7 and 8 on Travis-CI
- * README: remove outdated `secureEndpoint` reference
- * README: remove `secureEndpoint` docs, add `headers`
- * https-proxy-agent: add support for proxy "headers"
-
-1.0.0 / 2015-07-10
-==================
-
- * upgrade to "agent-base" v2 API
- * test: test case is fixed
- * use %o debug() formatter
- * README: use SVG for Travis-CI badge
-
-0.3.6 / 2015-07-06
-==================
-
- * package: update "extend" to v3
- * package: update "mocha" to v2
- * package: update "debug" to v2
- * travis: test node v0.8, v0.10, and v0.12
- * test: use ssl-cert-snakeoil self-signed SSL certs
-
-0.3.5 / 2014-06-11
-==================
-
- * package: update "debug" to v1.0.0
-
-0.3.4 / 2014-04-09
-==================
-
- * gitignore: ignore root level ?.js files
- * package: update outdated dependencies
-
-0.3.3 / 2014-01-13
-==================
-
- * https-proxy-agnet: use debug() instead of console.error()
- * https-proxy-agent: fix debug() call
- * History: fix whitespace
-
-0.3.2 / 2013-11-18
-==================
-
- * https-proxy-agent: allow "https" without trailing colon
- * README: fix typo
-
-0.3.1 / 2013-11-16
-==================
-
- * test: enable the HTTPS over HTTPS test on node v0.11.8
- * https-proxy-agent: create the proxy socket connection first
- * https-proxy-agent: delete `pathname` from the proxy opts as well
- * https-proxy-agent: remove dead "end"-emitting code
-
-0.3.0 / 2013-09-16
-==================
-
- * https-proxy-agent: use "debug" module
- * https-proxy-agent: update to the "agent-base" v1 API
- * https-proxy-agent: default the "port" to 443 if not set
- * https-proxy-agent: augment the `opts` object for the `tls.connect` function
- * https-proxy-agent: use "extend" module
- * https-proxy-agent: remove use of `this` as much as possible
- * https-proxy-agent: listen for the "error" event of the socket
- * test: refactor of tests to use "proxy" module
- * test: add "error" event catching test
- * test: add 407 proxy response test
- * test: use "semver" module, disable the HTTPS over HTTPS test for node >= v0.11.3
-
-0.2.0 / 2013-09-03
-==================
-
- * Add initial "Proxy-Authorization" Basic authentication support
-
-0.1.0 / 2013-07-21
-==================
-
- * rename `secure` to `secureProxy`
- * added `secureEndpoint` option
- * various optimizations
- * README improvements
-
-0.0.2 / 2013-07-11
-==================
-
- * test: add mocha tests
- * don't use `socket.ondata`, use the official API instead
- * throw an Error when no proxy info is given
- * add support for passing options to net/tls .connect()
-
-0.0.1 / 2013-07-09
-==================
-
- * Initial release
diff --git a/node_modules/https-proxy-agent/README.md b/node_modules/https-proxy-agent/README.md
deleted file mode 100644
index 20fda1e24..000000000
--- a/node_modules/https-proxy-agent/README.md
+++ /dev/null
@@ -1,137 +0,0 @@
-https-proxy-agent
-================
-### An HTTP(s) proxy `http.Agent` implementation for HTTPS
-[![Build Status](https://travis-ci.org/TooTallNate/node-https-proxy-agent.svg?branch=master)](https://travis-ci.org/TooTallNate/node-https-proxy-agent)
-
-This module provides an `http.Agent` implementation that connects to a specified
-HTTP or HTTPS proxy server, and can be used with the built-in `https` module.
-
-Specifically, this `Agent` implementation connects to an intermediary "proxy"
-server and issues the [CONNECT HTTP method][CONNECT], which tells the proxy to
-open a direct TCP connection to the destination server.
-
-Since this agent implements the CONNECT HTTP method, it also works with other
-protocols that use this method when connecting over proxies (i.e. WebSockets).
-See the "Examples" section below for more.
-
-
-Installation
-------------
-
-Install with `npm`:
-
-``` bash
-$ npm install https-proxy-agent
-```
-
-
-Examples
---------
-
-#### `https` module example
-
-``` js
-var url = require('url');
-var https = require('https');
-var HttpsProxyAgent = require('https-proxy-agent');
-
-// HTTP/HTTPS proxy to connect to
-var proxy = process.env.http_proxy || 'http://168.63.76.32:3128';
-console.log('using proxy server %j', proxy);
-
-// HTTPS endpoint for the proxy to connect to
-var endpoint = process.argv[2] || 'https://graph.facebook.com/tootallnate';
-console.log('attempting to GET %j', endpoint);
-var options = url.parse(endpoint);
-
-// create an instance of the `HttpsProxyAgent` class with the proxy server information
-var agent = new HttpsProxyAgent(proxy);
-options.agent = agent;
-
-https.get(options, function (res) {
- console.log('"response" event!', res.headers);
- res.pipe(process.stdout);
-});
-```
-
-#### `ws` WebSocket connection example
-
-``` js
-var url = require('url');
-var WebSocket = require('ws');
-var HttpsProxyAgent = require('https-proxy-agent');
-
-// HTTP/HTTPS proxy to connect to
-var proxy = process.env.http_proxy || 'http://168.63.76.32:3128';
-console.log('using proxy server %j', proxy);
-
-// WebSocket endpoint for the proxy to connect to
-var endpoint = process.argv[2] || 'ws://echo.websocket.org';
-var parsed = url.parse(endpoint);
-console.log('attempting to connect to WebSocket %j', endpoint);
-
-// create an instance of the `HttpsProxyAgent` class with the proxy server information
-var options = url.parse(proxy);
-
-var agent = new HttpsProxyAgent(options);
-
-// finally, initiate the WebSocket connection
-var socket = new WebSocket(endpoint, { agent: agent });
-
-socket.on('open', function () {
- console.log('"open" event!');
- socket.send('hello world');
-});
-
-socket.on('message', function (data, flags) {
- console.log('"message" event! %j %j', data, flags);
- socket.close();
-});
-```
-
-API
----
-
-### new HttpsProxyAgent(Object options)
-
-The `HttpsProxyAgent` class implements an `http.Agent` subclass that connects
-to the specified "HTTP(s) proxy server" in order to proxy HTTPS and/or WebSocket
-requests. This is achieved by using the [HTTP `CONNECT` method][CONNECT].
-
-The `options` argument may either be a string URI of the proxy server to use, or an
-"options" object with more specific properties:
-
- * `host` - String - Proxy host to connect to (may use `hostname` as well). Required.
- * `port` - Number - Proxy port to connect to. Required.
- * `protocol` - String - If `https:`, then use TLS to connect to the proxy.
- * `headers` - Object - Additional HTTP headers to be sent on the HTTP CONNECT method.
- * Any other options given are passed to the `net.connect()`/`tls.connect()` functions.
-
-
-License
--------
-
-(The MIT License)
-
-Copyright (c) 2013 Nathan Rajlich &lt;nathan@tootallnate.net&gt;
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-'Software'), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-[CONNECT]: http://en.wikipedia.org/wiki/HTTP_tunnel#HTTP_CONNECT_Tunneling
diff --git a/node_modules/https-proxy-agent/index.d.ts b/node_modules/https-proxy-agent/index.d.ts
deleted file mode 100644
index cec35d85e..000000000
--- a/node_modules/https-proxy-agent/index.d.ts
+++ /dev/null
@@ -1,22 +0,0 @@
-declare module 'https-proxy-agent' {
- import * as https from 'https';
-
- namespace HttpsProxyAgent {
- interface HttpsProxyAgentOptions {
- host: string;
- port: number | string;
- secureProxy?: boolean;
- headers?: {
- [key: string]: string;
- };
- [key: string]: any;
- }
- }
-
- // HttpsProxyAgent doesnt *actually* extend https.Agent, but for my purposes I want it to pretend that it does
- class HttpsProxyAgent extends https.Agent {
- constructor(opts: HttpsProxyAgent.HttpsProxyAgentOptions | string);
- }
-
- export = HttpsProxyAgent;
-}
diff --git a/node_modules/https-proxy-agent/index.js b/node_modules/https-proxy-agent/index.js
deleted file mode 100644
index 817a0a923..000000000
--- a/node_modules/https-proxy-agent/index.js
+++ /dev/null
@@ -1,241 +0,0 @@
-/**
- * Module dependencies.
- */
-
-var net = require('net');
-var tls = require('tls');
-var url = require('url');
-var assert = require('assert');
-var Agent = require('agent-base');
-var inherits = require('util').inherits;
-var debug = require('debug')('https-proxy-agent');
-
-/**
- * Module exports.
- */
-
-module.exports = HttpsProxyAgent;
-
-/**
- * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to the
- * specified "HTTP(s) proxy server" in order to proxy HTTPS requests.
- *
- * @api public
- */
-
-function HttpsProxyAgent(opts) {
- if (!(this instanceof HttpsProxyAgent)) return new HttpsProxyAgent(opts);
- if ('string' == typeof opts) opts = url.parse(opts);
- if (!opts)
- throw new Error(
- 'an HTTP(S) proxy server `host` and `port` must be specified!'
- );
- debug('creating new HttpsProxyAgent instance: %o', opts);
- Agent.call(this, opts);
-
- var proxy = Object.assign({}, opts);
-
- // if `true`, then connect to the proxy server over TLS. defaults to `false`.
- this.secureProxy = proxy.protocol
- ? /^https:?$/i.test(proxy.protocol)
- : false;
-
- // prefer `hostname` over `host`, and set the `port` if needed
- proxy.host = proxy.hostname || proxy.host;
- proxy.port = +proxy.port || (this.secureProxy ? 443 : 80);
-
- // ALPN is supported by Node.js >= v5.
- // attempt to negotiate http/1.1 for proxy servers that support http/2
- if (this.secureProxy && !('ALPNProtocols' in proxy)) {
- proxy.ALPNProtocols = ['http 1.1'];
- }
-
- if (proxy.host && proxy.path) {
- // if both a `host` and `path` are specified then it's most likely the
- // result of a `url.parse()` call... we need to remove the `path` portion so
- // that `net.connect()` doesn't attempt to open that as a unix socket file.
- delete proxy.path;
- delete proxy.pathname;
- }
-
- this.proxy = proxy;
- this.defaultPort = 443;
-}
-inherits(HttpsProxyAgent, Agent);
-
-/**
- * Called when the node-core HTTP client library is creating a new HTTP request.
- *
- * @api public
- */
-
-HttpsProxyAgent.prototype.callback = function connect(req, opts, fn) {
- var proxy = this.proxy;
-
- // create a socket connection to the proxy server
- var socket;
- if (this.secureProxy) {
- socket = tls.connect(proxy);
- } else {
- socket = net.connect(proxy);
- }
-
- // we need to buffer any HTTP traffic that happens with the proxy before we get
- // the CONNECT response, so that if the response is anything other than an "200"
- // response code, then we can re-play the "data" events on the socket once the
- // HTTP parser is hooked up...
- var buffers = [];
- var buffersLength = 0;
-
- function read() {
- var b = socket.read();
- if (b) ondata(b);
- else socket.once('readable', read);
- }
-
- function cleanup() {
- socket.removeListener('end', onend);
- socket.removeListener('error', onerror);
- socket.removeListener('close', onclose);
- socket.removeListener('readable', read);
- }
-
- function onclose(err) {
- debug('onclose had error %o', err);
- }
-
- function onend() {
- debug('onend');
- }
-
- function onerror(err) {
- cleanup();
- fn(err);
- }
-
- function ondata(b) {
- buffers.push(b);
- buffersLength += b.length;
- var buffered = Buffer.concat(buffers, buffersLength);
- var str = buffered.toString('ascii');
-
- if (!~str.indexOf('\r\n\r\n')) {
- // keep buffering
- debug('have not received end of HTTP headers yet...');
- read();
- return;
- }
-
- var firstLine = str.substring(0, str.indexOf('\r\n'));
- var statusCode = +firstLine.split(' ')[1];
- debug('got proxy server response: %o', firstLine);
-
- if (200 == statusCode) {
- // 200 Connected status code!
- var sock = socket;
-
- // nullify the buffered data since we won't be needing it
- buffers = buffered = null;
-
- if (opts.secureEndpoint) {
- // since the proxy is connecting to an SSL server, we have
- // to upgrade this socket connection to an SSL connection
- debug(
- 'upgrading proxy-connected socket to TLS connection: %o',
- opts.host
- );
- opts.socket = socket;
- opts.servername = opts.servername || opts.host;
- opts.host = null;
- opts.hostname = null;
- opts.port = null;
- sock = tls.connect(opts);
- }
-
- cleanup();
- req.once('socket', resume);
- fn(null, sock);
- } else {
- // some other status code that's not 200... need to re-play the HTTP header
- // "data" events onto the socket once the HTTP machinery is attached so
- // that the node core `http` can parse and handle the error status code
- cleanup();
-
- // the original socket is closed, and a new closed socket is
- // returned instead, so that the proxy doesn't get the HTTP request
- // written to it (which may contain `Authorization` headers or other
- // sensitive data).
- //
- // See: https://hackerone.com/reports/541502
- socket.destroy();
- socket = new net.Socket();
- socket.readable = true;
-
-
- // save a reference to the concat'd Buffer for the `onsocket` callback
- buffers = buffered;
-
- // need to wait for the "socket" event to re-play the "data" events
- req.once('socket', onsocket);
-
- fn(null, socket);
- }
- }
-
- function onsocket(socket) {
- debug('replaying proxy buffer for failed request');
- assert(socket.listenerCount('data') > 0);
-
- // replay the "buffers" Buffer onto the `socket`, since at this point
- // the HTTP module machinery has been hooked up for the user
- socket.push(buffers);
-
- // nullify the cached Buffer instance
- buffers = null;
- }
-
- socket.on('error', onerror);
- socket.on('close', onclose);
- socket.on('end', onend);
-
- read();
-
- var hostname = opts.host + ':' + opts.port;
- var msg = 'CONNECT ' + hostname + ' HTTP/1.1\r\n';
-
- var headers = Object.assign({}, proxy.headers);
- if (proxy.auth) {
- headers['Proxy-Authorization'] =
- 'Basic ' + Buffer.from(proxy.auth).toString('base64');
- }
-
- // the Host header should only include the port
- // number when it is a non-standard port
- var host = opts.host;
- if (!isDefaultPort(opts.port, opts.secureEndpoint)) {
- host += ':' + opts.port;
- }
- headers['Host'] = host;
-
- headers['Connection'] = 'close';
- Object.keys(headers).forEach(function(name) {
- msg += name + ': ' + headers[name] + '\r\n';
- });
-
- socket.write(msg + '\r\n');
-};
-
-/**
- * Resumes a socket.
- *
- * @param {(net.Socket|tls.Socket)} socket The socket to resume
- * @api public
- */
-
-function resume(socket) {
- socket.resume();
-}
-
-function isDefaultPort(port, secure) {
- return Boolean((!secure && port === 80) || (secure && port === 443));
-}
diff --git a/node_modules/https-proxy-agent/package.json b/node_modules/https-proxy-agent/package.json
deleted file mode 100644
index 274df864b..000000000
--- a/node_modules/https-proxy-agent/package.json
+++ /dev/null
@@ -1,66 +0,0 @@
-{
- "_from": "https-proxy-agent@^2.2.3",
- "_id": "https-proxy-agent@2.2.4",
- "_inBundle": false,
- "_integrity": "sha512-OmvfoQ53WLjtA9HeYP9RNrWMJzzAz1JGaSFr1nijg0PVR1JaD/xbJq1mdEIIlxGpXp9eSe/O2LgU9DJmTPd0Eg==",
- "_location": "/https-proxy-agent",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "https-proxy-agent@^2.2.3",
- "name": "https-proxy-agent",
- "escapedName": "https-proxy-agent",
- "rawSpec": "^2.2.3",
- "saveSpec": null,
- "fetchSpec": "^2.2.3"
- },
- "_requiredBy": [
- "/make-fetch-happen"
- ],
- "_resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.2.4.tgz",
- "_shasum": "4ee7a737abd92678a293d9b34a1af4d0d08c787b",
- "_spec": "https-proxy-agent@^2.2.3",
- "_where": "/Users/claudiahdz/npm/cli/node_modules/make-fetch-happen",
- "author": {
- "name": "Nathan Rajlich",
- "email": "nathan@tootallnate.net",
- "url": "http://n8.io/"
- },
- "bugs": {
- "url": "https://github.com/TooTallNate/node-https-proxy-agent/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "agent-base": "^4.3.0",
- "debug": "^3.1.0"
- },
- "deprecated": false,
- "description": "An HTTP(s) proxy `http.Agent` implementation for HTTPS",
- "devDependencies": {
- "mocha": "^6.2.0",
- "proxy": "1"
- },
- "engines": {
- "node": ">= 4.5.0"
- },
- "homepage": "https://github.com/TooTallNate/node-https-proxy-agent#readme",
- "keywords": [
- "https",
- "proxy",
- "endpoint",
- "agent"
- ],
- "license": "MIT",
- "main": "./index.js",
- "name": "https-proxy-agent",
- "repository": {
- "type": "git",
- "url": "git://github.com/TooTallNate/node-https-proxy-agent.git"
- },
- "scripts": {
- "test": "mocha --reporter spec"
- },
- "types": "./index.d.ts",
- "version": "2.2.4"
-}
diff --git a/node_modules/libcipm/CHANGELOG.md b/node_modules/libcipm/CHANGELOG.md
deleted file mode 100644
index 80411f300..000000000
--- a/node_modules/libcipm/CHANGELOG.md
+++ /dev/null
@@ -1,508 +0,0 @@
-# Change Log
-
-All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
-
-<a name="4.0.7"></a>
-## [4.0.7](https://github.com/npm/libcipm/compare/v4.0.4...v4.0.7) (2019-10-09)
-
-
-### Bug Fixes
-
-* delete node_modules contents but keep the dir itself ([f668181](https://github.com/npm/libcipm/commit/f668181)), closes [#3](https://github.com/npm/libcipm/issues/3)
-
-<a name="4.0.4"></a>
-## [4.0.4](https://github.com/npm/libcipm/compare/v4.0.3...v4.0.4) (2019-09-24)
-
-
-### Bug Fixes
-
-* pack git directories properly ([576ab36](https://github.com/npm/libcipm/commit/576ab36)), closes [#4](https://github.com/npm/libcipm/issues/4)
-
-
-
-<a name="4.0.3"></a>
-## [4.0.3](https://github.com/npm/libcipm/compare/v4.0.2...v4.0.3) (2019-08-12)
-
-
-### Bug Fixes
-
-* do not pass opts.log to lifecycle ([46b2101](https://github.com/npm/libcipm/commit/46b2101))
-
-
-
-<a name="4.0.2"></a>
-## [4.0.2](https://github.com/npm/libcipm/compare/v4.0.1...v4.0.2) (2019-08-12)
-
-
-
-<a name="4.0.1"></a>
-## [4.0.1](https://github.com/npm/libcipm/compare/v4.0.0...v4.0.1) (2019-08-12)
-
-
-### Bug Fixes
-
-* respect and retain all configs passed in ([20b7372](https://github.com/npm/libcipm/commit/20b7372))
-
-
-
-<a name="4.0.0"></a>
-# [4.0.0](https://github.com/npm/libcipm/compare/v3.0.3...v4.0.0) (2019-07-10)
-
-
-* npm-lifecycle@3.0.0 ([84b8d7e](https://github.com/npm/libcipm/commit/84b8d7e))
-
-
-### Bug Fixes
-
-* **lifecycle:** remove warning from bluebird ([#59](https://github.com/npm/libcipm/issues/59)) ([7af39e6](https://github.com/npm/libcipm/commit/7af39e6)), closes [#58](https://github.com/npm/libcipm/issues/58)
-
-
-### BREAKING CHANGES
-
-* requires updating node-gyp in npm/cli
-
-
-
-<a name="3.0.3"></a>
-## [3.0.3](https://github.com/npm/libcipm/compare/v3.0.2...v3.0.3) (2019-01-22)
-
-
-### Bug Fixes
-
-* **scripts:** pass in opts.dir directly ([018df27](https://github.com/npm/libcipm/commit/018df27))
-
-
-
-<a name="3.0.2"></a>
-## [3.0.2](https://github.com/npm/libcipm/compare/v3.0.1...v3.0.2) (2018-08-31)
-
-
-### Bug Fixes
-
-* **worker:** missed a spot ([4371558](https://github.com/npm/libcipm/commit/4371558))
-
-
-
-<a name="3.0.1"></a>
-## [3.0.1](https://github.com/npm/libcipm/compare/v3.0.0...v3.0.1) (2018-08-31)
-
-
-### Bug Fixes
-
-* **workers:** disable workers for now ([64db490](https://github.com/npm/libcipm/commit/64db490))
-
-
-
-<a name="3.0.0"></a>
-# [3.0.0](https://github.com/npm/libcipm/compare/v2.0.2...v3.0.0) (2018-08-31)
-
-
-### Features
-
-* **config:** switch to modern, figgy-pudding configuration ([#57](https://github.com/npm/libcipm/issues/57)) ([161f6b2](https://github.com/npm/libcipm/commit/161f6b2))
-
-
-### BREAKING CHANGES
-
-* **config:** this updates cipm to use pacote@9, which consumes npm-style config objects, not pacoteOpts()-style objects.
-
-
-
-<a name="2.0.2"></a>
-## [2.0.2](https://github.com/npm/libcipm/compare/v2.0.1...v2.0.2) (2018-08-10)
-
-
-### Bug Fixes
-
-* **child:** only override dirPacker if opts.dirPacker is defined ([#55](https://github.com/npm/libcipm/issues/55)) ([13ab2f0](https://github.com/npm/libcipm/commit/13ab2f0))
-
-
-
-<a name="2.0.1"></a>
-## [2.0.1](https://github.com/npm/libcipm/compare/v2.0.0...v2.0.1) (2018-07-27)
-
-
-### Bug Fixes
-
-* **deps:** move mkdirp to prod deps ([6878f39](https://github.com/npm/libcipm/commit/6878f39))
-
-
-
-<a name="2.0.0"></a>
-# [2.0.0](https://github.com/npm/libcipm/compare/v1.6.3...v2.0.0) (2018-05-24)
-
-
-### meta
-
-* update node version support ([694b4d3](https://github.com/npm/libcipm/commit/694b4d3))
-
-
-### BREAKING CHANGES
-
-* node@4 is no longer supported
-
-
-
-<a name="1.6.3"></a>
-## [1.6.3](https://github.com/npm/libcipm/compare/v1.6.2...v1.6.3) (2018-05-24)
-
-
-
-<a name="1.6.2"></a>
-## [1.6.2](https://github.com/npm/libcipm/compare/v1.6.1...v1.6.2) (2018-04-08)
-
-
-### Bug Fixes
-
-* **lifecycle:** detect binding.gyp for default install lifecycle ([#46](https://github.com/npm/libcipm/issues/46)) ([9149631](https://github.com/npm/libcipm/commit/9149631)), closes [#45](https://github.com/npm/libcipm/issues/45)
-
-
-
-<a name="1.6.1"></a>
-## [1.6.1](https://github.com/npm/libcipm/compare/v1.6.0...v1.6.1) (2018-03-13)
-
-
-### Bug Fixes
-
-* **bin:** Set non-zero exit code on error ([#41](https://github.com/npm/libcipm/issues/41)) ([54d0106](https://github.com/npm/libcipm/commit/54d0106))
-* **lifecycle:** defer to lifecycle’s internal logic as to whether or not to execute a run-script ([#42](https://github.com/npm/libcipm/issues/42)) ([7f27a52](https://github.com/npm/libcipm/commit/7f27a52)), closes [npm/npm#19258](https://github.com/npm/npm/issues/19258)
-* **prefix:** don't reference prefix before computing it ([#40](https://github.com/npm/libcipm/issues/40)) ([08ed1cc](https://github.com/npm/libcipm/commit/08ed1cc))
-* **prefix:** Resolve to promise when passing --prefix to npm ci ([#43](https://github.com/npm/libcipm/issues/43)) ([401d466](https://github.com/npm/libcipm/commit/401d466))
-
-
-
-<a name="1.6.0"></a>
-# [1.6.0](https://github.com/npm/libcipm/compare/v1.5.1...v1.6.0) (2018-03-01)
-
-
-### Bug Fixes
-
-* **bin:** cli.js was being excluded ([d62668e](https://github.com/npm/libcipm/commit/d62668e))
-
-
-### Features
-
-* **libcipm:** working standalone cipm release! ([a3383fd](https://github.com/npm/libcipm/commit/a3383fd))
-
-
-
-<a name="1.5.1"></a>
-## [1.5.1](https://github.com/npm/libcipm/compare/v1.5.0...v1.5.1) (2018-03-01)
-
-
-### Bug Fixes
-
-* **_from:** do not add _from to directory deps ([7405360](https://github.com/npm/libcipm/commit/7405360))
-
-
-
-<a name="1.5.0"></a>
-# [1.5.0](https://github.com/npm/libcipm/compare/v1.4.1...v1.5.0) (2018-03-01)
-
-
-### Bug Fixes
-
-* **errors:** handle aggregate errors better ([6239499](https://github.com/npm/libcipm/commit/6239499))
-
-
-### Features
-
-* **logger:** rudimentary progress bar update ([c5d9dc7](https://github.com/npm/libcipm/commit/c5d9dc7))
-
-
-
-<a name="1.4.1"></a>
-## [1.4.1](https://github.com/npm/libcipm/compare/v1.4.0...v1.4.1) (2018-02-27)
-
-
-### Bug Fixes
-
-* **buildTree:** linking in parallel causes hoist-clobbering ([5ffbc0e](https://github.com/npm/libcipm/commit/5ffbc0e)), closes [#39](https://github.com/npm/libcipm/issues/39)
-* **buildTree:** use checkDepEnv here too ([41a4634](https://github.com/npm/libcipm/commit/41a4634))
-* **perf:** split up updateJson and buildTree ([df5aba0](https://github.com/npm/libcipm/commit/df5aba0))
-* **perf:** stop using the readPackageJson version to update packages ([8da3d5a](https://github.com/npm/libcipm/commit/8da3d5a))
-
-
-
-<a name="1.4.0"></a>
-# [1.4.0](https://github.com/npm/libcipm/compare/v1.3.3...v1.4.0) (2018-02-21)
-
-
-### Features
-
-* **extract:** add support for --only and --also ([ad143ae](https://github.com/npm/libcipm/commit/ad143ae))
-
-
-
-<a name="1.3.3"></a>
-## [1.3.3](https://github.com/npm/libcipm/compare/v1.3.2...v1.3.3) (2018-02-21)
-
-
-### Bug Fixes
-
-* **extract:** stop extracting deps before parent :\ ([c6847dc](https://github.com/npm/libcipm/commit/c6847dc))
-
-
-
-<a name="1.3.2"></a>
-## [1.3.2](https://github.com/npm/libcipm/compare/v1.3.1...v1.3.2) (2018-02-15)
-
-
-
-<a name="1.3.1"></a>
-## [1.3.1](https://github.com/npm/libcipm/compare/v1.3.0...v1.3.1) (2018-02-15)
-
-
-
-<a name="1.3.0"></a>
-# [1.3.0](https://github.com/npm/libcipm/compare/v1.2.0...v1.3.0) (2018-02-13)
-
-
-### Features
-
-* **extract:** link directory deps and install missing bundle deps ([8334e9e](https://github.com/npm/libcipm/commit/8334e9e))
-
-
-
-<a name="1.2.0"></a>
-# [1.2.0](https://github.com/npm/libcipm/compare/v1.1.2...v1.2.0) (2018-02-07)
-
-
-### Features
-
-* **metadata:** add _resolved, _integrity, and _from on install ([36642dc](https://github.com/npm/libcipm/commit/36642dc))
-
-
-
-<a name="1.1.2"></a>
-## [1.1.2](https://github.com/npm/libcipm/compare/v1.1.1...v1.1.2) (2018-01-19)
-
-
-
-<a name="1.1.1"></a>
-## [1.1.1](https://github.com/npm/libcipm/compare/v1.1.0...v1.1.1) (2018-01-19)
-
-
-
-<a name="1.1.0"></a>
-# [1.1.0](https://github.com/npm/libcipm/compare/v1.0.1...v1.1.0) (2018-01-07)
-
-
-### Features
-
-* **log:** add some helpful log output ([f443f03](https://github.com/npm/libcipm/commit/f443f03))
-
-
-
-<a name="1.0.1"></a>
-## [1.0.1](https://github.com/npm/libcipm/compare/v1.0.0...v1.0.1) (2018-01-07)
-
-
-### Bug Fixes
-
-* **deps:** added protoduck to pkgjson ([ecbe719](https://github.com/npm/libcipm/commit/ecbe719))
-
-
-
-<a name="1.0.0"></a>
-# [1.0.0](https://github.com/npm/libcipm/compare/v0.9.1...v1.0.0) (2018-01-07)
-
-
-### Features
-
-* **cli:** splitting off CLI into a separate tool ([cff65c1](https://github.com/npm/libcipm/commit/cff65c1))
-
-
-### BREAKING CHANGES
-
-* **cli:** libcipm is its own library now,
-
-
-
-<a name="0.9.1"></a>
-## [0.9.1](https://github.com/npm/libcipm/compare/v0.9.0...v0.9.1) (2018-01-07)
-
-
-### Bug Fixes
-
-* **prefix:** oops @ prefix ([cc5adac](https://github.com/npm/libcipm/commit/cc5adac))
-
-
-
-<a name="0.9.0"></a>
-# [0.9.0](https://github.com/npm/libcipm/compare/v0.8.0...v0.9.0) (2018-01-07)
-
-
-### Bug Fixes
-
-* **package:** add pacote to bundleDependencies ([#36](https://github.com/npm/libcipm/issues/36)) ([a69742e](https://github.com/npm/libcipm/commit/a69742e))
-
-
-### Features
-
-* **config:** allow injection of npm configs ([#35](https://github.com/npm/libcipm/issues/35)) ([1f5694b](https://github.com/npm/libcipm/commit/1f5694b))
-
-
-
-<a name="0.8.0"></a>
-# [0.8.0](https://github.com/npm/libcipm/compare/v0.7.2...v0.8.0) (2017-11-28)
-
-
-### Features
-
-* **gyp:** new npm-lifecycle[@2](https://github.com/2) with included node-gyp ([a4ed938](https://github.com/npm/libcipm/commit/a4ed938))
-
-
-
-<a name="0.7.2"></a>
-## [0.7.2](https://github.com/npm/libcipm/compare/v0.7.1...v0.7.2) (2017-10-13)
-
-
-### Bug Fixes
-
-* **extract:** idk why this was breaking. Seriously. ([433a2be](https://github.com/npm/libcipm/commit/433a2be))
-* **tree:** pass through a custom Promise to logiTree ([2d29efb](https://github.com/npm/libcipm/commit/2d29efb))
-
-
-### Performance Improvements
-
-* zoomzoom. Even more concurrency! ([db9c2e0](https://github.com/npm/libcipm/commit/db9c2e0))
-
-
-
-<a name="0.7.1"></a>
-## [0.7.1](https://github.com/npm/libcipm/compare/v0.7.0...v0.7.1) (2017-10-13)
-
-
-### Bug Fixes
-
-* **scripts:** separate extract and build and fix ordering ([eb072a5](https://github.com/npm/libcipm/commit/eb072a5))
-
-
-
-<a name="0.7.0"></a>
-# [0.7.0](https://github.com/npm/libcipm/compare/v0.6.0...v0.7.0) (2017-10-12)
-
-
-### Bug Fixes
-
-* **lockfile:** npm-shrinkwrap takes precedence over package-lock (#28) ([3b98fb3](https://github.com/npm/libcipm/commit/3b98fb3))
-
-
-### Features
-
-* **optional:** ignore failed optional deps (#27) ([a654629](https://github.com/npm/libcipm/commit/a654629))
-
-
-
-<a name="0.6.0"></a>
-# [0.6.0](https://github.com/npm/libcipm/compare/v0.5.1...v0.6.0) (2017-10-09)
-
-
-### Features
-
-* **scripts:** run prepare and prepublish scripts in the root (#26) ([e0e35a3](https://github.com/npm/libcipm/commit/e0e35a3))
-
-
-
-<a name="0.5.1"></a>
-## [0.5.1](https://github.com/npm/libcipm/compare/v0.5.0...v0.5.1) (2017-10-09)
-
-
-
-<a name="0.5.0"></a>
-# [0.5.0](https://github.com/npm/libcipm/compare/v0.4.0...v0.5.0) (2017-10-09)
-
-
-### Bug Fixes
-
-* **output:** npm does not punctuate this ([e7ba976](https://github.com/npm/libcipm/commit/e7ba976))
-* **shutdown:** make sure workers close ([7ab57d0](https://github.com/npm/libcipm/commit/7ab57d0))
-
-
-### Features
-
-* **bin:** link bins and run scripts (#25) ([fab74bf](https://github.com/npm/libcipm/commit/fab74bf))
-* **lifecycle:** run scripts in dep order (#23) ([68ecfac](https://github.com/npm/libcipm/commit/68ecfac))
-
-
-
-<a name="0.4.0"></a>
-# [0.4.0](https://github.com/npm/libcipm/compare/v0.3.2...v0.4.0) (2017-10-04)
-
-
-### Features
-
-* **opts:** support full range of relevant CLI opts (#19) ([6f2bd51](https://github.com/npm/libcipm/commit/6f2bd51))
-
-
-
-<a name="0.3.2"></a>
-## [0.3.2](https://github.com/npm/libcipm/compare/v0.3.1...v0.3.2) (2017-09-06)
-
-
-### Bug Fixes
-
-* **bin:** make cli executable by default (#13) ([14a9a5f](https://github.com/npm/libcipm/commit/14a9a5f))
-* **config:** use npm.cmd on win32 and fix tests (#12) ([d912d16](https://github.com/npm/libcipm/commit/d912d16)), closes [#12](https://github.com/npm/libcipm/issues/12)
-* **json:** strip BOM when reading JSON files (#8) ([2529149](https://github.com/npm/libcipm/commit/2529149))
-
-
-
-<a name="0.3.1"></a>
-## [0.3.1](https://github.com/npm/libcipm/compare/v0.3.0...v0.3.1) (2017-09-05)
-
-
-
-<a name="0.3.0"></a>
-# [0.3.0](https://github.com/npm/libcipm/compare/v0.2.0...v0.3.0) (2017-09-05)
-
-
-### Features
-
-* **lockfile:** verify that lockfile matches package.json (#5) ([f631203](https://github.com/npm/libcipm/commit/f631203))
-* **scripts:** support --ignore-scripts option (#9) ([213ca02](https://github.com/npm/libcipm/commit/213ca02))
-
-
-
-<a name="0.2.0"></a>
-# [0.2.0](https://github.com/npm/libcipm/compare/v0.1.1...v0.2.0) (2017-09-01)
-
-
-### Bug Fixes
-
-* **main:** default --prefix ([ff06a31](https://github.com/npm/libcipm/commit/ff06a31))
-
-
-### Features
-
-* **lifecycle:** actually run lifecycle scripts correctly ([7f8933e](https://github.com/npm/libcipm/commit/7f8933e))
-
-
-
-<a name="0.1.1"></a>
-## [0.1.1](https://github.com/npm/libcipm/compare/v0.1.0...v0.1.1) (2017-08-30)
-
-
-### Bug Fixes
-
-* **files:** oops. forgot to include new files in tarball ([1ee85c9](https://github.com/npm/libcipm/commit/1ee85c9))
-
-
-
-<a name="0.1.0"></a>
-# 0.1.0 (2017-08-30)
-
-
-### Bug Fixes
-
-* **config:** pipe stdout ([08e6af8](https://github.com/npm/libcipm/commit/08e6af8))
-* **extract:** make sure to extract properly ([9643583](https://github.com/npm/libcipm/commit/9643583))
-* **license:** switch to MIT ([0d10d0d](https://github.com/npm/libcipm/commit/0d10d0d))
-
-
-### Features
-
-* **impl:** rough prototype ([2970e43](https://github.com/npm/libcipm/commit/2970e43))
-* **lifecycle:** Run lifecycle events, implement prefix option, add unit tests (#1) ([d6629be](https://github.com/npm/libcipm/commit/d6629be)), closes [#1](https://github.com/npm/libcipm/issues/1)
-* **opts:** add usage string and --help ([efcc48d](https://github.com/npm/libcipm/commit/efcc48d))
diff --git a/node_modules/libcipm/LICENSE.md b/node_modules/libcipm/LICENSE.md
deleted file mode 100644
index 2ed9c0311..000000000
--- a/node_modules/libcipm/LICENSE.md
+++ /dev/null
@@ -1,19 +0,0 @@
-Copyright npm, Inc., Kat Marchán, and Contributors
-
-Permission is hereby granted, free of charge, to any person obtaining a
-copy of this software and associated documentation files (the "Software"),
-to deal in the Software without restriction, including without limitation
-the rights to use, copy, modify, merge, publish, distribute, sublicense,
-and/or sell copies of the Software, and to permit persons to whom the
-Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/libcipm/README.md b/node_modules/libcipm/README.md
deleted file mode 100644
index 3dd9cbe14..000000000
--- a/node_modules/libcipm/README.md
+++ /dev/null
@@ -1,37 +0,0 @@
-[![npm](https://img.shields.io/npm/v/libcipm.svg)](https://npm.im/libcipm) [![license](https://img.shields.io/npm/l/libcipm.svg)](https://npm.im/libcipm) [![Travis](https://img.shields.io/travis/npm/libcipm.svg)](https://travis-ci.org/npm/libcipm) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/npm/libcipm?svg=true)](https://ci.appveyor.com/project/npm/libcipm) [![Coverage Status](https://coveralls.io/repos/github/npm/libcipm/badge.svg?branch=latest)](https://coveralls.io/github/npm/libcipm?branch=latest)
-
-[`libcipm`](https://github.com/npm/libcipm) installs npm projects in a way that's
-optimized for continuous integration/deployment/etc scenarios. It gives up
-the ability to build its own trees or install packages individually, as well
-as other user-oriented features, in exchange for speed, and being more strict
-about project state.
-
-For documentation about the associated command-line tool, see
-[`cipm`](https://npm.im/cipm).
-
-## Install
-
-`$ npm install libcipm`
-
-## Table of Contents
-
-* [Features](#features)
-* [Contributing](#contributing)
-* [API](#api)
-
-### Features
-
-* npm-compatible project installation
-* lifecycle script support
-* blazing fast
-* npm-compatible caching
-* errors if `package.json` and `package-lock.json` are out of sync, instead of fixing it like npm does. Essentially provides a `--frozen` install.
-
-### Contributing
-
-The libcipm team enthusiastically welcomes contributions and project
-participation! There's a bunch of things you can do if you want to contribute!
-The [Contributor Guide](CONTRIBUTING.md) has all the information you need for
-everything from reporting bugs to contributing entire new features. Please don't
-hesitate to jump in if you'd like to, or even ask us questions if something
-isn't clear.
diff --git a/node_modules/libcipm/index.js b/node_modules/libcipm/index.js
deleted file mode 100644
index 42d05e1f8..000000000
--- a/node_modules/libcipm/index.js
+++ /dev/null
@@ -1,429 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const binLink = require('bin-links')
-const buildLogicalTree = require('npm-logical-tree')
-const extract = require('./lib/extract.js')
-const figgyPudding = require('figgy-pudding')
-const fs = require('graceful-fs')
-const getPrefix = require('find-npm-prefix')
-const lifecycle = require('npm-lifecycle')
-const lockVerify = require('lock-verify')
-const mkdirp = BB.promisify(require('mkdirp'))
-const npa = require('npm-package-arg')
-const path = require('path')
-const readPkgJson = BB.promisify(require('read-package-json'))
-const rimraf = BB.promisify(require('rimraf'))
-
-const readFileAsync = BB.promisify(fs.readFile)
-const statAsync = BB.promisify(fs.stat)
-const symlinkAsync = BB.promisify(fs.symlink)
-const writeFileAsync = BB.promisify(fs.writeFile)
-
-const LifecycleOpts = figgyPudding({
- config: {},
- 'script-shell': {},
- scriptShell: 'script-shell',
- 'ignore-scripts': {},
- ignoreScripts: 'ignore-scripts',
- 'ignore-prepublish': {},
- ignorePrepublish: 'ignore-prepublish',
- 'scripts-prepend-node-path': {},
- scriptsPrependNodePath: 'scripts-prepend-node-path',
- 'unsafe-perm': {},
- unsafePerm: 'unsafe-perm',
- prefix: {},
- dir: 'prefix',
- failOk: { default: false }
-}, { other () { return true } })
-
-class Installer {
- constructor (opts) {
- this.opts = opts
-
- // Stats
- this.startTime = Date.now()
- this.runTime = 0
- this.timings = { scripts: 0 }
- this.pkgCount = 0
-
- // Misc
- this.log = this.opts.log || require('./lib/silentlog.js')
- this.pkg = null
- this.tree = null
- this.failedDeps = new Set()
- }
-
- timedStage (name) {
- const start = Date.now()
- return BB.resolve(this[name].apply(this, [].slice.call(arguments, 1)))
- .tap(() => {
- this.timings[name] = Date.now() - start
- this.log.info(name, `Done in ${this.timings[name] / 1000}s`)
- })
- }
-
- run () {
- return this.timedStage('prepare')
- .then(() => this.timedStage('extractTree', this.tree))
- .then(() => this.timedStage('updateJson', this.tree))
- .then(pkgJsons => this.timedStage('buildTree', this.tree, pkgJsons))
- .then(() => this.timedStage('garbageCollect', this.tree))
- .then(() => this.timedStage('runScript', 'prepublish', this.pkg, this.prefix))
- .then(() => this.timedStage('runScript', 'prepare', this.pkg, this.prefix))
- .then(() => this.timedStage('teardown'))
- .then(() => {
- this.runTime = Date.now() - this.startTime
- this.log.info(
- 'run-scripts',
- `total script time: ${this.timings.scripts / 1000}s`
- )
- this.log.info(
- 'run-time',
- `total run time: ${this.runTime / 1000}s`
- )
- })
- .catch(err => {
- this.timedStage('teardown')
- if (err.message.match(/aggregate error/)) {
- throw err[0]
- } else {
- throw err
- }
- })
- .then(() => this)
- }
-
- prepare () {
- this.log.info('prepare', 'initializing installer')
- this.log.level = this.opts.loglevel
- this.log.verbose('prepare', 'starting workers')
- extract.startWorkers()
-
- return (
- this.opts.prefix && this.opts.global
- ? BB.resolve(this.opts.prefix)
- // There's some Special™ logic around the `--prefix` config when it
- // comes from a config file or env vs when it comes from the CLI
- : process.argv.some(arg => arg.match(/^\s*--prefix\s*/i))
- ? BB.resolve(this.opts.prefix)
- : getPrefix(process.cwd())
- )
- .then(prefix => {
- this.prefix = prefix
- this.log.verbose('prepare', 'installation prefix: ' + prefix)
- return BB.join(
- readJson(prefix, 'package.json'),
- readJson(prefix, 'package-lock.json', true),
- readJson(prefix, 'npm-shrinkwrap.json', true),
- (pkg, lock, shrink) => {
- if (shrink) {
- this.log.verbose('prepare', 'using npm-shrinkwrap.json')
- } else if (lock) {
- this.log.verbose('prepare', 'using package-lock.json')
- }
- pkg._shrinkwrap = shrink || lock
- this.pkg = pkg
- }
- )
- })
- .then(() => statAsync(
- path.join(this.prefix, 'node_modules')
- ).catch(err => { if (err.code !== 'ENOENT') { throw err } }))
- .then(stat => {
- stat && this.log.warn(
- 'prepare', 'removing existing node_modules/ before installation'
- )
- return BB.join(
- this.checkLock(),
- stat && rimraf(path.join(this.prefix, 'node_modules/*'))
- )
- }).then(() => {
- // This needs to happen -after- we've done checkLock()
- this.tree = buildLogicalTree(this.pkg, this.pkg._shrinkwrap)
- this.log.silly('tree', this.tree)
- this.expectedTotal = 0
- this.tree.forEach((dep, next) => {
- this.expectedTotal++
- next()
- })
- })
- }
-
- teardown () {
- this.log.verbose('teardown', 'shutting down workers.')
- return extract.stopWorkers()
- }
-
- checkLock () {
- this.log.verbose('checkLock', 'verifying package-lock data')
- const pkg = this.pkg
- const prefix = this.prefix
- if (!pkg._shrinkwrap || !pkg._shrinkwrap.lockfileVersion) {
- return BB.reject(
- new Error(`cipm can only install packages with an existing package-lock.json or npm-shrinkwrap.json with lockfileVersion >= 1. Run an install with npm@5 or later to generate it, then try again.`)
- )
- }
- return lockVerify(prefix).then(result => {
- if (result.status) {
- result.warnings.forEach(w => this.log.warn('lockfile', w))
- } else {
- throw new Error(
- 'cipm can only install packages when your package.json and package-lock.json or ' +
- 'npm-shrinkwrap.json are in sync. Please update your lock file with `npm install` ' +
- 'before continuing.\n\n' +
- result.warnings.map(w => 'Warning: ' + w).join('\n') + '\n' +
- result.errors.join('\n') + '\n'
- )
- }
- }).catch(err => {
- throw err
- })
- }
-
- extractTree (tree) {
- this.log.verbose('extractTree', 'extracting dependencies to node_modules/')
- const cg = this.log.newItem('extractTree', this.expectedTotal)
- return tree.forEachAsync((dep, next) => {
- if (!this.checkDepEnv(dep)) { return }
- const depPath = dep.path(this.prefix)
- const spec = npa.resolve(dep.name, dep.version, this.prefix)
- if (dep.isRoot) {
- return next()
- } else if (spec.type === 'directory') {
- const relative = path.relative(path.dirname(depPath), spec.fetchSpec)
- this.log.silly('extractTree', `${dep.name}@${spec.fetchSpec} -> ${depPath} (symlink)`)
- return mkdirp(path.dirname(depPath))
- .then(() => symlinkAsync(relative, depPath, 'junction'))
- .catch(
- () => rimraf(depPath)
- .then(() => symlinkAsync(relative, depPath, 'junction'))
- ).then(() => next())
- .then(() => {
- this.pkgCount++
- cg.completeWork(1)
- })
- } else {
- this.log.silly('extractTree', `${dep.name}@${dep.version} -> ${depPath}`)
- return (
- dep.bundled
- ? statAsync(path.join(depPath, 'package.json')).catch(err => {
- if (err.code !== 'ENOENT') { throw err }
- })
- : BB.resolve(false)
- )
- .then(wasBundled => {
- // Don't extract if a bundled dep is actually present
- if (wasBundled) {
- cg.completeWork(1)
- return next()
- } else {
- return BB.resolve(extract.child(
- dep.name, dep, depPath, this.opts
- ))
- .then(() => cg.completeWork(1))
- .then(() => { this.pkgCount++ })
- .then(next)
- }
- })
- }
- }, {concurrency: 50, Promise: BB})
- .then(() => cg.finish())
- }
-
- checkDepEnv (dep) {
- const includeDev = (
- // Covers --dev and --development (from npm config itself)
- this.opts.dev ||
- (
- !/^prod(uction)?$/.test(this.opts.only) &&
- !this.opts.production
- ) ||
- /^dev(elopment)?$/.test(this.opts.only) ||
- /^dev(elopment)?$/.test(this.opts.also)
- )
- const includeProd = !/^dev(elopment)?$/.test(this.opts.only)
- const includeOptional = includeProd && this.opts.optional
- return (dep.dev && includeDev) ||
- (dep.optional && includeOptional) ||
- (!dep.dev && !dep.optional && includeProd)
- }
-
- updateJson (tree) {
- this.log.verbose('updateJson', 'updating json deps to include _from')
- const pkgJsons = new Map()
- return tree.forEachAsync((dep, next) => {
- if (!this.checkDepEnv(dep)) { return }
- const spec = npa.resolve(dep.name, dep.version)
- const depPath = dep.path(this.prefix)
- return next()
- .then(() => readJson(depPath, 'package.json'))
- .then(pkg => (spec.registry || spec.type === 'directory')
- ? pkg
- : this.updateFromField(dep, pkg).then(() => pkg)
- )
- .then(pkg => (pkg.scripts && pkg.scripts.install)
- ? pkg
- : this.updateInstallScript(dep, pkg).then(() => pkg)
- )
- .tap(pkg => { pkgJsons.set(dep, pkg) })
- }, {concurrency: 100, Promise: BB})
- .then(() => pkgJsons)
- }
-
- buildTree (tree, pkgJsons) {
- this.log.verbose('buildTree', 'finalizing tree and running scripts')
- return tree.forEachAsync((dep, next) => {
- if (!this.checkDepEnv(dep)) { return }
- const spec = npa.resolve(dep.name, dep.version)
- const depPath = dep.path(this.prefix)
- const pkg = pkgJsons.get(dep)
- this.log.silly('buildTree', `linking ${spec}`)
- return this.runScript('preinstall', pkg, depPath)
- .then(next) // build children between preinstall and binLink
- // Don't link root bins
- .then(() => {
- if (
- dep.isRoot ||
- !(pkg.bin || pkg.man || (pkg.directories && pkg.directories.bin))
- ) {
- // We skip the relatively expensive readPkgJson if there's no way
- // we'll actually be linking any bins or mans
- return
- }
- return readPkgJson(path.join(depPath, 'package.json'))
- .then(pkg => binLink(pkg, depPath, false, {
- force: this.opts.force,
- ignoreScripts: this.opts['ignore-scripts'],
- log: Object.assign({}, this.log, { info: () => {} }),
- name: pkg.name,
- pkgId: pkg.name + '@' + pkg.version,
- prefix: this.prefix,
- prefixes: [this.prefix],
- umask: this.opts.umask
- }), e => {
- this.log.verbose('buildTree', `error linking ${spec}: ${e.message} ${e.stack}`)
- })
- })
- .then(() => this.runScript('install', pkg, depPath))
- .then(() => this.runScript('postinstall', pkg, depPath))
- .then(() => this)
- .catch(e => {
- if (dep.optional) {
- this.failedDeps.add(dep)
- } else {
- throw e
- }
- })
- }, {concurrency: 1, Promise: BB})
- }
-
- updateFromField (dep, pkg) {
- const depPath = dep.path(this.prefix)
- const depPkgPath = path.join(depPath, 'package.json')
- const parent = dep.requiredBy.values().next().value
- return readJson(parent.path(this.prefix), 'package.json')
- .then(ppkg =>
- (ppkg.dependencies && ppkg.dependencies[dep.name]) ||
- (ppkg.devDependencies && ppkg.devDependencies[dep.name]) ||
- (ppkg.optionalDependencies && ppkg.optionalDependencies[dep.name])
- )
- .then(from => npa.resolve(dep.name, from))
- .then(from => { pkg._from = from.toString() })
- .then(() => writeFileAsync(depPkgPath, JSON.stringify(pkg, null, 2)))
- .then(() => pkg)
- }
-
- updateInstallScript (dep, pkg) {
- const depPath = dep.path(this.prefix)
- return statAsync(path.join(depPath, 'binding.gyp'))
- .catch(err => { if (err.code !== 'ENOENT') { throw err } })
- .then(stat => {
- if (stat) {
- if (!pkg.scripts) {
- pkg.scripts = {}
- }
- pkg.scripts.install = 'node-gyp rebuild'
- }
- })
- .then(() => pkg)
- }
-
- // A cute little mark-and-sweep collector!
- garbageCollect (tree) {
- if (!this.failedDeps.size) { return }
- return sweep(
- tree,
- this.prefix,
- mark(tree, this.failedDeps)
- )
- .then(purged => {
- this.purgedDeps = purged
- this.pkgCount -= purged.size
- })
- }
-
- runScript (stage, pkg, pkgPath) {
- const start = Date.now()
- if (!this.opts['ignore-scripts']) {
- // TODO(mikesherov): remove pkg._id when npm-lifecycle no longer relies on it
- pkg._id = pkg.name + '@' + pkg.version
- return BB.resolve(lifecycle(
- pkg, stage, pkgPath, LifecycleOpts(this.opts).concat({
- // TODO: can be removed once npm-lifecycle is updated to modern
- // config practices.
- config: Object.assign({}, this.opts, {
- log: null,
- dirPacker: null
- }),
- dir: this.prefix
- }))
- ).tap(() => { this.timings.scripts += Date.now() - start })
- }
- return BB.resolve()
- }
-}
-module.exports = Installer
-
-function mark (tree, failed) {
- const liveDeps = new Set()
- tree.forEach((dep, next) => {
- if (!failed.has(dep)) {
- liveDeps.add(dep)
- next()
- }
- })
- return liveDeps
-}
-
-function sweep (tree, prefix, liveDeps) {
- const purged = new Set()
- return tree.forEachAsync((dep, next) => {
- return next().then(() => {
- if (
- !dep.isRoot && // never purge root! 🙈
- !liveDeps.has(dep) &&
- !purged.has(dep)
- ) {
- purged.add(dep)
- return rimraf(dep.path(prefix))
- }
- })
- }, {concurrency: 50, Promise: BB}).then(() => purged)
-}
-
-function stripBOM (str) {
- return str.replace(/^\uFEFF/, '')
-}
-
-module.exports._readJson = readJson
-function readJson (jsonPath, name, ignoreMissing) {
- return readFileAsync(path.join(jsonPath, name), 'utf8')
- .then(str => JSON.parse(stripBOM(str)))
- .catch({code: 'ENOENT'}, err => {
- if (!ignoreMissing) {
- throw err
- }
- })
-}
diff --git a/node_modules/libcipm/lib/config/npm-config.js b/node_modules/libcipm/lib/config/npm-config.js
deleted file mode 100644
index a05119061..000000000
--- a/node_modules/libcipm/lib/config/npm-config.js
+++ /dev/null
@@ -1,84 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const fs = require('fs')
-const figgyPudding = require('figgy-pudding')
-const ini = require('ini')
-const path = require('path')
-const spawn = require('child_process').spawn
-
-const readFileAsync = BB.promisify(fs.readFile)
-
-const NpmConfig = figgyPudding({
- cache: { default: '' },
- then: {},
- userconfig: {}
-})
-
-module.exports = NpmConfig
-
-module.exports.fromNpm = getNpmConfig
-function getNpmConfig (argv) {
- return new BB((resolve, reject) => {
- const npmBin = process.platform === 'win32' ? 'npm.cmd' : 'npm'
- const child = spawn(npmBin, [
- 'config', 'ls', '--json', '-l'
- // We add argv here to get npm to parse those options for us :D
- ].concat(argv || []), {
- env: process.env,
- cwd: process.cwd(),
- stdio: [0, 'pipe', 2]
- })
-
- let stdout = ''
- if (child.stdout) {
- child.stdout.on('data', (chunk) => {
- stdout += chunk
- })
- }
-
- child.on('error', reject)
- child.on('close', (code) => {
- if (code === 127) {
- reject(new Error('`npm` command not found. Please ensure you have npm@5.4.0 or later installed.'))
- } else {
- try {
- resolve(JSON.parse(stdout))
- } catch (e) {
- reject(new Error('`npm config ls --json` failed to output json. Please ensure you have npm@5.4.0 or later installed.'))
- }
- }
- })
- }).then(opts => {
- return BB.all(
- process.cwd().split(path.sep).reduce((acc, next) => {
- acc.path = path.join(acc.path, next)
- acc.promises.push(maybeReadIni(path.join(acc.path, '.npmrc')))
- acc.promises.push(maybeReadIni(path.join(acc.path, 'npmrc')))
- return acc
- }, {
- path: '',
- promises: []
- }).promises.concat(
- opts.userconfig ? maybeReadIni(opts.userconfig) : {}
- )
- ).then(configs => NpmConfig(...configs, opts))
- }).then(opts => {
- if (opts.cache) {
- return opts.concat({ cache: path.join(opts.cache, '_cacache') })
- } else {
- return opts
- }
- })
-}
-
-function maybeReadIni (f) {
- return readFileAsync(f, 'utf8').catch(err => {
- if (err.code === 'ENOENT') {
- return ''
- } else {
- throw err
- }
- }).then(ini.parse)
-}
diff --git a/node_modules/libcipm/lib/extract.js b/node_modules/libcipm/lib/extract.js
deleted file mode 100644
index f87d2c791..000000000
--- a/node_modules/libcipm/lib/extract.js
+++ /dev/null
@@ -1,67 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const extractionWorker = require('./worker.js')
-const figgyPudding = require('figgy-pudding')
-const npa = require('npm-package-arg')
-const WORKER_PATH = require.resolve('./worker.js')
-let workerFarm
-
-// Broken for now, cause too many issues on some systems.
-const ENABLE_WORKERS = false
-
-const ExtractOpts = figgyPudding({
- log: {},
- dirPacker: {}
-})
-
-module.exports = {
- startWorkers () {
- if (ENABLE_WORKERS) {
- if (!workerFarm) { workerFarm = require('worker-farm') }
- this._workers = workerFarm({
- maxConcurrentCallsPerWorker: 20,
- maxRetries: 1
- }, WORKER_PATH)
- }
- },
-
- stopWorkers () {
- if (ENABLE_WORKERS) {
- if (!workerFarm) { workerFarm = require('worker-farm') }
- workerFarm.end(this._workers)
- }
- },
-
- child (name, child, childPath, opts) {
- opts = ExtractOpts(opts)
- const spec = npa.resolve(name, child.version)
- let childOpts = opts.concat({
- integrity: child.integrity,
- resolved: child.resolved
- })
- const args = [spec, childPath, childOpts]
- return BB.fromNode((cb) => {
- let launcher = extractionWorker
- let msg = args
- const spec = typeof args[0] === 'string' ? npa(args[0]) : args[0]
- if (ENABLE_WORKERS && (spec.registry || spec.type === 'remote')) {
- if (!workerFarm) { workerFarm = require('worker-farm') }
- // We can't serialize these options
- childOpts = childOpts.concat({
- log: null,
- dirPacker: null
- })
- // workers will run things in parallel!
- launcher = this._workers
- try {
- msg = JSON.stringify(msg)
- } catch (e) {
- return cb(e)
- }
- }
- launcher(msg, cb)
- })
- }
-}
diff --git a/node_modules/libcipm/lib/silentlog.js b/node_modules/libcipm/lib/silentlog.js
deleted file mode 100644
index 4c9d6c57e..000000000
--- a/node_modules/libcipm/lib/silentlog.js
+++ /dev/null
@@ -1,13 +0,0 @@
-'use strict'
-
-const noop = Function.prototype
-module.exports = {
- error: noop,
- warn: noop,
- info: noop,
- verbose: noop,
- silly: noop,
- http: noop,
- pause: noop,
- resume: noop
-}
diff --git a/node_modules/libcipm/lib/worker.js b/node_modules/libcipm/lib/worker.js
deleted file mode 100644
index bab607e52..000000000
--- a/node_modules/libcipm/lib/worker.js
+++ /dev/null
@@ -1,16 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-// const log = require('npmlog')
-const pacote = require('pacote')
-
-module.exports = (args, cb) => {
- const parsed = typeof args === 'string' ? JSON.parse(args) : args
- const spec = parsed[0]
- const extractTo = parsed[1]
- const opts = parsed[2]
- // opts.log = log
- // log.level = opts.loglevel
- return BB.resolve(pacote.extract(spec, extractTo, opts)).nodeify(cb)
-}
diff --git a/node_modules/libcipm/node_modules/.bin/which b/node_modules/libcipm/node_modules/.bin/which
deleted file mode 120000
index f62471c85..000000000
--- a/node_modules/libcipm/node_modules/.bin/which
+++ /dev/null
@@ -1 +0,0 @@
-../which/bin/which \ No newline at end of file
diff --git a/node_modules/libcipm/node_modules/cacache/CHANGELOG.md b/node_modules/libcipm/node_modules/cacache/CHANGELOG.md
deleted file mode 100644
index f67fbc8b4..000000000
--- a/node_modules/libcipm/node_modules/cacache/CHANGELOG.md
+++ /dev/null
@@ -1,657 +0,0 @@
-# Changelog
-
-All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
-
-### [12.0.3](https://github.com/npm/cacache/compare/v12.0.2...v12.0.3) (2019-08-19)
-
-
-### Bug Fixes
-
-* do not chown if not running as root ([2d80af9](https://github.com/npm/cacache/commit/2d80af9))
-
-
-
-### [12.0.2](https://github.com/npm/cacache/compare/v12.0.1...v12.0.2) (2019-07-19)
-
-
-
-### [12.0.1](https://github.com/npm/cacache/compare/v12.0.0...v12.0.1) (2019-07-19)
-
-* **deps** Abstracted out `lib/util/infer-owner.js` to
- [@npmcli/infer-owner](https://www.npmjs.com/package/@npmcli/infer-owner)
- so that it could be more easily used in other parts of the npm CLI.
-
-
-## [12.0.0](https://github.com/npm/cacache/compare/v11.3.3...v12.0.0) (2019-07-15)
-
-
-### Features
-
-* infer uid/gid instead of accepting as options ([ac84d14](https://github.com/npm/cacache/commit/ac84d14))
-* **i18n:** add another error message ([676cb32](https://github.com/npm/cacache/commit/676cb32))
-
-
-### BREAKING CHANGES
-
-* the uid gid options are no longer respected or
-necessary. As of this change, cacache will always match the cache
-contents to the ownership of the cache directory (or its parent
-directory), regardless of what the caller passes in.
-
-Reasoning:
-
-The number one reason to use a uid or gid option was to keep root-owned
-files from causing problems in the cache. In npm's case, this meant
-that CLI's ./lib/command.js had to work out the appropriate uid and gid,
-then pass it to the libnpmcommand module, which had to in turn pass the
-uid and gid to npm-registry-fetch, which then passed it to
-make-fetch-happen, which passed it to cacache. (For package fetching,
-pacote would be in that mix as well.)
-
-Added to that, `cacache.rm()` will actually _write_ a file into the
-cache index, but has no way to accept an option so that its call to
-entry-index.js will write the index with the appropriate uid/gid.
-Little ownership bugs were all over the place, and tricky to trace
-through. (Why should make-fetch-happen even care about accepting or
-passing uids and gids? It's an http library.)
-
-This change allows us to keep the cache from having mixed ownership in
-any situation.
-
-Of course, this _does_ mean that if you have a root-owned but
-user-writable folder (for example, `/tmp`), then the cache will try to
-chown everything to root.
-
-The solution is for the user to create a folder, make it user-owned, and
-use that, rather than relying on cacache to create the root cache folder.
-
-If we decide to restore the uid/gid opts, and use ownership inferrence
-only when uid/gid are unset, then take care to also make rm take an
-option object, and pass it through to entry-index.js.
-
-
-
-### [11.3.3](https://github.com/npm/cacache/compare/v11.3.2...v11.3.3) (2019-06-17)
-
-
-### Bug Fixes
-
-* **audit:** npm audit fix ([200a6d5](https://github.com/npm/cacache/commit/200a6d5))
-* **config:** Add ssri config 'error' option ([#146](https://github.com/npm/cacache/issues/146)) ([47de8f5](https://github.com/npm/cacache/commit/47de8f5))
-* **deps:** npm audit fix ([481a7dc](https://github.com/npm/cacache/commit/481a7dc))
-* **standard:** standard --fix ([7799149](https://github.com/npm/cacache/commit/7799149))
-* **write:** avoid another cb never called situation ([5156561](https://github.com/npm/cacache/commit/5156561))
-
-
-
-<a name="11.3.2"></a>
-## [11.3.2](https://github.com/npm/cacache/compare/v11.3.1...v11.3.2) (2018-12-21)
-
-
-### Bug Fixes
-
-* **get:** make sure to handle errors in the .then ([b10bcd0](https://github.com/npm/cacache/commit/b10bcd0))
-
-
-
-<a name="11.3.1"></a>
-## [11.3.1](https://github.com/npm/cacache/compare/v11.3.0...v11.3.1) (2018-11-05)
-
-
-### Bug Fixes
-
-* **get:** export hasContent.sync properly ([d76c920](https://github.com/npm/cacache/commit/d76c920))
-
-
-
-<a name="11.3.0"></a>
-# [11.3.0](https://github.com/npm/cacache/compare/v11.2.0...v11.3.0) (2018-11-05)
-
-
-### Features
-
-* **get:** add sync API for reading ([db1e094](https://github.com/npm/cacache/commit/db1e094))
-
-
-
-<a name="11.2.0"></a>
-# [11.2.0](https://github.com/npm/cacache/compare/v11.1.0...v11.2.0) (2018-08-08)
-
-
-### Features
-
-* **read:** add sync support to other internal read.js fns ([fe638b6](https://github.com/npm/cacache/commit/fe638b6))
-
-
-
-<a name="11.1.0"></a>
-# [11.1.0](https://github.com/npm/cacache/compare/v11.0.3...v11.1.0) (2018-08-01)
-
-
-### Features
-
-* **read:** add sync support for low-level content read ([b43af83](https://github.com/npm/cacache/commit/b43af83))
-
-
-
-<a name="11.0.3"></a>
-## [11.0.3](https://github.com/npm/cacache/compare/v11.0.2...v11.0.3) (2018-08-01)
-
-
-### Bug Fixes
-
-* **config:** add ssri config options ([#136](https://github.com/npm/cacache/issues/136)) ([10d5d9a](https://github.com/npm/cacache/commit/10d5d9a))
-* **perf:** refactor content.read to avoid lstats ([c5ac10e](https://github.com/npm/cacache/commit/c5ac10e))
-* **test:** oops when removing safe-buffer ([1950490](https://github.com/npm/cacache/commit/1950490))
-
-
-
-<a name="11.0.2"></a>
-## [11.0.2](https://github.com/npm/cacache/compare/v11.0.1...v11.0.2) (2018-05-07)
-
-
-### Bug Fixes
-
-* **verify:** size param no longer lost in a verify ([#131](https://github.com/npm/cacache/issues/131)) ([c614a19](https://github.com/npm/cacache/commit/c614a19)), closes [#130](https://github.com/npm/cacache/issues/130)
-
-
-
-<a name="11.0.1"></a>
-## [11.0.1](https://github.com/npm/cacache/compare/v11.0.0...v11.0.1) (2018-04-10)
-
-
-
-<a name="11.0.0"></a>
-# [11.0.0](https://github.com/npm/cacache/compare/v10.0.4...v11.0.0) (2018-04-09)
-
-
-### Features
-
-* **opts:** use figgy-pudding for opts ([#128](https://github.com/npm/cacache/issues/128)) ([33d4eed](https://github.com/npm/cacache/commit/33d4eed))
-
-
-### meta
-
-* drop support for node@4 ([529f347](https://github.com/npm/cacache/commit/529f347))
-
-
-### BREAKING CHANGES
-
-* node@4 is no longer supported
-
-
-
-<a name="10.0.4"></a>
-## [10.0.4](https://github.com/npm/cacache/compare/v10.0.3...v10.0.4) (2018-02-16)
-
-
-
-<a name="10.0.3"></a>
-## [10.0.3](https://github.com/npm/cacache/compare/v10.0.2...v10.0.3) (2018-02-16)
-
-
-### Bug Fixes
-
-* **content:** rethrow aggregate errors as ENOENT ([fa918f5](https://github.com/npm/cacache/commit/fa918f5))
-
-
-
-<a name="10.0.2"></a>
-## [10.0.2](https://github.com/npm/cacache/compare/v10.0.1...v10.0.2) (2018-01-07)
-
-
-### Bug Fixes
-
-* **ls:** deleted entries could cause a premature stream EOF ([347dc36](https://github.com/npm/cacache/commit/347dc36))
-
-
-
-<a name="10.0.1"></a>
-## [10.0.1](https://github.com/npm/cacache/compare/v10.0.0...v10.0.1) (2017-11-15)
-
-
-### Bug Fixes
-
-* **move-file:** actually use the fallback to `move-concurrently` (#110) ([073fbe1](https://github.com/npm/cacache/commit/073fbe1))
-
-
-
-<a name="10.0.0"></a>
-# [10.0.0](https://github.com/npm/cacache/compare/v9.3.0...v10.0.0) (2017-10-23)
-
-
-### Features
-
-* **license:** relicense to ISC (#111) ([fdbb4e5](https://github.com/npm/cacache/commit/fdbb4e5))
-
-
-### Performance Improvements
-
-* more copyFile benchmarks ([63787bb](https://github.com/npm/cacache/commit/63787bb))
-
-
-### BREAKING CHANGES
-
-* **license:** the license has been changed from CC0-1.0 to ISC.
-
-
-
-<a name="9.3.0"></a>
-# [9.3.0](https://github.com/npm/cacache/compare/v9.2.9...v9.3.0) (2017-10-07)
-
-
-### Features
-
-* **copy:** added cacache.get.copy api for fast copies (#107) ([067b5f6](https://github.com/npm/cacache/commit/067b5f6))
-
-
-
-<a name="9.2.9"></a>
-## [9.2.9](https://github.com/npm/cacache/compare/v9.2.8...v9.2.9) (2017-06-17)
-
-
-
-<a name="9.2.8"></a>
-## [9.2.8](https://github.com/npm/cacache/compare/v9.2.7...v9.2.8) (2017-06-05)
-
-
-### Bug Fixes
-
-* **ssri:** bump ssri for bugfix ([c3232ea](https://github.com/npm/cacache/commit/c3232ea))
-
-
-
-<a name="9.2.7"></a>
-## [9.2.7](https://github.com/npm/cacache/compare/v9.2.6...v9.2.7) (2017-06-05)
-
-
-### Bug Fixes
-
-* **content:** make verified content completely read-only (#96) ([4131196](https://github.com/npm/cacache/commit/4131196))
-
-
-
-<a name="9.2.6"></a>
-## [9.2.6](https://github.com/npm/cacache/compare/v9.2.5...v9.2.6) (2017-05-31)
-
-
-### Bug Fixes
-
-* **node:** update ssri to prevent old node 4 crash ([5209ffe](https://github.com/npm/cacache/commit/5209ffe))
-
-
-
-<a name="9.2.5"></a>
-## [9.2.5](https://github.com/npm/cacache/compare/v9.2.4...v9.2.5) (2017-05-25)
-
-
-### Bug Fixes
-
-* **deps:** fix lockfile issues and bump ssri ([84e1d7e](https://github.com/npm/cacache/commit/84e1d7e))
-
-
-
-<a name="9.2.4"></a>
-## [9.2.4](https://github.com/npm/cacache/compare/v9.2.3...v9.2.4) (2017-05-24)
-
-
-### Bug Fixes
-
-* **deps:** bumping deps ([bbccb12](https://github.com/npm/cacache/commit/bbccb12))
-
-
-
-<a name="9.2.3"></a>
-## [9.2.3](https://github.com/npm/cacache/compare/v9.2.2...v9.2.3) (2017-05-24)
-
-
-### Bug Fixes
-
-* **rm:** stop crashing if content is missing on rm ([ac90bc0](https://github.com/npm/cacache/commit/ac90bc0))
-
-
-
-<a name="9.2.2"></a>
-## [9.2.2](https://github.com/npm/cacache/compare/v9.2.1...v9.2.2) (2017-05-14)
-
-
-### Bug Fixes
-
-* **i18n:** lets pretend this didn't happen ([519b4ee](https://github.com/npm/cacache/commit/519b4ee))
-
-
-
-<a name="9.2.1"></a>
-## [9.2.1](https://github.com/npm/cacache/compare/v9.2.0...v9.2.1) (2017-05-14)
-
-
-### Bug Fixes
-
-* **docs:** fixing translation messup ([bb9e4f9](https://github.com/npm/cacache/commit/bb9e4f9))
-
-
-
-<a name="9.2.0"></a>
-# [9.2.0](https://github.com/npm/cacache/compare/v9.1.0...v9.2.0) (2017-05-14)
-
-
-### Features
-
-* **i18n:** add Spanish translation for API ([531f9a4](https://github.com/npm/cacache/commit/531f9a4))
-
-
-
-<a name="9.1.0"></a>
-# [9.1.0](https://github.com/npm/cacache/compare/v9.0.0...v9.1.0) (2017-05-14)
-
-
-### Features
-
-* **i18n:** Add Spanish translation and i18n setup (#91) ([323b90c](https://github.com/npm/cacache/commit/323b90c))
-
-
-
-<a name="9.0.0"></a>
-# [9.0.0](https://github.com/npm/cacache/compare/v8.0.0...v9.0.0) (2017-04-28)
-
-
-### Bug Fixes
-
-* **memoization:** actually use the LRU ([0e55dc9](https://github.com/npm/cacache/commit/0e55dc9))
-
-
-### Features
-
-* **memoization:** memoizers can be injected through opts.memoize (#90) ([e5614c7](https://github.com/npm/cacache/commit/e5614c7))
-
-
-### BREAKING CHANGES
-
-* **memoization:** If you were passing an object to opts.memoize, it will now be used as an injected memoization object. If you were only passing booleans and other non-objects through that option, no changes are needed.
-
-
-
-<a name="8.0.0"></a>
-# [8.0.0](https://github.com/npm/cacache/compare/v7.1.0...v8.0.0) (2017-04-22)
-
-
-### Features
-
-* **read:** change hasContent to return {sri, size} (#88) ([bad6c49](https://github.com/npm/cacache/commit/bad6c49)), closes [#87](https://github.com/npm/cacache/issues/87)
-
-
-### BREAKING CHANGES
-
-* **read:** hasContent now returns an object with `{sri, size}` instead of `sri`. Use `result.sri` anywhere that needed the old return value.
-
-
-
-<a name="7.1.0"></a>
-# [7.1.0](https://github.com/npm/cacache/compare/v7.0.5...v7.1.0) (2017-04-20)
-
-
-### Features
-
-* **size:** handle content size info (#49) ([91230af](https://github.com/npm/cacache/commit/91230af))
-
-
-
-<a name="7.0.5"></a>
-## [7.0.5](https://github.com/npm/cacache/compare/v7.0.4...v7.0.5) (2017-04-18)
-
-
-### Bug Fixes
-
-* **integrity:** new ssri with fixed integrity stream ([6d13e8e](https://github.com/npm/cacache/commit/6d13e8e))
-* **write:** wrap stuff in promises to improve errors ([3624fc5](https://github.com/npm/cacache/commit/3624fc5))
-
-
-
-<a name="7.0.4"></a>
-## [7.0.4](https://github.com/npm/cacache/compare/v7.0.3...v7.0.4) (2017-04-15)
-
-
-### Bug Fixes
-
-* **fix-owner:** throw away ENOENTs on chownr ([d49bbcd](https://github.com/npm/cacache/commit/d49bbcd))
-
-
-
-<a name="7.0.3"></a>
-## [7.0.3](https://github.com/npm/cacache/compare/v7.0.2...v7.0.3) (2017-04-05)
-
-
-### Bug Fixes
-
-* **read:** fixing error message for integrity verification failures ([9d4f0a5](https://github.com/npm/cacache/commit/9d4f0a5))
-
-
-
-<a name="7.0.2"></a>
-## [7.0.2](https://github.com/npm/cacache/compare/v7.0.1...v7.0.2) (2017-04-03)
-
-
-### Bug Fixes
-
-* **integrity:** use EINTEGRITY error code and update ssri ([8dc2e62](https://github.com/npm/cacache/commit/8dc2e62))
-
-
-
-<a name="7.0.1"></a>
-## [7.0.1](https://github.com/npm/cacache/compare/v7.0.0...v7.0.1) (2017-04-03)
-
-
-### Bug Fixes
-
-* **docs:** fix header name conflict in readme ([afcd456](https://github.com/npm/cacache/commit/afcd456))
-
-
-
-<a name="7.0.0"></a>
-# [7.0.0](https://github.com/npm/cacache/compare/v6.3.0...v7.0.0) (2017-04-03)
-
-
-### Bug Fixes
-
-* **test:** fix content.write tests when running in docker ([d2e9b6a](https://github.com/npm/cacache/commit/d2e9b6a))
-
-
-### Features
-
-* **integrity:** subresource integrity support (#78) ([b1e731f](https://github.com/npm/cacache/commit/b1e731f))
-
-
-### BREAKING CHANGES
-
-* **integrity:** The entire API has been overhauled to use SRI hashes instead of digest/hashAlgorithm pairs. SRI hashes follow the Subresource Integrity standard and support strings and objects compatible with [`ssri`](https://npm.im/ssri).
-
-* This change bumps the index version, which will invalidate all previous index entries. Content entries will remain intact, and existing caches will automatically reuse any content from before this breaking change.
-
-* `cacache.get.info()`, `cacache.ls()`, and `cacache.ls.stream()` will now return objects that looks like this:
-
-```
-{
- key: String,
- integrity: '<algorithm>-<base64hash>',
- path: ContentPath,
- time: Date<ms>,
- metadata: Any
-}
-```
-
-* `opts.digest` and `opts.hashAlgorithm` are obsolete for any API calls that used them.
-
-* Anywhere `opts.digest` was accepted, `opts.integrity` is now an option. Any valid SRI hash is accepted here -- multiple hash entries will be resolved according to the standard: first, the "strongest" hash algorithm will be picked, and then each of the entries for that algorithm will be matched against the content. Content will be validated if *any* of the entries match (so, a single integrity string can be used for multiple "versions" of the same document/data).
-
-* `put.byDigest()`, `put.stream.byDigest`, `get.byDigest()` and `get.stream.byDigest()` now expect an SRI instead of a `digest` + `opts.hashAlgorithm` pairing.
-
-* `get.hasContent()` now expects an integrity hash instead of a digest. If content exists, it will return the specific single integrity hash that was found in the cache.
-
-* `verify()` has learned to handle integrity-based caches, and forgotten how to handle old-style cache indices due to the format change.
-
-* `cacache.rm.content()` now expects an integrity hash instead of a hex digest.
-
-
-
-<a name="6.3.0"></a>
-# [6.3.0](https://github.com/npm/cacache/compare/v6.2.0...v6.3.0) (2017-04-01)
-
-
-### Bug Fixes
-
-* **fixOwner:** ignore EEXIST race condition from mkdirp ([4670e9b](https://github.com/npm/cacache/commit/4670e9b))
-* **index:** ignore index removal races when inserting ([b9d2fa2](https://github.com/npm/cacache/commit/b9d2fa2))
-* **memo:** use lru-cache for better mem management (#75) ([d8ac5aa](https://github.com/npm/cacache/commit/d8ac5aa))
-
-
-### Features
-
-* **dependencies:** Switch to move-concurrently (#77) ([dc6482d](https://github.com/npm/cacache/commit/dc6482d))
-
-
-
-<a name="6.2.0"></a>
-# [6.2.0](https://github.com/npm/cacache/compare/v6.1.2...v6.2.0) (2017-03-15)
-
-
-### Bug Fixes
-
-* **index:** additional bucket entry verification with checksum (#72) ([f8e0f25](https://github.com/npm/cacache/commit/f8e0f25))
-* **verify:** return fixOwner.chownr promise ([6818521](https://github.com/npm/cacache/commit/6818521))
-
-
-### Features
-
-* **tmp:** safe tmp dir creation/management util (#73) ([c42da71](https://github.com/npm/cacache/commit/c42da71))
-
-
-
-<a name="6.1.2"></a>
-## [6.1.2](https://github.com/npm/cacache/compare/v6.1.1...v6.1.2) (2017-03-13)
-
-
-### Bug Fixes
-
-* **index:** set default hashAlgorithm ([d6eb2f0](https://github.com/npm/cacache/commit/d6eb2f0))
-
-
-
-<a name="6.1.1"></a>
-## [6.1.1](https://github.com/npm/cacache/compare/v6.1.0...v6.1.1) (2017-03-13)
-
-
-### Bug Fixes
-
-* **coverage:** bumping coverage for verify (#71) ([0b7faf6](https://github.com/npm/cacache/commit/0b7faf6))
-* **deps:** glob should have been a regular dep :< ([0640bc4](https://github.com/npm/cacache/commit/0640bc4))
-
-
-
-<a name="6.1.0"></a>
-# [6.1.0](https://github.com/npm/cacache/compare/v6.0.2...v6.1.0) (2017-03-12)
-
-
-### Bug Fixes
-
-* **coverage:** more coverage for content reads (#70) ([ef4f70a](https://github.com/npm/cacache/commit/ef4f70a))
-* **tests:** use safe-buffer because omfg (#69) ([6ab8132](https://github.com/npm/cacache/commit/6ab8132))
-
-
-### Features
-
-* **rm:** limited rm.all and fixed bugs (#66) ([d5d25ba](https://github.com/npm/cacache/commit/d5d25ba)), closes [#66](https://github.com/npm/cacache/issues/66)
-* **verify:** tested, working cache verifier/gc (#68) ([45ad77a](https://github.com/npm/cacache/commit/45ad77a))
-
-
-
-<a name="6.0.2"></a>
-## [6.0.2](https://github.com/npm/cacache/compare/v6.0.1...v6.0.2) (2017-03-11)
-
-
-### Bug Fixes
-
-* **index:** segment cache items with another subbucket (#64) ([c3644e5](https://github.com/npm/cacache/commit/c3644e5))
-
-
-
-<a name="6.0.1"></a>
-## [6.0.1](https://github.com/npm/cacache/compare/v6.0.0...v6.0.1) (2017-03-05)
-
-
-### Bug Fixes
-
-* **docs:** Missed spots in README ([8ffb7fa](https://github.com/npm/cacache/commit/8ffb7fa))
-
-
-
-<a name="6.0.0"></a>
-# [6.0.0](https://github.com/npm/cacache/compare/v5.0.3...v6.0.0) (2017-03-05)
-
-
-### Bug Fixes
-
-* **api:** keep memo cache mostly-internal ([2f72d0a](https://github.com/npm/cacache/commit/2f72d0a))
-* **content:** use the rest of the string, not the whole string ([fa8f3c3](https://github.com/npm/cacache/commit/fa8f3c3))
-* **deps:** removed `format-number@2.0.2` ([1187791](https://github.com/npm/cacache/commit/1187791))
-* **deps:** removed inflight@1.0.6 ([0d1819c](https://github.com/npm/cacache/commit/0d1819c))
-* **deps:** rimraf@2.6.1 ([9efab6b](https://github.com/npm/cacache/commit/9efab6b))
-* **deps:** standard@9.0.0 ([4202cba](https://github.com/npm/cacache/commit/4202cba))
-* **deps:** tap@10.3.0 ([aa03088](https://github.com/npm/cacache/commit/aa03088))
-* **deps:** weallcontribute@1.0.8 ([ad4f4dc](https://github.com/npm/cacache/commit/ad4f4dc))
-* **docs:** add security note to hashKey ([03f81ba](https://github.com/npm/cacache/commit/03f81ba))
-* **hashes:** change default hashAlgorithm to sha512 ([ea00ba6](https://github.com/npm/cacache/commit/ea00ba6))
-* **hashes:** missed a spot for hashAlgorithm defaults ([45997d8](https://github.com/npm/cacache/commit/45997d8))
-* **index:** add length header before JSON for verification ([fb8cb4d](https://github.com/npm/cacache/commit/fb8cb4d))
-* **index:** change index filenames to sha1s of keys ([bbc5fca](https://github.com/npm/cacache/commit/bbc5fca))
-* **index:** who cares about race conditions anyway ([b1d3888](https://github.com/npm/cacache/commit/b1d3888))
-* **perf:** bulk-read get+read for massive speed ([d26cdf9](https://github.com/npm/cacache/commit/d26cdf9))
-* **perf:** use bulk file reads for index reads ([79a8891](https://github.com/npm/cacache/commit/79a8891))
-* **put-stream:** remove tmp file on stream insert error ([65f6632](https://github.com/npm/cacache/commit/65f6632))
-* **put-stream:** robustified and predictibilized ([daf9e08](https://github.com/npm/cacache/commit/daf9e08))
-* **put-stream:** use new promise API for moves ([1d36013](https://github.com/npm/cacache/commit/1d36013))
-* **readme:** updated to reflect new default hashAlgo ([c60a2fa](https://github.com/npm/cacache/commit/c60a2fa))
-* **verify:** tiny typo fix ([db22d05](https://github.com/npm/cacache/commit/db22d05))
-
-
-### Features
-
-* **api:** converted external api ([7bf032f](https://github.com/npm/cacache/commit/7bf032f))
-* **cacache:** exported clearMemoized() utility ([8d2c5b6](https://github.com/npm/cacache/commit/8d2c5b6))
-* **cache:** add versioning to content and index ([31bc549](https://github.com/npm/cacache/commit/31bc549))
-* **content:** collate content files into subdirs ([c094d9f](https://github.com/npm/cacache/commit/c094d9f))
-* **deps:** `@npmcorp/move@1.0.0` ([bdd00bf](https://github.com/npm/cacache/commit/bdd00bf))
-* **deps:** `bluebird@3.4.7` ([3a17aff](https://github.com/npm/cacache/commit/3a17aff))
-* **deps:** `promise-inflight@1.0.1` ([a004fe6](https://github.com/npm/cacache/commit/a004fe6))
-* **get:** added memoization support for get ([c77d794](https://github.com/npm/cacache/commit/c77d794))
-* **get:** export hasContent ([2956ec3](https://github.com/npm/cacache/commit/2956ec3))
-* **index:** add hashAlgorithm and format insert ret val ([b639746](https://github.com/npm/cacache/commit/b639746))
-* **index:** collate index files into subdirs ([e8402a5](https://github.com/npm/cacache/commit/e8402a5))
-* **index:** promisify entry index ([cda3335](https://github.com/npm/cacache/commit/cda3335))
-* **memo:** added memoization lib ([da07b92](https://github.com/npm/cacache/commit/da07b92))
-* **memo:** export memoization api ([954b1b3](https://github.com/npm/cacache/commit/954b1b3))
-* **move-file:** add move fallback for weird errors ([5cf4616](https://github.com/npm/cacache/commit/5cf4616))
-* **perf:** bulk content write api ([51b536e](https://github.com/npm/cacache/commit/51b536e))
-* **put:** added memoization support to put ([b613a70](https://github.com/npm/cacache/commit/b613a70))
-* **read:** switched to promises ([a869362](https://github.com/npm/cacache/commit/a869362))
-* **rm:** added memoization support to rm ([4205cf0](https://github.com/npm/cacache/commit/4205cf0))
-* **rm:** switched to promises ([a000d24](https://github.com/npm/cacache/commit/a000d24))
-* **util:** promise-inflight ownership fix requests ([9517cd7](https://github.com/npm/cacache/commit/9517cd7))
-* **util:** use promises for api ([ae204bb](https://github.com/npm/cacache/commit/ae204bb))
-* **verify:** converted to Promises ([f0b3974](https://github.com/npm/cacache/commit/f0b3974))
-
-
-### BREAKING CHANGES
-
-* cache: index/content directories are now versioned. Previous caches are no longer compatible and cannot be migrated.
-* util: fix-owner now uses Promises instead of callbacks
-* index: Previously-generated index entries are no longer compatible and the index must be regenerated.
-* index: The index format has changed and previous caches are no longer compatible. Existing caches will need to be regenerated.
-* hashes: Default hashAlgorithm changed from sha1 to sha512. If you
-rely on the prior setting, pass `opts.hashAlgorithm` in explicitly.
-* content: Previously-generated content directories are no longer compatible
-and must be regenerated.
-* verify: API is now promise-based
-* read: Switches to a Promise-based API and removes callback stuff
-* rm: Switches to a Promise-based API and removes callback stuff
-* index: this changes the API to work off promises instead of callbacks
-* api: this means we are going all in on promises now
diff --git a/node_modules/libcipm/node_modules/cacache/LICENSE.md b/node_modules/libcipm/node_modules/cacache/LICENSE.md
deleted file mode 100644
index 8d28acf86..000000000
--- a/node_modules/libcipm/node_modules/cacache/LICENSE.md
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/libcipm/node_modules/cacache/README.es.md b/node_modules/libcipm/node_modules/cacache/README.es.md
deleted file mode 100644
index 55007e20d..000000000
--- a/node_modules/libcipm/node_modules/cacache/README.es.md
+++ /dev/null
@@ -1,628 +0,0 @@
-# cacache [![npm version](https://img.shields.io/npm/v/cacache.svg)](https://npm.im/cacache) [![license](https://img.shields.io/npm/l/cacache.svg)](https://npm.im/cacache) [![Travis](https://img.shields.io/travis/zkat/cacache.svg)](https://travis-ci.org/zkat/cacache) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/cacache?svg=true)](https://ci.appveyor.com/project/zkat/cacache) [![Coverage Status](https://coveralls.io/repos/github/zkat/cacache/badge.svg?branch=latest)](https://coveralls.io/github/zkat/cacache?branch=latest)
-
-[`cacache`](https://github.com/zkat/cacache) es una librería de Node.js para
-manejar caches locales en disco, con acceso tanto con claves únicas como
-direcciones de contenido (hashes/hacheos). Es súper rápida, excelente con el
-acceso concurrente, y jamás te dará datos incorrectos, aún si se corrompen o
-manipulan directamente los ficheros del cache.
-
-El propósito original era reemplazar el caché local de
-[npm](https://npm.im/npm), pero se puede usar por su propia cuenta.
-
-_Traducciones: [English](README.md)_
-
-## Instalación
-
-`$ npm install --save cacache`
-
-## Índice
-
-* [Ejemplo](#ejemplo)
-* [Características](#características)
-* [Cómo Contribuir](#cómo-contribuir)
-* [API](#api)
- * [Usando el API en español](#localized-api)
- * Leer
- * [`ls`](#ls)
- * [`ls.flujo`](#ls-stream)
- * [`saca`](#get-data)
- * [`saca.flujo`](#get-stream)
- * [`saca.info`](#get-info)
- * [`saca.tieneDatos`](#get-hasContent)
- * Escribir
- * [`mete`](#put-data)
- * [`mete.flujo`](#put-stream)
- * [opciones para `mete*`](#put-options)
- * [`rm.todo`](#rm-all)
- * [`rm.entrada`](#rm-entry)
- * [`rm.datos`](#rm-content)
- * Utilidades
- * [`ponLenguaje`](#set-locale)
- * [`limpiaMemoizado`](#clear-memoized)
- * [`tmp.hazdir`](#tmp-mkdir)
- * [`tmp.conTmp`](#with-tmp)
- * Integridad
- * [Subresource Integrity](#integrity)
- * [`verifica`](#verify)
- * [`verifica.ultimaVez`](#verify-last-run)
-
-### Ejemplo
-
-```javascript
-const cacache = require('cacache/es')
-const fs = require('fs')
-
-const tarbol = '/ruta/a/mi-tar.tgz'
-const rutaCache = '/tmp/my-toy-cache'
-const clave = 'mi-clave-única-1234'
-
-// ¡Añádelo al caché! Usa `rutaCache` como raíz del caché.
-cacache.mete(rutaCache, clave, '10293801983029384').then(integrity => {
- console.log(`Saved content to ${rutaCache}.`)
-})
-
-const destino = '/tmp/mytar.tgz'
-
-// Copia el contenido del caché a otro fichero, pero esta vez con flujos.
-cacache.saca.flujo(
- rutaCache, clave
-).pipe(
- fs.createWriteStream(destino)
-).on('finish', () => {
- console.log('extracción completada')
-})
-
-// La misma cosa, pero accesando el contenido directamente, sin tocar el índice.
-cacache.saca.porHacheo(rutaCache, integridad).then(datos => {
- fs.writeFile(destino, datos, err => {
- console.log('datos del tarbol sacados basado en su sha512, y escrito a otro fichero')
- })
-})
-```
-
-### Características
-
-* Extracción por clave o por dirección de contenido (shasum, etc)
-* Usa el estándard de web, [Subresource Integrity](#integrity)
-* Compatible con multiples algoritmos - usa sha1, sha512, etc, en el mismo caché sin problema
-* Entradas con contenido idéntico comparten ficheros
-* Tolerancia de fallas (inmune a corrupción, ficheros parciales, carreras de proceso, etc)
-* Verificación completa de datos cuando (escribiendo y leyendo)
-* Concurrencia rápida, segura y "lockless"
-* Compatible con `stream`s (flujos)
-* Compatible con `Promise`s (promesas)
-* Bastante rápida -- acceso, incluyendo verificación, en microsegundos
-* Almacenaje de metadatos arbitrarios
-* Colección de basura y verificación adicional fuera de banda
-* Cobertura rigurosa de pruebas
-* Probablente hay un "Bloom filter" por ahí en algún lado. Eso le mola a la gente, ¿Verdad? 🤔
-
-### Cómo Contribuir
-
-El equipo de cacache felizmente acepta contribuciones de código y otras maneras de participación. ¡Hay muchas formas diferentes de contribuir! La [Guía de Colaboradores](CONTRIBUTING.md) (en inglés) tiene toda la información que necesitas para cualquier tipo de contribución: todo desde cómo reportar errores hasta cómo someter parches con nuevas características. Con todo y eso, no se preocupe por si lo que haces está exáctamente correcto: no hay ningún problema en hacer preguntas si algo no está claro, o no lo encuentras.
-
-El equipo de cacache tiene miembros hispanohablantes: es completamente aceptable crear `issues` y `pull requests` en español/castellano.
-
-Todos los participantes en este proyecto deben obedecer el [Código de Conducta](CODE_OF_CONDUCT.md) (en inglés), y en general actuar de forma amable y respetuosa mientras participan en esta comunidad.
-
-Por favor refiérase al [Historial de Cambios](CHANGELOG.md) (en inglés) para detalles sobre cambios importantes incluídos en cada versión.
-
-Finalmente, cacache tiene un sistema de localización de lenguaje. Si te interesa añadir lenguajes o mejorar los que existen, mira en el directorio `./locales` para comenzar.
-
-Happy hacking!
-
-### API
-
-#### <a name="localized-api"></a> Usando el API en español
-
-cacache incluye una traducción completa de su API al castellano, con las mismas
-características. Para usar el API como está documentado en este documento, usa
-`require('cacache/es')`
-
-cacache también tiene otros lenguajes: encuéntralos bajo `./locales`, y podrás
-usar el API en ese lenguaje con `require('cacache/<lenguaje>')`
-
-#### <a name="ls"></a> `> cacache.ls(cache) -> Promise<Object>`
-
-Enumera todas las entradas en el caché, dentro de un solo objeto. Cada entrada
-en el objeto tendrá como clave la clave única usada para el índice, el valor
-siendo un objeto de [`saca.info`](#get-info).
-
-##### Ejemplo
-
-```javascript
-cacache.ls(rutaCache).then(console.log)
-// Salida
-{
- 'my-thing': {
- key: 'my-thing',
- integrity: 'sha512-BaSe64/EnCoDED+HAsh=='
- path: '.testcache/content/deadbeef', // unido con `rutaCache`
- time: 12345698490,
- size: 4023948,
- metadata: {
- name: 'blah',
- version: '1.2.3',
- description: 'this was once a package but now it is my-thing'
- }
- },
- 'other-thing': {
- key: 'other-thing',
- integrity: 'sha1-ANothER+hasH=',
- path: '.testcache/content/bada55',
- time: 11992309289,
- size: 111112
- }
-}
-```
-
-#### <a name="ls-stream"></a> `> cacache.ls.flujo(cache) -> Readable`
-
-Enumera todas las entradas en el caché, emitiendo un objeto de
-[`saca.info`](#get-info) por cada evento de `data` en el flujo.
-
-##### Ejemplo
-
-```javascript
-cacache.ls.flujo(rutaCache).on('data', console.log)
-// Salida
-{
- key: 'my-thing',
- integrity: 'sha512-BaSe64HaSh',
- path: '.testcache/content/deadbeef', // unido con `rutaCache`
- time: 12345698490,
- size: 13423,
- metadata: {
- name: 'blah',
- version: '1.2.3',
- description: 'this was once a package but now it is my-thing'
- }
-}
-
-{
- key: 'other-thing',
- integrity: 'whirlpool-WoWSoMuchSupport',
- path: '.testcache/content/bada55',
- time: 11992309289,
- size: 498023984029
-}
-
-{
- ...
-}
-```
-
-#### <a name="get-data"></a> `> cacache.saca(cache, clave, [ops]) -> Promise({data, metadata, integrity})`
-
-Devuelve un objeto con los datos, hacheo de integridad y metadatos identificados
-por la `clave`. La propiedad `data` de este objeto será una instancia de
-`Buffer` con los datos almacenados en el caché. to do with it! cacache just
-won't care.
-
-`integrity` es un `string` de [Subresource Integrity](#integrity). Dígase, un
-`string` que puede ser usado para verificar a la `data`, que tiene como formato
-`<algoritmo>-<hacheo-integridad-base64>`.
-
-So no existe ninguna entrada identificada por `clave`, o se los datos
-almacenados localmente fallan verificación, el `Promise` fallará.
-
-Una sub-función, `saca.porHacheo`, tiene casi el mismo comportamiento, excepto
-que busca entradas usando el hacheo de integridad, sin tocar el índice general.
-Esta versión *sólo* devuelve `data`, sin ningún objeto conteniéndola.
-
-##### Nota
-
-Esta función lee la entrada completa a la memoria antes de devolverla. Si estás
-almacenando datos Muy Grandes, es posible que [`saca.flujo`](#get-stream) sea
-una mejor solución.
-
-##### Ejemplo
-
-```javascript
-// Busca por clave
-cache.saca(rutaCache, 'my-thing').then(console.log)
-// Salida:
-{
- metadata: {
- thingName: 'my'
- },
- integrity: 'sha512-BaSe64HaSh',
- data: Buffer#<deadbeef>,
- size: 9320
-}
-
-// Busca por hacheo
-cache.saca.porHacheo(rutaCache, 'sha512-BaSe64HaSh').then(console.log)
-// Salida:
-Buffer#<deadbeef>
-```
-
-#### <a name="get-stream"></a> `> cacache.saca.flujo(cache, clave, [ops]) -> Readable`
-
-Devuelve un [Readable
-Stream](https://nodejs.org/api/stream.html#stream_readable_streams) de los datos
-almacenados bajo `clave`.
-
-So no existe ninguna entrada identificada por `clave`, o se los datos
-almacenados localmente fallan verificación, el `Promise` fallará.
-
-`metadata` y `integrity` serán emitidos como eventos antes de que el flujo
-cierre.
-
-Una sub-función, `saca.flujo.porHacheo`, tiene casi el mismo comportamiento,
-excepto que busca entradas usando el hacheo de integridad, sin tocar el índice
-general. Esta versión no emite eventos de `metadata` o `integrity`.
-
-##### Ejemplo
-
-```javascript
-// Busca por clave
-cache.saca.flujo(
- rutaCache, 'my-thing'
-).on('metadata', metadata => {
- console.log('metadata:', metadata)
-}).on('integrity', integrity => {
- console.log('integrity:', integrity)
-}).pipe(
- fs.createWriteStream('./x.tgz')
-)
-// Salidas:
-metadata: { ... }
-integrity: 'sha512-SoMeDIGest+64=='
-
-// Busca por hacheo
-cache.saca.flujo.porHacheo(
- rutaCache, 'sha512-SoMeDIGest+64=='
-).pipe(
- fs.createWriteStream('./x.tgz')
-)
-```
-
-#### <a name="get-info"></a> `> cacache.saca.info(cache, clave) -> Promise`
-
-Busca la `clave` en el índice del caché, devolviendo información sobre la
-entrada si existe.
-
-##### Campos
-
-* `key` - Clave de la entrada. Igual al argumento `clave`.
-* `integrity` - [hacheo de Subresource Integrity](#integrity) del contenido al que se refiere esta entrada.
-* `path` - Dirección del fichero de datos almacenados, unida al argumento `cache`.
-* `time` - Hora de creación de la entrada
-* `metadata` - Metadatos asignados a esta entrada por el usuario
-
-##### Ejemplo
-
-```javascript
-cacache.saca.info(rutaCache, 'my-thing').then(console.log)
-
-// Salida
-{
- key: 'my-thing',
- integrity: 'sha256-MUSTVERIFY+ALL/THINGS=='
- path: '.testcache/content/deadbeef',
- time: 12345698490,
- size: 849234,
- metadata: {
- name: 'blah',
- version: '1.2.3',
- description: 'this was once a package but now it is my-thing'
- }
-}
-```
-
-#### <a name="get-hasContent"></a> `> cacache.saca.tieneDatos(cache, integrity) -> Promise`
-
-Busca un [hacheo Subresource Integrity](#integrity) en el caché. Si existe el
-contenido asociado con `integrity`, devuelve un objeto con dos campos: el hacheo
-_específico_ que se usó para la búsqueda, `sri`, y el tamaño total del
-contenido, `size`. Si no existe ningún contenido asociado con `integrity`,
-devuelve `false`.
-
-##### Ejemplo
-
-```javascript
-cacache.saca.tieneDatos(rutaCache, 'sha256-MUSTVERIFY+ALL/THINGS==').then(console.log)
-
-// Salida
-{
- sri: {
- source: 'sha256-MUSTVERIFY+ALL/THINGS==',
- algorithm: 'sha256',
- digest: 'MUSTVERIFY+ALL/THINGS==',
- options: []
- },
- size: 9001
-}
-
-cacache.saca.tieneDatos(rutaCache, 'sha521-NOT+IN/CACHE==').then(console.log)
-
-// Salida
-false
-```
-
-#### <a name="put-data"></a> `> cacache.mete(cache, clave, datos, [ops]) -> Promise`
-
-Inserta `datos` en el caché. El `Promise` devuelto se resuelve con un hacheo
-(generado conforme a [`ops.algorithms`](#optsalgorithms)) después que la entrada
-haya sido escrita en completo.
-
-##### Ejemplo
-
-```javascript
-fetch(
- 'https://registry.npmjs.org/cacache/-/cacache-1.0.0.tgz'
-).then(datos => {
- return cacache.mete(rutaCache, 'registry.npmjs.org|cacache@1.0.0', datos)
-}).then(integridad => {
- console.log('el hacheo de integridad es', integridad)
-})
-```
-
-#### <a name="put-stream"></a> `> cacache.mete.flujo(cache, clave, [ops]) -> Writable`
-
-Devuelve un [Writable
-Stream](https://nodejs.org/api/stream.html#stream_writable_streams) que inserta
-al caché los datos escritos a él. Emite un evento `integrity` con el hacheo del
-contenido escrito, cuando completa.
-
-##### Ejemplo
-
-```javascript
-request.get(
- 'https://registry.npmjs.org/cacache/-/cacache-1.0.0.tgz'
-).pipe(
- cacache.mete.flujo(
- rutaCache, 'registry.npmjs.org|cacache@1.0.0'
- ).on('integrity', d => console.log(`integrity digest is ${d}`))
-)
-```
-
-#### <a name="put-options"></a> `> opciones para cacache.mete`
-
-La funciones `cacache.mete` tienen un número de opciones en común.
-
-##### `ops.metadata`
-
-Metadatos del usuario que se almacenarán con la entrada.
-
-##### `ops.size`
-
-El tamaño declarado de los datos que se van a insertar. Si es proveído, cacache
-verificará que los datos escritos sean de ese tamaño, o si no, fallará con un
-error con código `EBADSIZE`.
-
-##### `ops.integrity`
-
-El hacheo de integridad de los datos siendo escritos.
-
-Si es proveído, y los datos escritos no le corresponden, la operación fallará
-con un error con código `EINTEGRITY`.
-
-`ops.algorithms` no tiene ningún efecto si esta opción está presente.
-
-##### `ops.algorithms`
-
-Por Defecto: `['sha512']`
-
-Algoritmos que se deben usar cuando se calcule el hacheo de [subresource
-integrity](#integrity) para los datos insertados. Puede usar cualquier algoritmo
-enumerado en `crypto.getHashes()`.
-
-Por el momento, sólo se acepta un algoritmo (dígase, un array con exáctamente un
-valor). No tiene ningún efecto si `ops.integrity` también ha sido proveido.
-
-##### `ops.uid`/`ops.gid`
-
-Si están presentes, cacache hará todo lo posible para asegurarse que todos los
-ficheros creados en el proceso de sus operaciones en el caché usen esta
-combinación en particular.
-
-##### `ops.memoize`
-
-Por Defecto: `null`
-
-Si es verdad, cacache tratará de memoizar los datos de la entrada en memoria. La
-próxima vez que el proceso corriente trate de accesar los datos o entrada,
-cacache buscará en memoria antes de buscar en disco.
-
-Si `ops.memoize` es un objeto regular o un objeto como `Map` (es decir, un
-objeto con métodos `get()` y `set()`), este objeto en sí sera usado en vez del
-caché de memoria global. Esto permite tener lógica específica a tu aplicación
-encuanto al almacenaje en memoria de tus datos.
-
-Si quieres asegurarte que los datos se lean del disco en vez de memoria, usa
-`memoize: false` cuando uses funciones de `cacache.saca`.
-
-#### <a name="rm-all"></a> `> cacache.rm.todo(cache) -> Promise`
-
-Borra el caché completo, incluyendo ficheros temporeros, ficheros de datos, y el
-índice del caché.
-
-##### Ejemplo
-
-```javascript
-cacache.rm.todo(rutaCache).then(() => {
- console.log('THE APOCALYPSE IS UPON US 😱')
-})
-```
-
-#### <a name="rm-entry"></a> `> cacache.rm.entrada(cache, clave) -> Promise`
-
-Alias: `cacache.rm`
-
-Borra la entrada `clave` del índuce. El contenido asociado con esta entrada
-seguirá siendo accesible por hacheo usando
-[`saca.flujo.porHacheo`](#get-stream).
-
-Para borrar el contenido en sí, usa [`rm.datos`](#rm-content). Si quieres hacer
-esto de manera más segura (pues ficheros de contenido pueden ser usados por
-multiples entradas), usa [`verifica`](#verify) para borrar huérfanos.
-
-##### Ejemplo
-
-```javascript
-cacache.rm.entrada(rutaCache, 'my-thing').then(() => {
- console.log('I did not like it anyway')
-})
-```
-
-#### <a name="rm-content"></a> `> cacache.rm.datos(cache, integrity) -> Promise`
-
-Borra el contenido identificado por `integrity`. Cualquier entrada que se
-refiera a este contenido quedarán huérfanas y se invalidarán si se tratan de
-accesar, al menos que contenido idéntico sea añadido bajo `integrity`.
-
-##### Ejemplo
-
-```javascript
-cacache.rm.datos(rutaCache, 'sha512-SoMeDIGest/IN+BaSE64==').then(() => {
- console.log('los datos para `mi-cosa` se borraron')
-})
-```
-
-#### <a name="set-locale"></a> `> cacache.ponLenguaje(locale)`
-
-Configura el lenguaje usado para mensajes y errores de cacache. La lista de
-lenguajes disponibles está en el directorio `./locales` del proyecto.
-
-_Te interesa añadir más lenguajes? [Somete un PR](CONTRIBUTING.md)!_
-
-#### <a name="clear-memoized"></a> `> cacache.limpiaMemoizado()`
-
-Completamente reinicializa el caché de memoria interno. Si estás usando tu
-propio objecto con `ops.memoize`, debes hacer esto de manera específica a él.
-
-#### <a name="tmp-mkdir"></a> `> tmp.hazdir(cache, ops) -> Promise<Path>`
-
-Alias: `tmp.mkdir`
-
-Devuelve un directorio único dentro del directorio `tmp` del caché.
-
-Una vez tengas el directorio, es responsabilidad tuya asegurarte que todos los
-ficheros escrito a él sean creados usando los permisos y `uid`/`gid` concordante
-con el caché. Si no, puedes pedirle a cacache que lo haga llamando a
-[`cacache.tmp.fix()`](#tmp-fix). Esta función arreglará todos los permisos en el
-directorio tmp.
-
-Si quieres que cacache limpie el directorio automáticamente cuando termines, usa
-[`cacache.tmp.conTmp()`](#with-tpm).
-
-##### Ejemplo
-
-```javascript
-cacache.tmp.mkdir(cache).then(dir => {
- fs.writeFile(path.join(dir, 'blablabla'), Buffer#<1234>, ...)
-})
-```
-
-#### <a name="with-tmp"></a> `> tmp.conTmp(cache, ops, cb) -> Promise`
-
-Crea un directorio temporero con [`tmp.mkdir()`](#tmp-mkdir) y ejecuta `cb` con
-él como primer argumento. El directorio creado será removido automáticamente
-cuando el valor devolvido por `cb()` se resuelva.
-
-Las mismas advertencias aplican en cuanto a manejando permisos para los ficheros
-dentro del directorio.
-
-##### Ejemplo
-
-```javascript
-cacache.tmp.conTmp(cache, dir => {
- return fs.writeFileAsync(path.join(dir, 'blablabla'), Buffer#<1234>, ...)
-}).then(() => {
- // `dir` no longer exists
-})
-```
-
-#### <a name="integrity"></a> Hacheos de Subresource Integrity
-
-cacache usa strings que siguen la especificación de [Subresource Integrity
-spec](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity).
-
-Es decir, donde quiera cacache espera un argumento o opción `integrity`, ese
-string debería usar el formato `<algoritmo>-<hacheo-base64>`.
-
-Una variación importante sobre los hacheos que cacache acepta es que acepta el
-nombre de cualquier algoritmo aceptado por el proceso de Node.js donde se usa.
-Puedes usar `crypto.getHashes()` para ver cuales están disponibles.
-
-##### Generando tus propios hacheos
-
-Si tienes un `shasum`, en general va a estar en formato de string hexadecimal
-(es decir, un `sha1` se vería como algo así:
-`5f5513f8822fdbe5145af33b64d8d970dcf95c6e`).
-
-Para ser compatible con cacache, necesitas convertir esto a su equivalente en
-subresource integrity. Por ejemplo, el hacheo correspondiente al ejemplo
-anterior sería: `sha1-X1UT+IIv2+UUWvM7ZNjZcNz5XG4=`.
-
-Puedes usar código así para generarlo por tu cuenta:
-
-```javascript
-const crypto = require('crypto')
-const algoritmo = 'sha512'
-const datos = 'foobarbaz'
-
-const integrity = (
- algorithm +
- '-' +
- crypto.createHash(algoritmo).update(datos).digest('base64')
-)
-```
-
-También puedes usar [`ssri`](https://npm.im/ssri) para deferir el trabajo a otra
-librería que garantiza que todo esté correcto, pues maneja probablemente todas
-las operaciones que tendrías que hacer con SRIs, incluyendo convirtiendo entre
-hexadecimal y el formato SRI.
-
-#### <a name="verify"></a> `> cacache.verifica(cache, ops) -> Promise`
-
-Examina y arregla tu caché:
-
-* Limpia entradas inválidas, huérfanas y corrompidas
-* Te deja filtrar cuales entradas retener, con tu propio filtro
-* Reclama cualquier ficheros de contenido sin referencias en el índice
-* Verifica integridad de todos los ficheros de contenido y remueve los malos
-* Arregla permisos del caché
-* Remieve el directorio `tmp` en el caché, y todo su contenido.
-
-Cuando termine, devuelve un objeto con varias estadísticas sobre el proceso de
-verificación, por ejemplo la cantidad de espacio de disco reclamado, el número
-de entradas válidas, número de entradas removidas, etc.
-
-##### Opciones
-
-* `ops.uid` - uid para asignarle al caché y su contenido
-* `ops.gid` - gid para asignarle al caché y su contenido
-* `ops.filter` - recibe una entrada como argumento. Devuelve falso para removerla. Nota: es posible que esta función sea invocada con la misma entrada más de una vez.
-
-##### Example
-
-```sh
-echo somegarbage >> $RUTACACHE/content/deadbeef
-```
-
-```javascript
-cacache.verifica(rutaCache).then(stats => {
- // deadbeef collected, because of invalid checksum.
- console.log('cache is much nicer now! stats:', stats)
-})
-```
-
-#### <a name="verify-last-run"></a> `> cacache.verifica.ultimaVez(cache) -> Promise`
-
-Alias: `últimaVez`
-
-Devuelve un `Date` que representa la última vez que `cacache.verifica` fue
-ejecutada en `cache`.
-
-##### Example
-
-```javascript
-cacache.verifica(rutaCache).then(() => {
- cacache.verifica.ultimaVez(rutaCache).then(última => {
- console.log('La última vez que se usó cacache.verifica() fue ' + última)
- })
-})
-```
diff --git a/node_modules/libcipm/node_modules/cacache/README.md b/node_modules/libcipm/node_modules/cacache/README.md
deleted file mode 100644
index 7f8ec5eec..000000000
--- a/node_modules/libcipm/node_modules/cacache/README.md
+++ /dev/null
@@ -1,641 +0,0 @@
-# cacache [![npm version](https://img.shields.io/npm/v/cacache.svg)](https://npm.im/cacache) [![license](https://img.shields.io/npm/l/cacache.svg)](https://npm.im/cacache) [![Travis](https://img.shields.io/travis/npm/cacache.svg)](https://travis-ci.org/npm/cacache) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/npm/cacache?svg=true)](https://ci.appveyor.com/project/npm/cacache) [![Coverage Status](https://coveralls.io/repos/github/npm/cacache/badge.svg?branch=latest)](https://coveralls.io/github/npm/cacache?branch=latest)
-
-[`cacache`](https://github.com/npm/cacache) is a Node.js library for managing
-local key and content address caches. It's really fast, really good at
-concurrency, and it will never give you corrupted data, even if cache files
-get corrupted or manipulated.
-
-On systems that support user and group settings on files, cacache will
-match the `uid` and `gid` values to the folder where the cache lives, even
-when running as `root`.
-
-It was written to be used as [npm](https://npm.im)'s local cache, but can
-just as easily be used on its own.
-
-_Translations: [español](README.es.md)_
-
-## Install
-
-`$ npm install --save cacache`
-
-## Table of Contents
-
-* [Example](#example)
-* [Features](#features)
-* [Contributing](#contributing)
-* [API](#api)
- * [Using localized APIs](#localized-api)
- * Reading
- * [`ls`](#ls)
- * [`ls.stream`](#ls-stream)
- * [`get`](#get-data)
- * [`get.stream`](#get-stream)
- * [`get.info`](#get-info)
- * [`get.hasContent`](#get-hasContent)
- * Writing
- * [`put`](#put-data)
- * [`put.stream`](#put-stream)
- * [`put*` opts](#put-options)
- * [`rm.all`](#rm-all)
- * [`rm.entry`](#rm-entry)
- * [`rm.content`](#rm-content)
- * Utilities
- * [`setLocale`](#set-locale)
- * [`clearMemoized`](#clear-memoized)
- * [`tmp.mkdir`](#tmp-mkdir)
- * [`tmp.withTmp`](#with-tmp)
- * Integrity
- * [Subresource Integrity](#integrity)
- * [`verify`](#verify)
- * [`verify.lastRun`](#verify-last-run)
-
-### Example
-
-```javascript
-const cacache = require('cacache/en')
-const fs = require('fs')
-
-const tarball = '/path/to/mytar.tgz'
-const cachePath = '/tmp/my-toy-cache'
-const key = 'my-unique-key-1234'
-
-// Cache it! Use `cachePath` as the root of the content cache
-cacache.put(cachePath, key, '10293801983029384').then(integrity => {
- console.log(`Saved content to ${cachePath}.`)
-})
-
-const destination = '/tmp/mytar.tgz'
-
-// Copy the contents out of the cache and into their destination!
-// But this time, use stream instead!
-cacache.get.stream(
- cachePath, key
-).pipe(
- fs.createWriteStream(destination)
-).on('finish', () => {
- console.log('done extracting!')
-})
-
-// The same thing, but skip the key index.
-cacache.get.byDigest(cachePath, integrityHash).then(data => {
- fs.writeFile(destination, data, err => {
- console.log('tarball data fetched based on its sha512sum and written out!')
- })
-})
-```
-
-### Features
-
-* Extraction by key or by content address (shasum, etc)
-* [Subresource Integrity](#integrity) web standard support
-* Multi-hash support - safely host sha1, sha512, etc, in a single cache
-* Automatic content deduplication
-* Fault tolerance (immune to corruption, partial writes, process races, etc)
-* Consistency guarantees on read and write (full data verification)
-* Lockless, high-concurrency cache access
-* Streaming support
-* Promise support
-* Pretty darn fast -- sub-millisecond reads and writes including verification
-* Arbitrary metadata storage
-* Garbage collection and additional offline verification
-* Thorough test coverage
-* There's probably a bloom filter in there somewhere. Those are cool, right? 🤔
-
-### Contributing
-
-The cacache team enthusiastically welcomes contributions and project participation! There's a bunch of things you can do if you want to contribute! The [Contributor Guide](CONTRIBUTING.md) has all the information you need for everything from reporting bugs to contributing entire new features. Please don't hesitate to jump in if you'd like to, or even ask us questions if something isn't clear.
-
-All participants and maintainers in this project are expected to follow [Code of Conduct](CODE_OF_CONDUCT.md), and just generally be excellent to each other.
-
-Please refer to the [Changelog](CHANGELOG.md) for project history details, too.
-
-Happy hacking!
-
-### API
-
-#### <a name="localized-api"></a> Using localized APIs
-
-cacache includes a complete API in English, with the same features as other
-translations. To use the English API as documented in this README, use
-`require('cacache/en')`. This is also currently the default if you do
-`require('cacache')`, but may change in the future.
-
-cacache also supports other languages! You can find the list of currently
-supported ones by looking in `./locales` in the source directory. You can use
-the API in that language with `require('cacache/<lang>')`.
-
-Want to add support for a new language? Please go ahead! You should be able to
-copy `./locales/en.js` and `./locales/en.json` and fill them in. Translating the
-`README.md` is a bit more work, but also appreciated if you get around to it. 👍🏼
-
-#### <a name="ls"></a> `> cacache.ls(cache) -> Promise<Object>`
-
-Lists info for all entries currently in the cache as a single large object. Each
-entry in the object will be keyed by the unique index key, with corresponding
-[`get.info`](#get-info) objects as the values.
-
-##### Example
-
-```javascript
-cacache.ls(cachePath).then(console.log)
-// Output
-{
- 'my-thing': {
- key: 'my-thing',
- integrity: 'sha512-BaSe64/EnCoDED+HAsh=='
- path: '.testcache/content/deadbeef', // joined with `cachePath`
- time: 12345698490,
- size: 4023948,
- metadata: {
- name: 'blah',
- version: '1.2.3',
- description: 'this was once a package but now it is my-thing'
- }
- },
- 'other-thing': {
- key: 'other-thing',
- integrity: 'sha1-ANothER+hasH=',
- path: '.testcache/content/bada55',
- time: 11992309289,
- size: 111112
- }
-}
-```
-
-#### <a name="ls-stream"></a> `> cacache.ls.stream(cache) -> Readable`
-
-Lists info for all entries currently in the cache as a single large object.
-
-This works just like [`ls`](#ls), except [`get.info`](#get-info) entries are
-returned as `'data'` events on the returned stream.
-
-##### Example
-
-```javascript
-cacache.ls.stream(cachePath).on('data', console.log)
-// Output
-{
- key: 'my-thing',
- integrity: 'sha512-BaSe64HaSh',
- path: '.testcache/content/deadbeef', // joined with `cachePath`
- time: 12345698490,
- size: 13423,
- metadata: {
- name: 'blah',
- version: '1.2.3',
- description: 'this was once a package but now it is my-thing'
- }
-}
-
-{
- key: 'other-thing',
- integrity: 'whirlpool-WoWSoMuchSupport',
- path: '.testcache/content/bada55',
- time: 11992309289,
- size: 498023984029
-}
-
-{
- ...
-}
-```
-
-#### <a name="get-data"></a> `> cacache.get(cache, key, [opts]) -> Promise({data, metadata, integrity})`
-
-Returns an object with the cached data, digest, and metadata identified by
-`key`. The `data` property of this object will be a `Buffer` instance that
-presumably holds some data that means something to you. I'm sure you know what
-to do with it! cacache just won't care.
-
-`integrity` is a [Subresource
-Integrity](#integrity)
-string. That is, a string that can be used to verify `data`, which looks like
-`<hash-algorithm>-<base64-integrity-hash>`.
-
-If there is no content identified by `key`, or if the locally-stored data does
-not pass the validity checksum, the promise will be rejected.
-
-A sub-function, `get.byDigest` may be used for identical behavior, except lookup
-will happen by integrity hash, bypassing the index entirely. This version of the
-function *only* returns `data` itself, without any wrapper.
-
-##### Note
-
-This function loads the entire cache entry into memory before returning it. If
-you're dealing with Very Large data, consider using [`get.stream`](#get-stream)
-instead.
-
-##### Example
-
-```javascript
-// Look up by key
-cache.get(cachePath, 'my-thing').then(console.log)
-// Output:
-{
- metadata: {
- thingName: 'my'
- },
- integrity: 'sha512-BaSe64HaSh',
- data: Buffer#<deadbeef>,
- size: 9320
-}
-
-// Look up by digest
-cache.get.byDigest(cachePath, 'sha512-BaSe64HaSh').then(console.log)
-// Output:
-Buffer#<deadbeef>
-```
-
-#### <a name="get-stream"></a> `> cacache.get.stream(cache, key, [opts]) -> Readable`
-
-Returns a [Readable Stream](https://nodejs.org/api/stream.html#stream_readable_streams) of the cached data identified by `key`.
-
-If there is no content identified by `key`, or if the locally-stored data does
-not pass the validity checksum, an error will be emitted.
-
-`metadata` and `integrity` events will be emitted before the stream closes, if
-you need to collect that extra data about the cached entry.
-
-A sub-function, `get.stream.byDigest` may be used for identical behavior,
-except lookup will happen by integrity hash, bypassing the index entirely. This
-version does not emit the `metadata` and `integrity` events at all.
-
-##### Example
-
-```javascript
-// Look up by key
-cache.get.stream(
- cachePath, 'my-thing'
-).on('metadata', metadata => {
- console.log('metadata:', metadata)
-}).on('integrity', integrity => {
- console.log('integrity:', integrity)
-}).pipe(
- fs.createWriteStream('./x.tgz')
-)
-// Outputs:
-metadata: { ... }
-integrity: 'sha512-SoMeDIGest+64=='
-
-// Look up by digest
-cache.get.stream.byDigest(
- cachePath, 'sha512-SoMeDIGest+64=='
-).pipe(
- fs.createWriteStream('./x.tgz')
-)
-```
-
-#### <a name="get-info"></a> `> cacache.get.info(cache, key) -> Promise`
-
-Looks up `key` in the cache index, returning information about the entry if
-one exists.
-
-##### Fields
-
-* `key` - Key the entry was looked up under. Matches the `key` argument.
-* `integrity` - [Subresource Integrity hash](#integrity) for the content this entry refers to.
-* `path` - Filesystem path where content is stored, joined with `cache` argument.
-* `time` - Timestamp the entry was first added on.
-* `metadata` - User-assigned metadata associated with the entry/content.
-
-##### Example
-
-```javascript
-cacache.get.info(cachePath, 'my-thing').then(console.log)
-
-// Output
-{
- key: 'my-thing',
- integrity: 'sha256-MUSTVERIFY+ALL/THINGS=='
- path: '.testcache/content/deadbeef',
- time: 12345698490,
- size: 849234,
- metadata: {
- name: 'blah',
- version: '1.2.3',
- description: 'this was once a package but now it is my-thing'
- }
-}
-```
-
-#### <a name="get-hasContent"></a> `> cacache.get.hasContent(cache, integrity) -> Promise`
-
-Looks up a [Subresource Integrity hash](#integrity) in the cache. If content
-exists for this `integrity`, it will return an object, with the specific single integrity hash
-that was found in `sri` key, and the size of the found content as `size`. If no content exists for this integrity, it will return `false`.
-
-##### Example
-
-```javascript
-cacache.get.hasContent(cachePath, 'sha256-MUSTVERIFY+ALL/THINGS==').then(console.log)
-
-// Output
-{
- sri: {
- source: 'sha256-MUSTVERIFY+ALL/THINGS==',
- algorithm: 'sha256',
- digest: 'MUSTVERIFY+ALL/THINGS==',
- options: []
- },
- size: 9001
-}
-
-cacache.get.hasContent(cachePath, 'sha521-NOT+IN/CACHE==').then(console.log)
-
-// Output
-false
-```
-
-#### <a name="put-data"></a> `> cacache.put(cache, key, data, [opts]) -> Promise`
-
-Inserts data passed to it into the cache. The returned Promise resolves with a
-digest (generated according to [`opts.algorithms`](#optsalgorithms)) after the
-cache entry has been successfully written.
-
-##### Example
-
-```javascript
-fetch(
- 'https://registry.npmjs.org/cacache/-/cacache-1.0.0.tgz'
-).then(data => {
- return cacache.put(cachePath, 'registry.npmjs.org|cacache@1.0.0', data)
-}).then(integrity => {
- console.log('integrity hash is', integrity)
-})
-```
-
-#### <a name="put-stream"></a> `> cacache.put.stream(cache, key, [opts]) -> Writable`
-
-Returns a [Writable
-Stream](https://nodejs.org/api/stream.html#stream_writable_streams) that inserts
-data written to it into the cache. Emits an `integrity` event with the digest of
-written contents when it succeeds.
-
-##### Example
-
-```javascript
-request.get(
- 'https://registry.npmjs.org/cacache/-/cacache-1.0.0.tgz'
-).pipe(
- cacache.put.stream(
- cachePath, 'registry.npmjs.org|cacache@1.0.0'
- ).on('integrity', d => console.log(`integrity digest is ${d}`))
-)
-```
-
-#### <a name="put-options"></a> `> cacache.put options`
-
-`cacache.put` functions have a number of options in common.
-
-##### `opts.metadata`
-
-Arbitrary metadata to be attached to the inserted key.
-
-##### `opts.size`
-
-If provided, the data stream will be verified to check that enough data was
-passed through. If there's more or less data than expected, insertion will fail
-with an `EBADSIZE` error.
-
-##### `opts.integrity`
-
-If present, the pre-calculated digest for the inserted content. If this option
-if provided and does not match the post-insertion digest, insertion will fail
-with an `EINTEGRITY` error.
-
-`algorithms` has no effect if this option is present.
-
-##### `opts.algorithms`
-
-Default: ['sha512']
-
-Hashing algorithms to use when calculating the [subresource integrity
-digest](#integrity)
-for inserted data. Can use any algorithm listed in `crypto.getHashes()` or
-`'omakase'`/`'お任せします'` to pick a random hash algorithm on each insertion. You
-may also use any anagram of `'modnar'` to use this feature.
-
-Currently only supports one algorithm at a time (i.e., an array length of
-exactly `1`). Has no effect if `opts.integrity` is present.
-
-##### `opts.memoize`
-
-Default: null
-
-If provided, cacache will memoize the given cache insertion in memory, bypassing
-any filesystem checks for that key or digest in future cache fetches. Nothing
-will be written to the in-memory cache unless this option is explicitly truthy.
-
-If `opts.memoize` is an object or a `Map`-like (that is, an object with `get`
-and `set` methods), it will be written to instead of the global memoization
-cache.
-
-Reading from disk data can be forced by explicitly passing `memoize: false` to
-the reader functions, but their default will be to read from memory.
-
-#### <a name="rm-all"></a> `> cacache.rm.all(cache) -> Promise`
-
-Clears the entire cache. Mainly by blowing away the cache directory itself.
-
-##### Example
-
-```javascript
-cacache.rm.all(cachePath).then(() => {
- console.log('THE APOCALYPSE IS UPON US 😱')
-})
-```
-
-#### <a name="rm-entry"></a> `> cacache.rm.entry(cache, key) -> Promise`
-
-Alias: `cacache.rm`
-
-Removes the index entry for `key`. Content will still be accessible if
-requested directly by content address ([`get.stream.byDigest`](#get-stream)).
-
-To remove the content itself (which might still be used by other entries), use
-[`rm.content`](#rm-content). Or, to safely vacuum any unused content, use
-[`verify`](#verify).
-
-##### Example
-
-```javascript
-cacache.rm.entry(cachePath, 'my-thing').then(() => {
- console.log('I did not like it anyway')
-})
-```
-
-#### <a name="rm-content"></a> `> cacache.rm.content(cache, integrity) -> Promise`
-
-Removes the content identified by `integrity`. Any index entries referring to it
-will not be usable again until the content is re-added to the cache with an
-identical digest.
-
-##### Example
-
-```javascript
-cacache.rm.content(cachePath, 'sha512-SoMeDIGest/IN+BaSE64==').then(() => {
- console.log('data for my-thing is gone!')
-})
-```
-
-#### <a name="set-locale"></a> `> cacache.setLocale(locale)`
-
-Configure the language/locale used for messages and errors coming from cacache.
-The list of available locales is in the `./locales` directory in the project
-root.
-
-_Interested in contributing more languages! [Submit a PR](CONTRIBUTING.md)!_
-
-#### <a name="clear-memoized"></a> `> cacache.clearMemoized()`
-
-Completely resets the in-memory entry cache.
-
-#### <a name="tmp-mkdir"></a> `> tmp.mkdir(cache, opts) -> Promise<Path>`
-
-Returns a unique temporary directory inside the cache's `tmp` dir. This
-directory will use the same safe user assignment that all the other stuff use.
-
-Once the directory is made, it's the user's responsibility that all files
-within are given the appropriate `gid`/`uid` ownership settings to match
-the rest of the cache. If not, you can ask cacache to do it for you by
-calling [`tmp.fix()`](#tmp-fix), which will fix all tmp directory
-permissions.
-
-If you want automatic cleanup of this directory, use
-[`tmp.withTmp()`](#with-tpm)
-
-##### Example
-
-```javascript
-cacache.tmp.mkdir(cache).then(dir => {
- fs.writeFile(path.join(dir, 'blablabla'), Buffer#<1234>, ...)
-})
-```
-
-#### <a name="tmp-fix"></a> `> tmp.fix(cache) -> Promise`
-
-Sets the `uid` and `gid` properties on all files and folders within the tmp
-folder to match the rest of the cache.
-
-Use this after manually writing files into [`tmp.mkdir`](#tmp-mkdir) or
-[`tmp.withTmp`](#with-tmp).
-
-##### Example
-
-```javascript
-cacache.tmp.mkdir(cache).then(dir => {
- writeFile(path.join(dir, 'file'), someData).then(() => {
- // make sure we didn't just put a root-owned file in the cache
- cacache.tmp.fix().then(() => {
- // all uids and gids match now
- })
- })
-})
-```
-
-#### <a name="with-tmp"></a> `> tmp.withTmp(cache, opts, cb) -> Promise`
-
-Creates a temporary directory with [`tmp.mkdir()`](#tmp-mkdir) and calls `cb`
-with it. The created temporary directory will be removed when the return value
-of `cb()` resolves -- that is, if you return a Promise from `cb()`, the tmp
-directory will be automatically deleted once that promise completes.
-
-The same caveats apply when it comes to managing permissions for the tmp dir's
-contents.
-
-##### Example
-
-```javascript
-cacache.tmp.withTmp(cache, dir => {
- return fs.writeFileAsync(path.join(dir, 'blablabla'), Buffer#<1234>, ...)
-}).then(() => {
- // `dir` no longer exists
-})
-```
-
-#### <a name="integrity"></a> Subresource Integrity Digests
-
-For content verification and addressing, cacache uses strings following the
-[Subresource
-Integrity spec](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity).
-That is, any time cacache expects an `integrity` argument or option, it
-should be in the format `<hashAlgorithm>-<base64-hash>`.
-
-One deviation from the current spec is that cacache will support any hash
-algorithms supported by the underlying Node.js process. You can use
-`crypto.getHashes()` to see which ones you can use.
-
-##### Generating Digests Yourself
-
-If you have an existing content shasum, they are generally formatted as a
-hexadecimal string (that is, a sha1 would look like:
-`5f5513f8822fdbe5145af33b64d8d970dcf95c6e`). In order to be compatible with
-cacache, you'll need to convert this to an equivalent subresource integrity
-string. For this example, the corresponding hash would be:
-`sha1-X1UT+IIv2+UUWvM7ZNjZcNz5XG4=`.
-
-If you want to generate an integrity string yourself for existing data, you can
-use something like this:
-
-```javascript
-const crypto = require('crypto')
-const hashAlgorithm = 'sha512'
-const data = 'foobarbaz'
-
-const integrity = (
- hashAlgorithm +
- '-' +
- crypto.createHash(hashAlgorithm).update(data).digest('base64')
-)
-```
-
-You can also use [`ssri`](https://npm.im/ssri) to have a richer set of functionality
-around SRI strings, including generation, parsing, and translating from existing
-hex-formatted strings.
-
-#### <a name="verify"></a> `> cacache.verify(cache, opts) -> Promise`
-
-Checks out and fixes up your cache:
-
-* Cleans up corrupted or invalid index entries.
-* Custom entry filtering options.
-* Garbage collects any content entries not referenced by the index.
-* Checks integrity for all content entries and removes invalid content.
-* Fixes cache ownership.
-* Removes the `tmp` directory in the cache and all its contents.
-
-When it's done, it'll return an object with various stats about the verification
-process, including amount of storage reclaimed, number of valid entries, number
-of entries removed, etc.
-
-##### Options
-
-* `opts.filter` - receives a formatted entry. Return false to remove it.
- Note: might be called more than once on the same entry.
-
-##### Example
-
-```sh
-echo somegarbage >> $CACHEPATH/content/deadbeef
-```
-
-```javascript
-cacache.verify(cachePath).then(stats => {
- // deadbeef collected, because of invalid checksum.
- console.log('cache is much nicer now! stats:', stats)
-})
-```
-
-#### <a name="verify-last-run"></a> `> cacache.verify.lastRun(cache) -> Promise`
-
-Returns a `Date` representing the last time `cacache.verify` was run on `cache`.
-
-##### Example
-
-```javascript
-cacache.verify(cachePath).then(() => {
- cacache.verify.lastRun(cachePath).then(lastTime => {
- console.log('cacache.verify was last called on' + lastTime)
- })
-})
-```
diff --git a/node_modules/libcipm/node_modules/cacache/en.js b/node_modules/libcipm/node_modules/cacache/en.js
deleted file mode 100644
index a3db581c9..000000000
--- a/node_modules/libcipm/node_modules/cacache/en.js
+++ /dev/null
@@ -1,3 +0,0 @@
-'use strict'
-
-module.exports = require('./locales/en.js')
diff --git a/node_modules/libcipm/node_modules/cacache/es.js b/node_modules/libcipm/node_modules/cacache/es.js
deleted file mode 100644
index 6282363c3..000000000
--- a/node_modules/libcipm/node_modules/cacache/es.js
+++ /dev/null
@@ -1,3 +0,0 @@
-'use strict'
-
-module.exports = require('./locales/es.js')
diff --git a/node_modules/libcipm/node_modules/cacache/get.js b/node_modules/libcipm/node_modules/cacache/get.js
deleted file mode 100644
index 008cb83a9..000000000
--- a/node_modules/libcipm/node_modules/cacache/get.js
+++ /dev/null
@@ -1,247 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const figgyPudding = require('figgy-pudding')
-const fs = require('fs')
-const index = require('./lib/entry-index')
-const memo = require('./lib/memoization')
-const pipe = require('mississippi').pipe
-const pipeline = require('mississippi').pipeline
-const read = require('./lib/content/read')
-const through = require('mississippi').through
-
-const GetOpts = figgyPudding({
- integrity: {},
- memoize: {},
- size: {}
-})
-
-module.exports = function get (cache, key, opts) {
- return getData(false, cache, key, opts)
-}
-module.exports.byDigest = function getByDigest (cache, digest, opts) {
- return getData(true, cache, digest, opts)
-}
-function getData (byDigest, cache, key, opts) {
- opts = GetOpts(opts)
- const memoized = (
- byDigest
- ? memo.get.byDigest(cache, key, opts)
- : memo.get(cache, key, opts)
- )
- if (memoized && opts.memoize !== false) {
- return BB.resolve(byDigest ? memoized : {
- metadata: memoized.entry.metadata,
- data: memoized.data,
- integrity: memoized.entry.integrity,
- size: memoized.entry.size
- })
- }
- return (
- byDigest ? BB.resolve(null) : index.find(cache, key, opts)
- ).then(entry => {
- if (!entry && !byDigest) {
- throw new index.NotFoundError(cache, key)
- }
- return read(cache, byDigest ? key : entry.integrity, {
- integrity: opts.integrity,
- size: opts.size
- }).then(data => byDigest ? data : {
- metadata: entry.metadata,
- data: data,
- size: entry.size,
- integrity: entry.integrity
- }).then(res => {
- if (opts.memoize && byDigest) {
- memo.put.byDigest(cache, key, res, opts)
- } else if (opts.memoize) {
- memo.put(cache, entry, res.data, opts)
- }
- return res
- })
- })
-}
-
-module.exports.sync = function get (cache, key, opts) {
- return getDataSync(false, cache, key, opts)
-}
-module.exports.sync.byDigest = function getByDigest (cache, digest, opts) {
- return getDataSync(true, cache, digest, opts)
-}
-function getDataSync (byDigest, cache, key, opts) {
- opts = GetOpts(opts)
- const memoized = (
- byDigest
- ? memo.get.byDigest(cache, key, opts)
- : memo.get(cache, key, opts)
- )
- if (memoized && opts.memoize !== false) {
- return byDigest ? memoized : {
- metadata: memoized.entry.metadata,
- data: memoized.data,
- integrity: memoized.entry.integrity,
- size: memoized.entry.size
- }
- }
- const entry = !byDigest && index.find.sync(cache, key, opts)
- if (!entry && !byDigest) {
- throw new index.NotFoundError(cache, key)
- }
- const data = read.sync(
- cache,
- byDigest ? key : entry.integrity,
- {
- integrity: opts.integrity,
- size: opts.size
- }
- )
- const res = byDigest
- ? data
- : {
- metadata: entry.metadata,
- data: data,
- size: entry.size,
- integrity: entry.integrity
- }
- if (opts.memoize && byDigest) {
- memo.put.byDigest(cache, key, res, opts)
- } else if (opts.memoize) {
- memo.put(cache, entry, res.data, opts)
- }
- return res
-}
-
-module.exports.stream = getStream
-function getStream (cache, key, opts) {
- opts = GetOpts(opts)
- let stream = through()
- const memoized = memo.get(cache, key, opts)
- if (memoized && opts.memoize !== false) {
- stream.on('newListener', function (ev, cb) {
- ev === 'metadata' && cb(memoized.entry.metadata)
- ev === 'integrity' && cb(memoized.entry.integrity)
- ev === 'size' && cb(memoized.entry.size)
- })
- stream.write(memoized.data, () => stream.end())
- return stream
- }
- index.find(cache, key).then(entry => {
- if (!entry) {
- return stream.emit(
- 'error', new index.NotFoundError(cache, key)
- )
- }
- let memoStream
- if (opts.memoize) {
- let memoData = []
- let memoLength = 0
- memoStream = through((c, en, cb) => {
- memoData && memoData.push(c)
- memoLength += c.length
- cb(null, c, en)
- }, cb => {
- memoData && memo.put(cache, entry, Buffer.concat(memoData, memoLength), opts)
- cb()
- })
- } else {
- memoStream = through()
- }
- stream.emit('metadata', entry.metadata)
- stream.emit('integrity', entry.integrity)
- stream.emit('size', entry.size)
- stream.on('newListener', function (ev, cb) {
- ev === 'metadata' && cb(entry.metadata)
- ev === 'integrity' && cb(entry.integrity)
- ev === 'size' && cb(entry.size)
- })
- pipe(
- read.readStream(cache, entry.integrity, opts.concat({
- size: opts.size == null ? entry.size : opts.size
- })),
- memoStream,
- stream
- )
- }).catch(err => stream.emit('error', err))
- return stream
-}
-
-module.exports.stream.byDigest = getStreamDigest
-function getStreamDigest (cache, integrity, opts) {
- opts = GetOpts(opts)
- const memoized = memo.get.byDigest(cache, integrity, opts)
- if (memoized && opts.memoize !== false) {
- const stream = through()
- stream.write(memoized, () => stream.end())
- return stream
- } else {
- let stream = read.readStream(cache, integrity, opts)
- if (opts.memoize) {
- let memoData = []
- let memoLength = 0
- const memoStream = through((c, en, cb) => {
- memoData && memoData.push(c)
- memoLength += c.length
- cb(null, c, en)
- }, cb => {
- memoData && memo.put.byDigest(
- cache,
- integrity,
- Buffer.concat(memoData, memoLength),
- opts
- )
- cb()
- })
- stream = pipeline(stream, memoStream)
- }
- return stream
- }
-}
-
-module.exports.info = info
-function info (cache, key, opts) {
- opts = GetOpts(opts)
- const memoized = memo.get(cache, key, opts)
- if (memoized && opts.memoize !== false) {
- return BB.resolve(memoized.entry)
- } else {
- return index.find(cache, key)
- }
-}
-
-module.exports.hasContent = read.hasContent
-
-module.exports.copy = function cp (cache, key, dest, opts) {
- return copy(false, cache, key, dest, opts)
-}
-module.exports.copy.byDigest = function cpDigest (cache, digest, dest, opts) {
- return copy(true, cache, digest, dest, opts)
-}
-function copy (byDigest, cache, key, dest, opts) {
- opts = GetOpts(opts)
- if (read.copy) {
- return (
- byDigest ? BB.resolve(null) : index.find(cache, key, opts)
- ).then(entry => {
- if (!entry && !byDigest) {
- throw new index.NotFoundError(cache, key)
- }
- return read.copy(
- cache, byDigest ? key : entry.integrity, dest, opts
- ).then(() => byDigest ? key : {
- metadata: entry.metadata,
- size: entry.size,
- integrity: entry.integrity
- })
- })
- } else {
- return getData(byDigest, cache, key, opts).then(res => {
- return fs.writeFileAsync(dest, byDigest ? res : res.data)
- .then(() => byDigest ? key : {
- metadata: res.metadata,
- size: res.size,
- integrity: res.integrity
- })
- })
- }
-}
diff --git a/node_modules/libcipm/node_modules/cacache/index.js b/node_modules/libcipm/node_modules/cacache/index.js
deleted file mode 100644
index a3db581c9..000000000
--- a/node_modules/libcipm/node_modules/cacache/index.js
+++ /dev/null
@@ -1,3 +0,0 @@
-'use strict'
-
-module.exports = require('./locales/en.js')
diff --git a/node_modules/libcipm/node_modules/cacache/lib/content/path.js b/node_modules/libcipm/node_modules/cacache/lib/content/path.js
deleted file mode 100644
index c67c28061..000000000
--- a/node_modules/libcipm/node_modules/cacache/lib/content/path.js
+++ /dev/null
@@ -1,26 +0,0 @@
-'use strict'
-
-const contentVer = require('../../package.json')['cache-version'].content
-const hashToSegments = require('../util/hash-to-segments')
-const path = require('path')
-const ssri = require('ssri')
-
-// Current format of content file path:
-//
-// sha512-BaSE64Hex= ->
-// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee
-//
-module.exports = contentPath
-function contentPath (cache, integrity) {
- const sri = ssri.parse(integrity, { single: true })
- // contentPath is the *strongest* algo given
- return path.join.apply(path, [
- contentDir(cache),
- sri.algorithm
- ].concat(hashToSegments(sri.hexDigest())))
-}
-
-module.exports._contentDir = contentDir
-function contentDir (cache) {
- return path.join(cache, `content-v${contentVer}`)
-}
diff --git a/node_modules/libcipm/node_modules/cacache/lib/content/read.js b/node_modules/libcipm/node_modules/cacache/lib/content/read.js
deleted file mode 100644
index 7929524f8..000000000
--- a/node_modules/libcipm/node_modules/cacache/lib/content/read.js
+++ /dev/null
@@ -1,195 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const contentPath = require('./path')
-const figgyPudding = require('figgy-pudding')
-const fs = require('graceful-fs')
-const PassThrough = require('stream').PassThrough
-const pipe = BB.promisify(require('mississippi').pipe)
-const ssri = require('ssri')
-const Y = require('../util/y.js')
-
-const lstatAsync = BB.promisify(fs.lstat)
-const readFileAsync = BB.promisify(fs.readFile)
-
-const ReadOpts = figgyPudding({
- size: {}
-})
-
-module.exports = read
-function read (cache, integrity, opts) {
- opts = ReadOpts(opts)
- return withContentSri(cache, integrity, (cpath, sri) => {
- return readFileAsync(cpath, null).then(data => {
- if (typeof opts.size === 'number' && opts.size !== data.length) {
- throw sizeError(opts.size, data.length)
- } else if (ssri.checkData(data, sri)) {
- return data
- } else {
- throw integrityError(sri, cpath)
- }
- })
- })
-}
-
-module.exports.sync = readSync
-function readSync (cache, integrity, opts) {
- opts = ReadOpts(opts)
- return withContentSriSync(cache, integrity, (cpath, sri) => {
- const data = fs.readFileSync(cpath)
- if (typeof opts.size === 'number' && opts.size !== data.length) {
- throw sizeError(opts.size, data.length)
- } else if (ssri.checkData(data, sri)) {
- return data
- } else {
- throw integrityError(sri, cpath)
- }
- })
-}
-
-module.exports.stream = readStream
-module.exports.readStream = readStream
-function readStream (cache, integrity, opts) {
- opts = ReadOpts(opts)
- const stream = new PassThrough()
- withContentSri(cache, integrity, (cpath, sri) => {
- return lstatAsync(cpath).then(stat => ({ cpath, sri, stat }))
- }).then(({ cpath, sri, stat }) => {
- return pipe(
- fs.createReadStream(cpath),
- ssri.integrityStream({
- integrity: sri,
- size: opts.size
- }),
- stream
- )
- }).catch(err => {
- stream.emit('error', err)
- })
- return stream
-}
-
-let copyFileAsync
-if (fs.copyFile) {
- module.exports.copy = copy
- module.exports.copy.sync = copySync
- copyFileAsync = BB.promisify(fs.copyFile)
-}
-
-function copy (cache, integrity, dest, opts) {
- opts = ReadOpts(opts)
- return withContentSri(cache, integrity, (cpath, sri) => {
- return copyFileAsync(cpath, dest)
- })
-}
-
-function copySync (cache, integrity, dest, opts) {
- opts = ReadOpts(opts)
- return withContentSriSync(cache, integrity, (cpath, sri) => {
- return fs.copyFileSync(cpath, dest)
- })
-}
-
-module.exports.hasContent = hasContent
-function hasContent (cache, integrity) {
- if (!integrity) { return BB.resolve(false) }
- return withContentSri(cache, integrity, (cpath, sri) => {
- return lstatAsync(cpath).then(stat => ({ size: stat.size, sri, stat }))
- }).catch(err => {
- if (err.code === 'ENOENT') { return false }
- if (err.code === 'EPERM') {
- if (process.platform !== 'win32') {
- throw err
- } else {
- return false
- }
- }
- })
-}
-
-module.exports.hasContent.sync = hasContentSync
-function hasContentSync (cache, integrity) {
- if (!integrity) { return false }
- return withContentSriSync(cache, integrity, (cpath, sri) => {
- try {
- const stat = fs.lstatSync(cpath)
- return { size: stat.size, sri, stat }
- } catch (err) {
- if (err.code === 'ENOENT') { return false }
- if (err.code === 'EPERM') {
- if (process.platform !== 'win32') {
- throw err
- } else {
- return false
- }
- }
- }
- })
-}
-
-function withContentSri (cache, integrity, fn) {
- return BB.try(() => {
- const sri = ssri.parse(integrity)
- // If `integrity` has multiple entries, pick the first digest
- // with available local data.
- const algo = sri.pickAlgorithm()
- const digests = sri[algo]
- if (digests.length <= 1) {
- const cpath = contentPath(cache, digests[0])
- return fn(cpath, digests[0])
- } else {
- return BB.any(sri[sri.pickAlgorithm()].map(meta => {
- return withContentSri(cache, meta, fn)
- }, { concurrency: 1 }))
- .catch(err => {
- if ([].some.call(err, e => e.code === 'ENOENT')) {
- throw Object.assign(
- new Error('No matching content found for ' + sri.toString()),
- { code: 'ENOENT' }
- )
- } else {
- throw err[0]
- }
- })
- }
- })
-}
-
-function withContentSriSync (cache, integrity, fn) {
- const sri = ssri.parse(integrity)
- // If `integrity` has multiple entries, pick the first digest
- // with available local data.
- const algo = sri.pickAlgorithm()
- const digests = sri[algo]
- if (digests.length <= 1) {
- const cpath = contentPath(cache, digests[0])
- return fn(cpath, digests[0])
- } else {
- let lastErr = null
- for (const meta of sri[sri.pickAlgorithm()]) {
- try {
- return withContentSriSync(cache, meta, fn)
- } catch (err) {
- lastErr = err
- }
- }
- if (lastErr) { throw lastErr }
- }
-}
-
-function sizeError (expected, found) {
- var err = new Error(Y`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
- err.expected = expected
- err.found = found
- err.code = 'EBADSIZE'
- return err
-}
-
-function integrityError (sri, path) {
- var err = new Error(Y`Integrity verification failed for ${sri} (${path})`)
- err.code = 'EINTEGRITY'
- err.sri = sri
- err.path = path
- return err
-}
diff --git a/node_modules/libcipm/node_modules/cacache/lib/content/rm.js b/node_modules/libcipm/node_modules/cacache/lib/content/rm.js
deleted file mode 100644
index 12cf15823..000000000
--- a/node_modules/libcipm/node_modules/cacache/lib/content/rm.js
+++ /dev/null
@@ -1,21 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const contentPath = require('./path')
-const hasContent = require('./read').hasContent
-const rimraf = BB.promisify(require('rimraf'))
-
-module.exports = rm
-function rm (cache, integrity) {
- return hasContent(cache, integrity).then(content => {
- if (content) {
- const sri = content.sri
- if (sri) {
- return rimraf(contentPath(cache, sri)).then(() => true)
- }
- } else {
- return false
- }
- })
-}
diff --git a/node_modules/libcipm/node_modules/cacache/lib/content/write.js b/node_modules/libcipm/node_modules/cacache/lib/content/write.js
deleted file mode 100644
index 4d96a3cff..000000000
--- a/node_modules/libcipm/node_modules/cacache/lib/content/write.js
+++ /dev/null
@@ -1,164 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const contentPath = require('./path')
-const fixOwner = require('../util/fix-owner')
-const fs = require('graceful-fs')
-const moveFile = require('../util/move-file')
-const PassThrough = require('stream').PassThrough
-const path = require('path')
-const pipe = BB.promisify(require('mississippi').pipe)
-const rimraf = BB.promisify(require('rimraf'))
-const ssri = require('ssri')
-const to = require('mississippi').to
-const uniqueFilename = require('unique-filename')
-const Y = require('../util/y.js')
-
-const writeFileAsync = BB.promisify(fs.writeFile)
-
-module.exports = write
-function write (cache, data, opts) {
- opts = opts || {}
- if (opts.algorithms && opts.algorithms.length > 1) {
- throw new Error(
- Y`opts.algorithms only supports a single algorithm for now`
- )
- }
- if (typeof opts.size === 'number' && data.length !== opts.size) {
- return BB.reject(sizeError(opts.size, data.length))
- }
- const sri = ssri.fromData(data, {
- algorithms: opts.algorithms
- })
- if (opts.integrity && !ssri.checkData(data, opts.integrity, opts)) {
- return BB.reject(checksumError(opts.integrity, sri))
- }
- return BB.using(makeTmp(cache, opts), tmp => (
- writeFileAsync(
- tmp.target, data, { flag: 'wx' }
- ).then(() => (
- moveToDestination(tmp, cache, sri, opts)
- ))
- )).then(() => ({ integrity: sri, size: data.length }))
-}
-
-module.exports.stream = writeStream
-function writeStream (cache, opts) {
- opts = opts || {}
- const inputStream = new PassThrough()
- let inputErr = false
- function errCheck () {
- if (inputErr) { throw inputErr }
- }
-
- let allDone
- const ret = to((c, n, cb) => {
- if (!allDone) {
- allDone = handleContent(inputStream, cache, opts, errCheck)
- }
- inputStream.write(c, n, cb)
- }, cb => {
- inputStream.end(() => {
- if (!allDone) {
- const e = new Error(Y`Cache input stream was empty`)
- e.code = 'ENODATA'
- return ret.emit('error', e)
- }
- allDone.then(res => {
- res.integrity && ret.emit('integrity', res.integrity)
- res.size !== null && ret.emit('size', res.size)
- cb()
- }, e => {
- ret.emit('error', e)
- })
- })
- })
- ret.once('error', e => {
- inputErr = e
- })
- return ret
-}
-
-function handleContent (inputStream, cache, opts, errCheck) {
- return BB.using(makeTmp(cache, opts), tmp => {
- errCheck()
- return pipeToTmp(
- inputStream, cache, tmp.target, opts, errCheck
- ).then(res => {
- return moveToDestination(
- tmp, cache, res.integrity, opts, errCheck
- ).then(() => res)
- })
- })
-}
-
-function pipeToTmp (inputStream, cache, tmpTarget, opts, errCheck) {
- return BB.resolve().then(() => {
- let integrity
- let size
- const hashStream = ssri.integrityStream({
- integrity: opts.integrity,
- algorithms: opts.algorithms,
- size: opts.size
- }).on('integrity', s => {
- integrity = s
- }).on('size', s => {
- size = s
- })
- const outStream = fs.createWriteStream(tmpTarget, {
- flags: 'wx'
- })
- errCheck()
- return pipe(inputStream, hashStream, outStream).then(() => {
- return { integrity, size }
- }).catch(err => {
- return rimraf(tmpTarget).then(() => { throw err })
- })
- })
-}
-
-function makeTmp (cache, opts) {
- const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
- return fixOwner.mkdirfix(
- cache, path.dirname(tmpTarget)
- ).then(() => ({
- target: tmpTarget,
- moved: false
- })).disposer(tmp => (!tmp.moved && rimraf(tmp.target)))
-}
-
-function moveToDestination (tmp, cache, sri, opts, errCheck) {
- errCheck && errCheck()
- const destination = contentPath(cache, sri)
- const destDir = path.dirname(destination)
-
- return fixOwner.mkdirfix(
- cache, destDir
- ).then(() => {
- errCheck && errCheck()
- return moveFile(tmp.target, destination)
- }).then(() => {
- errCheck && errCheck()
- tmp.moved = true
- return fixOwner.chownr(cache, destination)
- })
-}
-
-function sizeError (expected, found) {
- var err = new Error(Y`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
- err.expected = expected
- err.found = found
- err.code = 'EBADSIZE'
- return err
-}
-
-function checksumError (expected, found) {
- var err = new Error(Y`Integrity check failed:
- Wanted: ${expected}
- Found: ${found}`)
- err.code = 'EINTEGRITY'
- err.expected = expected
- err.found = found
- return err
-}
diff --git a/node_modules/libcipm/node_modules/cacache/lib/entry-index.js b/node_modules/libcipm/node_modules/cacache/lib/entry-index.js
deleted file mode 100644
index dee1824b1..000000000
--- a/node_modules/libcipm/node_modules/cacache/lib/entry-index.js
+++ /dev/null
@@ -1,288 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const contentPath = require('./content/path')
-const crypto = require('crypto')
-const figgyPudding = require('figgy-pudding')
-const fixOwner = require('./util/fix-owner')
-const fs = require('graceful-fs')
-const hashToSegments = require('./util/hash-to-segments')
-const ms = require('mississippi')
-const path = require('path')
-const ssri = require('ssri')
-const Y = require('./util/y.js')
-
-const indexV = require('../package.json')['cache-version'].index
-
-const appendFileAsync = BB.promisify(fs.appendFile)
-const readFileAsync = BB.promisify(fs.readFile)
-const readdirAsync = BB.promisify(fs.readdir)
-const concat = ms.concat
-const from = ms.from
-
-module.exports.NotFoundError = class NotFoundError extends Error {
- constructor (cache, key) {
- super(Y`No cache entry for \`${key}\` found in \`${cache}\``)
- this.code = 'ENOENT'
- this.cache = cache
- this.key = key
- }
-}
-
-const IndexOpts = figgyPudding({
- metadata: {},
- size: {}
-})
-
-module.exports.insert = insert
-function insert (cache, key, integrity, opts) {
- opts = IndexOpts(opts)
- const bucket = bucketPath(cache, key)
- const entry = {
- key,
- integrity: integrity && ssri.stringify(integrity),
- time: Date.now(),
- size: opts.size,
- metadata: opts.metadata
- }
- return fixOwner.mkdirfix(
- cache, path.dirname(bucket)
- ).then(() => {
- const stringified = JSON.stringify(entry)
- // NOTE - Cleverness ahoy!
- //
- // This works because it's tremendously unlikely for an entry to corrupt
- // another while still preserving the string length of the JSON in
- // question. So, we just slap the length in there and verify it on read.
- //
- // Thanks to @isaacs for the whiteboarding session that ended up with this.
- return appendFileAsync(
- bucket, `\n${hashEntry(stringified)}\t${stringified}`
- )
- }).then(
- () => fixOwner.chownr(cache, bucket)
- ).catch({ code: 'ENOENT' }, () => {
- // There's a class of race conditions that happen when things get deleted
- // during fixOwner, or between the two mkdirfix/chownr calls.
- //
- // It's perfectly fine to just not bother in those cases and lie
- // that the index entry was written. Because it's a cache.
- }).then(() => {
- return formatEntry(cache, entry)
- })
-}
-
-module.exports.insert.sync = insertSync
-function insertSync (cache, key, integrity, opts) {
- opts = IndexOpts(opts)
- const bucket = bucketPath(cache, key)
- const entry = {
- key,
- integrity: integrity && ssri.stringify(integrity),
- time: Date.now(),
- size: opts.size,
- metadata: opts.metadata
- }
- fixOwner.mkdirfix.sync(cache, path.dirname(bucket))
- const stringified = JSON.stringify(entry)
- fs.appendFileSync(
- bucket, `\n${hashEntry(stringified)}\t${stringified}`
- )
- try {
- fixOwner.chownr.sync(cache, bucket)
- } catch (err) {
- if (err.code !== 'ENOENT') {
- throw err
- }
- }
- return formatEntry(cache, entry)
-}
-
-module.exports.find = find
-function find (cache, key) {
- const bucket = bucketPath(cache, key)
- return bucketEntries(bucket).then(entries => {
- return entries.reduce((latest, next) => {
- if (next && next.key === key) {
- return formatEntry(cache, next)
- } else {
- return latest
- }
- }, null)
- }).catch(err => {
- if (err.code === 'ENOENT') {
- return null
- } else {
- throw err
- }
- })
-}
-
-module.exports.find.sync = findSync
-function findSync (cache, key) {
- const bucket = bucketPath(cache, key)
- try {
- return bucketEntriesSync(bucket).reduce((latest, next) => {
- if (next && next.key === key) {
- return formatEntry(cache, next)
- } else {
- return latest
- }
- }, null)
- } catch (err) {
- if (err.code === 'ENOENT') {
- return null
- } else {
- throw err
- }
- }
-}
-
-module.exports.delete = del
-function del (cache, key, opts) {
- return insert(cache, key, null, opts)
-}
-
-module.exports.delete.sync = delSync
-function delSync (cache, key, opts) {
- return insertSync(cache, key, null, opts)
-}
-
-module.exports.lsStream = lsStream
-function lsStream (cache) {
- const indexDir = bucketDir(cache)
- const stream = from.obj()
-
- // "/cachename/*"
- readdirOrEmpty(indexDir).map(bucket => {
- const bucketPath = path.join(indexDir, bucket)
-
- // "/cachename/<bucket 0xFF>/*"
- return readdirOrEmpty(bucketPath).map(subbucket => {
- const subbucketPath = path.join(bucketPath, subbucket)
-
- // "/cachename/<bucket 0xFF>/<bucket 0xFF>/*"
- return readdirOrEmpty(subbucketPath).map(entry => {
- const getKeyToEntry = bucketEntries(
- path.join(subbucketPath, entry)
- ).reduce((acc, entry) => {
- acc.set(entry.key, entry)
- return acc
- }, new Map())
-
- return getKeyToEntry.then(reduced => {
- for (let entry of reduced.values()) {
- const formatted = formatEntry(cache, entry)
- formatted && stream.push(formatted)
- }
- }).catch({ code: 'ENOENT' }, nop)
- })
- })
- }).then(() => {
- stream.push(null)
- }, err => {
- stream.emit('error', err)
- })
-
- return stream
-}
-
-module.exports.ls = ls
-function ls (cache) {
- return BB.fromNode(cb => {
- lsStream(cache).on('error', cb).pipe(concat(entries => {
- cb(null, entries.reduce((acc, xs) => {
- acc[xs.key] = xs
- return acc
- }, {}))
- }))
- })
-}
-
-function bucketEntries (bucket, filter) {
- return readFileAsync(
- bucket, 'utf8'
- ).then(data => _bucketEntries(data, filter))
-}
-
-function bucketEntriesSync (bucket, filter) {
- const data = fs.readFileSync(bucket, 'utf8')
- return _bucketEntries(data, filter)
-}
-
-function _bucketEntries (data, filter) {
- let entries = []
- data.split('\n').forEach(entry => {
- if (!entry) { return }
- const pieces = entry.split('\t')
- if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
- // Hash is no good! Corruption or malice? Doesn't matter!
- // EJECT EJECT
- return
- }
- let obj
- try {
- obj = JSON.parse(pieces[1])
- } catch (e) {
- // Entry is corrupted!
- return
- }
- if (obj) {
- entries.push(obj)
- }
- })
- return entries
-}
-
-module.exports._bucketDir = bucketDir
-function bucketDir (cache) {
- return path.join(cache, `index-v${indexV}`)
-}
-
-module.exports._bucketPath = bucketPath
-function bucketPath (cache, key) {
- const hashed = hashKey(key)
- return path.join.apply(path, [bucketDir(cache)].concat(
- hashToSegments(hashed)
- ))
-}
-
-module.exports._hashKey = hashKey
-function hashKey (key) {
- return hash(key, 'sha256')
-}
-
-module.exports._hashEntry = hashEntry
-function hashEntry (str) {
- return hash(str, 'sha1')
-}
-
-function hash (str, digest) {
- return crypto
- .createHash(digest)
- .update(str)
- .digest('hex')
-}
-
-function formatEntry (cache, entry) {
- // Treat null digests as deletions. They'll shadow any previous entries.
- if (!entry.integrity) { return null }
- return {
- key: entry.key,
- integrity: entry.integrity,
- path: contentPath(cache, entry.integrity),
- size: entry.size,
- time: entry.time,
- metadata: entry.metadata
- }
-}
-
-function readdirOrEmpty (dir) {
- return readdirAsync(dir)
- .catch({ code: 'ENOENT' }, () => [])
- .catch({ code: 'ENOTDIR' }, () => [])
-}
-
-function nop () {
-}
diff --git a/node_modules/libcipm/node_modules/cacache/lib/memoization.js b/node_modules/libcipm/node_modules/cacache/lib/memoization.js
deleted file mode 100644
index 92179c7ac..000000000
--- a/node_modules/libcipm/node_modules/cacache/lib/memoization.js
+++ /dev/null
@@ -1,69 +0,0 @@
-'use strict'
-
-const LRU = require('lru-cache')
-
-const MAX_SIZE = 50 * 1024 * 1024 // 50MB
-const MAX_AGE = 3 * 60 * 1000
-
-let MEMOIZED = new LRU({
- max: MAX_SIZE,
- maxAge: MAX_AGE,
- length: (entry, key) => {
- if (key.startsWith('key:')) {
- return entry.data.length
- } else if (key.startsWith('digest:')) {
- return entry.length
- }
- }
-})
-
-module.exports.clearMemoized = clearMemoized
-function clearMemoized () {
- const old = {}
- MEMOIZED.forEach((v, k) => {
- old[k] = v
- })
- MEMOIZED.reset()
- return old
-}
-
-module.exports.put = put
-function put (cache, entry, data, opts) {
- pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data })
- putDigest(cache, entry.integrity, data, opts)
-}
-
-module.exports.put.byDigest = putDigest
-function putDigest (cache, integrity, data, opts) {
- pickMem(opts).set(`digest:${cache}:${integrity}`, data)
-}
-
-module.exports.get = get
-function get (cache, key, opts) {
- return pickMem(opts).get(`key:${cache}:${key}`)
-}
-
-module.exports.get.byDigest = getDigest
-function getDigest (cache, integrity, opts) {
- return pickMem(opts).get(`digest:${cache}:${integrity}`)
-}
-
-class ObjProxy {
- constructor (obj) {
- this.obj = obj
- }
- get (key) { return this.obj[key] }
- set (key, val) { this.obj[key] = val }
-}
-
-function pickMem (opts) {
- if (!opts || !opts.memoize) {
- return MEMOIZED
- } else if (opts.memoize.get && opts.memoize.set) {
- return opts.memoize
- } else if (typeof opts.memoize === 'object') {
- return new ObjProxy(opts.memoize)
- } else {
- return MEMOIZED
- }
-}
diff --git a/node_modules/libcipm/node_modules/cacache/lib/util/fix-owner.js b/node_modules/libcipm/node_modules/cacache/lib/util/fix-owner.js
deleted file mode 100644
index f5c33db5f..000000000
--- a/node_modules/libcipm/node_modules/cacache/lib/util/fix-owner.js
+++ /dev/null
@@ -1,128 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const chownr = BB.promisify(require('chownr'))
-const mkdirp = BB.promisify(require('mkdirp'))
-const inflight = require('promise-inflight')
-const inferOwner = require('infer-owner')
-
-// Memoize getuid()/getgid() calls.
-// patch process.setuid/setgid to invalidate cached value on change
-const self = { uid: null, gid: null }
-const getSelf = () => {
- if (typeof self.uid !== 'number') {
- self.uid = process.getuid()
- const setuid = process.setuid
- process.setuid = (uid) => {
- self.uid = null
- process.setuid = setuid
- return process.setuid(uid)
- }
- }
- if (typeof self.gid !== 'number') {
- self.gid = process.getgid()
- const setgid = process.setgid
- process.setgid = (gid) => {
- self.gid = null
- process.setgid = setgid
- return process.setgid(gid)
- }
- }
-}
-
-module.exports.chownr = fixOwner
-function fixOwner (cache, filepath) {
- if (!process.getuid) {
- // This platform doesn't need ownership fixing
- return BB.resolve()
- }
-
- getSelf()
- if (self.uid !== 0) {
- // almost certainly can't chown anyway
- return BB.resolve()
- }
-
- return BB.resolve(inferOwner(cache)).then(owner => {
- const { uid, gid } = owner
-
- // No need to override if it's already what we used.
- if (self.uid === uid && self.gid === gid) {
- return
- }
-
- return inflight(
- 'fixOwner: fixing ownership on ' + filepath,
- () => chownr(
- filepath,
- typeof uid === 'number' ? uid : self.uid,
- typeof gid === 'number' ? gid : self.gid
- ).catch({ code: 'ENOENT' }, () => null)
- )
- })
-}
-
-module.exports.chownr.sync = fixOwnerSync
-function fixOwnerSync (cache, filepath) {
- if (!process.getuid) {
- // This platform doesn't need ownership fixing
- return
- }
- const { uid, gid } = inferOwner.sync(cache)
- getSelf()
- if (self.uid === uid && self.gid === gid) {
- // No need to override if it's already what we used.
- return
- }
- try {
- chownr.sync(
- filepath,
- typeof uid === 'number' ? uid : self.uid,
- typeof gid === 'number' ? gid : self.gid
- )
- } catch (err) {
- // only catch ENOENT, any other error is a problem.
- if (err.code === 'ENOENT') {
- return null
- }
- throw err
- }
-}
-
-module.exports.mkdirfix = mkdirfix
-function mkdirfix (cache, p, cb) {
- // we have to infer the owner _before_ making the directory, even though
- // we aren't going to use the results, since the cache itself might not
- // exist yet. If we mkdirp it, then our current uid/gid will be assumed
- // to be correct if it creates the cache folder in the process.
- return BB.resolve(inferOwner(cache)).then(() => {
- return mkdirp(p).then(made => {
- if (made) {
- return fixOwner(cache, made).then(() => made)
- }
- }).catch({ code: 'EEXIST' }, () => {
- // There's a race in mkdirp!
- return fixOwner(cache, p).then(() => null)
- })
- })
-}
-
-module.exports.mkdirfix.sync = mkdirfixSync
-function mkdirfixSync (cache, p) {
- try {
- inferOwner.sync(cache)
- const made = mkdirp.sync(p)
- if (made) {
- fixOwnerSync(cache, made)
- return made
- }
- } catch (err) {
- if (err.code === 'EEXIST') {
- fixOwnerSync(cache, p)
- return null
- } else {
- throw err
- }
- }
-}
diff --git a/node_modules/libcipm/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/libcipm/node_modules/cacache/lib/util/hash-to-segments.js
deleted file mode 100644
index 192be2a6d..000000000
--- a/node_modules/libcipm/node_modules/cacache/lib/util/hash-to-segments.js
+++ /dev/null
@@ -1,11 +0,0 @@
-'use strict'
-
-module.exports = hashToSegments
-
-function hashToSegments (hash) {
- return [
- hash.slice(0, 2),
- hash.slice(2, 4),
- hash.slice(4)
- ]
-}
diff --git a/node_modules/libcipm/node_modules/cacache/lib/util/move-file.js b/node_modules/libcipm/node_modules/cacache/lib/util/move-file.js
deleted file mode 100644
index b43744b3d..000000000
--- a/node_modules/libcipm/node_modules/cacache/lib/util/move-file.js
+++ /dev/null
@@ -1,51 +0,0 @@
-'use strict'
-
-const fs = require('graceful-fs')
-const BB = require('bluebird')
-const chmod = BB.promisify(fs.chmod)
-const unlink = BB.promisify(fs.unlink)
-let move
-let pinflight
-
-module.exports = moveFile
-function moveFile (src, dest) {
- // This isn't quite an fs.rename -- the assumption is that
- // if `dest` already exists, and we get certain errors while
- // trying to move it, we should just not bother.
- //
- // In the case of cache corruption, users will receive an
- // EINTEGRITY error elsewhere, and can remove the offending
- // content their own way.
- //
- // Note that, as the name suggests, this strictly only supports file moves.
- return BB.fromNode(cb => {
- fs.link(src, dest, err => {
- if (err) {
- if (err.code === 'EEXIST' || err.code === 'EBUSY') {
- // file already exists, so whatever
- } else if (err.code === 'EPERM' && process.platform === 'win32') {
- // file handle stayed open even past graceful-fs limits
- } else {
- return cb(err)
- }
- }
- return cb()
- })
- }).then(() => {
- // content should never change for any reason, so make it read-only
- return BB.join(unlink(src), process.platform !== 'win32' && chmod(dest, '0444'))
- }).catch(() => {
- if (!pinflight) { pinflight = require('promise-inflight') }
- return pinflight('cacache-move-file:' + dest, () => {
- return BB.promisify(fs.stat)(dest).catch(err => {
- if (err.code !== 'ENOENT') {
- // Something else is wrong here. Bail bail bail
- throw err
- }
- // file doesn't already exist! let's try a rename -> copy fallback
- if (!move) { move = require('move-concurrently') }
- return move(src, dest, { BB, fs })
- })
- })
- })
-}
diff --git a/node_modules/libcipm/node_modules/cacache/lib/util/tmp.js b/node_modules/libcipm/node_modules/cacache/lib/util/tmp.js
deleted file mode 100644
index 78494b8ea..000000000
--- a/node_modules/libcipm/node_modules/cacache/lib/util/tmp.js
+++ /dev/null
@@ -1,37 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const figgyPudding = require('figgy-pudding')
-const fixOwner = require('./fix-owner')
-const path = require('path')
-const rimraf = BB.promisify(require('rimraf'))
-const uniqueFilename = require('unique-filename')
-
-const TmpOpts = figgyPudding({
- tmpPrefix: {}
-})
-
-module.exports.mkdir = mktmpdir
-function mktmpdir (cache, opts) {
- opts = TmpOpts(opts)
- const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
- return fixOwner.mkdirfix(cache, tmpTarget).then(() => {
- return tmpTarget
- })
-}
-
-module.exports.withTmp = withTmp
-function withTmp (cache, opts, cb) {
- if (!cb) {
- cb = opts
- opts = null
- }
- opts = TmpOpts(opts)
- return BB.using(mktmpdir(cache, opts).disposer(rimraf), cb)
-}
-
-module.exports.fix = fixtmpdir
-function fixtmpdir (cache) {
- return fixOwner(cache, path.join(cache, 'tmp'))
-}
diff --git a/node_modules/libcipm/node_modules/cacache/lib/util/y.js b/node_modules/libcipm/node_modules/cacache/lib/util/y.js
deleted file mode 100644
index d62bedacb..000000000
--- a/node_modules/libcipm/node_modules/cacache/lib/util/y.js
+++ /dev/null
@@ -1,25 +0,0 @@
-'use strict'
-
-const path = require('path')
-const y18n = require('y18n')({
- directory: path.join(__dirname, '../../locales'),
- locale: 'en',
- updateFiles: process.env.CACACHE_UPDATE_LOCALE_FILES === 'true'
-})
-
-module.exports = yTag
-function yTag (parts) {
- let str = ''
- parts.forEach((part, i) => {
- const arg = arguments[i + 1]
- str += part
- if (arg) {
- str += '%s'
- }
- })
- return y18n.__.apply(null, [str].concat([].slice.call(arguments, 1)))
-}
-
-module.exports.setLocale = locale => {
- y18n.setLocale(locale)
-}
diff --git a/node_modules/libcipm/node_modules/cacache/lib/verify.js b/node_modules/libcipm/node_modules/cacache/lib/verify.js
deleted file mode 100644
index 617d38db1..000000000
--- a/node_modules/libcipm/node_modules/cacache/lib/verify.js
+++ /dev/null
@@ -1,227 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const contentPath = require('./content/path')
-const figgyPudding = require('figgy-pudding')
-const finished = BB.promisify(require('mississippi').finished)
-const fixOwner = require('./util/fix-owner')
-const fs = require('graceful-fs')
-const glob = BB.promisify(require('glob'))
-const index = require('./entry-index')
-const path = require('path')
-const rimraf = BB.promisify(require('rimraf'))
-const ssri = require('ssri')
-
-BB.promisifyAll(fs)
-
-const VerifyOpts = figgyPudding({
- concurrency: {
- default: 20
- },
- filter: {},
- log: {
- default: { silly () {} }
- }
-})
-
-module.exports = verify
-function verify (cache, opts) {
- opts = VerifyOpts(opts)
- opts.log.silly('verify', 'verifying cache at', cache)
- return BB.reduce([
- markStartTime,
- fixPerms,
- garbageCollect,
- rebuildIndex,
- cleanTmp,
- writeVerifile,
- markEndTime
- ], (stats, step, i) => {
- const label = step.name || `step #${i}`
- const start = new Date()
- return BB.resolve(step(cache, opts)).then(s => {
- s && Object.keys(s).forEach(k => {
- stats[k] = s[k]
- })
- const end = new Date()
- if (!stats.runTime) { stats.runTime = {} }
- stats.runTime[label] = end - start
- return stats
- })
- }, {}).tap(stats => {
- stats.runTime.total = stats.endTime - stats.startTime
- opts.log.silly('verify', 'verification finished for', cache, 'in', `${stats.runTime.total}ms`)
- })
-}
-
-function markStartTime (cache, opts) {
- return { startTime: new Date() }
-}
-
-function markEndTime (cache, opts) {
- return { endTime: new Date() }
-}
-
-function fixPerms (cache, opts) {
- opts.log.silly('verify', 'fixing cache permissions')
- return fixOwner.mkdirfix(cache, cache).then(() => {
- // TODO - fix file permissions too
- return fixOwner.chownr(cache, cache)
- }).then(() => null)
-}
-
-// Implements a naive mark-and-sweep tracing garbage collector.
-//
-// The algorithm is basically as follows:
-// 1. Read (and filter) all index entries ("pointers")
-// 2. Mark each integrity value as "live"
-// 3. Read entire filesystem tree in `content-vX/` dir
-// 4. If content is live, verify its checksum and delete it if it fails
-// 5. If content is not marked as live, rimraf it.
-//
-function garbageCollect (cache, opts) {
- opts.log.silly('verify', 'garbage collecting content')
- const indexStream = index.lsStream(cache)
- const liveContent = new Set()
- indexStream.on('data', entry => {
- if (opts.filter && !opts.filter(entry)) { return }
- liveContent.add(entry.integrity.toString())
- })
- return finished(indexStream).then(() => {
- const contentDir = contentPath._contentDir(cache)
- return glob(path.join(contentDir, '**'), {
- follow: false,
- nodir: true,
- nosort: true
- }).then(files => {
- return BB.resolve({
- verifiedContent: 0,
- reclaimedCount: 0,
- reclaimedSize: 0,
- badContentCount: 0,
- keptSize: 0
- }).tap((stats) => BB.map(files, (f) => {
- const split = f.split(/[/\\]/)
- const digest = split.slice(split.length - 3).join('')
- const algo = split[split.length - 4]
- const integrity = ssri.fromHex(digest, algo)
- if (liveContent.has(integrity.toString())) {
- return verifyContent(f, integrity).then(info => {
- if (!info.valid) {
- stats.reclaimedCount++
- stats.badContentCount++
- stats.reclaimedSize += info.size
- } else {
- stats.verifiedContent++
- stats.keptSize += info.size
- }
- return stats
- })
- } else {
- // No entries refer to this content. We can delete.
- stats.reclaimedCount++
- return fs.statAsync(f).then(s => {
- return rimraf(f).then(() => {
- stats.reclaimedSize += s.size
- return stats
- })
- })
- }
- }, { concurrency: opts.concurrency }))
- })
- })
-}
-
-function verifyContent (filepath, sri) {
- return fs.statAsync(filepath).then(stat => {
- const contentInfo = {
- size: stat.size,
- valid: true
- }
- return ssri.checkStream(
- fs.createReadStream(filepath),
- sri
- ).catch(err => {
- if (err.code !== 'EINTEGRITY') { throw err }
- return rimraf(filepath).then(() => {
- contentInfo.valid = false
- })
- }).then(() => contentInfo)
- }).catch({ code: 'ENOENT' }, () => ({ size: 0, valid: false }))
-}
-
-function rebuildIndex (cache, opts) {
- opts.log.silly('verify', 'rebuilding index')
- return index.ls(cache).then(entries => {
- const stats = {
- missingContent: 0,
- rejectedEntries: 0,
- totalEntries: 0
- }
- const buckets = {}
- for (let k in entries) {
- if (entries.hasOwnProperty(k)) {
- const hashed = index._hashKey(k)
- const entry = entries[k]
- const excluded = opts.filter && !opts.filter(entry)
- excluded && stats.rejectedEntries++
- if (buckets[hashed] && !excluded) {
- buckets[hashed].push(entry)
- } else if (buckets[hashed] && excluded) {
- // skip
- } else if (excluded) {
- buckets[hashed] = []
- buckets[hashed]._path = index._bucketPath(cache, k)
- } else {
- buckets[hashed] = [entry]
- buckets[hashed]._path = index._bucketPath(cache, k)
- }
- }
- }
- return BB.map(Object.keys(buckets), key => {
- return rebuildBucket(cache, buckets[key], stats, opts)
- }, { concurrency: opts.concurrency }).then(() => stats)
- })
-}
-
-function rebuildBucket (cache, bucket, stats, opts) {
- return fs.truncateAsync(bucket._path).then(() => {
- // This needs to be serialized because cacache explicitly
- // lets very racy bucket conflicts clobber each other.
- return BB.mapSeries(bucket, entry => {
- const content = contentPath(cache, entry.integrity)
- return fs.statAsync(content).then(() => {
- return index.insert(cache, entry.key, entry.integrity, {
- metadata: entry.metadata,
- size: entry.size
- }).then(() => { stats.totalEntries++ })
- }).catch({ code: 'ENOENT' }, () => {
- stats.rejectedEntries++
- stats.missingContent++
- })
- })
- })
-}
-
-function cleanTmp (cache, opts) {
- opts.log.silly('verify', 'cleaning tmp directory')
- return rimraf(path.join(cache, 'tmp'))
-}
-
-function writeVerifile (cache, opts) {
- const verifile = path.join(cache, '_lastverified')
- opts.log.silly('verify', 'writing verifile to ' + verifile)
- try {
- return fs.writeFileAsync(verifile, '' + (+(new Date())))
- } finally {
- fixOwner.chownr.sync(cache, verifile)
- }
-}
-
-module.exports.lastRun = lastRun
-function lastRun (cache) {
- return fs.readFileAsync(
- path.join(cache, '_lastverified'), 'utf8'
- ).then(data => new Date(+data))
-}
diff --git a/node_modules/libcipm/node_modules/cacache/locales/en.js b/node_modules/libcipm/node_modules/cacache/locales/en.js
deleted file mode 100644
index 1715fdb53..000000000
--- a/node_modules/libcipm/node_modules/cacache/locales/en.js
+++ /dev/null
@@ -1,47 +0,0 @@
-'use strict'
-
-const ls = require('../ls.js')
-const get = require('../get.js')
-const put = require('../put.js')
-const rm = require('../rm.js')
-const verify = require('../verify.js')
-const setLocale = require('../lib/util/y.js').setLocale
-const clearMemoized = require('../lib/memoization.js').clearMemoized
-const tmp = require('../lib/util/tmp.js')
-
-setLocale('en')
-
-const x = module.exports
-
-x.ls = cache => ls(cache)
-x.ls.stream = cache => ls.stream(cache)
-
-x.get = (cache, key, opts) => get(cache, key, opts)
-x.get.byDigest = (cache, hash, opts) => get.byDigest(cache, hash, opts)
-x.get.sync = (cache, key, opts) => get.sync(cache, key, opts)
-x.get.sync.byDigest = (cache, key, opts) => get.sync.byDigest(cache, key, opts)
-x.get.stream = (cache, key, opts) => get.stream(cache, key, opts)
-x.get.stream.byDigest = (cache, hash, opts) => get.stream.byDigest(cache, hash, opts)
-x.get.copy = (cache, key, dest, opts) => get.copy(cache, key, dest, opts)
-x.get.copy.byDigest = (cache, hash, dest, opts) => get.copy.byDigest(cache, hash, dest, opts)
-x.get.info = (cache, key) => get.info(cache, key)
-x.get.hasContent = (cache, hash) => get.hasContent(cache, hash)
-x.get.hasContent.sync = (cache, hash) => get.hasContent.sync(cache, hash)
-
-x.put = (cache, key, data, opts) => put(cache, key, data, opts)
-x.put.stream = (cache, key, opts) => put.stream(cache, key, opts)
-
-x.rm = (cache, key) => rm.entry(cache, key)
-x.rm.all = cache => rm.all(cache)
-x.rm.entry = x.rm
-x.rm.content = (cache, hash) => rm.content(cache, hash)
-
-x.setLocale = lang => setLocale(lang)
-x.clearMemoized = () => clearMemoized()
-
-x.tmp = {}
-x.tmp.mkdir = (cache, opts) => tmp.mkdir(cache, opts)
-x.tmp.withTmp = (cache, opts, cb) => tmp.withTmp(cache, opts, cb)
-
-x.verify = (cache, opts) => verify(cache, opts)
-x.verify.lastRun = cache => verify.lastRun(cache)
diff --git a/node_modules/libcipm/node_modules/cacache/locales/en.json b/node_modules/libcipm/node_modules/cacache/locales/en.json
deleted file mode 100644
index 4f1452884..000000000
--- a/node_modules/libcipm/node_modules/cacache/locales/en.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "No cache entry for `%s` found in `%s`": "No cache entry for %s found in %s",
- "Integrity verification failed for %s (%s)": "Integrity verification failed for %s (%s)",
- "Bad data size: expected inserted data to be %s bytes, but got %s instead": "Bad data size: expected inserted data to be %s bytes, but got %s instead",
- "Cache input stream was empty": "Cache input stream was empty",
- "Integrity check failed:\n Wanted: %s\n Found: %s": "Integrity check failed:\n Wanted: %s\n Found: %s"
-} \ No newline at end of file
diff --git a/node_modules/libcipm/node_modules/cacache/locales/es.js b/node_modules/libcipm/node_modules/cacache/locales/es.js
deleted file mode 100644
index ac4e4cfe7..000000000
--- a/node_modules/libcipm/node_modules/cacache/locales/es.js
+++ /dev/null
@@ -1,49 +0,0 @@
-'use strict'
-
-const ls = require('../ls.js')
-const get = require('../get.js')
-const put = require('../put.js')
-const rm = require('../rm.js')
-const verify = require('../verify.js')
-const setLocale = require('../lib/util/y.js').setLocale
-const clearMemoized = require('../lib/memoization.js').clearMemoized
-const tmp = require('../lib/util/tmp.js')
-
-setLocale('es')
-
-const x = module.exports
-
-x.ls = cache => ls(cache)
-x.ls.flujo = cache => ls.stream(cache)
-
-x.saca = (cache, clave, ops) => get(cache, clave, ops)
-x.saca.porHacheo = (cache, hacheo, ops) => get.byDigest(cache, hacheo, ops)
-x.saca.sinc = (cache, clave, ops) => get.sync(cache, clave, ops)
-x.saca.sinc.porHacheo = (cache, hacheo, ops) => get.sync.byDigest(cache, hacheo, ops)
-x.saca.flujo = (cache, clave, ops) => get.stream(cache, clave, ops)
-x.saca.flujo.porHacheo = (cache, hacheo, ops) => get.stream.byDigest(cache, hacheo, ops)
-x.sava.copia = (cache, clave, destino, opts) => get.copy(cache, clave, destino, opts)
-x.sava.copia.porHacheo = (cache, hacheo, destino, opts) => get.copy.byDigest(cache, hacheo, destino, opts)
-x.saca.info = (cache, clave) => get.info(cache, clave)
-x.saca.tieneDatos = (cache, hacheo) => get.hasContent(cache, hacheo)
-x.saca.tieneDatos.sinc = (cache, hacheo) => get.hasContent.sync(cache, hacheo)
-
-x.mete = (cache, clave, datos, ops) => put(cache, clave, datos, ops)
-x.mete.flujo = (cache, clave, ops) => put.stream(cache, clave, ops)
-
-x.rm = (cache, clave) => rm.entry(cache, clave)
-x.rm.todo = cache => rm.all(cache)
-x.rm.entrada = x.rm
-x.rm.datos = (cache, hacheo) => rm.content(cache, hacheo)
-
-x.ponLenguaje = lang => setLocale(lang)
-x.limpiaMemoizado = () => clearMemoized()
-
-x.tmp = {}
-x.tmp.mkdir = (cache, ops) => tmp.mkdir(cache, ops)
-x.tmp.hazdir = x.tmp.mkdir
-x.tmp.conTmp = (cache, ops, cb) => tmp.withTmp(cache, ops, cb)
-
-x.verifica = (cache, ops) => verify(cache, ops)
-x.verifica.ultimaVez = cache => verify.lastRun(cache)
-x.verifica.últimaVez = x.verifica.ultimaVez
diff --git a/node_modules/libcipm/node_modules/cacache/locales/es.json b/node_modules/libcipm/node_modules/cacache/locales/es.json
deleted file mode 100644
index a91d76225..000000000
--- a/node_modules/libcipm/node_modules/cacache/locales/es.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "No cache entry for `%s` found in `%s`": "No existe ninguna entrada para «%s» en «%s»",
- "Integrity verification failed for %s (%s)": "Verificación de integridad falló para «%s» (%s)",
- "Bad data size: expected inserted data to be %s bytes, but got %s instead": "Tamaño incorrecto de datos: los datos insertados debieron haber sido %s octetos, pero fueron %s",
- "Cache input stream was empty": "El stream de entrada al caché estaba vacío"
-}
diff --git a/node_modules/libcipm/node_modules/cacache/ls.js b/node_modules/libcipm/node_modules/cacache/ls.js
deleted file mode 100644
index 9f49b388a..000000000
--- a/node_modules/libcipm/node_modules/cacache/ls.js
+++ /dev/null
@@ -1,6 +0,0 @@
-'use strict'
-
-var index = require('./lib/entry-index')
-
-module.exports = index.ls
-module.exports.stream = index.lsStream
diff --git a/node_modules/libcipm/node_modules/cacache/package.json b/node_modules/libcipm/node_modules/cacache/package.json
deleted file mode 100644
index 5d8cc8953..000000000
--- a/node_modules/libcipm/node_modules/cacache/package.json
+++ /dev/null
@@ -1,126 +0,0 @@
-{
- "_from": "cacache@^12.0.2",
- "_id": "cacache@12.0.3",
- "_inBundle": false,
- "_integrity": "sha512-kqdmfXEGFepesTuROHMs3MpFLWrPkSSpRqOw80RCflZXy/khxaArvFrQ7uJxSUduzAufc6G0g1VUCOZXxWavPw==",
- "_location": "/libcipm/cacache",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "cacache@^12.0.2",
- "name": "cacache",
- "escapedName": "cacache",
- "rawSpec": "^12.0.2",
- "saveSpec": null,
- "fetchSpec": "^12.0.2"
- },
- "_requiredBy": [
- "/libcipm/pacote"
- ],
- "_resolved": "https://registry.npmjs.org/cacache/-/cacache-12.0.3.tgz",
- "_shasum": "be99abba4e1bf5df461cd5a2c1071fc432573390",
- "_spec": "cacache@^12.0.2",
- "_where": "/Users/claudiahdz/npm/cli/node_modules/libcipm/node_modules/pacote",
- "author": {
- "name": "Kat Marchán",
- "email": "kzm@sykosomatic.org"
- },
- "bugs": {
- "url": "https://github.com/npm/cacache/issues"
- },
- "bundleDependencies": false,
- "cache-version": {
- "content": "2",
- "index": "5"
- },
- "config": {
- "nyc": {
- "exclude": [
- "node_modules/**",
- "test/**"
- ]
- }
- },
- "contributors": [
- {
- "name": "Charlotte Spencer",
- "email": "charlottelaspencer@gmail.com"
- },
- {
- "name": "Rebecca Turner",
- "email": "me@re-becca.org"
- }
- ],
- "dependencies": {
- "bluebird": "^3.5.5",
- "chownr": "^1.1.1",
- "figgy-pudding": "^3.5.1",
- "glob": "^7.1.4",
- "graceful-fs": "^4.1.15",
- "infer-owner": "^1.0.3",
- "lru-cache": "^5.1.1",
- "mississippi": "^3.0.0",
- "mkdirp": "^0.5.1",
- "move-concurrently": "^1.0.1",
- "promise-inflight": "^1.0.1",
- "rimraf": "^2.6.3",
- "ssri": "^6.0.1",
- "unique-filename": "^1.1.1",
- "y18n": "^4.0.0"
- },
- "deprecated": false,
- "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.",
- "devDependencies": {
- "benchmark": "^2.1.4",
- "chalk": "^2.4.2",
- "cross-env": "^5.1.4",
- "require-inject": "^1.4.4",
- "standard": "^12.0.1",
- "standard-version": "^6.0.1",
- "tacks": "^1.3.0",
- "tap": "^12.7.0",
- "weallbehave": "^1.2.0",
- "weallcontribute": "^1.0.9"
- },
- "files": [
- "*.js",
- "lib",
- "locales"
- ],
- "homepage": "https://github.com/npm/cacache#readme",
- "keywords": [
- "cache",
- "caching",
- "content-addressable",
- "sri",
- "sri hash",
- "subresource integrity",
- "cache",
- "storage",
- "store",
- "file store",
- "filesystem",
- "disk cache",
- "disk storage"
- ],
- "license": "ISC",
- "main": "index.js",
- "name": "cacache",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/cacache.git"
- },
- "scripts": {
- "benchmarks": "node test/benchmarks",
- "postrelease": "npm publish && git push --follow-tags",
- "prerelease": "npm t",
- "pretest": "standard",
- "release": "standard-version -s",
- "test": "cross-env CACACHE_UPDATE_LOCALE_FILES=true tap --coverage --nyc-arg=--all -J test/*.js",
- "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
- "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
- "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
- },
- "version": "12.0.3"
-}
diff --git a/node_modules/libcipm/node_modules/cacache/put.js b/node_modules/libcipm/node_modules/cacache/put.js
deleted file mode 100644
index a40063930..000000000
--- a/node_modules/libcipm/node_modules/cacache/put.js
+++ /dev/null
@@ -1,86 +0,0 @@
-'use strict'
-
-const figgyPudding = require('figgy-pudding')
-const index = require('./lib/entry-index')
-const memo = require('./lib/memoization')
-const write = require('./lib/content/write')
-const to = require('mississippi').to
-
-const PutOpts = figgyPudding({
- algorithms: {
- default: ['sha512']
- },
- integrity: {},
- memoize: {},
- metadata: {},
- pickAlgorithm: {},
- size: {},
- tmpPrefix: {},
- single: {},
- sep: {},
- error: {},
- strict: {}
-})
-
-module.exports = putData
-function putData (cache, key, data, opts) {
- opts = PutOpts(opts)
- return write(cache, data, opts).then(res => {
- return index.insert(
- cache, key, res.integrity, opts.concat({ size: res.size })
- ).then(entry => {
- if (opts.memoize) {
- memo.put(cache, entry, data, opts)
- }
- return res.integrity
- })
- })
-}
-
-module.exports.stream = putStream
-function putStream (cache, key, opts) {
- opts = PutOpts(opts)
- let integrity
- let size
- const contentStream = write.stream(
- cache, opts
- ).on('integrity', int => {
- integrity = int
- }).on('size', s => {
- size = s
- })
- let memoData
- let memoTotal = 0
- const stream = to((chunk, enc, cb) => {
- contentStream.write(chunk, enc, () => {
- if (opts.memoize) {
- if (!memoData) { memoData = [] }
- memoData.push(chunk)
- memoTotal += chunk.length
- }
- cb()
- })
- }, cb => {
- contentStream.end(() => {
- index.insert(cache, key, integrity, opts.concat({ size })).then(entry => {
- if (opts.memoize) {
- memo.put(cache, entry, Buffer.concat(memoData, memoTotal), opts)
- }
- stream.emit('integrity', integrity)
- cb()
- })
- })
- })
- let erred = false
- stream.once('error', err => {
- if (erred) { return }
- erred = true
- contentStream.emit('error', err)
- })
- contentStream.once('error', err => {
- if (erred) { return }
- erred = true
- stream.emit('error', err)
- })
- return stream
-}
diff --git a/node_modules/libcipm/node_modules/cacache/rm.js b/node_modules/libcipm/node_modules/cacache/rm.js
deleted file mode 100644
index e71a1d27b..000000000
--- a/node_modules/libcipm/node_modules/cacache/rm.js
+++ /dev/null
@@ -1,28 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const index = require('./lib/entry-index')
-const memo = require('./lib/memoization')
-const path = require('path')
-const rimraf = BB.promisify(require('rimraf'))
-const rmContent = require('./lib/content/rm')
-
-module.exports = entry
-module.exports.entry = entry
-function entry (cache, key) {
- memo.clearMemoized()
- return index.delete(cache, key)
-}
-
-module.exports.content = content
-function content (cache, integrity) {
- memo.clearMemoized()
- return rmContent(cache, integrity)
-}
-
-module.exports.all = all
-function all (cache) {
- memo.clearMemoized()
- return rimraf(path.join(cache, '*(content-*|index-*)'))
-}
diff --git a/node_modules/libcipm/node_modules/cacache/verify.js b/node_modules/libcipm/node_modules/cacache/verify.js
deleted file mode 100644
index db7763d7a..000000000
--- a/node_modules/libcipm/node_modules/cacache/verify.js
+++ /dev/null
@@ -1,3 +0,0 @@
-'use strict'
-
-module.exports = require('./lib/verify')
diff --git a/node_modules/libcipm/node_modules/fs-minipass/LICENSE b/node_modules/libcipm/node_modules/fs-minipass/LICENSE
deleted file mode 100644
index 19129e315..000000000
--- a/node_modules/libcipm/node_modules/fs-minipass/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/libcipm/node_modules/fs-minipass/README.md b/node_modules/libcipm/node_modules/fs-minipass/README.md
deleted file mode 100644
index 1e61241cf..000000000
--- a/node_modules/libcipm/node_modules/fs-minipass/README.md
+++ /dev/null
@@ -1,70 +0,0 @@
-# fs-minipass
-
-Filesystem streams based on [minipass](http://npm.im/minipass).
-
-4 classes are exported:
-
-- ReadStream
-- ReadStreamSync
-- WriteStream
-- WriteStreamSync
-
-When using `ReadStreamSync`, all of the data is made available
-immediately upon consuming the stream. Nothing is buffered in memory
-when the stream is constructed. If the stream is piped to a writer,
-then it will synchronously `read()` and emit data into the writer as
-fast as the writer can consume it. (That is, it will respect
-backpressure.) If you call `stream.read()` then it will read the
-entire file and return the contents.
-
-When using `WriteStreamSync`, every write is flushed to the file
-synchronously. If your writes all come in a single tick, then it'll
-write it all out in a single tick. It's as synchronous as you are.
-
-The async versions work much like their node builtin counterparts,
-with the exception of introducing significantly less Stream machinery
-overhead.
-
-## USAGE
-
-It's just streams, you pipe them or read() them or write() to them.
-
-```js
-const fsm = require('fs-minipass')
-const readStream = new fsm.ReadStream('file.txt')
-const writeStream = new fsm.WriteStream('output.txt')
-writeStream.write('some file header or whatever\n')
-readStream.pipe(writeStream)
-```
-
-## ReadStream(path, options)
-
-Path string is required, but somewhat irrelevant if an open file
-descriptor is passed in as an option.
-
-Options:
-
-- `fd` Pass in a numeric file descriptor, if the file is already open.
-- `readSize` The size of reads to do, defaults to 16MB
-- `size` The size of the file, if known. Prevents zero-byte read()
- call at the end.
-- `autoClose` Set to `false` to prevent the file descriptor from being
- closed when the file is done being read.
-
-## WriteStream(path, options)
-
-Path string is required, but somewhat irrelevant if an open file
-descriptor is passed in as an option.
-
-Options:
-
-- `fd` Pass in a numeric file descriptor, if the file is already open.
-- `mode` The mode to create the file with. Defaults to `0o666`.
-- `start` The position in the file to start reading. If not
- specified, then the file will start writing at position zero, and be
- truncated by default.
-- `autoClose` Set to `false` to prevent the file descriptor from being
- closed when the stream is ended.
-- `flags` Flags to use when opening the file. Irrelevant if `fd` is
- passed in, since file won't be opened in that case. Defaults to
- `'a'` if a `pos` is specified, or `'w'` otherwise.
diff --git a/node_modules/libcipm/node_modules/fs-minipass/index.js b/node_modules/libcipm/node_modules/fs-minipass/index.js
deleted file mode 100644
index cd585a83c..000000000
--- a/node_modules/libcipm/node_modules/fs-minipass/index.js
+++ /dev/null
@@ -1,387 +0,0 @@
-'use strict'
-const MiniPass = require('minipass')
-const EE = require('events').EventEmitter
-const fs = require('fs')
-
-// for writev
-const binding = process.binding('fs')
-const writeBuffers = binding.writeBuffers
-/* istanbul ignore next */
-const FSReqWrap = binding.FSReqWrap || binding.FSReqCallback
-
-const _autoClose = Symbol('_autoClose')
-const _close = Symbol('_close')
-const _ended = Symbol('_ended')
-const _fd = Symbol('_fd')
-const _finished = Symbol('_finished')
-const _flags = Symbol('_flags')
-const _flush = Symbol('_flush')
-const _handleChunk = Symbol('_handleChunk')
-const _makeBuf = Symbol('_makeBuf')
-const _mode = Symbol('_mode')
-const _needDrain = Symbol('_needDrain')
-const _onerror = Symbol('_onerror')
-const _onopen = Symbol('_onopen')
-const _onread = Symbol('_onread')
-const _onwrite = Symbol('_onwrite')
-const _open = Symbol('_open')
-const _path = Symbol('_path')
-const _pos = Symbol('_pos')
-const _queue = Symbol('_queue')
-const _read = Symbol('_read')
-const _readSize = Symbol('_readSize')
-const _reading = Symbol('_reading')
-const _remain = Symbol('_remain')
-const _size = Symbol('_size')
-const _write = Symbol('_write')
-const _writing = Symbol('_writing')
-const _defaultFlag = Symbol('_defaultFlag')
-
-class ReadStream extends MiniPass {
- constructor (path, opt) {
- opt = opt || {}
- super(opt)
-
- this.writable = false
-
- if (typeof path !== 'string')
- throw new TypeError('path must be a string')
-
- this[_fd] = typeof opt.fd === 'number' ? opt.fd : null
- this[_path] = path
- this[_readSize] = opt.readSize || 16*1024*1024
- this[_reading] = false
- this[_size] = typeof opt.size === 'number' ? opt.size : Infinity
- this[_remain] = this[_size]
- this[_autoClose] = typeof opt.autoClose === 'boolean' ?
- opt.autoClose : true
-
- if (typeof this[_fd] === 'number')
- this[_read]()
- else
- this[_open]()
- }
-
- get fd () { return this[_fd] }
- get path () { return this[_path] }
-
- write () {
- throw new TypeError('this is a readable stream')
- }
-
- end () {
- throw new TypeError('this is a readable stream')
- }
-
- [_open] () {
- fs.open(this[_path], 'r', (er, fd) => this[_onopen](er, fd))
- }
-
- [_onopen] (er, fd) {
- if (er)
- this[_onerror](er)
- else {
- this[_fd] = fd
- this.emit('open', fd)
- this[_read]()
- }
- }
-
- [_makeBuf] () {
- return Buffer.allocUnsafe(Math.min(this[_readSize], this[_remain]))
- }
-
- [_read] () {
- if (!this[_reading]) {
- this[_reading] = true
- const buf = this[_makeBuf]()
- /* istanbul ignore if */
- if (buf.length === 0) return process.nextTick(() => this[_onread](null, 0, buf))
- fs.read(this[_fd], buf, 0, buf.length, null, (er, br, buf) =>
- this[_onread](er, br, buf))
- }
- }
-
- [_onread] (er, br, buf) {
- this[_reading] = false
- if (er)
- this[_onerror](er)
- else if (this[_handleChunk](br, buf))
- this[_read]()
- }
-
- [_close] () {
- if (this[_autoClose] && typeof this[_fd] === 'number') {
- fs.close(this[_fd], _ => this.emit('close'))
- this[_fd] = null
- }
- }
-
- [_onerror] (er) {
- this[_reading] = true
- this[_close]()
- this.emit('error', er)
- }
-
- [_handleChunk] (br, buf) {
- let ret = false
- // no effect if infinite
- this[_remain] -= br
- if (br > 0)
- ret = super.write(br < buf.length ? buf.slice(0, br) : buf)
-
- if (br === 0 || this[_remain] <= 0) {
- ret = false
- this[_close]()
- super.end()
- }
-
- return ret
- }
-
- emit (ev, data) {
- switch (ev) {
- case 'prefinish':
- case 'finish':
- break
-
- case 'drain':
- if (typeof this[_fd] === 'number')
- this[_read]()
- break
-
- default:
- return super.emit(ev, data)
- }
- }
-}
-
-class ReadStreamSync extends ReadStream {
- [_open] () {
- let threw = true
- try {
- this[_onopen](null, fs.openSync(this[_path], 'r'))
- threw = false
- } finally {
- if (threw)
- this[_close]()
- }
- }
-
- [_read] () {
- let threw = true
- try {
- if (!this[_reading]) {
- this[_reading] = true
- do {
- const buf = this[_makeBuf]()
- /* istanbul ignore next */
- const br = buf.length === 0 ? 0 : fs.readSync(this[_fd], buf, 0, buf.length, null)
- if (!this[_handleChunk](br, buf))
- break
- } while (true)
- this[_reading] = false
- }
- threw = false
- } finally {
- if (threw)
- this[_close]()
- }
- }
-
- [_close] () {
- if (this[_autoClose] && typeof this[_fd] === 'number') {
- try {
- fs.closeSync(this[_fd])
- } catch (er) {}
- this[_fd] = null
- this.emit('close')
- }
- }
-}
-
-class WriteStream extends EE {
- constructor (path, opt) {
- opt = opt || {}
- super(opt)
- this.readable = false
- this[_writing] = false
- this[_ended] = false
- this[_needDrain] = false
- this[_queue] = []
- this[_path] = path
- this[_fd] = typeof opt.fd === 'number' ? opt.fd : null
- this[_mode] = opt.mode === undefined ? 0o666 : opt.mode
- this[_pos] = typeof opt.start === 'number' ? opt.start : null
- this[_autoClose] = typeof opt.autoClose === 'boolean' ?
- opt.autoClose : true
-
- // truncating makes no sense when writing into the middle
- const defaultFlag = this[_pos] !== null ? 'r+' : 'w'
- this[_defaultFlag] = opt.flags === undefined
- this[_flags] = this[_defaultFlag] ? defaultFlag : opt.flags
-
- if (this[_fd] === null)
- this[_open]()
- }
-
- get fd () { return this[_fd] }
- get path () { return this[_path] }
-
- [_onerror] (er) {
- this[_close]()
- this[_writing] = true
- this.emit('error', er)
- }
-
- [_open] () {
- fs.open(this[_path], this[_flags], this[_mode],
- (er, fd) => this[_onopen](er, fd))
- }
-
- [_onopen] (er, fd) {
- if (this[_defaultFlag] &&
- this[_flags] === 'r+' &&
- er && er.code === 'ENOENT') {
- this[_flags] = 'w'
- this[_open]()
- } else if (er)
- this[_onerror](er)
- else {
- this[_fd] = fd
- this.emit('open', fd)
- this[_flush]()
- }
- }
-
- end (buf, enc) {
- if (buf)
- this.write(buf, enc)
-
- this[_ended] = true
-
- // synthetic after-write logic, where drain/finish live
- if (!this[_writing] && !this[_queue].length &&
- typeof this[_fd] === 'number')
- this[_onwrite](null, 0)
- }
-
- write (buf, enc) {
- if (typeof buf === 'string')
- buf = new Buffer(buf, enc)
-
- if (this[_ended]) {
- this.emit('error', new Error('write() after end()'))
- return false
- }
-
- if (this[_fd] === null || this[_writing] || this[_queue].length) {
- this[_queue].push(buf)
- this[_needDrain] = true
- return false
- }
-
- this[_writing] = true
- this[_write](buf)
- return true
- }
-
- [_write] (buf) {
- fs.write(this[_fd], buf, 0, buf.length, this[_pos], (er, bw) =>
- this[_onwrite](er, bw))
- }
-
- [_onwrite] (er, bw) {
- if (er)
- this[_onerror](er)
- else {
- if (this[_pos] !== null)
- this[_pos] += bw
- if (this[_queue].length)
- this[_flush]()
- else {
- this[_writing] = false
-
- if (this[_ended] && !this[_finished]) {
- this[_finished] = true
- this[_close]()
- this.emit('finish')
- } else if (this[_needDrain]) {
- this[_needDrain] = false
- this.emit('drain')
- }
- }
- }
- }
-
- [_flush] () {
- if (this[_queue].length === 0) {
- if (this[_ended])
- this[_onwrite](null, 0)
- } else if (this[_queue].length === 1)
- this[_write](this[_queue].pop())
- else {
- const iovec = this[_queue]
- this[_queue] = []
- writev(this[_fd], iovec, this[_pos],
- (er, bw) => this[_onwrite](er, bw))
- }
- }
-
- [_close] () {
- if (this[_autoClose] && typeof this[_fd] === 'number') {
- fs.close(this[_fd], _ => this.emit('close'))
- this[_fd] = null
- }
- }
-}
-
-class WriteStreamSync extends WriteStream {
- [_open] () {
- let fd
- try {
- fd = fs.openSync(this[_path], this[_flags], this[_mode])
- } catch (er) {
- if (this[_defaultFlag] &&
- this[_flags] === 'r+' &&
- er && er.code === 'ENOENT') {
- this[_flags] = 'w'
- return this[_open]()
- } else
- throw er
- }
- this[_onopen](null, fd)
- }
-
- [_close] () {
- if (this[_autoClose] && typeof this[_fd] === 'number') {
- try {
- fs.closeSync(this[_fd])
- } catch (er) {}
- this[_fd] = null
- this.emit('close')
- }
- }
-
- [_write] (buf) {
- try {
- this[_onwrite](null,
- fs.writeSync(this[_fd], buf, 0, buf.length, this[_pos]))
- } catch (er) {
- this[_onwrite](er, 0)
- }
- }
-}
-
-const writev = (fd, iovec, pos, cb) => {
- const done = (er, bw) => cb(er, bw, iovec)
- const req = new FSReqWrap()
- req.oncomplete = done
- binding.writeBuffers(fd, iovec, pos, req)
-}
-
-exports.ReadStream = ReadStream
-exports.ReadStreamSync = ReadStreamSync
-
-exports.WriteStream = WriteStream
-exports.WriteStreamSync = WriteStreamSync
diff --git a/node_modules/libcipm/node_modules/fs-minipass/package.json b/node_modules/libcipm/node_modules/fs-minipass/package.json
deleted file mode 100644
index 6cc2bd684..000000000
--- a/node_modules/libcipm/node_modules/fs-minipass/package.json
+++ /dev/null
@@ -1,65 +0,0 @@
-{
- "_from": "fs-minipass@^1.2.5",
- "_id": "fs-minipass@1.2.7",
- "_inBundle": false,
- "_integrity": "sha512-GWSSJGFy4e9GUeCcbIkED+bgAoFyj7XF1mV8rma3QW4NIqX9Kyx79N/PF61H5udOV3aY1IaMLs6pGbH71nlCTA==",
- "_location": "/libcipm/fs-minipass",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "fs-minipass@^1.2.5",
- "name": "fs-minipass",
- "escapedName": "fs-minipass",
- "rawSpec": "^1.2.5",
- "saveSpec": null,
- "fetchSpec": "^1.2.5"
- },
- "_requiredBy": [
- "/libcipm/tar"
- ],
- "_resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-1.2.7.tgz",
- "_shasum": "ccff8570841e7fe4265693da88936c55aed7f7c7",
- "_spec": "fs-minipass@^1.2.5",
- "_where": "/Users/claudiahdz/npm/cli/node_modules/libcipm/node_modules/tar",
- "author": {
- "name": "Isaac Z. Schlueter",
- "email": "i@izs.me",
- "url": "http://blog.izs.me/"
- },
- "bugs": {
- "url": "https://github.com/npm/fs-minipass/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "minipass": "^2.6.0"
- },
- "deprecated": false,
- "description": "fs read and write streams based on minipass",
- "devDependencies": {
- "mutate-fs": "^2.0.1",
- "tap": "^14.6.4"
- },
- "files": [
- "index.js"
- ],
- "homepage": "https://github.com/npm/fs-minipass#readme",
- "keywords": [],
- "license": "ISC",
- "main": "index.js",
- "name": "fs-minipass",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/fs-minipass.git"
- },
- "scripts": {
- "postpublish": "git push origin --follow-tags",
- "postversion": "npm publish",
- "preversion": "npm test",
- "test": "tap"
- },
- "tap": {
- "check-coverage": true
- },
- "version": "1.2.7"
-}
diff --git a/node_modules/libcipm/node_modules/minipass/LICENSE b/node_modules/libcipm/node_modules/minipass/LICENSE
deleted file mode 100644
index 20a476254..000000000
--- a/node_modules/libcipm/node_modules/minipass/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) npm, Inc. and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/libcipm/node_modules/minipass/README.md b/node_modules/libcipm/node_modules/minipass/README.md
deleted file mode 100644
index c989beea0..000000000
--- a/node_modules/libcipm/node_modules/minipass/README.md
+++ /dev/null
@@ -1,606 +0,0 @@
-# minipass
-
-A _very_ minimal implementation of a [PassThrough
-stream](https://nodejs.org/api/stream.html#stream_class_stream_passthrough)
-
-[It's very
-fast](https://docs.google.com/spreadsheets/d/1oObKSrVwLX_7Ut4Z6g3fZW-AX1j1-k6w-cDsrkaSbHM/edit#gid=0)
-for objects, strings, and buffers.
-
-Supports pipe()ing (including multi-pipe() and backpressure
-transmission), buffering data until either a `data` event handler or
-`pipe()` is added (so you don't lose the first chunk), and most other
-cases where PassThrough is a good idea.
-
-There is a `read()` method, but it's much more efficient to consume
-data from this stream via `'data'` events or by calling `pipe()` into
-some other stream. Calling `read()` requires the buffer to be
-flattened in some cases, which requires copying memory.
-
-There is also no `unpipe()` method. Once you start piping, there is
-no stopping it!
-
-If you set `objectMode: true` in the options, then whatever is written
-will be emitted. Otherwise, it'll do a minimal amount of Buffer
-copying to ensure proper Streams semantics when `read(n)` is called.
-
-`objectMode` can also be set by doing `stream.objectMode = true`, or by
-writing any non-string/non-buffer data. `objectMode` cannot be set to
-false once it is set.
-
-This is not a `through` or `through2` stream. It doesn't transform
-the data, it just passes it right through. If you want to transform
-the data, extend the class, and override the `write()` method. Once
-you're done transforming the data however you want, call
-`super.write()` with the transform output.
-
-For some examples of streams that extend Minipass in various ways, check
-out:
-
-- [minizlib](http://npm.im/minizlib)
-- [fs-minipass](http://npm.im/fs-minipass)
-- [tar](http://npm.im/tar)
-- [minipass-collect](http://npm.im/minipass-collect)
-- [minipass-flush](http://npm.im/minipass-flush)
-- [minipass-pipeline](http://npm.im/minipass-pipeline)
-- [tap](http://npm.im/tap)
-- [tap-parser](http://npm.im/tap)
-- [treport](http://npm.im/tap)
-
-## Differences from Node.js Streams
-
-There are several things that make Minipass streams different from (and in
-some ways superior to) Node.js core streams.
-
-Please read these caveats if you are familiar with noode-core streams and
-intend to use Minipass streams in your programs.
-
-### Timing
-
-Minipass streams are designed to support synchronous use-cases. Thus, data
-is emitted as soon as it is available, always. It is buffered until read,
-but no longer. Another way to look at it is that Minipass streams are
-exactly as synchronous as the logic that writes into them.
-
-This can be surprising if your code relies on `PassThrough.write()` always
-providing data on the next tick rather than the current one, or being able
-to call `resume()` and not have the entire buffer disappear immediately.
-
-However, without this synchronicity guarantee, there would be no way for
-Minipass to achieve the speeds it does, or support the synchronous use
-cases that it does. Simply put, waiting takes time.
-
-This non-deferring approach makes Minipass streams much easier to reason
-about, especially in the context of Promises and other flow-control
-mechanisms.
-
-### No High/Low Water Marks
-
-Node.js core streams will optimistically fill up a buffer, returning `true`
-on all writes until the limit is hit, even if the data has nowhere to go.
-Then, they will not attempt to draw more data in until the buffer size dips
-below a minimum value.
-
-Minipass streams are much simpler. The `write()` method will return `true`
-if the data has somewhere to go (which is to say, given the timing
-guarantees, that the data is already there by the time `write()` returns).
-
-If the data has nowhere to go, then `write()` returns false, and the data
-sits in a buffer, to be drained out immediately as soon as anyone consumes
-it.
-
-### Hazards of Buffering (or: Why Minipass Is So Fast)
-
-Since data written to a Minipass stream is immediately written all the way
-through the pipeline, and `write()` always returns true/false based on
-whether the data was fully flushed, backpressure is communicated
-immediately to the upstream caller. This minimizes buffering.
-
-Consider this case:
-
-```js
-const {PassThrough} = require('stream')
-const p1 = new PassThrough({ highWaterMark: 1024 })
-const p2 = new PassThrough({ highWaterMark: 1024 })
-const p3 = new PassThrough({ highWaterMark: 1024 })
-const p4 = new PassThrough({ highWaterMark: 1024 })
-
-p1.pipe(p2).pipe(p3).pipe(p4)
-p4.on('data', () => console.log('made it through'))
-
-// this returns false and buffers, then writes to p2 on next tick (1)
-// p2 returns false and buffers, pausing p1, then writes to p3 on next tick (2)
-// p3 returns false and buffers, pausing p2, then writes to p4 on next tick (3)
-// p4 returns false and buffers, pausing p3, then emits 'data' and 'drain'
-// on next tick (4)
-// p3 sees p4's 'drain' event, and calls resume(), emitting 'resume' and
-// 'drain' on next tick (5)
-// p2 sees p3's 'drain', calls resume(), emits 'resume' and 'drain' on next tick (6)
-// p1 sees p2's 'drain', calls resume(), emits 'resume' and 'drain' on next
-// tick (7)
-
-p1.write(Buffer.alloc(2048)) // returns false
-```
-
-Along the way, the data was buffered and deferred at each stage, and
-multiple event deferrals happened, for an unblocked pipeline where it was
-perfectly safe to write all the way through!
-
-Furthermore, setting a `highWaterMark` of `1024` might lead someone reading
-the code to think an advisory maximum of 1KiB is being set for the
-pipeline. However, the actual advisory buffering level is the _sum_ of
-`highWaterMark` values, since each one has its own bucket.
-
-Consider the Minipass case:
-
-```js
-const m1 = new Minipass()
-const m2 = new Minipass()
-const m3 = new Minipass()
-const m4 = new Minipass()
-
-m1.pipe(m2).pipe(m3).pipe(m4)
-m4.on('data', () => console.log('made it through'))
-
-// m1 is flowing, so it writes the data to m2 immediately
-// m2 is flowing, so it writes the data to m3 immediately
-// m3 is flowing, so it writes the data to m4 immediately
-// m4 is flowing, so it fires the 'data' event immediately, returns true
-// m4's write returned true, so m3 is still flowing, returns true
-// m3's write returned true, so m2 is still flowing, returns true
-// m2's write returned true, so m1 is still flowing, returns true
-// No event deferrals or buffering along the way!
-
-m1.write(Buffer.alloc(2048)) // returns true
-```
-
-It is extremely unlikely that you _don't_ want to buffer any data written,
-or _ever_ buffer data that can be flushed all the way through. Neither
-node-core streams nor Minipass ever fail to buffer written data, but
-node-core streams do a lot of unnecessary buffering and pausing.
-
-As always, the faster implementation is the one that does less stuff and
-waits less time to do it.
-
-### Immediately emit `end` for empty streams (when not paused)
-
-If a stream is not paused, and `end()` is called before writing any data
-into it, then it will emit `end` immediately.
-
-If you have logic that occurs on the `end` event which you don't want to
-potentially happen immediately (for example, closing file descriptors,
-moving on to the next entry in an archive parse stream, etc.) then be sure
-to call `stream.pause()` on creation, and then `stream.resume()` once you
-are ready to respond to the `end` event.
-
-### Emit `end` When Asked
-
-One hazard of immediately emitting `'end'` is that you may not yet have had
-a chance to add a listener. In order to avoid this hazard, Minipass
-streams safely re-emit the `'end'` event if a new listener is added after
-`'end'` has been emitted.
-
-Ie, if you do `stream.on('end', someFunction)`, and the stream has already
-emitted `end`, then it will call the handler right away. (You can think of
-this somewhat like attaching a new `.then(fn)` to a previously-resolved
-Promise.)
-
-To prevent calling handlers multiple times who would not expect multiple
-ends to occur, all listeners are removed from the `'end'` event whenever it
-is emitted.
-
-### Impact of "immediate flow" on Tee-streams
-
-A "tee stream" is a stream piping to multiple destinations:
-
-```js
-const tee = new Minipass()
-t.pipe(dest1)
-t.pipe(dest2)
-t.write('foo') // goes to both destinations
-```
-
-Since Minipass streams _immediately_ process any pending data through the
-pipeline when a new pipe destination is added, this can have surprising
-effects, especially when a stream comes in from some other function and may
-or may not have data in its buffer.
-
-```js
-// WARNING! WILL LOSE DATA!
-const src = new Minipass()
-src.write('foo')
-src.pipe(dest1) // 'foo' chunk flows to dest1 immediately, and is gone
-src.pipe(dest2) // gets nothing!
-```
-
-The solution is to create a dedicated tee-stream junction that pipes to
-both locations, and then pipe to _that_ instead.
-
-```js
-// Safe example: tee to both places
-const src = new Minipass()
-src.write('foo')
-const tee = new Minipass()
-tee.pipe(dest1)
-tee.pipe(dest2)
-stream.pipe(tee) // tee gets 'foo', pipes to both locations
-```
-
-The same caveat applies to `on('data')` event listeners. The first one
-added will _immediately_ receive all of the data, leaving nothing for the
-second:
-
-```js
-// WARNING! WILL LOSE DATA!
-const src = new Minipass()
-src.write('foo')
-src.on('data', handler1) // receives 'foo' right away
-src.on('data', handler2) // nothing to see here!
-```
-
-Using a dedicated tee-stream can be used in this case as well:
-
-```js
-// Safe example: tee to both data handlers
-const src = new Minipass()
-src.write('foo')
-const tee = new Minipass()
-tee.on('data', handler1)
-tee.on('data', handler2)
-src.pipe(tee)
-```
-
-## USAGE
-
-It's a stream! Use it like a stream and it'll most likely do what you want.
-
-```js
-const Minipass = require('minipass')
-const mp = new Minipass(options) // optional: { encoding, objectMode }
-mp.write('foo')
-mp.pipe(someOtherStream)
-mp.end('bar')
-```
-
-### OPTIONS
-
-* `encoding` How would you like the data coming _out_ of the stream to be
- encoded? Accepts any values that can be passed to `Buffer.toString()`.
-* `objectMode` Emit data exactly as it comes in. This will be flipped on
- by default if you write() something other than a string or Buffer at any
- point. Setting `objectMode: true` will prevent setting any encoding
- value.
-
-### API
-
-Implements the user-facing portions of Node.js's `Readable` and `Writable`
-streams.
-
-### Methods
-
-* `write(chunk, [encoding], [callback])` - Put data in. (Note that, in the
- base Minipass class, the same data will come out.) Returns `false` if
- the stream will buffer the next write, or true if it's still in
- "flowing" mode.
-* `end([chunk, [encoding]], [callback])` - Signal that you have no more
- data to write. This will queue an `end` event to be fired when all the
- data has been consumed.
-* `setEncoding(encoding)` - Set the encoding for data coming of the
- stream. This can only be done once.
-* `pause()` - No more data for a while, please. This also prevents `end`
- from being emitted for empty streams until the stream is resumed.
-* `resume()` - Resume the stream. If there's data in the buffer, it is
- all discarded. Any buffered events are immediately emitted.
-* `pipe(dest)` - Send all output to the stream provided. There is no way
- to unpipe. When data is emitted, it is immediately written to any and
- all pipe destinations.
-* `on(ev, fn)`, `emit(ev, fn)` - Minipass streams are EventEmitters.
- Some events are given special treatment, however. (See below under
- "events".)
-* `promise()` - Returns a Promise that resolves when the stream emits
- `end`, or rejects if the stream emits `error`.
-* `collect()` - Return a Promise that resolves on `end` with an array
- containing each chunk of data that was emitted, or rejects if the
- stream emits `error`. Note that this consumes the stream data.
-* `concat()` - Same as `collect()`, but concatenates the data into a
- single Buffer object. Will reject the returned promise if the stream is
- in objectMode, or if it goes into objectMode by the end of the data.
-* `read(n)` - Consume `n` bytes of data out of the buffer. If `n` is not
- provided, then consume all of it. If `n` bytes are not available, then
- it returns null. **Note** consuming streams in this way is less
- efficient, and can lead to unnecessary Buffer copying.
-* `destroy([er])` - Destroy the stream. If an error is provided, then an
- `'error'` event is emitted. If the stream has a `close()` method, and
- has not emitted a `'close'` event yet, then `stream.close()` will be
- called. Any Promises returned by `.promise()`, `.collect()` or
- `.concat()` will be rejected. After being destroyed, writing to the
- stream will emit an error. No more data will be emitted if the stream is
- destroyed, even if it was previously buffered.
-
-### Properties
-
-* `bufferLength` Read-only. Total number of bytes buffered, or in the case
- of objectMode, the total number of objects.
-* `encoding` The encoding that has been set. (Setting this is equivalent
- to calling `setEncoding(enc)` and has the same prohibition against
- setting multiple times.)
-* `flowing` Read-only. Boolean indicating whether a chunk written to the
- stream will be immediately emitted.
-* `emittedEnd` Read-only. Boolean indicating whether the end-ish events
- (ie, `end`, `prefinish`, `finish`) have been emitted. Note that
- listening on any end-ish event will immediateyl re-emit it if it has
- already been emitted.
-* `writable` Whether the stream is writable. Default `true`. Set to
- `false` when `end()`
-* `readable` Whether the stream is readable. Default `true`.
-* `buffer` A [yallist](http://npm.im/yallist) linked list of chunks written
- to the stream that have not yet been emitted. (It's probably a bad idea
- to mess with this.)
-* `pipes` A [yallist](http://npm.im/yallist) linked list of streams that
- this stream is piping into. (It's probably a bad idea to mess with
- this.)
-* `destroyed` A getter that indicates whether the stream was destroyed.
-* `paused` True if the stream has been explicitly paused, otherwise false.
-* `objectMode` Indicates whether the stream is in `objectMode`. Once set
- to `true`, it cannot be set to `false`.
-
-### Events
-
-* `data` Emitted when there's data to read. Argument is the data to read.
- This is never emitted while not flowing. If a listener is attached, that
- will resume the stream.
-* `end` Emitted when there's no more data to read. This will be emitted
- immediately for empty streams when `end()` is called. If a listener is
- attached, and `end` was already emitted, then it will be emitted again.
- All listeners are removed when `end` is emitted.
-* `prefinish` An end-ish event that follows the same logic as `end` and is
- emitted in the same conditions where `end` is emitted. Emitted after
- `'end'`.
-* `finish` An end-ish event that follows the same logic as `end` and is
- emitted in the same conditions where `end` is emitted. Emitted after
- `'prefinish'`.
-* `close` An indication that an underlying resource has been released.
- Minipass does not emit this event, but will defer it until after `end`
- has been emitted, since it throws off some stream libraries otherwise.
-* `drain` Emitted when the internal buffer empties, and it is again
- suitable to `write()` into the stream.
-* `readable` Emitted when data is buffered and ready to be read by a
- consumer.
-* `resume` Emitted when stream changes state from buffering to flowing
- mode. (Ie, when `resume` is called, `pipe` is called, or a `data` event
- listener is added.)
-
-### Static Methods
-
-* `Minipass.isStream(stream)` Returns `true` if the argument is a stream,
- and false otherwise. To be considered a stream, the object must be
- either an instance of Minipass, or an EventEmitter that has either a
- `pipe()` method, or both `write()` and `end()` methods. (Pretty much any
- stream in node-land will return `true` for this.)
-
-## EXAMPLES
-
-Here are some examples of things you can do with Minipass streams.
-
-### simple "are you done yet" promise
-
-```js
-mp.promise().then(() => {
- // stream is finished
-}, er => {
- // stream emitted an error
-})
-```
-
-### collecting
-
-```js
-mp.collect().then(all => {
- // all is an array of all the data emitted
- // encoding is supported in this case, so
- // so the result will be a collection of strings if
- // an encoding is specified, or buffers/objects if not.
- //
- // In an async function, you may do
- // const data = await stream.collect()
-})
-```
-
-### collecting into a single blob
-
-This is a bit slower because it concatenates the data into one chunk for
-you, but if you're going to do it yourself anyway, it's convenient this
-way:
-
-```js
-mp.concat().then(onebigchunk => {
- // onebigchunk is a string if the stream
- // had an encoding set, or a buffer otherwise.
-})
-```
-
-### iteration
-
-You can iterate over streams synchronously or asynchronously in
-platforms that support it.
-
-Synchronous iteration will end when the currently available data is
-consumed, even if the `end` event has not been reached. In string and
-buffer mode, the data is concatenated, so unless multiple writes are
-occurring in the same tick as the `read()`, sync iteration loops will
-generally only have a single iteration.
-
-To consume chunks in this way exactly as they have been written, with
-no flattening, create the stream with the `{ objectMode: true }`
-option.
-
-```js
-const mp = new Minipass({ objectMode: true })
-mp.write('a')
-mp.write('b')
-for (let letter of mp) {
- console.log(letter) // a, b
-}
-mp.write('c')
-mp.write('d')
-for (let letter of mp) {
- console.log(letter) // c, d
-}
-mp.write('e')
-mp.end()
-for (let letter of mp) {
- console.log(letter) // e
-}
-for (let letter of mp) {
- console.log(letter) // nothing
-}
-```
-
-Asynchronous iteration will continue until the end event is reached,
-consuming all of the data.
-
-```js
-const mp = new Minipass({ encoding: 'utf8' })
-
-// some source of some data
-let i = 5
-const inter = setInterval(() => {
- if (i --> 0)
- mp.write(Buffer.from('foo\n', 'utf8'))
- else {
- mp.end()
- clearInterval(inter)
- }
-}, 100)
-
-// consume the data with asynchronous iteration
-async function consume () {
- for await (let chunk of mp) {
- console.log(chunk)
- }
- return 'ok'
-}
-
-consume().then(res => console.log(res))
-// logs `foo\n` 5 times, and then `ok`
-```
-
-### subclass that `console.log()`s everything written into it
-
-```js
-class Logger extends Minipass {
- write (chunk, encoding, callback) {
- console.log('WRITE', chunk, encoding)
- return super.write(chunk, encoding, callback)
- }
- end (chunk, encoding, callback) {
- console.log('END', chunk, encoding)
- return super.end(chunk, encoding, callback)
- }
-}
-
-someSource.pipe(new Logger()).pipe(someDest)
-```
-
-### same thing, but using an inline anonymous class
-
-```js
-// js classes are fun
-someSource
- .pipe(new (class extends Minipass {
- emit (ev, ...data) {
- // let's also log events, because debugging some weird thing
- console.log('EMIT', ev)
- return super.emit(ev, ...data)
- }
- write (chunk, encoding, callback) {
- console.log('WRITE', chunk, encoding)
- return super.write(chunk, encoding, callback)
- }
- end (chunk, encoding, callback) {
- console.log('END', chunk, encoding)
- return super.end(chunk, encoding, callback)
- }
- }))
- .pipe(someDest)
-```
-
-### subclass that defers 'end' for some reason
-
-```js
-class SlowEnd extends Minipass {
- emit (ev, ...args) {
- if (ev === 'end') {
- console.log('going to end, hold on a sec')
- setTimeout(() => {
- console.log('ok, ready to end now')
- super.emit('end', ...args)
- }, 100)
- } else {
- return super.emit(ev, ...args)
- }
- }
-}
-```
-
-### transform that creates newline-delimited JSON
-
-```js
-class NDJSONEncode extends Minipass {
- write (obj, cb) {
- try {
- // JSON.stringify can throw, emit an error on that
- return super.write(JSON.stringify(obj) + '\n', 'utf8', cb)
- } catch (er) {
- this.emit('error', er)
- }
- }
- end (obj, cb) {
- if (typeof obj === 'function') {
- cb = obj
- obj = undefined
- }
- if (obj !== undefined) {
- this.write(obj)
- }
- return super.end(cb)
- }
-}
-```
-
-### transform that parses newline-delimited JSON
-
-```js
-class NDJSONDecode extends Minipass {
- constructor (options) {
- // always be in object mode, as far as Minipass is concerned
- super({ objectMode: true })
- this._jsonBuffer = ''
- }
- write (chunk, encoding, cb) {
- if (typeof chunk === 'string' &&
- typeof encoding === 'string' &&
- encoding !== 'utf8') {
- chunk = Buffer.from(chunk, encoding).toString()
- } else if (Buffer.isBuffer(chunk))
- chunk = chunk.toString()
- }
- if (typeof encoding === 'function') {
- cb = encoding
- }
- const jsonData = (this._jsonBuffer + chunk).split('\n')
- this._jsonBuffer = jsonData.pop()
- for (let i = 0; i < jsonData.length; i++) {
- let parsed
- try {
- super.write(parsed)
- } catch (er) {
- this.emit('error', er)
- continue
- }
- }
- if (cb)
- cb()
- }
-}
-```
diff --git a/node_modules/libcipm/node_modules/minipass/index.js b/node_modules/libcipm/node_modules/minipass/index.js
deleted file mode 100644
index c072352d4..000000000
--- a/node_modules/libcipm/node_modules/minipass/index.js
+++ /dev/null
@@ -1,537 +0,0 @@
-'use strict'
-const EE = require('events')
-const Yallist = require('yallist')
-const SD = require('string_decoder').StringDecoder
-
-const EOF = Symbol('EOF')
-const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
-const EMITTED_END = Symbol('emittedEnd')
-const EMITTING_END = Symbol('emittingEnd')
-const CLOSED = Symbol('closed')
-const READ = Symbol('read')
-const FLUSH = Symbol('flush')
-const FLUSHCHUNK = Symbol('flushChunk')
-const ENCODING = Symbol('encoding')
-const DECODER = Symbol('decoder')
-const FLOWING = Symbol('flowing')
-const PAUSED = Symbol('paused')
-const RESUME = Symbol('resume')
-const BUFFERLENGTH = Symbol('bufferLength')
-const BUFFERPUSH = Symbol('bufferPush')
-const BUFFERSHIFT = Symbol('bufferShift')
-const OBJECTMODE = Symbol('objectMode')
-const DESTROYED = Symbol('destroyed')
-
-// TODO remove when Node v8 support drops
-const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1'
-const ASYNCITERATOR = doIter && Symbol.asyncIterator
- || Symbol('asyncIterator not implemented')
-const ITERATOR = doIter && Symbol.iterator
- || Symbol('iterator not implemented')
-
-// Buffer in node 4.x < 4.5.0 doesn't have working Buffer.from
-// or Buffer.alloc, and Buffer in node 10 deprecated the ctor.
-// .M, this is fine .\^/M..
-const B = Buffer.alloc ? Buffer
- : /* istanbul ignore next */ require('safe-buffer').Buffer
-
-// events that mean 'the stream is over'
-// these are treated specially, and re-emitted
-// if they are listened for after emitting.
-const isEndish = ev =>
- ev === 'end' ||
- ev === 'finish' ||
- ev === 'prefinish'
-
-const isArrayBuffer = b => b instanceof ArrayBuffer ||
- typeof b === 'object' &&
- b.constructor &&
- b.constructor.name === 'ArrayBuffer' &&
- b.byteLength >= 0
-
-const isArrayBufferView = b => !B.isBuffer(b) && ArrayBuffer.isView(b)
-
-module.exports = class Minipass extends EE {
- constructor (options) {
- super()
- this[FLOWING] = false
- // whether we're explicitly paused
- this[PAUSED] = false
- this.pipes = new Yallist()
- this.buffer = new Yallist()
- this[OBJECTMODE] = options && options.objectMode || false
- if (this[OBJECTMODE])
- this[ENCODING] = null
- else
- this[ENCODING] = options && options.encoding || null
- if (this[ENCODING] === 'buffer')
- this[ENCODING] = null
- this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
- this[EOF] = false
- this[EMITTED_END] = false
- this[EMITTING_END] = false
- this[CLOSED] = false
- this.writable = true
- this.readable = true
- this[BUFFERLENGTH] = 0
- this[DESTROYED] = false
- }
-
- get bufferLength () { return this[BUFFERLENGTH] }
-
- get encoding () { return this[ENCODING] }
- set encoding (enc) {
- if (this[OBJECTMODE])
- throw new Error('cannot set encoding in objectMode')
-
- if (this[ENCODING] && enc !== this[ENCODING] &&
- (this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH]))
- throw new Error('cannot change encoding')
-
- if (this[ENCODING] !== enc) {
- this[DECODER] = enc ? new SD(enc) : null
- if (this.buffer.length)
- this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk))
- }
-
- this[ENCODING] = enc
- }
-
- setEncoding (enc) {
- this.encoding = enc
- }
-
- get objectMode () { return this[OBJECTMODE] }
- set objectMode (ॐ ) { this[OBJECTMODE] = this[OBJECTMODE] || !!ॐ }
-
- write (chunk, encoding, cb) {
- if (this[EOF])
- throw new Error('write after end')
-
- if (this[DESTROYED]) {
- this.emit('error', Object.assign(
- new Error('Cannot call write after a stream was destroyed'),
- { code: 'ERR_STREAM_DESTROYED' }
- ))
- return true
- }
-
- if (typeof encoding === 'function')
- cb = encoding, encoding = 'utf8'
-
- if (!encoding)
- encoding = 'utf8'
-
- // convert array buffers and typed array views into buffers
- // at some point in the future, we may want to do the opposite!
- // leave strings and buffers as-is
- // anything else switches us into object mode
- if (!this[OBJECTMODE] && !B.isBuffer(chunk)) {
- if (isArrayBufferView(chunk))
- chunk = B.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
- else if (isArrayBuffer(chunk))
- chunk = B.from(chunk)
- else if (typeof chunk !== 'string')
- // use the setter so we throw if we have encoding set
- this.objectMode = true
- }
-
- // this ensures at this point that the chunk is a buffer or string
- // don't buffer it up or send it to the decoder
- if (!this.objectMode && !chunk.length) {
- const ret = this.flowing
- if (this[BUFFERLENGTH] !== 0)
- this.emit('readable')
- if (cb)
- cb()
- return ret
- }
-
- // fast-path writing strings of same encoding to a stream with
- // an empty buffer, skipping the buffer/decoder dance
- if (typeof chunk === 'string' && !this[OBJECTMODE] &&
- // unless it is a string already ready for us to use
- !(encoding === this[ENCODING] && !this[DECODER].lastNeed)) {
- chunk = B.from(chunk, encoding)
- }
-
- if (B.isBuffer(chunk) && this[ENCODING])
- chunk = this[DECODER].write(chunk)
-
- try {
- return this.flowing
- ? (this.emit('data', chunk), this.flowing)
- : (this[BUFFERPUSH](chunk), false)
- } finally {
- if (this[BUFFERLENGTH] !== 0)
- this.emit('readable')
- if (cb)
- cb()
- }
- }
-
- read (n) {
- if (this[DESTROYED])
- return null
-
- try {
- if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH])
- return null
-
- if (this[OBJECTMODE])
- n = null
-
- if (this.buffer.length > 1 && !this[OBJECTMODE]) {
- if (this.encoding)
- this.buffer = new Yallist([
- Array.from(this.buffer).join('')
- ])
- else
- this.buffer = new Yallist([
- B.concat(Array.from(this.buffer), this[BUFFERLENGTH])
- ])
- }
-
- return this[READ](n || null, this.buffer.head.value)
- } finally {
- this[MAYBE_EMIT_END]()
- }
- }
-
- [READ] (n, chunk) {
- if (n === chunk.length || n === null)
- this[BUFFERSHIFT]()
- else {
- this.buffer.head.value = chunk.slice(n)
- chunk = chunk.slice(0, n)
- this[BUFFERLENGTH] -= n
- }
-
- this.emit('data', chunk)
-
- if (!this.buffer.length && !this[EOF])
- this.emit('drain')
-
- return chunk
- }
-
- end (chunk, encoding, cb) {
- if (typeof chunk === 'function')
- cb = chunk, chunk = null
- if (typeof encoding === 'function')
- cb = encoding, encoding = 'utf8'
- if (chunk)
- this.write(chunk, encoding)
- if (cb)
- this.once('end', cb)
- this[EOF] = true
- this.writable = false
-
- // if we haven't written anything, then go ahead and emit,
- // even if we're not reading.
- // we'll re-emit if a new 'end' listener is added anyway.
- // This makes MP more suitable to write-only use cases.
- if (this.flowing || !this[PAUSED])
- this[MAYBE_EMIT_END]()
- return this
- }
-
- // don't let the internal resume be overwritten
- [RESUME] () {
- if (this[DESTROYED])
- return
-
- this[PAUSED] = false
- this[FLOWING] = true
- this.emit('resume')
- if (this.buffer.length)
- this[FLUSH]()
- else if (this[EOF])
- this[MAYBE_EMIT_END]()
- else
- this.emit('drain')
- }
-
- resume () {
- return this[RESUME]()
- }
-
- pause () {
- this[FLOWING] = false
- this[PAUSED] = true
- }
-
- get destroyed () {
- return this[DESTROYED]
- }
-
- get flowing () {
- return this[FLOWING]
- }
-
- get paused () {
- return this[PAUSED]
- }
-
- [BUFFERPUSH] (chunk) {
- if (this[OBJECTMODE])
- this[BUFFERLENGTH] += 1
- else
- this[BUFFERLENGTH] += chunk.length
- return this.buffer.push(chunk)
- }
-
- [BUFFERSHIFT] () {
- if (this.buffer.length) {
- if (this[OBJECTMODE])
- this[BUFFERLENGTH] -= 1
- else
- this[BUFFERLENGTH] -= this.buffer.head.value.length
- }
- return this.buffer.shift()
- }
-
- [FLUSH] () {
- do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()))
-
- if (!this.buffer.length && !this[EOF])
- this.emit('drain')
- }
-
- [FLUSHCHUNK] (chunk) {
- return chunk ? (this.emit('data', chunk), this.flowing) : false
- }
-
- pipe (dest, opts) {
- if (this[DESTROYED])
- return
-
- const ended = this[EMITTED_END]
- opts = opts || {}
- if (dest === process.stdout || dest === process.stderr)
- opts.end = false
- else
- opts.end = opts.end !== false
-
- const p = { dest: dest, opts: opts, ondrain: _ => this[RESUME]() }
- this.pipes.push(p)
-
- dest.on('drain', p.ondrain)
- this[RESUME]()
- // piping an ended stream ends immediately
- if (ended && p.opts.end)
- p.dest.end()
- return dest
- }
-
- addListener (ev, fn) {
- return this.on(ev, fn)
- }
-
- on (ev, fn) {
- try {
- return super.on(ev, fn)
- } finally {
- if (ev === 'data' && !this.pipes.length && !this.flowing)
- this[RESUME]()
- else if (isEndish(ev) && this[EMITTED_END]) {
- super.emit(ev)
- this.removeAllListeners(ev)
- }
- }
- }
-
- get emittedEnd () {
- return this[EMITTED_END]
- }
-
- [MAYBE_EMIT_END] () {
- if (!this[EMITTING_END] &&
- !this[EMITTED_END] &&
- !this[DESTROYED] &&
- this.buffer.length === 0 &&
- this[EOF]) {
- this[EMITTING_END] = true
- this.emit('end')
- this.emit('prefinish')
- this.emit('finish')
- if (this[CLOSED])
- this.emit('close')
- this[EMITTING_END] = false
- }
- }
-
- emit (ev, data) {
- // error and close are only events allowed after calling destroy()
- if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
- return
- else if (ev === 'data') {
- if (!data)
- return
-
- if (this.pipes.length)
- this.pipes.forEach(p =>
- p.dest.write(data) === false && this.pause())
- } else if (ev === 'end') {
- // only actual end gets this treatment
- if (this[EMITTED_END] === true)
- return
-
- this[EMITTED_END] = true
- this.readable = false
-
- if (this[DECODER]) {
- data = this[DECODER].end()
- if (data) {
- this.pipes.forEach(p => p.dest.write(data))
- super.emit('data', data)
- }
- }
-
- this.pipes.forEach(p => {
- p.dest.removeListener('drain', p.ondrain)
- if (p.opts.end)
- p.dest.end()
- })
- } else if (ev === 'close') {
- this[CLOSED] = true
- // don't emit close before 'end' and 'finish'
- if (!this[EMITTED_END] && !this[DESTROYED])
- return
- }
-
- // TODO: replace with a spread operator when Node v4 support drops
- const args = new Array(arguments.length)
- args[0] = ev
- args[1] = data
- if (arguments.length > 2) {
- for (let i = 2; i < arguments.length; i++) {
- args[i] = arguments[i]
- }
- }
-
- try {
- return super.emit.apply(this, args)
- } finally {
- if (!isEndish(ev))
- this[MAYBE_EMIT_END]()
- else
- this.removeAllListeners(ev)
- }
- }
-
- // const all = await stream.collect()
- collect () {
- const buf = []
- buf.dataLength = 0
- this.on('data', c => {
- buf.push(c)
- buf.dataLength += c.length
- })
- return this.promise().then(() => buf)
- }
-
- // const data = await stream.concat()
- concat () {
- return this[OBJECTMODE]
- ? Promise.reject(new Error('cannot concat in objectMode'))
- : this.collect().then(buf =>
- this[OBJECTMODE]
- ? Promise.reject(new Error('cannot concat in objectMode'))
- : this[ENCODING] ? buf.join('') : B.concat(buf, buf.dataLength))
- }
-
- // stream.promise().then(() => done, er => emitted error)
- promise () {
- return new Promise((resolve, reject) => {
- this.on(DESTROYED, () => reject(new Error('stream destroyed')))
- this.on('end', () => resolve())
- this.on('error', er => reject(er))
- })
- }
-
- // for await (let chunk of stream)
- [ASYNCITERATOR] () {
- const next = () => {
- const res = this.read()
- if (res !== null)
- return Promise.resolve({ done: false, value: res })
-
- if (this[EOF])
- return Promise.resolve({ done: true })
-
- let resolve = null
- let reject = null
- const onerr = er => {
- this.removeListener('data', ondata)
- this.removeListener('end', onend)
- reject(er)
- }
- const ondata = value => {
- this.removeListener('error', onerr)
- this.removeListener('end', onend)
- this.pause()
- resolve({ value: value, done: !!this[EOF] })
- }
- const onend = () => {
- this.removeListener('error', onerr)
- this.removeListener('data', ondata)
- resolve({ done: true })
- }
- const ondestroy = () => onerr(new Error('stream destroyed'))
- return new Promise((res, rej) => {
- reject = rej
- resolve = res
- this.once(DESTROYED, ondestroy)
- this.once('error', onerr)
- this.once('end', onend)
- this.once('data', ondata)
- })
- }
-
- return { next }
- }
-
- // for (let chunk of stream)
- [ITERATOR] () {
- const next = () => {
- const value = this.read()
- const done = value === null
- return { value, done }
- }
- return { next }
- }
-
- destroy (er) {
- if (this[DESTROYED]) {
- if (er)
- this.emit('error', er)
- else
- this.emit(DESTROYED)
- return this
- }
-
- this[DESTROYED] = true
-
- // throw away all buffered data, it's never coming out
- this.buffer = new Yallist()
- this[BUFFERLENGTH] = 0
-
- if (typeof this.close === 'function' && !this[CLOSED])
- this.close()
-
- if (er)
- this.emit('error', er)
- else // if no error to emit, still reject pending promises
- this.emit(DESTROYED)
-
- return this
- }
-
- static isStream (s) {
- return !!s && (s instanceof Minipass || s instanceof EE && (
- typeof s.pipe === 'function' || // readable
- (typeof s.write === 'function' && typeof s.end === 'function') // writable
- ))
- }
-}
diff --git a/node_modules/libcipm/node_modules/minipass/node_modules/yallist/LICENSE b/node_modules/libcipm/node_modules/minipass/node_modules/yallist/LICENSE
deleted file mode 100644
index 19129e315..000000000
--- a/node_modules/libcipm/node_modules/minipass/node_modules/yallist/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/libcipm/node_modules/minipass/node_modules/yallist/README.md b/node_modules/libcipm/node_modules/minipass/node_modules/yallist/README.md
deleted file mode 100644
index f58610186..000000000
--- a/node_modules/libcipm/node_modules/minipass/node_modules/yallist/README.md
+++ /dev/null
@@ -1,204 +0,0 @@
-# yallist
-
-Yet Another Linked List
-
-There are many doubly-linked list implementations like it, but this
-one is mine.
-
-For when an array would be too big, and a Map can't be iterated in
-reverse order.
-
-
-[![Build Status](https://travis-ci.org/isaacs/yallist.svg?branch=master)](https://travis-ci.org/isaacs/yallist) [![Coverage Status](https://coveralls.io/repos/isaacs/yallist/badge.svg?service=github)](https://coveralls.io/github/isaacs/yallist)
-
-## basic usage
-
-```javascript
-var yallist = require('yallist')
-var myList = yallist.create([1, 2, 3])
-myList.push('foo')
-myList.unshift('bar')
-// of course pop() and shift() are there, too
-console.log(myList.toArray()) // ['bar', 1, 2, 3, 'foo']
-myList.forEach(function (k) {
- // walk the list head to tail
-})
-myList.forEachReverse(function (k, index, list) {
- // walk the list tail to head
-})
-var myDoubledList = myList.map(function (k) {
- return k + k
-})
-// now myDoubledList contains ['barbar', 2, 4, 6, 'foofoo']
-// mapReverse is also a thing
-var myDoubledListReverse = myList.mapReverse(function (k) {
- return k + k
-}) // ['foofoo', 6, 4, 2, 'barbar']
-
-var reduced = myList.reduce(function (set, entry) {
- set += entry
- return set
-}, 'start')
-console.log(reduced) // 'startfoo123bar'
-```
-
-## api
-
-The whole API is considered "public".
-
-Functions with the same name as an Array method work more or less the
-same way.
-
-There's reverse versions of most things because that's the point.
-
-### Yallist
-
-Default export, the class that holds and manages a list.
-
-Call it with either a forEach-able (like an array) or a set of
-arguments, to initialize the list.
-
-The Array-ish methods all act like you'd expect. No magic length,
-though, so if you change that it won't automatically prune or add
-empty spots.
-
-### Yallist.create(..)
-
-Alias for Yallist function. Some people like factories.
-
-#### yallist.head
-
-The first node in the list
-
-#### yallist.tail
-
-The last node in the list
-
-#### yallist.length
-
-The number of nodes in the list. (Change this at your peril. It is
-not magic like Array length.)
-
-#### yallist.toArray()
-
-Convert the list to an array.
-
-#### yallist.forEach(fn, [thisp])
-
-Call a function on each item in the list.
-
-#### yallist.forEachReverse(fn, [thisp])
-
-Call a function on each item in the list, in reverse order.
-
-#### yallist.get(n)
-
-Get the data at position `n` in the list. If you use this a lot,
-probably better off just using an Array.
-
-#### yallist.getReverse(n)
-
-Get the data at position `n`, counting from the tail.
-
-#### yallist.map(fn, thisp)
-
-Create a new Yallist with the result of calling the function on each
-item.
-
-#### yallist.mapReverse(fn, thisp)
-
-Same as `map`, but in reverse.
-
-#### yallist.pop()
-
-Get the data from the list tail, and remove the tail from the list.
-
-#### yallist.push(item, ...)
-
-Insert one or more items to the tail of the list.
-
-#### yallist.reduce(fn, initialValue)
-
-Like Array.reduce.
-
-#### yallist.reduceReverse
-
-Like Array.reduce, but in reverse.
-
-#### yallist.reverse
-
-Reverse the list in place.
-
-#### yallist.shift()
-
-Get the data from the list head, and remove the head from the list.
-
-#### yallist.slice([from], [to])
-
-Just like Array.slice, but returns a new Yallist.
-
-#### yallist.sliceReverse([from], [to])
-
-Just like yallist.slice, but the result is returned in reverse.
-
-#### yallist.toArray()
-
-Create an array representation of the list.
-
-#### yallist.toArrayReverse()
-
-Create a reversed array representation of the list.
-
-#### yallist.unshift(item, ...)
-
-Insert one or more items to the head of the list.
-
-#### yallist.unshiftNode(node)
-
-Move a Node object to the front of the list. (That is, pull it out of
-wherever it lives, and make it the new head.)
-
-If the node belongs to a different list, then that list will remove it
-first.
-
-#### yallist.pushNode(node)
-
-Move a Node object to the end of the list. (That is, pull it out of
-wherever it lives, and make it the new tail.)
-
-If the node belongs to a list already, then that list will remove it
-first.
-
-#### yallist.removeNode(node)
-
-Remove a node from the list, preserving referential integrity of head
-and tail and other nodes.
-
-Will throw an error if you try to have a list remove a node that
-doesn't belong to it.
-
-### Yallist.Node
-
-The class that holds the data and is actually the list.
-
-Call with `var n = new Node(value, previousNode, nextNode)`
-
-Note that if you do direct operations on Nodes themselves, it's very
-easy to get into weird states where the list is broken. Be careful :)
-
-#### node.next
-
-The next node in the list.
-
-#### node.prev
-
-The previous node in the list.
-
-#### node.value
-
-The data the node contains.
-
-#### node.list
-
-The list to which this node belongs. (Null if it does not belong to
-any list.)
diff --git a/node_modules/libcipm/node_modules/minipass/node_modules/yallist/iterator.js b/node_modules/libcipm/node_modules/minipass/node_modules/yallist/iterator.js
deleted file mode 100644
index d41c97a19..000000000
--- a/node_modules/libcipm/node_modules/minipass/node_modules/yallist/iterator.js
+++ /dev/null
@@ -1,8 +0,0 @@
-'use strict'
-module.exports = function (Yallist) {
- Yallist.prototype[Symbol.iterator] = function* () {
- for (let walker = this.head; walker; walker = walker.next) {
- yield walker.value
- }
- }
-}
diff --git a/node_modules/libcipm/node_modules/minipass/node_modules/yallist/package.json b/node_modules/libcipm/node_modules/minipass/node_modules/yallist/package.json
deleted file mode 100644
index 392e8060a..000000000
--- a/node_modules/libcipm/node_modules/minipass/node_modules/yallist/package.json
+++ /dev/null
@@ -1,62 +0,0 @@
-{
- "_from": "yallist@^3.0.0",
- "_id": "yallist@3.1.1",
- "_inBundle": false,
- "_integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==",
- "_location": "/libcipm/minipass/yallist",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "yallist@^3.0.0",
- "name": "yallist",
- "escapedName": "yallist",
- "rawSpec": "^3.0.0",
- "saveSpec": null,
- "fetchSpec": "^3.0.0"
- },
- "_requiredBy": [
- "/libcipm/minipass"
- ],
- "_resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz",
- "_shasum": "dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd",
- "_spec": "yallist@^3.0.0",
- "_where": "/Users/isaacs/dev/npm/cli/node_modules/libcipm/node_modules/minipass",
- "author": {
- "name": "Isaac Z. Schlueter",
- "email": "i@izs.me",
- "url": "http://blog.izs.me/"
- },
- "bugs": {
- "url": "https://github.com/isaacs/yallist/issues"
- },
- "bundleDependencies": false,
- "dependencies": {},
- "deprecated": false,
- "description": "Yet Another Linked List",
- "devDependencies": {
- "tap": "^12.1.0"
- },
- "directories": {
- "test": "test"
- },
- "files": [
- "yallist.js",
- "iterator.js"
- ],
- "homepage": "https://github.com/isaacs/yallist#readme",
- "license": "ISC",
- "main": "yallist.js",
- "name": "yallist",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/isaacs/yallist.git"
- },
- "scripts": {
- "postpublish": "git push origin --all; git push origin --tags",
- "postversion": "npm publish",
- "preversion": "npm test",
- "test": "tap test/*.js --100"
- },
- "version": "3.1.1"
-}
diff --git a/node_modules/libcipm/node_modules/minipass/node_modules/yallist/yallist.js b/node_modules/libcipm/node_modules/minipass/node_modules/yallist/yallist.js
deleted file mode 100644
index ed4e7303a..000000000
--- a/node_modules/libcipm/node_modules/minipass/node_modules/yallist/yallist.js
+++ /dev/null
@@ -1,426 +0,0 @@
-'use strict'
-module.exports = Yallist
-
-Yallist.Node = Node
-Yallist.create = Yallist
-
-function Yallist (list) {
- var self = this
- if (!(self instanceof Yallist)) {
- self = new Yallist()
- }
-
- self.tail = null
- self.head = null
- self.length = 0
-
- if (list && typeof list.forEach === 'function') {
- list.forEach(function (item) {
- self.push(item)
- })
- } else if (arguments.length > 0) {
- for (var i = 0, l = arguments.length; i < l; i++) {
- self.push(arguments[i])
- }
- }
-
- return self
-}
-
-Yallist.prototype.removeNode = function (node) {
- if (node.list !== this) {
- throw new Error('removing node which does not belong to this list')
- }
-
- var next = node.next
- var prev = node.prev
-
- if (next) {
- next.prev = prev
- }
-
- if (prev) {
- prev.next = next
- }
-
- if (node === this.head) {
- this.head = next
- }
- if (node === this.tail) {
- this.tail = prev
- }
-
- node.list.length--
- node.next = null
- node.prev = null
- node.list = null
-
- return next
-}
-
-Yallist.prototype.unshiftNode = function (node) {
- if (node === this.head) {
- return
- }
-
- if (node.list) {
- node.list.removeNode(node)
- }
-
- var head = this.head
- node.list = this
- node.next = head
- if (head) {
- head.prev = node
- }
-
- this.head = node
- if (!this.tail) {
- this.tail = node
- }
- this.length++
-}
-
-Yallist.prototype.pushNode = function (node) {
- if (node === this.tail) {
- return
- }
-
- if (node.list) {
- node.list.removeNode(node)
- }
-
- var tail = this.tail
- node.list = this
- node.prev = tail
- if (tail) {
- tail.next = node
- }
-
- this.tail = node
- if (!this.head) {
- this.head = node
- }
- this.length++
-}
-
-Yallist.prototype.push = function () {
- for (var i = 0, l = arguments.length; i < l; i++) {
- push(this, arguments[i])
- }
- return this.length
-}
-
-Yallist.prototype.unshift = function () {
- for (var i = 0, l = arguments.length; i < l; i++) {
- unshift(this, arguments[i])
- }
- return this.length
-}
-
-Yallist.prototype.pop = function () {
- if (!this.tail) {
- return undefined
- }
-
- var res = this.tail.value
- this.tail = this.tail.prev
- if (this.tail) {
- this.tail.next = null
- } else {
- this.head = null
- }
- this.length--
- return res
-}
-
-Yallist.prototype.shift = function () {
- if (!this.head) {
- return undefined
- }
-
- var res = this.head.value
- this.head = this.head.next
- if (this.head) {
- this.head.prev = null
- } else {
- this.tail = null
- }
- this.length--
- return res
-}
-
-Yallist.prototype.forEach = function (fn, thisp) {
- thisp = thisp || this
- for (var walker = this.head, i = 0; walker !== null; i++) {
- fn.call(thisp, walker.value, i, this)
- walker = walker.next
- }
-}
-
-Yallist.prototype.forEachReverse = function (fn, thisp) {
- thisp = thisp || this
- for (var walker = this.tail, i = this.length - 1; walker !== null; i--) {
- fn.call(thisp, walker.value, i, this)
- walker = walker.prev
- }
-}
-
-Yallist.prototype.get = function (n) {
- for (var i = 0, walker = this.head; walker !== null && i < n; i++) {
- // abort out of the list early if we hit a cycle
- walker = walker.next
- }
- if (i === n && walker !== null) {
- return walker.value
- }
-}
-
-Yallist.prototype.getReverse = function (n) {
- for (var i = 0, walker = this.tail; walker !== null && i < n; i++) {
- // abort out of the list early if we hit a cycle
- walker = walker.prev
- }
- if (i === n && walker !== null) {
- return walker.value
- }
-}
-
-Yallist.prototype.map = function (fn, thisp) {
- thisp = thisp || this
- var res = new Yallist()
- for (var walker = this.head; walker !== null;) {
- res.push(fn.call(thisp, walker.value, this))
- walker = walker.next
- }
- return res
-}
-
-Yallist.prototype.mapReverse = function (fn, thisp) {
- thisp = thisp || this
- var res = new Yallist()
- for (var walker = this.tail; walker !== null;) {
- res.push(fn.call(thisp, walker.value, this))
- walker = walker.prev
- }
- return res
-}
-
-Yallist.prototype.reduce = function (fn, initial) {
- var acc
- var walker = this.head
- if (arguments.length > 1) {
- acc = initial
- } else if (this.head) {
- walker = this.head.next
- acc = this.head.value
- } else {
- throw new TypeError('Reduce of empty list with no initial value')
- }
-
- for (var i = 0; walker !== null; i++) {
- acc = fn(acc, walker.value, i)
- walker = walker.next
- }
-
- return acc
-}
-
-Yallist.prototype.reduceReverse = function (fn, initial) {
- var acc
- var walker = this.tail
- if (arguments.length > 1) {
- acc = initial
- } else if (this.tail) {
- walker = this.tail.prev
- acc = this.tail.value
- } else {
- throw new TypeError('Reduce of empty list with no initial value')
- }
-
- for (var i = this.length - 1; walker !== null; i--) {
- acc = fn(acc, walker.value, i)
- walker = walker.prev
- }
-
- return acc
-}
-
-Yallist.prototype.toArray = function () {
- var arr = new Array(this.length)
- for (var i = 0, walker = this.head; walker !== null; i++) {
- arr[i] = walker.value
- walker = walker.next
- }
- return arr
-}
-
-Yallist.prototype.toArrayReverse = function () {
- var arr = new Array(this.length)
- for (var i = 0, walker = this.tail; walker !== null; i++) {
- arr[i] = walker.value
- walker = walker.prev
- }
- return arr
-}
-
-Yallist.prototype.slice = function (from, to) {
- to = to || this.length
- if (to < 0) {
- to += this.length
- }
- from = from || 0
- if (from < 0) {
- from += this.length
- }
- var ret = new Yallist()
- if (to < from || to < 0) {
- return ret
- }
- if (from < 0) {
- from = 0
- }
- if (to > this.length) {
- to = this.length
- }
- for (var i = 0, walker = this.head; walker !== null && i < from; i++) {
- walker = walker.next
- }
- for (; walker !== null && i < to; i++, walker = walker.next) {
- ret.push(walker.value)
- }
- return ret
-}
-
-Yallist.prototype.sliceReverse = function (from, to) {
- to = to || this.length
- if (to < 0) {
- to += this.length
- }
- from = from || 0
- if (from < 0) {
- from += this.length
- }
- var ret = new Yallist()
- if (to < from || to < 0) {
- return ret
- }
- if (from < 0) {
- from = 0
- }
- if (to > this.length) {
- to = this.length
- }
- for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) {
- walker = walker.prev
- }
- for (; walker !== null && i > from; i--, walker = walker.prev) {
- ret.push(walker.value)
- }
- return ret
-}
-
-Yallist.prototype.splice = function (start, deleteCount /*, ...nodes */) {
- if (start > this.length) {
- start = this.length - 1
- }
- if (start < 0) {
- start = this.length + start;
- }
-
- for (var i = 0, walker = this.head; walker !== null && i < start; i++) {
- walker = walker.next
- }
-
- var ret = []
- for (var i = 0; walker && i < deleteCount; i++) {
- ret.push(walker.value)
- walker = this.removeNode(walker)
- }
- if (walker === null) {
- walker = this.tail
- }
-
- if (walker !== this.head && walker !== this.tail) {
- walker = walker.prev
- }
-
- for (var i = 2; i < arguments.length; i++) {
- walker = insert(this, walker, arguments[i])
- }
- return ret;
-}
-
-Yallist.prototype.reverse = function () {
- var head = this.head
- var tail = this.tail
- for (var walker = head; walker !== null; walker = walker.prev) {
- var p = walker.prev
- walker.prev = walker.next
- walker.next = p
- }
- this.head = tail
- this.tail = head
- return this
-}
-
-function insert (self, node, value) {
- var inserted = node === self.head ?
- new Node(value, null, node, self) :
- new Node(value, node, node.next, self)
-
- if (inserted.next === null) {
- self.tail = inserted
- }
- if (inserted.prev === null) {
- self.head = inserted
- }
-
- self.length++
-
- return inserted
-}
-
-function push (self, item) {
- self.tail = new Node(item, self.tail, null, self)
- if (!self.head) {
- self.head = self.tail
- }
- self.length++
-}
-
-function unshift (self, item) {
- self.head = new Node(item, null, self.head, self)
- if (!self.tail) {
- self.tail = self.head
- }
- self.length++
-}
-
-function Node (value, prev, next, list) {
- if (!(this instanceof Node)) {
- return new Node(value, prev, next, list)
- }
-
- this.list = list
- this.value = value
-
- if (prev) {
- prev.next = this
- this.prev = prev
- } else {
- this.prev = null
- }
-
- if (next) {
- next.prev = this
- this.next = next
- } else {
- this.next = null
- }
-}
-
-try {
- // add if support for Symbol.iterator is present
- require('./iterator.js')(Yallist)
-} catch (er) {}
diff --git a/node_modules/libcipm/node_modules/minipass/package.json b/node_modules/libcipm/node_modules/minipass/package.json
deleted file mode 100644
index c03c3363a..000000000
--- a/node_modules/libcipm/node_modules/minipass/package.json
+++ /dev/null
@@ -1,73 +0,0 @@
-{
- "_from": "minipass@^2.3.5",
- "_id": "minipass@2.9.0",
- "_inBundle": false,
- "_integrity": "sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg==",
- "_location": "/libcipm/minipass",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "minipass@^2.3.5",
- "name": "minipass",
- "escapedName": "minipass",
- "rawSpec": "^2.3.5",
- "saveSpec": null,
- "fetchSpec": "^2.3.5"
- },
- "_requiredBy": [
- "/libcipm/fs-minipass",
- "/libcipm/minizlib",
- "/libcipm/pacote",
- "/libcipm/tar"
- ],
- "_resolved": "https://registry.npmjs.org/minipass/-/minipass-2.9.0.tgz",
- "_shasum": "e713762e7d3e32fed803115cf93e04bca9fcc9a6",
- "_spec": "minipass@^2.3.5",
- "_where": "/Users/claudiahdz/npm/cli/node_modules/libcipm/node_modules/pacote",
- "author": {
- "name": "Isaac Z. Schlueter",
- "email": "i@izs.me",
- "url": "http://blog.izs.me/"
- },
- "bugs": {
- "url": "https://github.com/isaacs/minipass/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "safe-buffer": "^5.1.2",
- "yallist": "^3.0.0"
- },
- "deprecated": false,
- "description": "minimal implementation of a PassThrough stream",
- "devDependencies": {
- "end-of-stream": "^1.4.0",
- "tap": "^14.6.5",
- "through2": "^2.0.3"
- },
- "files": [
- "index.js"
- ],
- "homepage": "https://github.com/isaacs/minipass#readme",
- "keywords": [
- "passthrough",
- "stream"
- ],
- "license": "ISC",
- "main": "index.js",
- "name": "minipass",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/isaacs/minipass.git"
- },
- "scripts": {
- "postpublish": "git push origin --follow-tags",
- "postversion": "npm publish",
- "preversion": "npm test",
- "test": "tap"
- },
- "tap": {
- "check-coverage": true
- },
- "version": "2.9.0"
-}
diff --git a/node_modules/libcipm/node_modules/minizlib/LICENSE b/node_modules/libcipm/node_modules/minizlib/LICENSE
deleted file mode 100644
index ffce7383f..000000000
--- a/node_modules/libcipm/node_modules/minizlib/LICENSE
+++ /dev/null
@@ -1,26 +0,0 @@
-Minizlib was created by Isaac Z. Schlueter.
-It is a derivative work of the Node.js project.
-
-"""
-Copyright Isaac Z. Schlueter and Contributors
-Copyright Node.js contributors. All rights reserved.
-Copyright Joyent, Inc. and other Node contributors. All rights reserved.
-
-Permission is hereby granted, free of charge, to any person obtaining a
-copy of this software and associated documentation files (the "Software"),
-to deal in the Software without restriction, including without limitation
-the rights to use, copy, modify, merge, publish, distribute, sublicense,
-and/or sell copies of the Software, and to permit persons to whom the
-Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-"""
diff --git a/node_modules/libcipm/node_modules/minizlib/README.md b/node_modules/libcipm/node_modules/minizlib/README.md
deleted file mode 100644
index 4097b8522..000000000
--- a/node_modules/libcipm/node_modules/minizlib/README.md
+++ /dev/null
@@ -1,53 +0,0 @@
-# minizlib
-
-A fast zlib stream built on [minipass](http://npm.im/minipass) and
-Node.js's zlib binding.
-
-This module was created to serve the needs of
-[node-tar](http://npm.im/tar) and
-[minipass-fetch](http://npm.im/minipass-fetch).
-
-Brotli is supported in versions of node with a Brotli binding.
-
-## How does this differ from the streams in `require('zlib')`?
-
-First, there are no convenience methods to compress or decompress a
-buffer. If you want those, use the built-in `zlib` module. This is
-only streams. That being said, Minipass streams to make it fairly easy to
-use as one-liners: `new zlib.Deflate().end(data).read()` will return the
-deflate compressed result.
-
-This module compresses and decompresses the data as fast as you feed
-it in. It is synchronous, and runs on the main process thread. Zlib
-and Brotli operations can be high CPU, but they're very fast, and doing it
-this way means much less bookkeeping and artificial deferral.
-
-Node's built in zlib streams are built on top of `stream.Transform`.
-They do the maximally safe thing with respect to consistent
-asynchrony, buffering, and backpressure.
-
-See [Minipass](http://npm.im/minipass) for more on the differences between
-Node.js core streams and Minipass streams, and the convenience methods
-provided by that class.
-
-## Classes
-
-- Deflate
-- Inflate
-- Gzip
-- Gunzip
-- DeflateRaw
-- InflateRaw
-- Unzip
-- BrotliCompress (Node v10 and higher)
-- BrotliDecompress (Node v10 and higher)
-
-## USAGE
-
-```js
-const zlib = require('minizlib')
-const input = sourceOfCompressedData()
-const decode = new zlib.BrotliDecompress()
-const output = whereToWriteTheDecodedData()
-input.pipe(decode).pipe(output)
-```
diff --git a/node_modules/libcipm/node_modules/minizlib/constants.js b/node_modules/libcipm/node_modules/minizlib/constants.js
deleted file mode 100644
index 641ebc731..000000000
--- a/node_modules/libcipm/node_modules/minizlib/constants.js
+++ /dev/null
@@ -1,115 +0,0 @@
-// Update with any zlib constants that are added or changed in the future.
-// Node v6 didn't export this, so we just hard code the version and rely
-// on all the other hard-coded values from zlib v4736. When node v6
-// support drops, we can just export the realZlibConstants object.
-const realZlibConstants = require('zlib').constants ||
- /* istanbul ignore next */ { ZLIB_VERNUM: 4736 }
-
-module.exports = Object.freeze(Object.assign(Object.create(null), {
- Z_NO_FLUSH: 0,
- Z_PARTIAL_FLUSH: 1,
- Z_SYNC_FLUSH: 2,
- Z_FULL_FLUSH: 3,
- Z_FINISH: 4,
- Z_BLOCK: 5,
- Z_OK: 0,
- Z_STREAM_END: 1,
- Z_NEED_DICT: 2,
- Z_ERRNO: -1,
- Z_STREAM_ERROR: -2,
- Z_DATA_ERROR: -3,
- Z_MEM_ERROR: -4,
- Z_BUF_ERROR: -5,
- Z_VERSION_ERROR: -6,
- Z_NO_COMPRESSION: 0,
- Z_BEST_SPEED: 1,
- Z_BEST_COMPRESSION: 9,
- Z_DEFAULT_COMPRESSION: -1,
- Z_FILTERED: 1,
- Z_HUFFMAN_ONLY: 2,
- Z_RLE: 3,
- Z_FIXED: 4,
- Z_DEFAULT_STRATEGY: 0,
- DEFLATE: 1,
- INFLATE: 2,
- GZIP: 3,
- GUNZIP: 4,
- DEFLATERAW: 5,
- INFLATERAW: 6,
- UNZIP: 7,
- BROTLI_DECODE: 8,
- BROTLI_ENCODE: 9,
- Z_MIN_WINDOWBITS: 8,
- Z_MAX_WINDOWBITS: 15,
- Z_DEFAULT_WINDOWBITS: 15,
- Z_MIN_CHUNK: 64,
- Z_MAX_CHUNK: Infinity,
- Z_DEFAULT_CHUNK: 16384,
- Z_MIN_MEMLEVEL: 1,
- Z_MAX_MEMLEVEL: 9,
- Z_DEFAULT_MEMLEVEL: 8,
- Z_MIN_LEVEL: -1,
- Z_MAX_LEVEL: 9,
- Z_DEFAULT_LEVEL: -1,
- BROTLI_OPERATION_PROCESS: 0,
- BROTLI_OPERATION_FLUSH: 1,
- BROTLI_OPERATION_FINISH: 2,
- BROTLI_OPERATION_EMIT_METADATA: 3,
- BROTLI_MODE_GENERIC: 0,
- BROTLI_MODE_TEXT: 1,
- BROTLI_MODE_FONT: 2,
- BROTLI_DEFAULT_MODE: 0,
- BROTLI_MIN_QUALITY: 0,
- BROTLI_MAX_QUALITY: 11,
- BROTLI_DEFAULT_QUALITY: 11,
- BROTLI_MIN_WINDOW_BITS: 10,
- BROTLI_MAX_WINDOW_BITS: 24,
- BROTLI_LARGE_MAX_WINDOW_BITS: 30,
- BROTLI_DEFAULT_WINDOW: 22,
- BROTLI_MIN_INPUT_BLOCK_BITS: 16,
- BROTLI_MAX_INPUT_BLOCK_BITS: 24,
- BROTLI_PARAM_MODE: 0,
- BROTLI_PARAM_QUALITY: 1,
- BROTLI_PARAM_LGWIN: 2,
- BROTLI_PARAM_LGBLOCK: 3,
- BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
- BROTLI_PARAM_SIZE_HINT: 5,
- BROTLI_PARAM_LARGE_WINDOW: 6,
- BROTLI_PARAM_NPOSTFIX: 7,
- BROTLI_PARAM_NDIRECT: 8,
- BROTLI_DECODER_RESULT_ERROR: 0,
- BROTLI_DECODER_RESULT_SUCCESS: 1,
- BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
- BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
- BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
- BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
- BROTLI_DECODER_NO_ERROR: 0,
- BROTLI_DECODER_SUCCESS: 1,
- BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
- BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
- BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
- BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
- BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
- BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
- BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
- BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
- BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
- BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
- BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
- BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
- BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
- BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
- BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
- BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
- BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
- BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
- BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
- BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
- BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
- BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
- BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
- BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
- BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
- BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
- BROTLI_DECODER_ERROR_UNREACHABLE: -31,
-}, realZlibConstants))
diff --git a/node_modules/libcipm/node_modules/minizlib/index.js b/node_modules/libcipm/node_modules/minizlib/index.js
deleted file mode 100644
index 295047b9c..000000000
--- a/node_modules/libcipm/node_modules/minizlib/index.js
+++ /dev/null
@@ -1,320 +0,0 @@
-'use strict'
-
-const assert = require('assert')
-const Buffer = require('buffer').Buffer
-const realZlib = require('zlib')
-
-const constants = exports.constants = require('./constants.js')
-const Minipass = require('minipass')
-
-const OriginalBufferConcat = Buffer.concat
-
-class ZlibError extends Error {
- constructor (err) {
- super('zlib: ' + err.message)
- this.code = err.code
- this.errno = err.errno
- /* istanbul ignore if */
- if (!this.code)
- this.code = 'ZLIB_ERROR'
-
- this.message = 'zlib: ' + err.message
- Error.captureStackTrace(this, this.constructor)
- }
-
- get name () {
- return 'ZlibError'
- }
-}
-
-// the Zlib class they all inherit from
-// This thing manages the queue of requests, and returns
-// true or false if there is anything in the queue when
-// you call the .write() method.
-const _opts = Symbol('opts')
-const _flushFlag = Symbol('flushFlag')
-const _finishFlushFlag = Symbol('finishFlushFlag')
-const _fullFlushFlag = Symbol('fullFlushFlag')
-const _handle = Symbol('handle')
-const _onError = Symbol('onError')
-const _sawError = Symbol('sawError')
-const _level = Symbol('level')
-const _strategy = Symbol('strategy')
-const _ended = Symbol('ended')
-const _defaultFullFlush = Symbol('_defaultFullFlush')
-
-class ZlibBase extends Minipass {
- constructor (opts, mode) {
- if (!opts || typeof opts !== 'object')
- throw new TypeError('invalid options for ZlibBase constructor')
-
- super(opts)
- this[_ended] = false
- this[_opts] = opts
-
- this[_flushFlag] = opts.flush
- this[_finishFlushFlag] = opts.finishFlush
- // this will throw if any options are invalid for the class selected
- try {
- this[_handle] = new realZlib[mode](opts)
- } catch (er) {
- // make sure that all errors get decorated properly
- throw new ZlibError(er)
- }
-
- this[_onError] = (err) => {
- this[_sawError] = true
- // there is no way to cleanly recover.
- // continuing only obscures problems.
- this.close()
- this.emit('error', err)
- }
-
- this[_handle].on('error', er => this[_onError](new ZlibError(er)))
- this.once('end', () => this.close)
- }
-
- close () {
- if (this[_handle]) {
- this[_handle].close()
- this[_handle] = null
- this.emit('close')
- }
- }
-
- reset () {
- if (!this[_sawError]) {
- assert(this[_handle], 'zlib binding closed')
- return this[_handle].reset()
- }
- }
-
- flush (flushFlag) {
- if (this.ended)
- return
-
- if (typeof flushFlag !== 'number')
- flushFlag = this[_fullFlushFlag]
- this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag }))
- }
-
- end (chunk, encoding, cb) {
- if (chunk)
- this.write(chunk, encoding)
- this.flush(this[_finishFlushFlag])
- this[_ended] = true
- return super.end(null, null, cb)
- }
-
- get ended () {
- return this[_ended]
- }
-
- write (chunk, encoding, cb) {
- // process the chunk using the sync process
- // then super.write() all the outputted chunks
- if (typeof encoding === 'function')
- cb = encoding, encoding = 'utf8'
-
- if (typeof chunk === 'string')
- chunk = Buffer.from(chunk, encoding)
-
- if (this[_sawError])
- return
- assert(this[_handle], 'zlib binding closed')
-
- // _processChunk tries to .close() the native handle after it's done, so we
- // intercept that by temporarily making it a no-op.
- const nativeHandle = this[_handle]._handle
- const originalNativeClose = nativeHandle.close
- nativeHandle.close = () => {}
- const originalClose = this[_handle].close
- this[_handle].close = () => {}
- // It also calls `Buffer.concat()` at the end, which may be convenient
- // for some, but which we are not interested in as it slows us down.
- Buffer.concat = (args) => args
- let result
- try {
- const flushFlag = typeof chunk[_flushFlag] === 'number'
- ? chunk[_flushFlag] : this[_flushFlag]
- result = this[_handle]._processChunk(chunk, flushFlag)
- // if we don't throw, reset it back how it was
- Buffer.concat = OriginalBufferConcat
- } catch (err) {
- // or if we do, put Buffer.concat() back before we emit error
- // Error events call into user code, which may call Buffer.concat()
- Buffer.concat = OriginalBufferConcat
- this[_onError](new ZlibError(err))
- } finally {
- if (this[_handle]) {
- // Core zlib resets `_handle` to null after attempting to close the
- // native handle. Our no-op handler prevented actual closure, but we
- // need to restore the `._handle` property.
- this[_handle]._handle = nativeHandle
- nativeHandle.close = originalNativeClose
- this[_handle].close = originalClose
- // `_processChunk()` adds an 'error' listener. If we don't remove it
- // after each call, these handlers start piling up.
- this[_handle].removeAllListeners('error')
- }
- }
-
- let writeReturn
- if (result) {
- if (Array.isArray(result) && result.length > 0) {
- // The first buffer is always `handle._outBuffer`, which would be
- // re-used for later invocations; so, we always have to copy that one.
- writeReturn = super.write(Buffer.from(result[0]))
- for (let i = 1; i < result.length; i++) {
- writeReturn = super.write(result[i])
- }
- } else {
- writeReturn = super.write(Buffer.from(result))
- }
- }
-
- if (cb)
- cb()
- return writeReturn
- }
-}
-
-class Zlib extends ZlibBase {
- constructor (opts, mode) {
- opts = opts || {}
-
- opts.flush = opts.flush || constants.Z_NO_FLUSH
- opts.finishFlush = opts.finishFlush || constants.Z_FINISH
- super(opts, mode)
-
- this[_fullFlushFlag] = constants.Z_FULL_FLUSH
- this[_level] = opts.level
- this[_strategy] = opts.strategy
- }
-
- params (level, strategy) {
- if (this[_sawError])
- return
-
- if (!this[_handle])
- throw new Error('cannot switch params when binding is closed')
-
- // no way to test this without also not supporting params at all
- /* istanbul ignore if */
- if (!this[_handle].params)
- throw new Error('not supported in this implementation')
-
- if (this[_level] !== level || this[_strategy] !== strategy) {
- this.flush(constants.Z_SYNC_FLUSH)
- assert(this[_handle], 'zlib binding closed')
- // .params() calls .flush(), but the latter is always async in the
- // core zlib. We override .flush() temporarily to intercept that and
- // flush synchronously.
- const origFlush = this[_handle].flush
- this[_handle].flush = (flushFlag, cb) => {
- this.flush(flushFlag)
- cb()
- }
- try {
- this[_handle].params(level, strategy)
- } finally {
- this[_handle].flush = origFlush
- }
- /* istanbul ignore else */
- if (this[_handle]) {
- this[_level] = level
- this[_strategy] = strategy
- }
- }
- }
-}
-
-// minimal 2-byte header
-class Deflate extends Zlib {
- constructor (opts) {
- super(opts, 'Deflate')
- }
-}
-
-class Inflate extends Zlib {
- constructor (opts) {
- super(opts, 'Inflate')
- }
-}
-
-// gzip - bigger header, same deflate compression
-class Gzip extends Zlib {
- constructor (opts) {
- super(opts, 'Gzip')
- }
-}
-
-class Gunzip extends Zlib {
- constructor (opts) {
- super(opts, 'Gunzip')
- }
-}
-
-// raw - no header
-class DeflateRaw extends Zlib {
- constructor (opts) {
- super(opts, 'DeflateRaw')
- }
-}
-
-class InflateRaw extends Zlib {
- constructor (opts) {
- super(opts, 'InflateRaw')
- }
-}
-
-// auto-detect header.
-class Unzip extends Zlib {
- constructor (opts) {
- super(opts, 'Unzip')
- }
-}
-
-class Brotli extends ZlibBase {
- constructor (opts, mode) {
- opts = opts || {}
-
- opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS
- opts.finishFlush = opts.finishFlush || constants.BROTLI_OPERATION_FINISH
-
- super(opts, mode)
-
- this[_fullFlushFlag] = constants.BROTLI_OPERATION_FLUSH
- }
-}
-
-class BrotliCompress extends Brotli {
- constructor (opts) {
- super(opts, 'BrotliCompress')
- }
-}
-
-class BrotliDecompress extends Brotli {
- constructor (opts) {
- super(opts, 'BrotliDecompress')
- }
-}
-
-exports.Deflate = Deflate
-exports.Inflate = Inflate
-exports.Gzip = Gzip
-exports.Gunzip = Gunzip
-exports.DeflateRaw = DeflateRaw
-exports.InflateRaw = InflateRaw
-exports.Unzip = Unzip
-/* istanbul ignore else */
-if (typeof realZlib.BrotliCompress === 'function') {
- exports.BrotliCompress = BrotliCompress
- exports.BrotliDecompress = BrotliDecompress
-} else {
- exports.BrotliCompress = exports.BrotliDecompress = class {
- constructor () {
- throw new Error('Brotli is not supported in this version of Node.js')
- }
- }
-}
diff --git a/node_modules/libcipm/node_modules/minizlib/package.json b/node_modules/libcipm/node_modules/minizlib/package.json
deleted file mode 100644
index 1650216f6..000000000
--- a/node_modules/libcipm/node_modules/minizlib/package.json
+++ /dev/null
@@ -1,71 +0,0 @@
-{
- "_from": "minizlib@^1.2.1",
- "_id": "minizlib@1.3.3",
- "_inBundle": false,
- "_integrity": "sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q==",
- "_location": "/libcipm/minizlib",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "minizlib@^1.2.1",
- "name": "minizlib",
- "escapedName": "minizlib",
- "rawSpec": "^1.2.1",
- "saveSpec": null,
- "fetchSpec": "^1.2.1"
- },
- "_requiredBy": [
- "/libcipm/tar"
- ],
- "_resolved": "https://registry.npmjs.org/minizlib/-/minizlib-1.3.3.tgz",
- "_shasum": "2290de96818a34c29551c8a8d301216bd65a861d",
- "_spec": "minizlib@^1.2.1",
- "_where": "/Users/claudiahdz/npm/cli/node_modules/libcipm/node_modules/tar",
- "author": {
- "name": "Isaac Z. Schlueter",
- "email": "i@izs.me",
- "url": "http://blog.izs.me/"
- },
- "bugs": {
- "url": "https://github.com/isaacs/minizlib/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "minipass": "^2.9.0"
- },
- "deprecated": false,
- "description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.",
- "devDependencies": {
- "tap": "^12.0.1"
- },
- "files": [
- "index.js",
- "constants.js"
- ],
- "homepage": "https://github.com/isaacs/minizlib#readme",
- "keywords": [
- "zlib",
- "gzip",
- "gunzip",
- "deflate",
- "inflate",
- "compression",
- "zip",
- "unzip"
- ],
- "license": "MIT",
- "main": "index.js",
- "name": "minizlib",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/isaacs/minizlib.git"
- },
- "scripts": {
- "postpublish": "git push origin --all; git push origin --tags",
- "postversion": "npm publish",
- "preversion": "npm test",
- "test": "tap test/*.js --100 -J"
- },
- "version": "1.3.3"
-}
diff --git a/node_modules/libcipm/node_modules/npm-package-arg/CHANGELOG.md b/node_modules/libcipm/node_modules/npm-package-arg/CHANGELOG.md
deleted file mode 100644
index 1b3431acc..000000000
--- a/node_modules/libcipm/node_modules/npm-package-arg/CHANGELOG.md
+++ /dev/null
@@ -1,26 +0,0 @@
-# Change Log
-
-All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
-
-<a name="6.1.1"></a>
-## [6.1.1](https://github.com/npm/npm-package-arg/compare/v6.1.0...v6.1.1) (2019-08-21)
-
-
-### Bug Fixes
-
-* preserve drive letter on windows git file:// urls ([3909203](https://github.com/npm/npm-package-arg/commit/3909203))
-
-
-
-<a name="6.1.0"></a>
-# [6.1.0](https://github.com/npm/npm-package-arg/compare/v6.0.0...v6.1.0) (2018-04-10)
-
-
-### Bug Fixes
-
-* **git:** Fix gitRange for git+ssh for private git ([#33](https://github.com/npm/npm-package-arg/issues/33)) ([647a0b3](https://github.com/npm/npm-package-arg/commit/647a0b3))
-
-
-### Features
-
-* **alias:** add `npm:` registry alias spec ([#34](https://github.com/npm/npm-package-arg/issues/34)) ([ab99f8e](https://github.com/npm/npm-package-arg/commit/ab99f8e))
diff --git a/node_modules/libcipm/node_modules/npm-package-arg/LICENSE b/node_modules/libcipm/node_modules/npm-package-arg/LICENSE
deleted file mode 100644
index 05eeeb88c..000000000
--- a/node_modules/libcipm/node_modules/npm-package-arg/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/libcipm/node_modules/npm-package-arg/README.md b/node_modules/libcipm/node_modules/npm-package-arg/README.md
deleted file mode 100644
index 847341b21..000000000
--- a/node_modules/libcipm/node_modules/npm-package-arg/README.md
+++ /dev/null
@@ -1,83 +0,0 @@
-# npm-package-arg
-
-[![Build Status](https://travis-ci.org/npm/npm-package-arg.svg?branch=master)](https://travis-ci.org/npm/npm-package-arg)
-
-Parses package name and specifier passed to commands like `npm install` or
-`npm cache add`, or as found in `package.json` dependency sections.
-
-## EXAMPLES
-
-```javascript
-var assert = require("assert")
-var npa = require("npm-package-arg")
-
-// Pass in the descriptor, and it'll return an object
-try {
- var parsed = npa("@bar/foo@1.2")
-} catch (ex) {
- …
-}
-```
-
-## USING
-
-`var npa = require('npm-package-arg')`
-
-### var result = npa(*arg*[, *where*])
-
-* *arg* - a string that you might pass to `npm install`, like:
-`foo@1.2`, `@bar/foo@1.2`, `foo@user/foo`, `http://x.com/foo.tgz`,
-`git+https://github.com/user/foo`, `bitbucket:user/foo`, `foo.tar.gz`,
-`../foo/bar/` or `bar`. If the *arg* you provide doesn't have a specifier
-part, eg `foo` then the specifier will default to `latest`.
-* *where* - Optionally the path to resolve file paths relative to. Defaults to `process.cwd()`
-
-**Throws** if the package name is invalid, a dist-tag is invalid or a URL's protocol is not supported.
-
-### var result = npa.resolve(*name*, *spec*[, *where*])
-
-* *name* - The name of the module you want to install. For example: `foo` or `@bar/foo`.
-* *spec* - The specifier indicating where and how you can get this module. Something like:
-`1.2`, `^1.7.17`, `http://x.com/foo.tgz`, `git+https://github.com/user/foo`,
-`bitbucket:user/foo`, `file:foo.tar.gz` or `file:../foo/bar/`. If not
-included then the default is `latest`.
-* *where* - Optionally the path to resolve file paths relative to. Defaults to `process.cwd()`
-
-**Throws** if the package name is invalid, a dist-tag is invalid or a URL's protocol is not supported.
-
-## RESULT OBJECT
-
-The objects that are returned by npm-package-arg contain the following
-keys:
-
-* `type` - One of the following strings:
- * `git` - A git repo
- * `tag` - A tagged version, like `"foo@latest"`
- * `version` - A specific version number, like `"foo@1.2.3"`
- * `range` - A version range, like `"foo@2.x"`
- * `file` - A local `.tar.gz`, `.tar` or `.tgz` file.
- * `directory` - A local directory.
- * `remote` - An http url (presumably to a tgz)
-* `registry` - If true this specifier refers to a resource hosted on a
- registry. This is true for `tag`, `version` and `range` types.
-* `name` - If known, the `name` field expected in the resulting pkg.
-* `scope` - If a name is something like `@org/module` then the `scope`
- field will be set to `@org`. If it doesn't have a scoped name, then
- scope is `null`.
-* `escapedName` - A version of `name` escaped to match the npm scoped packages
- specification. Mostly used when making requests against a registry. When
- `name` is `null`, `escapedName` will also be `null`.
-* `rawSpec` - The specifier part that was parsed out in calls to `npa(arg)`,
- or the value of `spec` in calls to `npa.resolve(name, spec).
-* `saveSpec` - The normalized specifier, for saving to package.json files.
- `null` for registry dependencies.
-* `fetchSpec` - The version of the specifier to be used to fetch this
- resource. `null` for shortcuts to hosted git dependencies as there isn't
- just one URL to try with them.
-* `gitRange` - If set, this is a semver specifier to match against git tags with
-* `gitCommittish` - If set, this is the specific committish to use with a git dependency.
-* `hosted` - If `from === 'hosted'` then this will be a `hosted-git-info`
- object. This property is not included when serializing the object as
- JSON.
-* `raw` - The original un-modified string that was provided. If called as
- `npa.resolve(name, spec)` then this will be `name + '@' + spec`.
diff --git a/node_modules/libcipm/node_modules/npm-package-arg/node_modules/.bin/semver b/node_modules/libcipm/node_modules/npm-package-arg/node_modules/.bin/semver
deleted file mode 120000
index 317eb293d..000000000
--- a/node_modules/libcipm/node_modules/npm-package-arg/node_modules/.bin/semver
+++ /dev/null
@@ -1 +0,0 @@
-../semver/bin/semver \ No newline at end of file
diff --git a/node_modules/libcipm/node_modules/npm-package-arg/node_modules/hosted-git-info/CHANGELOG.md b/node_modules/libcipm/node_modules/npm-package-arg/node_modules/hosted-git-info/CHANGELOG.md
deleted file mode 100644
index 479f24b64..000000000
--- a/node_modules/libcipm/node_modules/npm-package-arg/node_modules/hosted-git-info/CHANGELOG.md
+++ /dev/null
@@ -1,115 +0,0 @@
-# Change Log
-
-All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
-
-<a name="2.8.5"></a>
-## [2.8.5](https://github.com/npm/hosted-git-info/compare/v2.8.4...v2.8.5) (2019-10-07)
-
-
-### Bug Fixes
-
-* updated pathmatch for gitlab ([e8325b5](https://github.com/npm/hosted-git-info/commit/e8325b5)), closes [#51](https://github.com/npm/hosted-git-info/issues/51)
-* updated pathmatch for gitlab ([ffe056f](https://github.com/npm/hosted-git-info/commit/ffe056f))
-
-
-
-<a name="2.8.4"></a>
-## [2.8.4](https://github.com/npm/hosted-git-info/compare/v2.8.3...v2.8.4) (2019-08-12)
-
-
-
-<a name="2.8.3"></a>
-## [2.8.3](https://github.com/npm/hosted-git-info/compare/v2.8.2...v2.8.3) (2019-08-12)
-
-
-
-<a name="2.8.2"></a>
-## [2.8.2](https://github.com/npm/hosted-git-info/compare/v2.8.1...v2.8.2) (2019-08-05)
-
-
-### Bug Fixes
-
-* http protocol use sshurl by default ([3b1d629](https://github.com/npm/hosted-git-info/commit/3b1d629)), closes [#48](https://github.com/npm/hosted-git-info/issues/48)
-
-
-
-<a name="2.8.1"></a>
-## [2.8.1](https://github.com/npm/hosted-git-info/compare/v2.8.0...v2.8.1) (2019-08-05)
-
-
-### Bug Fixes
-
-* ignore noCommittish on tarball url generation ([5d4a8d7](https://github.com/npm/hosted-git-info/commit/5d4a8d7))
-* use gist tarball url that works for anonymous gists ([1692435](https://github.com/npm/hosted-git-info/commit/1692435))
-
-
-
-<a name="2.8.0"></a>
-# [2.8.0](https://github.com/npm/hosted-git-info/compare/v2.7.1...v2.8.0) (2019-08-05)
-
-
-### Bug Fixes
-
-* Allow slashes in gitlab project section ([bbcf7b2](https://github.com/npm/hosted-git-info/commit/bbcf7b2)), closes [#46](https://github.com/npm/hosted-git-info/issues/46) [#43](https://github.com/npm/hosted-git-info/issues/43)
-* **git-host:** disallow URI-encoded slash (%2F) in `path` ([3776fa5](https://github.com/npm/hosted-git-info/commit/3776fa5)), closes [#44](https://github.com/npm/hosted-git-info/issues/44)
-* **gitlab:** Do not URL encode slashes in project name for GitLab https URL ([cbf04f9](https://github.com/npm/hosted-git-info/commit/cbf04f9)), closes [#47](https://github.com/npm/hosted-git-info/issues/47)
-* do not allow invalid gist urls ([d5cf830](https://github.com/npm/hosted-git-info/commit/d5cf830))
-* **cache:** Switch to lru-cache to save ourselves from unlimited memory consumption ([e518222](https://github.com/npm/hosted-git-info/commit/e518222)), closes [#38](https://github.com/npm/hosted-git-info/issues/38)
-
-
-### Features
-
-* give these objects a name ([60abaea](https://github.com/npm/hosted-git-info/commit/60abaea))
-
-
-
-<a name="2.7.1"></a>
-## [2.7.1](https://github.com/npm/hosted-git-info/compare/v2.7.0...v2.7.1) (2018-07-07)
-
-
-### Bug Fixes
-
-* **index:** Guard against non-string types ([5bc580d](https://github.com/npm/hosted-git-info/commit/5bc580d))
-* **parse:** Crash on strings that parse to having no host ([c931482](https://github.com/npm/hosted-git-info/commit/c931482)), closes [#35](https://github.com/npm/hosted-git-info/issues/35)
-
-
-
-<a name="2.7.0"></a>
-# [2.7.0](https://github.com/npm/hosted-git-info/compare/v2.6.1...v2.7.0) (2018-07-06)
-
-
-### Bug Fixes
-
-* **github tarball:** update github tarballtemplate ([6efd582](https://github.com/npm/hosted-git-info/commit/6efd582)), closes [#34](https://github.com/npm/hosted-git-info/issues/34)
-* **gitlab docs:** switched to lowercase anchors for readmes ([701bcd1](https://github.com/npm/hosted-git-info/commit/701bcd1))
-
-
-### Features
-
-* **all:** Support www. prefixes on hostnames ([3349575](https://github.com/npm/hosted-git-info/commit/3349575)), closes [#32](https://github.com/npm/hosted-git-info/issues/32)
-
-
-
-<a name="2.6.1"></a>
-## [2.6.1](https://github.com/npm/hosted-git-info/compare/v2.6.0...v2.6.1) (2018-06-25)
-
-### Bug Fixes
-
-* **Revert:** "compat: remove Object.assign fallback ([#25](https://github.com/npm/hosted-git-info/issues/25))" ([cce5a62](https://github.com/npm/hosted-git-info/commit/cce5a62))
-* **Revert:** "git-host: fix forgotten extend()" ([a815ec9](https://github.com/npm/hosted-git-info/commit/a815ec9))
-
-
-
-<a name="2.6.0"></a>
-# [2.6.0](https://github.com/npm/hosted-git-info/compare/v2.5.0...v2.6.0) (2018-03-07)
-
-
-### Bug Fixes
-
-* **compat:** remove Object.assign fallback ([#25](https://github.com/npm/hosted-git-info/issues/25)) ([627ab55](https://github.com/npm/hosted-git-info/commit/627ab55))
-* **git-host:** fix forgotten extend() ([eba1f7b](https://github.com/npm/hosted-git-info/commit/eba1f7b))
-
-
-### Features
-
-* **browse:** fragment support for browse() ([#28](https://github.com/npm/hosted-git-info/issues/28)) ([cd5e5bb](https://github.com/npm/hosted-git-info/commit/cd5e5bb))
diff --git a/node_modules/libcipm/node_modules/npm-package-arg/node_modules/hosted-git-info/LICENSE b/node_modules/libcipm/node_modules/npm-package-arg/node_modules/hosted-git-info/LICENSE
deleted file mode 100644
index 45055763d..000000000
--- a/node_modules/libcipm/node_modules/npm-package-arg/node_modules/hosted-git-info/LICENSE
+++ /dev/null
@@ -1,13 +0,0 @@
-Copyright (c) 2015, Rebecca Turner
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
-REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
-INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
-LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
-OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
-PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/libcipm/node_modules/npm-package-arg/node_modules/hosted-git-info/README.md b/node_modules/libcipm/node_modules/npm-package-arg/node_modules/hosted-git-info/README.md
deleted file mode 100644
index 7b723f6b9..000000000
--- a/node_modules/libcipm/node_modules/npm-package-arg/node_modules/hosted-git-info/README.md
+++ /dev/null
@@ -1,133 +0,0 @@
-# hosted-git-info
-
-This will let you identify and transform various git hosts URLs between
-protocols. It also can tell you what the URL is for the raw path for
-particular file for direct access without git.
-
-## Example
-
-```javascript
-var hostedGitInfo = require("hosted-git-info")
-var info = hostedGitInfo.fromUrl("git@github.com:npm/hosted-git-info.git", opts)
-/* info looks like:
-{
- type: "github",
- domain: "github.com",
- user: "npm",
- project: "hosted-git-info"
-}
-*/
-```
-
-If the URL can't be matched with a git host, `null` will be returned. We
-can match git, ssh and https urls. Additionally, we can match ssh connect
-strings (`git@github.com:npm/hosted-git-info`) and shortcuts (eg,
-`github:npm/hosted-git-info`). Github specifically, is detected in the case
-of a third, unprefixed, form: `npm/hosted-git-info`.
-
-If it does match, the returned object has properties of:
-
-* info.type -- The short name of the service
-* info.domain -- The domain for git protocol use
-* info.user -- The name of the user/org on the git host
-* info.project -- The name of the project on the git host
-
-## Version Contract
-
-The major version will be bumped any time…
-
-* The constructor stops accepting URLs that it previously accepted.
-* A method is removed.
-* A method can no longer accept the number and type of arguments it previously accepted.
-* A method can return a different type than it currently returns.
-
-Implications:
-
-* I do not consider the specific format of the urls returned from, say
- `.https()` to be a part of the contract. The contract is that it will
- return a string that can be used to fetch the repo via HTTPS. But what
- that string looks like, specifically, can change.
-* Dropping support for a hosted git provider would constitute a breaking
- change.
-
-## Usage
-
-### var info = hostedGitInfo.fromUrl(gitSpecifier[, options])
-
-* *gitSpecifer* is a URL of a git repository or a SCP-style specifier of one.
-* *options* is an optional object. It can have the following properties:
- * *noCommittish* — If true then committishes won't be included in generated URLs.
- * *noGitPlus* — If true then `git+` won't be prefixed on URLs.
-
-## Methods
-
-All of the methods take the same options as the `fromUrl` factory. Options
-provided to a method override those provided to the constructor.
-
-* info.file(path, opts)
-
-Given the path of a file relative to the repository, returns a URL for
-directly fetching it from the githost. If no committish was set then
-`master` will be used as the default.
-
-For example `hostedGitInfo.fromUrl("git@github.com:npm/hosted-git-info.git#v1.0.0").file("package.json")`
-would return `https://raw.githubusercontent.com/npm/hosted-git-info/v1.0.0/package.json`
-
-* info.shortcut(opts)
-
-eg, `github:npm/hosted-git-info`
-
-* info.browse(path, fragment, opts)
-
-eg, `https://github.com/npm/hosted-git-info/tree/v1.2.0`,
-`https://github.com/npm/hosted-git-info/tree/v1.2.0/package.json`,
-`https://github.com/npm/hosted-git-info/tree/v1.2.0/REAMDE.md#supported-hosts`
-
-* info.bugs(opts)
-
-eg, `https://github.com/npm/hosted-git-info/issues`
-
-* info.docs(opts)
-
-eg, `https://github.com/npm/hosted-git-info/tree/v1.2.0#readme`
-
-* info.https(opts)
-
-eg, `git+https://github.com/npm/hosted-git-info.git`
-
-* info.sshurl(opts)
-
-eg, `git+ssh://git@github.com/npm/hosted-git-info.git`
-
-* info.ssh(opts)
-
-eg, `git@github.com:npm/hosted-git-info.git`
-
-* info.path(opts)
-
-eg, `npm/hosted-git-info`
-
-* info.tarball(opts)
-
-eg, `https://github.com/npm/hosted-git-info/archive/v1.2.0.tar.gz`
-
-* info.getDefaultRepresentation()
-
-Returns the default output type. The default output type is based on the
-string you passed in to be parsed
-
-* info.toString(opts)
-
-Uses the getDefaultRepresentation to call one of the other methods to get a URL for
-this resource. As such `hostedGitInfo.fromUrl(url).toString()` will give
-you a normalized version of the URL that still uses the same protocol.
-
-Shortcuts will still be returned as shortcuts, but the special case github
-form of `org/project` will be normalized to `github:org/project`.
-
-SSH connect strings will be normalized into `git+ssh` URLs.
-
-## Supported hosts
-
-Currently this supports Github, Bitbucket and Gitlab. Pull requests for
-additional hosts welcome.
diff --git a/node_modules/libcipm/node_modules/npm-package-arg/node_modules/hosted-git-info/git-host-info.js b/node_modules/libcipm/node_modules/npm-package-arg/node_modules/hosted-git-info/git-host-info.js
deleted file mode 100644
index 8147e3348..000000000
--- a/node_modules/libcipm/node_modules/npm-package-arg/node_modules/hosted-git-info/git-host-info.js
+++ /dev/null
@@ -1,79 +0,0 @@
-'use strict'
-
-var gitHosts = module.exports = {
- github: {
- // First two are insecure and generally shouldn't be used any more, but
- // they are still supported.
- 'protocols': [ 'git', 'http', 'git+ssh', 'git+https', 'ssh', 'https' ],
- 'domain': 'github.com',
- 'treepath': 'tree',
- 'filetemplate': 'https://{auth@}raw.githubusercontent.com/{user}/{project}/{committish}/{path}',
- 'bugstemplate': 'https://{domain}/{user}/{project}/issues',
- 'gittemplate': 'git://{auth@}{domain}/{user}/{project}.git{#committish}',
- 'tarballtemplate': 'https://codeload.{domain}/{user}/{project}/tar.gz/{committish}'
- },
- bitbucket: {
- 'protocols': [ 'git+ssh', 'git+https', 'ssh', 'https' ],
- 'domain': 'bitbucket.org',
- 'treepath': 'src',
- 'tarballtemplate': 'https://{domain}/{user}/{project}/get/{committish}.tar.gz'
- },
- gitlab: {
- 'protocols': [ 'git+ssh', 'git+https', 'ssh', 'https' ],
- 'domain': 'gitlab.com',
- 'treepath': 'tree',
- 'bugstemplate': 'https://{domain}/{user}/{project}/issues',
- 'httpstemplate': 'git+https://{auth@}{domain}/{user}/{projectPath}.git{#committish}',
- 'tarballtemplate': 'https://{domain}/{user}/{project}/repository/archive.tar.gz?ref={committish}',
- 'pathmatch': /^[/]([^/]+)[/]((?!.*(\/-\/|\/repository\/archive\.tar\.gz\?=.*|\/repository\/[^/]+\/archive.tar.gz$)).*?)(?:[.]git|[/])?$/
- },
- gist: {
- 'protocols': [ 'git', 'git+ssh', 'git+https', 'ssh', 'https' ],
- 'domain': 'gist.github.com',
- 'pathmatch': /^[/](?:([^/]+)[/])?([a-z0-9]{32,})(?:[.]git)?$/,
- 'filetemplate': 'https://gist.githubusercontent.com/{user}/{project}/raw{/committish}/{path}',
- 'bugstemplate': 'https://{domain}/{project}',
- 'gittemplate': 'git://{domain}/{project}.git{#committish}',
- 'sshtemplate': 'git@{domain}:/{project}.git{#committish}',
- 'sshurltemplate': 'git+ssh://git@{domain}/{project}.git{#committish}',
- 'browsetemplate': 'https://{domain}/{project}{/committish}',
- 'browsefiletemplate': 'https://{domain}/{project}{/committish}{#path}',
- 'docstemplate': 'https://{domain}/{project}{/committish}',
- 'httpstemplate': 'git+https://{domain}/{project}.git{#committish}',
- 'shortcuttemplate': '{type}:{project}{#committish}',
- 'pathtemplate': '{project}{#committish}',
- 'tarballtemplate': 'https://codeload.github.com/gist/{project}/tar.gz/{committish}',
- 'hashformat': function (fragment) {
- return 'file-' + formatHashFragment(fragment)
- }
- }
-}
-
-var gitHostDefaults = {
- 'sshtemplate': 'git@{domain}:{user}/{project}.git{#committish}',
- 'sshurltemplate': 'git+ssh://git@{domain}/{user}/{project}.git{#committish}',
- 'browsetemplate': 'https://{domain}/{user}/{project}{/tree/committish}',
- 'browsefiletemplate': 'https://{domain}/{user}/{project}/{treepath}/{committish}/{path}{#fragment}',
- 'docstemplate': 'https://{domain}/{user}/{project}{/tree/committish}#readme',
- 'httpstemplate': 'git+https://{auth@}{domain}/{user}/{project}.git{#committish}',
- 'filetemplate': 'https://{domain}/{user}/{project}/raw/{committish}/{path}',
- 'shortcuttemplate': '{type}:{user}/{project}{#committish}',
- 'pathtemplate': '{user}/{project}{#committish}',
- 'pathmatch': /^[/]([^/]+)[/]([^/]+?)(?:[.]git|[/])?$/,
- 'hashformat': formatHashFragment
-}
-
-Object.keys(gitHosts).forEach(function (name) {
- Object.keys(gitHostDefaults).forEach(function (key) {
- if (gitHosts[name][key]) return
- gitHosts[name][key] = gitHostDefaults[key]
- })
- gitHosts[name].protocols_re = RegExp('^(' +
- gitHosts[name].protocols.map(function (protocol) {
- return protocol.replace(/([\\+*{}()[\]$^|])/g, '\\$1')
- }).join('|') + '):$')
-})
-
-function formatHashFragment (fragment) {
- return fragment.toLowerCase().replace(/^\W+|\/|\W+$/g, '').replace(/\W+/g, '-')
-}
diff --git a/node_modules/libcipm/node_modules/npm-package-arg/node_modules/hosted-git-info/git-host.js b/node_modules/libcipm/node_modules/npm-package-arg/node_modules/hosted-git-info/git-host.js
deleted file mode 100644
index 9616fbaa6..000000000
--- a/node_modules/libcipm/node_modules/npm-package-arg/node_modules/hosted-git-info/git-host.js
+++ /dev/null
@@ -1,156 +0,0 @@
-'use strict'
-var gitHosts = require('./git-host-info.js')
-/* eslint-disable node/no-deprecated-api */
-
-// copy-pasta util._extend from node's source, to avoid pulling
-// the whole util module into peoples' webpack bundles.
-/* istanbul ignore next */
-var extend = Object.assign || function _extend (target, source) {
- // Don't do anything if source isn't an object
- if (source === null || typeof source !== 'object') return target
-
- var keys = Object.keys(source)
- var i = keys.length
- while (i--) {
- target[keys[i]] = source[keys[i]]
- }
- return target
-}
-
-module.exports = GitHost
-function GitHost (type, user, auth, project, committish, defaultRepresentation, opts) {
- var gitHostInfo = this
- gitHostInfo.type = type
- Object.keys(gitHosts[type]).forEach(function (key) {
- gitHostInfo[key] = gitHosts[type][key]
- })
- gitHostInfo.user = user
- gitHostInfo.auth = auth
- gitHostInfo.project = project
- gitHostInfo.committish = committish
- gitHostInfo.default = defaultRepresentation
- gitHostInfo.opts = opts || {}
-}
-
-GitHost.prototype.hash = function () {
- return this.committish ? '#' + this.committish : ''
-}
-
-GitHost.prototype._fill = function (template, opts) {
- if (!template) return
- var vars = extend({}, opts)
- vars.path = vars.path ? vars.path.replace(/^[/]+/g, '') : ''
- opts = extend(extend({}, this.opts), opts)
- var self = this
- Object.keys(this).forEach(function (key) {
- if (self[key] != null && vars[key] == null) vars[key] = self[key]
- })
- var rawAuth = vars.auth
- var rawcommittish = vars.committish
- var rawFragment = vars.fragment
- var rawPath = vars.path
- var rawProject = vars.project
- Object.keys(vars).forEach(function (key) {
- var value = vars[key]
- if ((key === 'path' || key === 'project') && typeof value === 'string') {
- vars[key] = value.split('/').map(function (pathComponent) {
- return encodeURIComponent(pathComponent)
- }).join('/')
- } else {
- vars[key] = encodeURIComponent(value)
- }
- })
- vars['auth@'] = rawAuth ? rawAuth + '@' : ''
- vars['#fragment'] = rawFragment ? '#' + this.hashformat(rawFragment) : ''
- vars.fragment = vars.fragment ? vars.fragment : ''
- vars['#path'] = rawPath ? '#' + this.hashformat(rawPath) : ''
- vars['/path'] = vars.path ? '/' + vars.path : ''
- vars.projectPath = rawProject.split('/').map(encodeURIComponent).join('/')
- if (opts.noCommittish) {
- vars['#committish'] = ''
- vars['/tree/committish'] = ''
- vars['/committish'] = ''
- vars.committish = ''
- } else {
- vars['#committish'] = rawcommittish ? '#' + rawcommittish : ''
- vars['/tree/committish'] = vars.committish
- ? '/' + vars.treepath + '/' + vars.committish
- : ''
- vars['/committish'] = vars.committish ? '/' + vars.committish : ''
- vars.committish = vars.committish || 'master'
- }
- var res = template
- Object.keys(vars).forEach(function (key) {
- res = res.replace(new RegExp('[{]' + key + '[}]', 'g'), vars[key])
- })
- if (opts.noGitPlus) {
- return res.replace(/^git[+]/, '')
- } else {
- return res
- }
-}
-
-GitHost.prototype.ssh = function (opts) {
- return this._fill(this.sshtemplate, opts)
-}
-
-GitHost.prototype.sshurl = function (opts) {
- return this._fill(this.sshurltemplate, opts)
-}
-
-GitHost.prototype.browse = function (P, F, opts) {
- if (typeof P === 'string') {
- if (typeof F !== 'string') {
- opts = F
- F = null
- }
- return this._fill(this.browsefiletemplate, extend({
- fragment: F,
- path: P
- }, opts))
- } else {
- return this._fill(this.browsetemplate, P)
- }
-}
-
-GitHost.prototype.docs = function (opts) {
- return this._fill(this.docstemplate, opts)
-}
-
-GitHost.prototype.bugs = function (opts) {
- return this._fill(this.bugstemplate, opts)
-}
-
-GitHost.prototype.https = function (opts) {
- return this._fill(this.httpstemplate, opts)
-}
-
-GitHost.prototype.git = function (opts) {
- return this._fill(this.gittemplate, opts)
-}
-
-GitHost.prototype.shortcut = function (opts) {
- return this._fill(this.shortcuttemplate, opts)
-}
-
-GitHost.prototype.path = function (opts) {
- return this._fill(this.pathtemplate, opts)
-}
-
-GitHost.prototype.tarball = function (opts_) {
- var opts = extend({}, opts_, { noCommittish: false })
- return this._fill(this.tarballtemplate, opts)
-}
-
-GitHost.prototype.file = function (P, opts) {
- return this._fill(this.filetemplate, extend({ path: P }, opts))
-}
-
-GitHost.prototype.getDefaultRepresentation = function () {
- return this.default
-}
-
-GitHost.prototype.toString = function (opts) {
- if (this.default && typeof this[this.default] === 'function') return this[this.default](opts)
- return this.sshurl(opts)
-}
diff --git a/node_modules/libcipm/node_modules/npm-package-arg/node_modules/hosted-git-info/index.js b/node_modules/libcipm/node_modules/npm-package-arg/node_modules/hosted-git-info/index.js
deleted file mode 100644
index fc959cb04..000000000
--- a/node_modules/libcipm/node_modules/npm-package-arg/node_modules/hosted-git-info/index.js
+++ /dev/null
@@ -1,125 +0,0 @@
-'use strict'
-var url = require('url')
-var gitHosts = require('./git-host-info.js')
-var GitHost = module.exports = require('./git-host.js')
-
-var protocolToRepresentationMap = {
- 'git+ssh:': 'sshurl',
- 'git+https:': 'https',
- 'ssh:': 'sshurl',
- 'git:': 'git'
-}
-
-function protocolToRepresentation (protocol) {
- return protocolToRepresentationMap[protocol] || protocol.slice(0, -1)
-}
-
-var authProtocols = {
- 'git:': true,
- 'https:': true,
- 'git+https:': true,
- 'http:': true,
- 'git+http:': true
-}
-
-var cache = {}
-
-module.exports.fromUrl = function (giturl, opts) {
- if (typeof giturl !== 'string') return
- var key = giturl + JSON.stringify(opts || {})
-
- if (!(key in cache)) {
- cache[key] = fromUrl(giturl, opts)
- }
-
- return cache[key]
-}
-
-function fromUrl (giturl, opts) {
- if (giturl == null || giturl === '') return
- var url = fixupUnqualifiedGist(
- isGitHubShorthand(giturl) ? 'github:' + giturl : giturl
- )
- var parsed = parseGitUrl(url)
- var shortcutMatch = url.match(new RegExp('^([^:]+):(?:(?:[^@:]+(?:[^@]+)?@)?([^/]*))[/](.+?)(?:[.]git)?($|#)'))
- var matches = Object.keys(gitHosts).map(function (gitHostName) {
- try {
- var gitHostInfo = gitHosts[gitHostName]
- var auth = null
- if (parsed.auth && authProtocols[parsed.protocol]) {
- auth = decodeURIComponent(parsed.auth)
- }
- var committish = parsed.hash ? decodeURIComponent(parsed.hash.substr(1)) : null
- var user = null
- var project = null
- var defaultRepresentation = null
- if (shortcutMatch && shortcutMatch[1] === gitHostName) {
- user = shortcutMatch[2] && decodeURIComponent(shortcutMatch[2])
- project = decodeURIComponent(shortcutMatch[3])
- defaultRepresentation = 'shortcut'
- } else {
- if (parsed.host && parsed.host !== gitHostInfo.domain && parsed.host.replace(/^www[.]/, '') !== gitHostInfo.domain) return
- if (!gitHostInfo.protocols_re.test(parsed.protocol)) return
- if (!parsed.path) return
- var pathmatch = gitHostInfo.pathmatch
- var matched = parsed.path.match(pathmatch)
- if (!matched) return
- /* istanbul ignore else */
- if (matched[1] !== null && matched[1] !== undefined) {
- user = decodeURIComponent(matched[1].replace(/^:/, ''))
- }
- project = decodeURIComponent(matched[2])
- defaultRepresentation = protocolToRepresentation(parsed.protocol)
- }
- return new GitHost(gitHostName, user, auth, project, committish, defaultRepresentation, opts)
- } catch (ex) {
- /* istanbul ignore else */
- if (ex instanceof URIError) {
- } else throw ex
- }
- }).filter(function (gitHostInfo) { return gitHostInfo })
- if (matches.length !== 1) return
- return matches[0]
-}
-
-function isGitHubShorthand (arg) {
- // Note: This does not fully test the git ref format.
- // See https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html
- //
- // The only way to do this properly would be to shell out to
- // git-check-ref-format, and as this is a fast sync function,
- // we don't want to do that. Just let git fail if it turns
- // out that the commit-ish is invalid.
- // GH usernames cannot start with . or -
- return /^[^:@%/\s.-][^:@%/\s]*[/][^:@\s/%]+(?:#.*)?$/.test(arg)
-}
-
-function fixupUnqualifiedGist (giturl) {
- // necessary for round-tripping gists
- var parsed = url.parse(giturl)
- if (parsed.protocol === 'gist:' && parsed.host && !parsed.path) {
- return parsed.protocol + '/' + parsed.host
- } else {
- return giturl
- }
-}
-
-function parseGitUrl (giturl) {
- var matched = giturl.match(/^([^@]+)@([^:/]+):[/]?((?:[^/]+[/])?[^/]+?)(?:[.]git)?(#.*)?$/)
- if (!matched) return url.parse(giturl)
- return {
- protocol: 'git+ssh:',
- slashes: true,
- auth: matched[1],
- host: matched[2],
- port: null,
- hostname: matched[2],
- hash: matched[4],
- search: null,
- query: null,
- pathname: '/' + matched[3],
- path: '/' + matched[3],
- href: 'git+ssh://' + matched[1] + '@' + matched[2] +
- '/' + matched[3] + (matched[4] || '')
- }
-}
diff --git a/node_modules/libcipm/node_modules/npm-package-arg/node_modules/hosted-git-info/package.json b/node_modules/libcipm/node_modules/npm-package-arg/node_modules/hosted-git-info/package.json
deleted file mode 100644
index 417ed7d9a..000000000
--- a/node_modules/libcipm/node_modules/npm-package-arg/node_modules/hosted-git-info/package.json
+++ /dev/null
@@ -1,69 +0,0 @@
-{
- "_from": "hosted-git-info@^2.7.1",
- "_id": "hosted-git-info@2.8.5",
- "_inBundle": false,
- "_integrity": "sha512-kssjab8CvdXfcXMXVcvsXum4Hwdq9XGtRD3TteMEvEbq0LXyiNQr6AprqKqfeaDXze7SxWvRxdpwE6ku7ikLkg==",
- "_location": "/libcipm/npm-package-arg/hosted-git-info",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "hosted-git-info@^2.7.1",
- "name": "hosted-git-info",
- "escapedName": "hosted-git-info",
- "rawSpec": "^2.7.1",
- "saveSpec": null,
- "fetchSpec": "^2.7.1"
- },
- "_requiredBy": [
- "/libcipm/npm-package-arg"
- ],
- "_resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.5.tgz",
- "_shasum": "759cfcf2c4d156ade59b0b2dfabddc42a6b9c70c",
- "_spec": "hosted-git-info@^2.7.1",
- "_where": "/Users/isaacs/dev/npm/cli/node_modules/libcipm/node_modules/npm-package-arg",
- "author": {
- "name": "Rebecca Turner",
- "email": "me@re-becca.org",
- "url": "http://re-becca.org"
- },
- "bugs": {
- "url": "https://github.com/npm/hosted-git-info/issues"
- },
- "bundleDependencies": false,
- "deprecated": false,
- "description": "Provides metadata and conversions from repository urls for Github, Bitbucket and Gitlab",
- "devDependencies": {
- "standard": "^11.0.1",
- "standard-version": "^4.4.0",
- "tap": "^12.7.0"
- },
- "files": [
- "index.js",
- "git-host.js",
- "git-host-info.js"
- ],
- "homepage": "https://github.com/npm/hosted-git-info",
- "keywords": [
- "git",
- "github",
- "bitbucket",
- "gitlab"
- ],
- "license": "ISC",
- "main": "index.js",
- "name": "hosted-git-info",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/hosted-git-info.git"
- },
- "scripts": {
- "postrelease": "npm publish --tag=ancient-legacy-fixes && git push --follow-tags",
- "prerelease": "npm t",
- "pretest": "standard",
- "release": "standard-version -s",
- "test": "tap -J --100 --no-esm test/*.js",
- "test:coverage": "tap --coverage-report=html -J --100 --no-esm test/*.js"
- },
- "version": "2.8.5"
-}
diff --git a/node_modules/libcipm/node_modules/npm-package-arg/node_modules/semver/CHANGELOG.md b/node_modules/libcipm/node_modules/npm-package-arg/node_modules/semver/CHANGELOG.md
deleted file mode 100644
index 66304fdd2..000000000
--- a/node_modules/libcipm/node_modules/npm-package-arg/node_modules/semver/CHANGELOG.md
+++ /dev/null
@@ -1,39 +0,0 @@
-# changes log
-
-## 5.7
-
-* Add `minVersion` method
-
-## 5.6
-
-* Move boolean `loose` param to an options object, with
- backwards-compatibility protection.
-* Add ability to opt out of special prerelease version handling with
- the `includePrerelease` option flag.
-
-## 5.5
-
-* Add version coercion capabilities
-
-## 5.4
-
-* Add intersection checking
-
-## 5.3
-
-* Add `minSatisfying` method
-
-## 5.2
-
-* Add `prerelease(v)` that returns prerelease components
-
-## 5.1
-
-* Add Backus-Naur for ranges
-* Remove excessively cute inspection methods
-
-## 5.0
-
-* Remove AMD/Browserified build artifacts
-* Fix ltr and gtr when using the `*` range
-* Fix for range `*` with a prerelease identifier
diff --git a/node_modules/libcipm/node_modules/npm-package-arg/node_modules/semver/LICENSE b/node_modules/libcipm/node_modules/npm-package-arg/node_modules/semver/LICENSE
deleted file mode 100644
index 19129e315..000000000
--- a/node_modules/libcipm/node_modules/npm-package-arg/node_modules/semver/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/libcipm/node_modules/npm-package-arg/node_modules/semver/README.md b/node_modules/libcipm/node_modules/npm-package-arg/node_modules/semver/README.md
deleted file mode 100644
index f8dfa5a0d..000000000
--- a/node_modules/libcipm/node_modules/npm-package-arg/node_modules/semver/README.md
+++ /dev/null
@@ -1,412 +0,0 @@
-semver(1) -- The semantic versioner for npm
-===========================================
-
-## Install
-
-```bash
-npm install --save semver
-````
-
-## Usage
-
-As a node module:
-
-```js
-const semver = require('semver')
-
-semver.valid('1.2.3') // '1.2.3'
-semver.valid('a.b.c') // null
-semver.clean(' =v1.2.3 ') // '1.2.3'
-semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true
-semver.gt('1.2.3', '9.8.7') // false
-semver.lt('1.2.3', '9.8.7') // true
-semver.minVersion('>=1.0.0') // '1.0.0'
-semver.valid(semver.coerce('v2')) // '2.0.0'
-semver.valid(semver.coerce('42.6.7.9.3-alpha')) // '42.6.7'
-```
-
-As a command-line utility:
-
-```
-$ semver -h
-
-A JavaScript implementation of the https://semver.org/ specification
-Copyright Isaac Z. Schlueter
-
-Usage: semver [options] <version> [<version> [...]]
-Prints valid versions sorted by SemVer precedence
-
-Options:
--r --range <range>
- Print versions that match the specified range.
-
--i --increment [<level>]
- Increment a version by the specified level. Level can
- be one of: major, minor, patch, premajor, preminor,
- prepatch, or prerelease. Default level is 'patch'.
- Only one version may be specified.
-
---preid <identifier>
- Identifier to be used to prefix premajor, preminor,
- prepatch or prerelease version increments.
-
--l --loose
- Interpret versions and ranges loosely
-
--p --include-prerelease
- Always include prerelease versions in range matching
-
--c --coerce
- Coerce a string into SemVer if possible
- (does not imply --loose)
-
-Program exits successfully if any valid version satisfies
-all supplied ranges, and prints all satisfying versions.
-
-If no satisfying versions are found, then exits failure.
-
-Versions are printed in ascending order, so supplying
-multiple versions to the utility will just sort them.
-```
-
-## Versions
-
-A "version" is described by the `v2.0.0` specification found at
-<https://semver.org/>.
-
-A leading `"="` or `"v"` character is stripped off and ignored.
-
-## Ranges
-
-A `version range` is a set of `comparators` which specify versions
-that satisfy the range.
-
-A `comparator` is composed of an `operator` and a `version`. The set
-of primitive `operators` is:
-
-* `<` Less than
-* `<=` Less than or equal to
-* `>` Greater than
-* `>=` Greater than or equal to
-* `=` Equal. If no operator is specified, then equality is assumed,
- so this operator is optional, but MAY be included.
-
-For example, the comparator `>=1.2.7` would match the versions
-`1.2.7`, `1.2.8`, `2.5.3`, and `1.3.9`, but not the versions `1.2.6`
-or `1.1.0`.
-
-Comparators can be joined by whitespace to form a `comparator set`,
-which is satisfied by the **intersection** of all of the comparators
-it includes.
-
-A range is composed of one or more comparator sets, joined by `||`. A
-version matches a range if and only if every comparator in at least
-one of the `||`-separated comparator sets is satisfied by the version.
-
-For example, the range `>=1.2.7 <1.3.0` would match the versions
-`1.2.7`, `1.2.8`, and `1.2.99`, but not the versions `1.2.6`, `1.3.0`,
-or `1.1.0`.
-
-The range `1.2.7 || >=1.2.9 <2.0.0` would match the versions `1.2.7`,
-`1.2.9`, and `1.4.6`, but not the versions `1.2.8` or `2.0.0`.
-
-### Prerelease Tags
-
-If a version has a prerelease tag (for example, `1.2.3-alpha.3`) then
-it will only be allowed to satisfy comparator sets if at least one
-comparator with the same `[major, minor, patch]` tuple also has a
-prerelease tag.
-
-For example, the range `>1.2.3-alpha.3` would be allowed to match the
-version `1.2.3-alpha.7`, but it would *not* be satisfied by
-`3.4.5-alpha.9`, even though `3.4.5-alpha.9` is technically "greater
-than" `1.2.3-alpha.3` according to the SemVer sort rules. The version
-range only accepts prerelease tags on the `1.2.3` version. The
-version `3.4.5` *would* satisfy the range, because it does not have a
-prerelease flag, and `3.4.5` is greater than `1.2.3-alpha.7`.
-
-The purpose for this behavior is twofold. First, prerelease versions
-frequently are updated very quickly, and contain many breaking changes
-that are (by the author's design) not yet fit for public consumption.
-Therefore, by default, they are excluded from range matching
-semantics.
-
-Second, a user who has opted into using a prerelease version has
-clearly indicated the intent to use *that specific* set of
-alpha/beta/rc versions. By including a prerelease tag in the range,
-the user is indicating that they are aware of the risk. However, it
-is still not appropriate to assume that they have opted into taking a
-similar risk on the *next* set of prerelease versions.
-
-Note that this behavior can be suppressed (treating all prerelease
-versions as if they were normal versions, for the purpose of range
-matching) by setting the `includePrerelease` flag on the options
-object to any
-[functions](https://github.com/npm/node-semver#functions) that do
-range matching.
-
-#### Prerelease Identifiers
-
-The method `.inc` takes an additional `identifier` string argument that
-will append the value of the string as a prerelease identifier:
-
-```javascript
-semver.inc('1.2.3', 'prerelease', 'beta')
-// '1.2.4-beta.0'
-```
-
-command-line example:
-
-```bash
-$ semver 1.2.3 -i prerelease --preid beta
-1.2.4-beta.0
-```
-
-Which then can be used to increment further:
-
-```bash
-$ semver 1.2.4-beta.0 -i prerelease
-1.2.4-beta.1
-```
-
-### Advanced Range Syntax
-
-Advanced range syntax desugars to primitive comparators in
-deterministic ways.
-
-Advanced ranges may be combined in the same way as primitive
-comparators using white space or `||`.
-
-#### Hyphen Ranges `X.Y.Z - A.B.C`
-
-Specifies an inclusive set.
-
-* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4`
-
-If a partial version is provided as the first version in the inclusive
-range, then the missing pieces are replaced with zeroes.
-
-* `1.2 - 2.3.4` := `>=1.2.0 <=2.3.4`
-
-If a partial version is provided as the second version in the
-inclusive range, then all versions that start with the supplied parts
-of the tuple are accepted, but nothing that would be greater than the
-provided tuple parts.
-
-* `1.2.3 - 2.3` := `>=1.2.3 <2.4.0`
-* `1.2.3 - 2` := `>=1.2.3 <3.0.0`
-
-#### X-Ranges `1.2.x` `1.X` `1.2.*` `*`
-
-Any of `X`, `x`, or `*` may be used to "stand in" for one of the
-numeric values in the `[major, minor, patch]` tuple.
-
-* `*` := `>=0.0.0` (Any version satisfies)
-* `1.x` := `>=1.0.0 <2.0.0` (Matching major version)
-* `1.2.x` := `>=1.2.0 <1.3.0` (Matching major and minor versions)
-
-A partial version range is treated as an X-Range, so the special
-character is in fact optional.
-
-* `""` (empty string) := `*` := `>=0.0.0`
-* `1` := `1.x.x` := `>=1.0.0 <2.0.0`
-* `1.2` := `1.2.x` := `>=1.2.0 <1.3.0`
-
-#### Tilde Ranges `~1.2.3` `~1.2` `~1`
-
-Allows patch-level changes if a minor version is specified on the
-comparator. Allows minor-level changes if not.
-
-* `~1.2.3` := `>=1.2.3 <1.(2+1).0` := `>=1.2.3 <1.3.0`
-* `~1.2` := `>=1.2.0 <1.(2+1).0` := `>=1.2.0 <1.3.0` (Same as `1.2.x`)
-* `~1` := `>=1.0.0 <(1+1).0.0` := `>=1.0.0 <2.0.0` (Same as `1.x`)
-* `~0.2.3` := `>=0.2.3 <0.(2+1).0` := `>=0.2.3 <0.3.0`
-* `~0.2` := `>=0.2.0 <0.(2+1).0` := `>=0.2.0 <0.3.0` (Same as `0.2.x`)
-* `~0` := `>=0.0.0 <(0+1).0.0` := `>=0.0.0 <1.0.0` (Same as `0.x`)
-* `~1.2.3-beta.2` := `>=1.2.3-beta.2 <1.3.0` Note that prereleases in
- the `1.2.3` version will be allowed, if they are greater than or
- equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but
- `1.2.4-beta.2` would not, because it is a prerelease of a
- different `[major, minor, patch]` tuple.
-
-#### Caret Ranges `^1.2.3` `^0.2.5` `^0.0.4`
-
-Allows changes that do not modify the left-most non-zero digit in the
-`[major, minor, patch]` tuple. In other words, this allows patch and
-minor updates for versions `1.0.0` and above, patch updates for
-versions `0.X >=0.1.0`, and *no* updates for versions `0.0.X`.
-
-Many authors treat a `0.x` version as if the `x` were the major
-"breaking-change" indicator.
-
-Caret ranges are ideal when an author may make breaking changes
-between `0.2.4` and `0.3.0` releases, which is a common practice.
-However, it presumes that there will *not* be breaking changes between
-`0.2.4` and `0.2.5`. It allows for changes that are presumed to be
-additive (but non-breaking), according to commonly observed practices.
-
-* `^1.2.3` := `>=1.2.3 <2.0.0`
-* `^0.2.3` := `>=0.2.3 <0.3.0`
-* `^0.0.3` := `>=0.0.3 <0.0.4`
-* `^1.2.3-beta.2` := `>=1.2.3-beta.2 <2.0.0` Note that prereleases in
- the `1.2.3` version will be allowed, if they are greater than or
- equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but
- `1.2.4-beta.2` would not, because it is a prerelease of a
- different `[major, minor, patch]` tuple.
-* `^0.0.3-beta` := `>=0.0.3-beta <0.0.4` Note that prereleases in the
- `0.0.3` version *only* will be allowed, if they are greater than or
- equal to `beta`. So, `0.0.3-pr.2` would be allowed.
-
-When parsing caret ranges, a missing `patch` value desugars to the
-number `0`, but will allow flexibility within that value, even if the
-major and minor versions are both `0`.
-
-* `^1.2.x` := `>=1.2.0 <2.0.0`
-* `^0.0.x` := `>=0.0.0 <0.1.0`
-* `^0.0` := `>=0.0.0 <0.1.0`
-
-A missing `minor` and `patch` values will desugar to zero, but also
-allow flexibility within those values, even if the major version is
-zero.
-
-* `^1.x` := `>=1.0.0 <2.0.0`
-* `^0.x` := `>=0.0.0 <1.0.0`
-
-### Range Grammar
-
-Putting all this together, here is a Backus-Naur grammar for ranges,
-for the benefit of parser authors:
-
-```bnf
-range-set ::= range ( logical-or range ) *
-logical-or ::= ( ' ' ) * '||' ( ' ' ) *
-range ::= hyphen | simple ( ' ' simple ) * | ''
-hyphen ::= partial ' - ' partial
-simple ::= primitive | partial | tilde | caret
-primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial
-partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )?
-xr ::= 'x' | 'X' | '*' | nr
-nr ::= '0' | ['1'-'9'] ( ['0'-'9'] ) *
-tilde ::= '~' partial
-caret ::= '^' partial
-qualifier ::= ( '-' pre )? ( '+' build )?
-pre ::= parts
-build ::= parts
-parts ::= part ( '.' part ) *
-part ::= nr | [-0-9A-Za-z]+
-```
-
-## Functions
-
-All methods and classes take a final `options` object argument. All
-options in this object are `false` by default. The options supported
-are:
-
-- `loose` Be more forgiving about not-quite-valid semver strings.
- (Any resulting output will always be 100% strict compliant, of
- course.) For backwards compatibility reasons, if the `options`
- argument is a boolean value instead of an object, it is interpreted
- to be the `loose` param.
-- `includePrerelease` Set to suppress the [default
- behavior](https://github.com/npm/node-semver#prerelease-tags) of
- excluding prerelease tagged versions from ranges unless they are
- explicitly opted into.
-
-Strict-mode Comparators and Ranges will be strict about the SemVer
-strings that they parse.
-
-* `valid(v)`: Return the parsed version, or null if it's not valid.
-* `inc(v, release)`: Return the version incremented by the release
- type (`major`, `premajor`, `minor`, `preminor`, `patch`,
- `prepatch`, or `prerelease`), or null if it's not valid
- * `premajor` in one call will bump the version up to the next major
- version and down to a prerelease of that major version.
- `preminor`, and `prepatch` work the same way.
- * If called from a non-prerelease version, the `prerelease` will work the
- same as `prepatch`. It increments the patch version, then makes a
- prerelease. If the input version is already a prerelease it simply
- increments it.
-* `prerelease(v)`: Returns an array of prerelease components, or null
- if none exist. Example: `prerelease('1.2.3-alpha.1') -> ['alpha', 1]`
-* `major(v)`: Return the major version number.
-* `minor(v)`: Return the minor version number.
-* `patch(v)`: Return the patch version number.
-* `intersects(r1, r2, loose)`: Return true if the two supplied ranges
- or comparators intersect.
-* `parse(v)`: Attempt to parse a string as a semantic version, returning either
- a `SemVer` object or `null`.
-
-### Comparison
-
-* `gt(v1, v2)`: `v1 > v2`
-* `gte(v1, v2)`: `v1 >= v2`
-* `lt(v1, v2)`: `v1 < v2`
-* `lte(v1, v2)`: `v1 <= v2`
-* `eq(v1, v2)`: `v1 == v2` This is true if they're logically equivalent,
- even if they're not the exact same string. You already know how to
- compare strings.
-* `neq(v1, v2)`: `v1 != v2` The opposite of `eq`.
-* `cmp(v1, comparator, v2)`: Pass in a comparison string, and it'll call
- the corresponding function above. `"==="` and `"!=="` do simple
- string comparison, but are included for completeness. Throws if an
- invalid comparison string is provided.
-* `compare(v1, v2)`: Return `0` if `v1 == v2`, or `1` if `v1` is greater, or `-1` if
- `v2` is greater. Sorts in ascending order if passed to `Array.sort()`.
-* `rcompare(v1, v2)`: The reverse of compare. Sorts an array of versions
- in descending order when passed to `Array.sort()`.
-* `diff(v1, v2)`: Returns difference between two versions by the release type
- (`major`, `premajor`, `minor`, `preminor`, `patch`, `prepatch`, or `prerelease`),
- or null if the versions are the same.
-
-### Comparators
-
-* `intersects(comparator)`: Return true if the comparators intersect
-
-### Ranges
-
-* `validRange(range)`: Return the valid range or null if it's not valid
-* `satisfies(version, range)`: Return true if the version satisfies the
- range.
-* `maxSatisfying(versions, range)`: Return the highest version in the list
- that satisfies the range, or `null` if none of them do.
-* `minSatisfying(versions, range)`: Return the lowest version in the list
- that satisfies the range, or `null` if none of them do.
-* `minVersion(range)`: Return the lowest version that can possibly match
- the given range.
-* `gtr(version, range)`: Return `true` if version is greater than all the
- versions possible in the range.
-* `ltr(version, range)`: Return `true` if version is less than all the
- versions possible in the range.
-* `outside(version, range, hilo)`: Return true if the version is outside
- the bounds of the range in either the high or low direction. The
- `hilo` argument must be either the string `'>'` or `'<'`. (This is
- the function called by `gtr` and `ltr`.)
-* `intersects(range)`: Return true if any of the ranges comparators intersect
-
-Note that, since ranges may be non-contiguous, a version might not be
-greater than a range, less than a range, *or* satisfy a range! For
-example, the range `1.2 <1.2.9 || >2.0.0` would have a hole from `1.2.9`
-until `2.0.0`, so the version `1.2.10` would not be greater than the
-range (because `2.0.1` satisfies, which is higher), nor less than the
-range (since `1.2.8` satisfies, which is lower), and it also does not
-satisfy the range.
-
-If you want to know if a version satisfies or does not satisfy a
-range, use the `satisfies(version, range)` function.
-
-### Coercion
-
-* `coerce(version)`: Coerces a string to semver if possible
-
-This aims to provide a very forgiving translation of a non-semver string to
-semver. It looks for the first digit in a string, and consumes all
-remaining characters which satisfy at least a partial semver (e.g., `1`,
-`1.2`, `1.2.3`) up to the max permitted length (256 characters). Longer
-versions are simply truncated (`4.6.3.9.2-alpha2` becomes `4.6.3`). All
-surrounding text is simply ignored (`v3.4 replaces v3.3.1` becomes
-`3.4.0`). Only text which lacks digits will fail coercion (`version one`
-is not valid). The maximum length for any semver component considered for
-coercion is 16 characters; longer components will be ignored
-(`10000000000000000.4.7.4` becomes `4.7.4`). The maximum value for any
-semver component is `Number.MAX_SAFE_INTEGER || (2**53 - 1)`; higher value
-components are invalid (`9999999999999999.4.7.4` is likely invalid).
diff --git a/node_modules/libcipm/node_modules/npm-package-arg/node_modules/semver/bin/semver b/node_modules/libcipm/node_modules/npm-package-arg/node_modules/semver/bin/semver
deleted file mode 100755
index 801e77f13..000000000
--- a/node_modules/libcipm/node_modules/npm-package-arg/node_modules/semver/bin/semver
+++ /dev/null
@@ -1,160 +0,0 @@
-#!/usr/bin/env node
-// Standalone semver comparison program.
-// Exits successfully and prints matching version(s) if
-// any supplied version is valid and passes all tests.
-
-var argv = process.argv.slice(2)
-
-var versions = []
-
-var range = []
-
-var inc = null
-
-var version = require('../package.json').version
-
-var loose = false
-
-var includePrerelease = false
-
-var coerce = false
-
-var identifier
-
-var semver = require('../semver')
-
-var reverse = false
-
-var options = {}
-
-main()
-
-function main () {
- if (!argv.length) return help()
- while (argv.length) {
- var a = argv.shift()
- var indexOfEqualSign = a.indexOf('=')
- if (indexOfEqualSign !== -1) {
- a = a.slice(0, indexOfEqualSign)
- argv.unshift(a.slice(indexOfEqualSign + 1))
- }
- switch (a) {
- case '-rv': case '-rev': case '--rev': case '--reverse':
- reverse = true
- break
- case '-l': case '--loose':
- loose = true
- break
- case '-p': case '--include-prerelease':
- includePrerelease = true
- break
- case '-v': case '--version':
- versions.push(argv.shift())
- break
- case '-i': case '--inc': case '--increment':
- switch (argv[0]) {
- case 'major': case 'minor': case 'patch': case 'prerelease':
- case 'premajor': case 'preminor': case 'prepatch':
- inc = argv.shift()
- break
- default:
- inc = 'patch'
- break
- }
- break
- case '--preid':
- identifier = argv.shift()
- break
- case '-r': case '--range':
- range.push(argv.shift())
- break
- case '-c': case '--coerce':
- coerce = true
- break
- case '-h': case '--help': case '-?':
- return help()
- default:
- versions.push(a)
- break
- }
- }
-
- var options = { loose: loose, includePrerelease: includePrerelease }
-
- versions = versions.map(function (v) {
- return coerce ? (semver.coerce(v) || { version: v }).version : v
- }).filter(function (v) {
- return semver.valid(v)
- })
- if (!versions.length) return fail()
- if (inc && (versions.length !== 1 || range.length)) { return failInc() }
-
- for (var i = 0, l = range.length; i < l; i++) {
- versions = versions.filter(function (v) {
- return semver.satisfies(v, range[i], options)
- })
- if (!versions.length) return fail()
- }
- return success(versions)
-}
-
-function failInc () {
- console.error('--inc can only be used on a single version with no range')
- fail()
-}
-
-function fail () { process.exit(1) }
-
-function success () {
- var compare = reverse ? 'rcompare' : 'compare'
- versions.sort(function (a, b) {
- return semver[compare](a, b, options)
- }).map(function (v) {
- return semver.clean(v, options)
- }).map(function (v) {
- return inc ? semver.inc(v, inc, options, identifier) : v
- }).forEach(function (v, i, _) { console.log(v) })
-}
-
-function help () {
- console.log(['SemVer ' + version,
- '',
- 'A JavaScript implementation of the https://semver.org/ specification',
- 'Copyright Isaac Z. Schlueter',
- '',
- 'Usage: semver [options] <version> [<version> [...]]',
- 'Prints valid versions sorted by SemVer precedence',
- '',
- 'Options:',
- '-r --range <range>',
- ' Print versions that match the specified range.',
- '',
- '-i --increment [<level>]',
- ' Increment a version by the specified level. Level can',
- ' be one of: major, minor, patch, premajor, preminor,',
- " prepatch, or prerelease. Default level is 'patch'.",
- ' Only one version may be specified.',
- '',
- '--preid <identifier>',
- ' Identifier to be used to prefix premajor, preminor,',
- ' prepatch or prerelease version increments.',
- '',
- '-l --loose',
- ' Interpret versions and ranges loosely',
- '',
- '-p --include-prerelease',
- ' Always include prerelease versions in range matching',
- '',
- '-c --coerce',
- ' Coerce a string into SemVer if possible',
- ' (does not imply --loose)',
- '',
- 'Program exits successfully if any valid version satisfies',
- 'all supplied ranges, and prints all satisfying versions.',
- '',
- 'If no satisfying versions are found, then exits failure.',
- '',
- 'Versions are printed in ascending order, so supplying',
- 'multiple versions to the utility will just sort them.'
- ].join('\n'))
-}
diff --git a/node_modules/libcipm/node_modules/npm-package-arg/node_modules/semver/package.json b/node_modules/libcipm/node_modules/npm-package-arg/node_modules/semver/package.json
deleted file mode 100644
index f3ae8b27b..000000000
--- a/node_modules/libcipm/node_modules/npm-package-arg/node_modules/semver/package.json
+++ /dev/null
@@ -1,60 +0,0 @@
-{
- "_from": "semver@^5.6.0",
- "_id": "semver@5.7.1",
- "_inBundle": false,
- "_integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==",
- "_location": "/libcipm/npm-package-arg/semver",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "semver@^5.6.0",
- "name": "semver",
- "escapedName": "semver",
- "rawSpec": "^5.6.0",
- "saveSpec": null,
- "fetchSpec": "^5.6.0"
- },
- "_requiredBy": [
- "/libcipm/npm-package-arg"
- ],
- "_resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz",
- "_shasum": "a954f931aeba508d307bbf069eff0c01c96116f7",
- "_spec": "semver@^5.6.0",
- "_where": "/Users/mperrotte/npminc/cli/node_modules/libcipm/node_modules/npm-package-arg",
- "bin": {
- "semver": "bin/semver"
- },
- "bugs": {
- "url": "https://github.com/npm/node-semver/issues"
- },
- "bundleDependencies": false,
- "deprecated": false,
- "description": "The semantic version parser used by npm.",
- "devDependencies": {
- "tap": "^13.0.0-rc.18"
- },
- "files": [
- "bin",
- "range.bnf",
- "semver.js"
- ],
- "homepage": "https://github.com/npm/node-semver#readme",
- "license": "ISC",
- "main": "semver.js",
- "name": "semver",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/node-semver.git"
- },
- "scripts": {
- "postpublish": "git push origin --all; git push origin --tags",
- "postversion": "npm publish",
- "preversion": "npm test",
- "test": "tap"
- },
- "tap": {
- "check-coverage": true
- },
- "version": "5.7.1"
-}
diff --git a/node_modules/libcipm/node_modules/npm-package-arg/node_modules/semver/range.bnf b/node_modules/libcipm/node_modules/npm-package-arg/node_modules/semver/range.bnf
deleted file mode 100644
index d4c6ae0d7..000000000
--- a/node_modules/libcipm/node_modules/npm-package-arg/node_modules/semver/range.bnf
+++ /dev/null
@@ -1,16 +0,0 @@
-range-set ::= range ( logical-or range ) *
-logical-or ::= ( ' ' ) * '||' ( ' ' ) *
-range ::= hyphen | simple ( ' ' simple ) * | ''
-hyphen ::= partial ' - ' partial
-simple ::= primitive | partial | tilde | caret
-primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial
-partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )?
-xr ::= 'x' | 'X' | '*' | nr
-nr ::= '0' | [1-9] ( [0-9] ) *
-tilde ::= '~' partial
-caret ::= '^' partial
-qualifier ::= ( '-' pre )? ( '+' build )?
-pre ::= parts
-build ::= parts
-parts ::= part ( '.' part ) *
-part ::= nr | [-0-9A-Za-z]+
diff --git a/node_modules/libcipm/node_modules/npm-package-arg/node_modules/semver/semver.js b/node_modules/libcipm/node_modules/npm-package-arg/node_modules/semver/semver.js
deleted file mode 100644
index d315d5d68..000000000
--- a/node_modules/libcipm/node_modules/npm-package-arg/node_modules/semver/semver.js
+++ /dev/null
@@ -1,1483 +0,0 @@
-exports = module.exports = SemVer
-
-var debug
-/* istanbul ignore next */
-if (typeof process === 'object' &&
- process.env &&
- process.env.NODE_DEBUG &&
- /\bsemver\b/i.test(process.env.NODE_DEBUG)) {
- debug = function () {
- var args = Array.prototype.slice.call(arguments, 0)
- args.unshift('SEMVER')
- console.log.apply(console, args)
- }
-} else {
- debug = function () {}
-}
-
-// Note: this is the semver.org version of the spec that it implements
-// Not necessarily the package version of this code.
-exports.SEMVER_SPEC_VERSION = '2.0.0'
-
-var MAX_LENGTH = 256
-var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
- /* istanbul ignore next */ 9007199254740991
-
-// Max safe segment length for coercion.
-var MAX_SAFE_COMPONENT_LENGTH = 16
-
-// The actual regexps go on exports.re
-var re = exports.re = []
-var src = exports.src = []
-var R = 0
-
-// The following Regular Expressions can be used for tokenizing,
-// validating, and parsing SemVer version strings.
-
-// ## Numeric Identifier
-// A single `0`, or a non-zero digit followed by zero or more digits.
-
-var NUMERICIDENTIFIER = R++
-src[NUMERICIDENTIFIER] = '0|[1-9]\\d*'
-var NUMERICIDENTIFIERLOOSE = R++
-src[NUMERICIDENTIFIERLOOSE] = '[0-9]+'
-
-// ## Non-numeric Identifier
-// Zero or more digits, followed by a letter or hyphen, and then zero or
-// more letters, digits, or hyphens.
-
-var NONNUMERICIDENTIFIER = R++
-src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'
-
-// ## Main Version
-// Three dot-separated numeric identifiers.
-
-var MAINVERSION = R++
-src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' +
- '(' + src[NUMERICIDENTIFIER] + ')\\.' +
- '(' + src[NUMERICIDENTIFIER] + ')'
-
-var MAINVERSIONLOOSE = R++
-src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' +
- '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' +
- '(' + src[NUMERICIDENTIFIERLOOSE] + ')'
-
-// ## Pre-release Version Identifier
-// A numeric identifier, or a non-numeric identifier.
-
-var PRERELEASEIDENTIFIER = R++
-src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] +
- '|' + src[NONNUMERICIDENTIFIER] + ')'
-
-var PRERELEASEIDENTIFIERLOOSE = R++
-src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] +
- '|' + src[NONNUMERICIDENTIFIER] + ')'
-
-// ## Pre-release Version
-// Hyphen, followed by one or more dot-separated pre-release version
-// identifiers.
-
-var PRERELEASE = R++
-src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] +
- '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))'
-
-var PRERELEASELOOSE = R++
-src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] +
- '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))'
-
-// ## Build Metadata Identifier
-// Any combination of digits, letters, or hyphens.
-
-var BUILDIDENTIFIER = R++
-src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+'
-
-// ## Build Metadata
-// Plus sign, followed by one or more period-separated build metadata
-// identifiers.
-
-var BUILD = R++
-src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] +
- '(?:\\.' + src[BUILDIDENTIFIER] + ')*))'
-
-// ## Full Version String
-// A main version, followed optionally by a pre-release version and
-// build metadata.
-
-// Note that the only major, minor, patch, and pre-release sections of
-// the version string are capturing groups. The build metadata is not a
-// capturing group, because it should not ever be used in version
-// comparison.
-
-var FULL = R++
-var FULLPLAIN = 'v?' + src[MAINVERSION] +
- src[PRERELEASE] + '?' +
- src[BUILD] + '?'
-
-src[FULL] = '^' + FULLPLAIN + '$'
-
-// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.
-// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty
-// common in the npm registry.
-var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] +
- src[PRERELEASELOOSE] + '?' +
- src[BUILD] + '?'
-
-var LOOSE = R++
-src[LOOSE] = '^' + LOOSEPLAIN + '$'
-
-var GTLT = R++
-src[GTLT] = '((?:<|>)?=?)'
-
-// Something like "2.*" or "1.2.x".
-// Note that "x.x" is a valid xRange identifer, meaning "any version"
-// Only the first item is strictly required.
-var XRANGEIDENTIFIERLOOSE = R++
-src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*'
-var XRANGEIDENTIFIER = R++
-src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*'
-
-var XRANGEPLAIN = R++
-src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' +
- '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' +
- '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' +
- '(?:' + src[PRERELEASE] + ')?' +
- src[BUILD] + '?' +
- ')?)?'
-
-var XRANGEPLAINLOOSE = R++
-src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
- '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
- '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
- '(?:' + src[PRERELEASELOOSE] + ')?' +
- src[BUILD] + '?' +
- ')?)?'
-
-var XRANGE = R++
-src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$'
-var XRANGELOOSE = R++
-src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$'
-
-// Coercion.
-// Extract anything that could conceivably be a part of a valid semver
-var COERCE = R++
-src[COERCE] = '(?:^|[^\\d])' +
- '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' +
- '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +
- '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +
- '(?:$|[^\\d])'
-
-// Tilde ranges.
-// Meaning is "reasonably at or greater than"
-var LONETILDE = R++
-src[LONETILDE] = '(?:~>?)'
-
-var TILDETRIM = R++
-src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+'
-re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g')
-var tildeTrimReplace = '$1~'
-
-var TILDE = R++
-src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$'
-var TILDELOOSE = R++
-src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$'
-
-// Caret ranges.
-// Meaning is "at least and backwards compatible with"
-var LONECARET = R++
-src[LONECARET] = '(?:\\^)'
-
-var CARETTRIM = R++
-src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+'
-re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g')
-var caretTrimReplace = '$1^'
-
-var CARET = R++
-src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$'
-var CARETLOOSE = R++
-src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$'
-
-// A simple gt/lt/eq thing, or just "" to indicate "any version"
-var COMPARATORLOOSE = R++
-src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$'
-var COMPARATOR = R++
-src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$'
-
-// An expression to strip any whitespace between the gtlt and the thing
-// it modifies, so that `> 1.2.3` ==> `>1.2.3`
-var COMPARATORTRIM = R++
-src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] +
- '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')'
-
-// this one has to use the /g flag
-re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g')
-var comparatorTrimReplace = '$1$2$3'
-
-// Something like `1.2.3 - 1.2.4`
-// Note that these all use the loose form, because they'll be
-// checked against either the strict or loose comparator form
-// later.
-var HYPHENRANGE = R++
-src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' +
- '\\s+-\\s+' +
- '(' + src[XRANGEPLAIN] + ')' +
- '\\s*$'
-
-var HYPHENRANGELOOSE = R++
-src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' +
- '\\s+-\\s+' +
- '(' + src[XRANGEPLAINLOOSE] + ')' +
- '\\s*$'
-
-// Star ranges basically just allow anything at all.
-var STAR = R++
-src[STAR] = '(<|>)?=?\\s*\\*'
-
-// Compile to actual regexp objects.
-// All are flag-free, unless they were created above with a flag.
-for (var i = 0; i < R; i++) {
- debug(i, src[i])
- if (!re[i]) {
- re[i] = new RegExp(src[i])
- }
-}
-
-exports.parse = parse
-function parse (version, options) {
- if (!options || typeof options !== 'object') {
- options = {
- loose: !!options,
- includePrerelease: false
- }
- }
-
- if (version instanceof SemVer) {
- return version
- }
-
- if (typeof version !== 'string') {
- return null
- }
-
- if (version.length > MAX_LENGTH) {
- return null
- }
-
- var r = options.loose ? re[LOOSE] : re[FULL]
- if (!r.test(version)) {
- return null
- }
-
- try {
- return new SemVer(version, options)
- } catch (er) {
- return null
- }
-}
-
-exports.valid = valid
-function valid (version, options) {
- var v = parse(version, options)
- return v ? v.version : null
-}
-
-exports.clean = clean
-function clean (version, options) {
- var s = parse(version.trim().replace(/^[=v]+/, ''), options)
- return s ? s.version : null
-}
-
-exports.SemVer = SemVer
-
-function SemVer (version, options) {
- if (!options || typeof options !== 'object') {
- options = {
- loose: !!options,
- includePrerelease: false
- }
- }
- if (version instanceof SemVer) {
- if (version.loose === options.loose) {
- return version
- } else {
- version = version.version
- }
- } else if (typeof version !== 'string') {
- throw new TypeError('Invalid Version: ' + version)
- }
-
- if (version.length > MAX_LENGTH) {
- throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters')
- }
-
- if (!(this instanceof SemVer)) {
- return new SemVer(version, options)
- }
-
- debug('SemVer', version, options)
- this.options = options
- this.loose = !!options.loose
-
- var m = version.trim().match(options.loose ? re[LOOSE] : re[FULL])
-
- if (!m) {
- throw new TypeError('Invalid Version: ' + version)
- }
-
- this.raw = version
-
- // these are actually numbers
- this.major = +m[1]
- this.minor = +m[2]
- this.patch = +m[3]
-
- if (this.major > MAX_SAFE_INTEGER || this.major < 0) {
- throw new TypeError('Invalid major version')
- }
-
- if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) {
- throw new TypeError('Invalid minor version')
- }
-
- if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) {
- throw new TypeError('Invalid patch version')
- }
-
- // numberify any prerelease numeric ids
- if (!m[4]) {
- this.prerelease = []
- } else {
- this.prerelease = m[4].split('.').map(function (id) {
- if (/^[0-9]+$/.test(id)) {
- var num = +id
- if (num >= 0 && num < MAX_SAFE_INTEGER) {
- return num
- }
- }
- return id
- })
- }
-
- this.build = m[5] ? m[5].split('.') : []
- this.format()
-}
-
-SemVer.prototype.format = function () {
- this.version = this.major + '.' + this.minor + '.' + this.patch
- if (this.prerelease.length) {
- this.version += '-' + this.prerelease.join('.')
- }
- return this.version
-}
-
-SemVer.prototype.toString = function () {
- return this.version
-}
-
-SemVer.prototype.compare = function (other) {
- debug('SemVer.compare', this.version, this.options, other)
- if (!(other instanceof SemVer)) {
- other = new SemVer(other, this.options)
- }
-
- return this.compareMain(other) || this.comparePre(other)
-}
-
-SemVer.prototype.compareMain = function (other) {
- if (!(other instanceof SemVer)) {
- other = new SemVer(other, this.options)
- }
-
- return compareIdentifiers(this.major, other.major) ||
- compareIdentifiers(this.minor, other.minor) ||
- compareIdentifiers(this.patch, other.patch)
-}
-
-SemVer.prototype.comparePre = function (other) {
- if (!(other instanceof SemVer)) {
- other = new SemVer(other, this.options)
- }
-
- // NOT having a prerelease is > having one
- if (this.prerelease.length && !other.prerelease.length) {
- return -1
- } else if (!this.prerelease.length && other.prerelease.length) {
- return 1
- } else if (!this.prerelease.length && !other.prerelease.length) {
- return 0
- }
-
- var i = 0
- do {
- var a = this.prerelease[i]
- var b = other.prerelease[i]
- debug('prerelease compare', i, a, b)
- if (a === undefined && b === undefined) {
- return 0
- } else if (b === undefined) {
- return 1
- } else if (a === undefined) {
- return -1
- } else if (a === b) {
- continue
- } else {
- return compareIdentifiers(a, b)
- }
- } while (++i)
-}
-
-// preminor will bump the version up to the next minor release, and immediately
-// down to pre-release. premajor and prepatch work the same way.
-SemVer.prototype.inc = function (release, identifier) {
- switch (release) {
- case 'premajor':
- this.prerelease.length = 0
- this.patch = 0
- this.minor = 0
- this.major++
- this.inc('pre', identifier)
- break
- case 'preminor':
- this.prerelease.length = 0
- this.patch = 0
- this.minor++
- this.inc('pre', identifier)
- break
- case 'prepatch':
- // If this is already a prerelease, it will bump to the next version
- // drop any prereleases that might already exist, since they are not
- // relevant at this point.
- this.prerelease.length = 0
- this.inc('patch', identifier)
- this.inc('pre', identifier)
- break
- // If the input is a non-prerelease version, this acts the same as
- // prepatch.
- case 'prerelease':
- if (this.prerelease.length === 0) {
- this.inc('patch', identifier)
- }
- this.inc('pre', identifier)
- break
-
- case 'major':
- // If this is a pre-major version, bump up to the same major version.
- // Otherwise increment major.
- // 1.0.0-5 bumps to 1.0.0
- // 1.1.0 bumps to 2.0.0
- if (this.minor !== 0 ||
- this.patch !== 0 ||
- this.prerelease.length === 0) {
- this.major++
- }
- this.minor = 0
- this.patch = 0
- this.prerelease = []
- break
- case 'minor':
- // If this is a pre-minor version, bump up to the same minor version.
- // Otherwise increment minor.
- // 1.2.0-5 bumps to 1.2.0
- // 1.2.1 bumps to 1.3.0
- if (this.patch !== 0 || this.prerelease.length === 0) {
- this.minor++
- }
- this.patch = 0
- this.prerelease = []
- break
- case 'patch':
- // If this is not a pre-release version, it will increment the patch.
- // If it is a pre-release it will bump up to the same patch version.
- // 1.2.0-5 patches to 1.2.0
- // 1.2.0 patches to 1.2.1
- if (this.prerelease.length === 0) {
- this.patch++
- }
- this.prerelease = []
- break
- // This probably shouldn't be used publicly.
- // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction.
- case 'pre':
- if (this.prerelease.length === 0) {
- this.prerelease = [0]
- } else {
- var i = this.prerelease.length
- while (--i >= 0) {
- if (typeof this.prerelease[i] === 'number') {
- this.prerelease[i]++
- i = -2
- }
- }
- if (i === -1) {
- // didn't increment anything
- this.prerelease.push(0)
- }
- }
- if (identifier) {
- // 1.2.0-beta.1 bumps to 1.2.0-beta.2,
- // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0
- if (this.prerelease[0] === identifier) {
- if (isNaN(this.prerelease[1])) {
- this.prerelease = [identifier, 0]
- }
- } else {
- this.prerelease = [identifier, 0]
- }
- }
- break
-
- default:
- throw new Error('invalid increment argument: ' + release)
- }
- this.format()
- this.raw = this.version
- return this
-}
-
-exports.inc = inc
-function inc (version, release, loose, identifier) {
- if (typeof (loose) === 'string') {
- identifier = loose
- loose = undefined
- }
-
- try {
- return new SemVer(version, loose).inc(release, identifier).version
- } catch (er) {
- return null
- }
-}
-
-exports.diff = diff
-function diff (version1, version2) {
- if (eq(version1, version2)) {
- return null
- } else {
- var v1 = parse(version1)
- var v2 = parse(version2)
- var prefix = ''
- if (v1.prerelease.length || v2.prerelease.length) {
- prefix = 'pre'
- var defaultResult = 'prerelease'
- }
- for (var key in v1) {
- if (key === 'major' || key === 'minor' || key === 'patch') {
- if (v1[key] !== v2[key]) {
- return prefix + key
- }
- }
- }
- return defaultResult // may be undefined
- }
-}
-
-exports.compareIdentifiers = compareIdentifiers
-
-var numeric = /^[0-9]+$/
-function compareIdentifiers (a, b) {
- var anum = numeric.test(a)
- var bnum = numeric.test(b)
-
- if (anum && bnum) {
- a = +a
- b = +b
- }
-
- return a === b ? 0
- : (anum && !bnum) ? -1
- : (bnum && !anum) ? 1
- : a < b ? -1
- : 1
-}
-
-exports.rcompareIdentifiers = rcompareIdentifiers
-function rcompareIdentifiers (a, b) {
- return compareIdentifiers(b, a)
-}
-
-exports.major = major
-function major (a, loose) {
- return new SemVer(a, loose).major
-}
-
-exports.minor = minor
-function minor (a, loose) {
- return new SemVer(a, loose).minor
-}
-
-exports.patch = patch
-function patch (a, loose) {
- return new SemVer(a, loose).patch
-}
-
-exports.compare = compare
-function compare (a, b, loose) {
- return new SemVer(a, loose).compare(new SemVer(b, loose))
-}
-
-exports.compareLoose = compareLoose
-function compareLoose (a, b) {
- return compare(a, b, true)
-}
-
-exports.rcompare = rcompare
-function rcompare (a, b, loose) {
- return compare(b, a, loose)
-}
-
-exports.sort = sort
-function sort (list, loose) {
- return list.sort(function (a, b) {
- return exports.compare(a, b, loose)
- })
-}
-
-exports.rsort = rsort
-function rsort (list, loose) {
- return list.sort(function (a, b) {
- return exports.rcompare(a, b, loose)
- })
-}
-
-exports.gt = gt
-function gt (a, b, loose) {
- return compare(a, b, loose) > 0
-}
-
-exports.lt = lt
-function lt (a, b, loose) {
- return compare(a, b, loose) < 0
-}
-
-exports.eq = eq
-function eq (a, b, loose) {
- return compare(a, b, loose) === 0
-}
-
-exports.neq = neq
-function neq (a, b, loose) {
- return compare(a, b, loose) !== 0
-}
-
-exports.gte = gte
-function gte (a, b, loose) {
- return compare(a, b, loose) >= 0
-}
-
-exports.lte = lte
-function lte (a, b, loose) {
- return compare(a, b, loose) <= 0
-}
-
-exports.cmp = cmp
-function cmp (a, op, b, loose) {
- switch (op) {
- case '===':
- if (typeof a === 'object')
- a = a.version
- if (typeof b === 'object')
- b = b.version
- return a === b
-
- case '!==':
- if (typeof a === 'object')
- a = a.version
- if (typeof b === 'object')
- b = b.version
- return a !== b
-
- case '':
- case '=':
- case '==':
- return eq(a, b, loose)
-
- case '!=':
- return neq(a, b, loose)
-
- case '>':
- return gt(a, b, loose)
-
- case '>=':
- return gte(a, b, loose)
-
- case '<':
- return lt(a, b, loose)
-
- case '<=':
- return lte(a, b, loose)
-
- default:
- throw new TypeError('Invalid operator: ' + op)
- }
-}
-
-exports.Comparator = Comparator
-function Comparator (comp, options) {
- if (!options || typeof options !== 'object') {
- options = {
- loose: !!options,
- includePrerelease: false
- }
- }
-
- if (comp instanceof Comparator) {
- if (comp.loose === !!options.loose) {
- return comp
- } else {
- comp = comp.value
- }
- }
-
- if (!(this instanceof Comparator)) {
- return new Comparator(comp, options)
- }
-
- debug('comparator', comp, options)
- this.options = options
- this.loose = !!options.loose
- this.parse(comp)
-
- if (this.semver === ANY) {
- this.value = ''
- } else {
- this.value = this.operator + this.semver.version
- }
-
- debug('comp', this)
-}
-
-var ANY = {}
-Comparator.prototype.parse = function (comp) {
- var r = this.options.loose ? re[COMPARATORLOOSE] : re[COMPARATOR]
- var m = comp.match(r)
-
- if (!m) {
- throw new TypeError('Invalid comparator: ' + comp)
- }
-
- this.operator = m[1]
- if (this.operator === '=') {
- this.operator = ''
- }
-
- // if it literally is just '>' or '' then allow anything.
- if (!m[2]) {
- this.semver = ANY
- } else {
- this.semver = new SemVer(m[2], this.options.loose)
- }
-}
-
-Comparator.prototype.toString = function () {
- return this.value
-}
-
-Comparator.prototype.test = function (version) {
- debug('Comparator.test', version, this.options.loose)
-
- if (this.semver === ANY) {
- return true
- }
-
- if (typeof version === 'string') {
- version = new SemVer(version, this.options)
- }
-
- return cmp(version, this.operator, this.semver, this.options)
-}
-
-Comparator.prototype.intersects = function (comp, options) {
- if (!(comp instanceof Comparator)) {
- throw new TypeError('a Comparator is required')
- }
-
- if (!options || typeof options !== 'object') {
- options = {
- loose: !!options,
- includePrerelease: false
- }
- }
-
- var rangeTmp
-
- if (this.operator === '') {
- rangeTmp = new Range(comp.value, options)
- return satisfies(this.value, rangeTmp, options)
- } else if (comp.operator === '') {
- rangeTmp = new Range(this.value, options)
- return satisfies(comp.semver, rangeTmp, options)
- }
-
- var sameDirectionIncreasing =
- (this.operator === '>=' || this.operator === '>') &&
- (comp.operator === '>=' || comp.operator === '>')
- var sameDirectionDecreasing =
- (this.operator === '<=' || this.operator === '<') &&
- (comp.operator === '<=' || comp.operator === '<')
- var sameSemVer = this.semver.version === comp.semver.version
- var differentDirectionsInclusive =
- (this.operator === '>=' || this.operator === '<=') &&
- (comp.operator === '>=' || comp.operator === '<=')
- var oppositeDirectionsLessThan =
- cmp(this.semver, '<', comp.semver, options) &&
- ((this.operator === '>=' || this.operator === '>') &&
- (comp.operator === '<=' || comp.operator === '<'))
- var oppositeDirectionsGreaterThan =
- cmp(this.semver, '>', comp.semver, options) &&
- ((this.operator === '<=' || this.operator === '<') &&
- (comp.operator === '>=' || comp.operator === '>'))
-
- return sameDirectionIncreasing || sameDirectionDecreasing ||
- (sameSemVer && differentDirectionsInclusive) ||
- oppositeDirectionsLessThan || oppositeDirectionsGreaterThan
-}
-
-exports.Range = Range
-function Range (range, options) {
- if (!options || typeof options !== 'object') {
- options = {
- loose: !!options,
- includePrerelease: false
- }
- }
-
- if (range instanceof Range) {
- if (range.loose === !!options.loose &&
- range.includePrerelease === !!options.includePrerelease) {
- return range
- } else {
- return new Range(range.raw, options)
- }
- }
-
- if (range instanceof Comparator) {
- return new Range(range.value, options)
- }
-
- if (!(this instanceof Range)) {
- return new Range(range, options)
- }
-
- this.options = options
- this.loose = !!options.loose
- this.includePrerelease = !!options.includePrerelease
-
- // First, split based on boolean or ||
- this.raw = range
- this.set = range.split(/\s*\|\|\s*/).map(function (range) {
- return this.parseRange(range.trim())
- }, this).filter(function (c) {
- // throw out any that are not relevant for whatever reason
- return c.length
- })
-
- if (!this.set.length) {
- throw new TypeError('Invalid SemVer Range: ' + range)
- }
-
- this.format()
-}
-
-Range.prototype.format = function () {
- this.range = this.set.map(function (comps) {
- return comps.join(' ').trim()
- }).join('||').trim()
- return this.range
-}
-
-Range.prototype.toString = function () {
- return this.range
-}
-
-Range.prototype.parseRange = function (range) {
- var loose = this.options.loose
- range = range.trim()
- // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
- var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE]
- range = range.replace(hr, hyphenReplace)
- debug('hyphen replace', range)
- // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
- range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace)
- debug('comparator trim', range, re[COMPARATORTRIM])
-
- // `~ 1.2.3` => `~1.2.3`
- range = range.replace(re[TILDETRIM], tildeTrimReplace)
-
- // `^ 1.2.3` => `^1.2.3`
- range = range.replace(re[CARETTRIM], caretTrimReplace)
-
- // normalize spaces
- range = range.split(/\s+/).join(' ')
-
- // At this point, the range is completely trimmed and
- // ready to be split into comparators.
-
- var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR]
- var set = range.split(' ').map(function (comp) {
- return parseComparator(comp, this.options)
- }, this).join(' ').split(/\s+/)
- if (this.options.loose) {
- // in loose mode, throw out any that are not valid comparators
- set = set.filter(function (comp) {
- return !!comp.match(compRe)
- })
- }
- set = set.map(function (comp) {
- return new Comparator(comp, this.options)
- }, this)
-
- return set
-}
-
-Range.prototype.intersects = function (range, options) {
- if (!(range instanceof Range)) {
- throw new TypeError('a Range is required')
- }
-
- return this.set.some(function (thisComparators) {
- return thisComparators.every(function (thisComparator) {
- return range.set.some(function (rangeComparators) {
- return rangeComparators.every(function (rangeComparator) {
- return thisComparator.intersects(rangeComparator, options)
- })
- })
- })
- })
-}
-
-// Mostly just for testing and legacy API reasons
-exports.toComparators = toComparators
-function toComparators (range, options) {
- return new Range(range, options).set.map(function (comp) {
- return comp.map(function (c) {
- return c.value
- }).join(' ').trim().split(' ')
- })
-}
-
-// comprised of xranges, tildes, stars, and gtlt's at this point.
-// already replaced the hyphen ranges
-// turn into a set of JUST comparators.
-function parseComparator (comp, options) {
- debug('comp', comp, options)
- comp = replaceCarets(comp, options)
- debug('caret', comp)
- comp = replaceTildes(comp, options)
- debug('tildes', comp)
- comp = replaceXRanges(comp, options)
- debug('xrange', comp)
- comp = replaceStars(comp, options)
- debug('stars', comp)
- return comp
-}
-
-function isX (id) {
- return !id || id.toLowerCase() === 'x' || id === '*'
-}
-
-// ~, ~> --> * (any, kinda silly)
-// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0
-// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0
-// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0
-// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0
-// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0
-function replaceTildes (comp, options) {
- return comp.trim().split(/\s+/).map(function (comp) {
- return replaceTilde(comp, options)
- }).join(' ')
-}
-
-function replaceTilde (comp, options) {
- var r = options.loose ? re[TILDELOOSE] : re[TILDE]
- return comp.replace(r, function (_, M, m, p, pr) {
- debug('tilde', comp, _, M, m, p, pr)
- var ret
-
- if (isX(M)) {
- ret = ''
- } else if (isX(m)) {
- ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
- } else if (isX(p)) {
- // ~1.2 == >=1.2.0 <1.3.0
- ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
- } else if (pr) {
- debug('replaceTilde pr', pr)
- ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
- ' <' + M + '.' + (+m + 1) + '.0'
- } else {
- // ~1.2.3 == >=1.2.3 <1.3.0
- ret = '>=' + M + '.' + m + '.' + p +
- ' <' + M + '.' + (+m + 1) + '.0'
- }
-
- debug('tilde return', ret)
- return ret
- })
-}
-
-// ^ --> * (any, kinda silly)
-// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0
-// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0
-// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0
-// ^1.2.3 --> >=1.2.3 <2.0.0
-// ^1.2.0 --> >=1.2.0 <2.0.0
-function replaceCarets (comp, options) {
- return comp.trim().split(/\s+/).map(function (comp) {
- return replaceCaret(comp, options)
- }).join(' ')
-}
-
-function replaceCaret (comp, options) {
- debug('caret', comp, options)
- var r = options.loose ? re[CARETLOOSE] : re[CARET]
- return comp.replace(r, function (_, M, m, p, pr) {
- debug('caret', comp, _, M, m, p, pr)
- var ret
-
- if (isX(M)) {
- ret = ''
- } else if (isX(m)) {
- ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
- } else if (isX(p)) {
- if (M === '0') {
- ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
- } else {
- ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0'
- }
- } else if (pr) {
- debug('replaceCaret pr', pr)
- if (M === '0') {
- if (m === '0') {
- ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
- ' <' + M + '.' + m + '.' + (+p + 1)
- } else {
- ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
- ' <' + M + '.' + (+m + 1) + '.0'
- }
- } else {
- ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
- ' <' + (+M + 1) + '.0.0'
- }
- } else {
- debug('no pr')
- if (M === '0') {
- if (m === '0') {
- ret = '>=' + M + '.' + m + '.' + p +
- ' <' + M + '.' + m + '.' + (+p + 1)
- } else {
- ret = '>=' + M + '.' + m + '.' + p +
- ' <' + M + '.' + (+m + 1) + '.0'
- }
- } else {
- ret = '>=' + M + '.' + m + '.' + p +
- ' <' + (+M + 1) + '.0.0'
- }
- }
-
- debug('caret return', ret)
- return ret
- })
-}
-
-function replaceXRanges (comp, options) {
- debug('replaceXRanges', comp, options)
- return comp.split(/\s+/).map(function (comp) {
- return replaceXRange(comp, options)
- }).join(' ')
-}
-
-function replaceXRange (comp, options) {
- comp = comp.trim()
- var r = options.loose ? re[XRANGELOOSE] : re[XRANGE]
- return comp.replace(r, function (ret, gtlt, M, m, p, pr) {
- debug('xRange', comp, ret, gtlt, M, m, p, pr)
- var xM = isX(M)
- var xm = xM || isX(m)
- var xp = xm || isX(p)
- var anyX = xp
-
- if (gtlt === '=' && anyX) {
- gtlt = ''
- }
-
- if (xM) {
- if (gtlt === '>' || gtlt === '<') {
- // nothing is allowed
- ret = '<0.0.0'
- } else {
- // nothing is forbidden
- ret = '*'
- }
- } else if (gtlt && anyX) {
- // we know patch is an x, because we have any x at all.
- // replace X with 0
- if (xm) {
- m = 0
- }
- p = 0
-
- if (gtlt === '>') {
- // >1 => >=2.0.0
- // >1.2 => >=1.3.0
- // >1.2.3 => >= 1.2.4
- gtlt = '>='
- if (xm) {
- M = +M + 1
- m = 0
- p = 0
- } else {
- m = +m + 1
- p = 0
- }
- } else if (gtlt === '<=') {
- // <=0.7.x is actually <0.8.0, since any 0.7.x should
- // pass. Similarly, <=7.x is actually <8.0.0, etc.
- gtlt = '<'
- if (xm) {
- M = +M + 1
- } else {
- m = +m + 1
- }
- }
-
- ret = gtlt + M + '.' + m + '.' + p
- } else if (xm) {
- ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
- } else if (xp) {
- ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
- }
-
- debug('xRange return', ret)
-
- return ret
- })
-}
-
-// Because * is AND-ed with everything else in the comparator,
-// and '' means "any version", just remove the *s entirely.
-function replaceStars (comp, options) {
- debug('replaceStars', comp, options)
- // Looseness is ignored here. star is always as loose as it gets!
- return comp.trim().replace(re[STAR], '')
-}
-
-// This function is passed to string.replace(re[HYPHENRANGE])
-// M, m, patch, prerelease, build
-// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5
-// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do
-// 1.2 - 3.4 => >=1.2.0 <3.5.0
-function hyphenReplace ($0,
- from, fM, fm, fp, fpr, fb,
- to, tM, tm, tp, tpr, tb) {
- if (isX(fM)) {
- from = ''
- } else if (isX(fm)) {
- from = '>=' + fM + '.0.0'
- } else if (isX(fp)) {
- from = '>=' + fM + '.' + fm + '.0'
- } else {
- from = '>=' + from
- }
-
- if (isX(tM)) {
- to = ''
- } else if (isX(tm)) {
- to = '<' + (+tM + 1) + '.0.0'
- } else if (isX(tp)) {
- to = '<' + tM + '.' + (+tm + 1) + '.0'
- } else if (tpr) {
- to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr
- } else {
- to = '<=' + to
- }
-
- return (from + ' ' + to).trim()
-}
-
-// if ANY of the sets match ALL of its comparators, then pass
-Range.prototype.test = function (version) {
- if (!version) {
- return false
- }
-
- if (typeof version === 'string') {
- version = new SemVer(version, this.options)
- }
-
- for (var i = 0; i < this.set.length; i++) {
- if (testSet(this.set[i], version, this.options)) {
- return true
- }
- }
- return false
-}
-
-function testSet (set, version, options) {
- for (var i = 0; i < set.length; i++) {
- if (!set[i].test(version)) {
- return false
- }
- }
-
- if (version.prerelease.length && !options.includePrerelease) {
- // Find the set of versions that are allowed to have prereleases
- // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0
- // That should allow `1.2.3-pr.2` to pass.
- // However, `1.2.4-alpha.notready` should NOT be allowed,
- // even though it's within the range set by the comparators.
- for (i = 0; i < set.length; i++) {
- debug(set[i].semver)
- if (set[i].semver === ANY) {
- continue
- }
-
- if (set[i].semver.prerelease.length > 0) {
- var allowed = set[i].semver
- if (allowed.major === version.major &&
- allowed.minor === version.minor &&
- allowed.patch === version.patch) {
- return true
- }
- }
- }
-
- // Version has a -pre, but it's not one of the ones we like.
- return false
- }
-
- return true
-}
-
-exports.satisfies = satisfies
-function satisfies (version, range, options) {
- try {
- range = new Range(range, options)
- } catch (er) {
- return false
- }
- return range.test(version)
-}
-
-exports.maxSatisfying = maxSatisfying
-function maxSatisfying (versions, range, options) {
- var max = null
- var maxSV = null
- try {
- var rangeObj = new Range(range, options)
- } catch (er) {
- return null
- }
- versions.forEach(function (v) {
- if (rangeObj.test(v)) {
- // satisfies(v, range, options)
- if (!max || maxSV.compare(v) === -1) {
- // compare(max, v, true)
- max = v
- maxSV = new SemVer(max, options)
- }
- }
- })
- return max
-}
-
-exports.minSatisfying = minSatisfying
-function minSatisfying (versions, range, options) {
- var min = null
- var minSV = null
- try {
- var rangeObj = new Range(range, options)
- } catch (er) {
- return null
- }
- versions.forEach(function (v) {
- if (rangeObj.test(v)) {
- // satisfies(v, range, options)
- if (!min || minSV.compare(v) === 1) {
- // compare(min, v, true)
- min = v
- minSV = new SemVer(min, options)
- }
- }
- })
- return min
-}
-
-exports.minVersion = minVersion
-function minVersion (range, loose) {
- range = new Range(range, loose)
-
- var minver = new SemVer('0.0.0')
- if (range.test(minver)) {
- return minver
- }
-
- minver = new SemVer('0.0.0-0')
- if (range.test(minver)) {
- return minver
- }
-
- minver = null
- for (var i = 0; i < range.set.length; ++i) {
- var comparators = range.set[i]
-
- comparators.forEach(function (comparator) {
- // Clone to avoid manipulating the comparator's semver object.
- var compver = new SemVer(comparator.semver.version)
- switch (comparator.operator) {
- case '>':
- if (compver.prerelease.length === 0) {
- compver.patch++
- } else {
- compver.prerelease.push(0)
- }
- compver.raw = compver.format()
- /* fallthrough */
- case '':
- case '>=':
- if (!minver || gt(minver, compver)) {
- minver = compver
- }
- break
- case '<':
- case '<=':
- /* Ignore maximum versions */
- break
- /* istanbul ignore next */
- default:
- throw new Error('Unexpected operation: ' + comparator.operator)
- }
- })
- }
-
- if (minver && range.test(minver)) {
- return minver
- }
-
- return null
-}
-
-exports.validRange = validRange
-function validRange (range, options) {
- try {
- // Return '*' instead of '' so that truthiness works.
- // This will throw if it's invalid anyway
- return new Range(range, options).range || '*'
- } catch (er) {
- return null
- }
-}
-
-// Determine if version is less than all the versions possible in the range
-exports.ltr = ltr
-function ltr (version, range, options) {
- return outside(version, range, '<', options)
-}
-
-// Determine if version is greater than all the versions possible in the range.
-exports.gtr = gtr
-function gtr (version, range, options) {
- return outside(version, range, '>', options)
-}
-
-exports.outside = outside
-function outside (version, range, hilo, options) {
- version = new SemVer(version, options)
- range = new Range(range, options)
-
- var gtfn, ltefn, ltfn, comp, ecomp
- switch (hilo) {
- case '>':
- gtfn = gt
- ltefn = lte
- ltfn = lt
- comp = '>'
- ecomp = '>='
- break
- case '<':
- gtfn = lt
- ltefn = gte
- ltfn = gt
- comp = '<'
- ecomp = '<='
- break
- default:
- throw new TypeError('Must provide a hilo val of "<" or ">"')
- }
-
- // If it satisifes the range it is not outside
- if (satisfies(version, range, options)) {
- return false
- }
-
- // From now on, variable terms are as if we're in "gtr" mode.
- // but note that everything is flipped for the "ltr" function.
-
- for (var i = 0; i < range.set.length; ++i) {
- var comparators = range.set[i]
-
- var high = null
- var low = null
-
- comparators.forEach(function (comparator) {
- if (comparator.semver === ANY) {
- comparator = new Comparator('>=0.0.0')
- }
- high = high || comparator
- low = low || comparator
- if (gtfn(comparator.semver, high.semver, options)) {
- high = comparator
- } else if (ltfn(comparator.semver, low.semver, options)) {
- low = comparator
- }
- })
-
- // If the edge version comparator has a operator then our version
- // isn't outside it
- if (high.operator === comp || high.operator === ecomp) {
- return false
- }
-
- // If the lowest version comparator has an operator and our version
- // is less than it then it isn't higher than the range
- if ((!low.operator || low.operator === comp) &&
- ltefn(version, low.semver)) {
- return false
- } else if (low.operator === ecomp && ltfn(version, low.semver)) {
- return false
- }
- }
- return true
-}
-
-exports.prerelease = prerelease
-function prerelease (version, options) {
- var parsed = parse(version, options)
- return (parsed && parsed.prerelease.length) ? parsed.prerelease : null
-}
-
-exports.intersects = intersects
-function intersects (r1, r2, options) {
- r1 = new Range(r1, options)
- r2 = new Range(r2, options)
- return r1.intersects(r2)
-}
-
-exports.coerce = coerce
-function coerce (version) {
- if (version instanceof SemVer) {
- return version
- }
-
- if (typeof version !== 'string') {
- return null
- }
-
- var match = version.match(re[COERCE])
-
- if (match == null) {
- return null
- }
-
- return parse(match[1] +
- '.' + (match[2] || '0') +
- '.' + (match[3] || '0'))
-}
diff --git a/node_modules/libcipm/node_modules/npm-package-arg/npa.js b/node_modules/libcipm/node_modules/npm-package-arg/npa.js
deleted file mode 100644
index bf2c17cfd..000000000
--- a/node_modules/libcipm/node_modules/npm-package-arg/npa.js
+++ /dev/null
@@ -1,301 +0,0 @@
-'use strict'
-module.exports = npa
-module.exports.resolve = resolve
-module.exports.Result = Result
-
-let url
-let HostedGit
-let semver
-let path_
-function path () {
- if (!path_) path_ = require('path')
- return path_
-}
-let validatePackageName
-let osenv
-
-const isWindows = process.platform === 'win32' || global.FAKE_WINDOWS
-const hasSlashes = isWindows ? /\\|[/]/ : /[/]/
-const isURL = /^(?:git[+])?[a-z]+:/i
-const isFilename = /[.](?:tgz|tar.gz|tar)$/i
-
-function npa (arg, where) {
- let name
- let spec
- if (typeof arg === 'object') {
- if (arg instanceof Result && (!where || where === arg.where)) {
- return arg
- } else if (arg.name && arg.rawSpec) {
- return npa.resolve(arg.name, arg.rawSpec, where || arg.where)
- } else {
- return npa(arg.raw, where || arg.where)
- }
- }
- const nameEndsAt = arg[0] === '@' ? arg.slice(1).indexOf('@') + 1 : arg.indexOf('@')
- const namePart = nameEndsAt > 0 ? arg.slice(0, nameEndsAt) : arg
- if (isURL.test(arg)) {
- spec = arg
- } else if (namePart[0] !== '@' && (hasSlashes.test(namePart) || isFilename.test(namePart))) {
- spec = arg
- } else if (nameEndsAt > 0) {
- name = namePart
- spec = arg.slice(nameEndsAt + 1)
- } else {
- if (!validatePackageName) validatePackageName = require('validate-npm-package-name')
- const valid = validatePackageName(arg)
- if (valid.validForOldPackages) {
- name = arg
- } else {
- spec = arg
- }
- }
- return resolve(name, spec, where, arg)
-}
-
-const isFilespec = isWindows ? /^(?:[.]|~[/]|[/\\]|[a-zA-Z]:)/ : /^(?:[.]|~[/]|[/]|[a-zA-Z]:)/
-
-function resolve (name, spec, where, arg) {
- const res = new Result({
- raw: arg,
- name: name,
- rawSpec: spec,
- fromArgument: arg != null
- })
-
- if (name) res.setName(name)
-
- if (spec && (isFilespec.test(spec) || /^file:/i.test(spec))) {
- return fromFile(res, where)
- } else if (spec && /^npm:/i.test(spec)) {
- return fromAlias(res, where)
- }
- if (!HostedGit) HostedGit = require('hosted-git-info')
- const hosted = HostedGit.fromUrl(spec, {noGitPlus: true, noCommittish: true})
- if (hosted) {
- return fromHostedGit(res, hosted)
- } else if (spec && isURL.test(spec)) {
- return fromURL(res)
- } else if (spec && (hasSlashes.test(spec) || isFilename.test(spec))) {
- return fromFile(res, where)
- } else {
- return fromRegistry(res)
- }
-}
-
-function invalidPackageName (name, valid) {
- const err = new Error(`Invalid package name "${name}": ${valid.errors.join('; ')}`)
- err.code = 'EINVALIDPACKAGENAME'
- return err
-}
-function invalidTagName (name) {
- const err = new Error(`Invalid tag name "${name}": Tags may not have any characters that encodeURIComponent encodes.`)
- err.code = 'EINVALIDTAGNAME'
- return err
-}
-
-function Result (opts) {
- this.type = opts.type
- this.registry = opts.registry
- this.where = opts.where
- if (opts.raw == null) {
- this.raw = opts.name ? opts.name + '@' + opts.rawSpec : opts.rawSpec
- } else {
- this.raw = opts.raw
- }
- this.name = undefined
- this.escapedName = undefined
- this.scope = undefined
- this.rawSpec = opts.rawSpec == null ? '' : opts.rawSpec
- this.saveSpec = opts.saveSpec
- this.fetchSpec = opts.fetchSpec
- if (opts.name) this.setName(opts.name)
- this.gitRange = opts.gitRange
- this.gitCommittish = opts.gitCommittish
- this.hosted = opts.hosted
-}
-
-Result.prototype.setName = function (name) {
- if (!validatePackageName) validatePackageName = require('validate-npm-package-name')
- const valid = validatePackageName(name)
- if (!valid.validForOldPackages) {
- throw invalidPackageName(name, valid)
- }
- this.name = name
- this.scope = name[0] === '@' ? name.slice(0, name.indexOf('/')) : undefined
- // scoped packages in couch must have slash url-encoded, e.g. @foo%2Fbar
- this.escapedName = name.replace('/', '%2f')
- return this
-}
-
-Result.prototype.toString = function () {
- const full = []
- if (this.name != null && this.name !== '') full.push(this.name)
- const spec = this.saveSpec || this.fetchSpec || this.rawSpec
- if (spec != null && spec !== '') full.push(spec)
- return full.length ? full.join('@') : this.raw
-}
-
-Result.prototype.toJSON = function () {
- const result = Object.assign({}, this)
- delete result.hosted
- return result
-}
-
-function setGitCommittish (res, committish) {
- if (committish != null && committish.length >= 7 && committish.slice(0, 7) === 'semver:') {
- res.gitRange = decodeURIComponent(committish.slice(7))
- res.gitCommittish = null
- } else {
- res.gitCommittish = committish === '' ? null : committish
- }
- return res
-}
-
-const isAbsolutePath = /^[/]|^[A-Za-z]:/
-
-function resolvePath (where, spec) {
- if (isAbsolutePath.test(spec)) return spec
- return path().resolve(where, spec)
-}
-
-function isAbsolute (dir) {
- if (dir[0] === '/') return true
- if (/^[A-Za-z]:/.test(dir)) return true
- return false
-}
-
-function fromFile (res, where) {
- if (!where) where = process.cwd()
- res.type = isFilename.test(res.rawSpec) ? 'file' : 'directory'
- res.where = where
-
- const spec = res.rawSpec.replace(/\\/g, '/')
- .replace(/^file:[/]*([A-Za-z]:)/, '$1') // drive name paths on windows
- .replace(/^file:(?:[/]*([~./]))?/, '$1')
- if (/^~[/]/.test(spec)) {
- // this is needed for windows and for file:~/foo/bar
- if (!osenv) osenv = require('osenv')
- res.fetchSpec = resolvePath(osenv.home(), spec.slice(2))
- res.saveSpec = 'file:' + spec
- } else {
- res.fetchSpec = resolvePath(where, spec)
- if (isAbsolute(spec)) {
- res.saveSpec = 'file:' + spec
- } else {
- res.saveSpec = 'file:' + path().relative(where, res.fetchSpec)
- }
- }
- return res
-}
-
-function fromHostedGit (res, hosted) {
- res.type = 'git'
- res.hosted = hosted
- res.saveSpec = hosted.toString({noGitPlus: false, noCommittish: false})
- res.fetchSpec = hosted.getDefaultRepresentation() === 'shortcut' ? null : hosted.toString()
- return setGitCommittish(res, hosted.committish)
-}
-
-function unsupportedURLType (protocol, spec) {
- const err = new Error(`Unsupported URL Type "${protocol}": ${spec}`)
- err.code = 'EUNSUPPORTEDPROTOCOL'
- return err
-}
-
-function matchGitScp (spec) {
- // git ssh specifiers are overloaded to also use scp-style git
- // specifiers, so we have to parse those out and treat them special.
- // They are NOT true URIs, so we can't hand them to `url.parse`.
- //
- // This regex looks for things that look like:
- // git+ssh://git@my.custom.git.com:username/project.git#deadbeef
- //
- // ...and various combinations. The username in the beginning is *required*.
- const matched = spec.match(/^git\+ssh:\/\/([^:#]+:[^#]+(?:\.git)?)(?:#(.*))?$/i)
- return matched && !matched[1].match(/:[0-9]+\/?.*$/i) && {
- fetchSpec: matched[1],
- gitCommittish: matched[2] == null ? null : matched[2]
- }
-}
-
-function fromURL (res) {
- if (!url) url = require('url')
- const urlparse = url.parse(res.rawSpec)
- res.saveSpec = res.rawSpec
- // check the protocol, and then see if it's git or not
- switch (urlparse.protocol) {
- case 'git:':
- case 'git+http:':
- case 'git+https:':
- case 'git+rsync:':
- case 'git+ftp:':
- case 'git+file:':
- case 'git+ssh:':
- res.type = 'git'
- const match = urlparse.protocol === 'git+ssh:' && matchGitScp(res.rawSpec)
- if (match) {
- setGitCommittish(res, match.gitCommittish)
- res.fetchSpec = match.fetchSpec
- } else {
- setGitCommittish(res, urlparse.hash != null ? urlparse.hash.slice(1) : '')
- urlparse.protocol = urlparse.protocol.replace(/^git[+]/, '')
- if (urlparse.protocol === 'file:' && /^git\+file:\/\/[a-z]:/i.test(res.rawSpec)) {
- // keep the drive letter : on windows file paths
- urlparse.host += ':'
- urlparse.hostname += ':'
- }
- delete urlparse.hash
- res.fetchSpec = url.format(urlparse)
- }
- break
- case 'http:':
- case 'https:':
- res.type = 'remote'
- res.fetchSpec = res.saveSpec
- break
-
- default:
- throw unsupportedURLType(urlparse.protocol, res.rawSpec)
- }
-
- return res
-}
-
-function fromAlias (res, where) {
- const subSpec = npa(res.rawSpec.substr(4), where)
- if (subSpec.type === 'alias') {
- throw new Error('nested aliases not supported')
- }
- if (!subSpec.registry) {
- throw new Error('aliases only work for registry deps')
- }
- res.subSpec = subSpec
- res.registry = true
- res.type = 'alias'
- res.saveSpec = null
- res.fetchSpec = null
- return res
-}
-
-function fromRegistry (res) {
- res.registry = true
- const spec = res.rawSpec === '' ? 'latest' : res.rawSpec
- // no save spec for registry components as we save based on the fetched
- // version, not on the argument so this can't compute that.
- res.saveSpec = null
- res.fetchSpec = spec
- if (!semver) semver = require('semver')
- const version = semver.valid(spec, true)
- const range = semver.validRange(spec, true)
- if (version) {
- res.type = 'version'
- } else if (range) {
- res.type = 'range'
- } else {
- if (encodeURIComponent(spec) !== spec) {
- throw invalidTagName(spec)
- }
- res.type = 'tag'
- }
- return res
-}
diff --git a/node_modules/libcipm/node_modules/npm-package-arg/package.json b/node_modules/libcipm/node_modules/npm-package-arg/package.json
deleted file mode 100644
index fece3b5f6..000000000
--- a/node_modules/libcipm/node_modules/npm-package-arg/package.json
+++ /dev/null
@@ -1,73 +0,0 @@
-{
- "_from": "npm-package-arg@^6.1.0",
- "_id": "npm-package-arg@6.1.1",
- "_inBundle": false,
- "_integrity": "sha512-qBpssaL3IOZWi5vEKUKW0cO7kzLeT+EQO9W8RsLOZf76KF9E/K9+wH0C7t06HXPpaH8WH5xF1MExLuCwbTqRUg==",
- "_location": "/libcipm/npm-package-arg",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "npm-package-arg@^6.1.0",
- "name": "npm-package-arg",
- "escapedName": "npm-package-arg",
- "rawSpec": "^6.1.0",
- "saveSpec": null,
- "fetchSpec": "^6.1.0"
- },
- "_requiredBy": [
- "/libcipm"
- ],
- "_resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-6.1.1.tgz",
- "_shasum": "02168cb0a49a2b75bf988a28698de7b529df5cb7",
- "_spec": "npm-package-arg@^6.1.0",
- "_where": "/Users/claudiahdz/npm/cli/node_modules/libcipm",
- "author": {
- "name": "Isaac Z. Schlueter",
- "email": "i@izs.me",
- "url": "http://blog.izs.me/"
- },
- "bugs": {
- "url": "https://github.com/npm/npm-package-arg/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "hosted-git-info": "^2.7.1",
- "osenv": "^0.1.5",
- "semver": "^5.6.0",
- "validate-npm-package-name": "^3.0.0"
- },
- "deprecated": false,
- "description": "Parse the things that can be arguments to `npm install`",
- "devDependencies": {
- "standard": "^11.0.1",
- "standard-version": "^4.4.0",
- "tap": "^12.5.0",
- "weallbehave": "^1.2.0",
- "weallcontribute": "^1.0.8"
- },
- "directories": {
- "test": "test"
- },
- "files": [
- "npa.js"
- ],
- "homepage": "https://github.com/npm/npm-package-arg",
- "license": "ISC",
- "main": "npa.js",
- "name": "npm-package-arg",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/npm-package-arg.git"
- },
- "scripts": {
- "postrelease": "npm publish && git push --follow-tags",
- "prerelease": "npm t",
- "pretest": "standard",
- "release": "standard-version -s",
- "test": "tap --100 -J --coverage test/*.js",
- "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
- "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
- },
- "version": "6.1.1"
-}
diff --git a/node_modules/libcipm/node_modules/npm-pick-manifest/CHANGELOG.md b/node_modules/libcipm/node_modules/npm-pick-manifest/CHANGELOG.md
deleted file mode 100644
index c594ba140..000000000
--- a/node_modules/libcipm/node_modules/npm-pick-manifest/CHANGELOG.md
+++ /dev/null
@@ -1,167 +0,0 @@
-# Change Log
-
-All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
-
-<a name="3.0.2"></a>
-## [3.0.2](https://github.com/npm/npm-pick-manifest/compare/v3.0.1...v3.0.2) (2019-08-30)
-
-
-
-<a name="3.0.1"></a>
-## [3.0.1](https://github.com/npm/npm-pick-manifest/compare/v3.0.0...v3.0.1) (2019-08-28)
-
-
-### Bug Fixes
-
-* throw 403 for forbidden major/minor versions ([003286e](https://github.com/npm/npm-pick-manifest/commit/003286e)), closes [#2](https://github.com/npm/npm-pick-manifest/issues/2)
-
-
-
-<a name="3.0.0"></a>
-# [3.0.0](https://github.com/npm/npm-pick-manifest/compare/v2.2.3...v3.0.0) (2019-08-20)
-
-
-### Features
-
-* throw forbidden error when package is blocked by policy ([ad2a962](https://github.com/npm/npm-pick-manifest/commit/ad2a962)), closes [#1](https://github.com/npm/npm-pick-manifest/issues/1)
-
-
-### BREAKING CHANGES
-
-* This adds a new error code when package versions are
-blocked.
-
-PR-URL: https://github.com/npm/npm-pick-manifest/pull/1
-Credit: @claudiahdz
-
-
-
-<a name="2.2.3"></a>
-## [2.2.3](https://github.com/npm/npm-pick-manifest/compare/v2.2.2...v2.2.3) (2018-10-31)
-
-
-### Bug Fixes
-
-* **enjoyBy:** rework semantics for enjoyBy again ([5e89b62](https://github.com/npm/npm-pick-manifest/commit/5e89b62))
-
-
-
-<a name="2.2.2"></a>
-## [2.2.2](https://github.com/npm/npm-pick-manifest/compare/v2.2.1...v2.2.2) (2018-10-31)
-
-
-### Bug Fixes
-
-* **enjoyBy:** rework semantics for enjoyBy ([5684f45](https://github.com/npm/npm-pick-manifest/commit/5684f45))
-
-
-
-<a name="2.2.1"></a>
-## [2.2.1](https://github.com/npm/npm-pick-manifest/compare/v2.2.0...v2.2.1) (2018-10-30)
-
-
-
-<a name="2.2.0"></a>
-# [2.2.0](https://github.com/npm/npm-pick-manifest/compare/v2.1.0...v2.2.0) (2018-10-30)
-
-
-### Bug Fixes
-
-* **audit:** npm audit fix --force ([d5ae6c4](https://github.com/npm/npm-pick-manifest/commit/d5ae6c4))
-
-
-### Features
-
-* **enjoyBy:** add opts.enjoyBy option to filter versions by date ([0b8a790](https://github.com/npm/npm-pick-manifest/commit/0b8a790))
-
-
-
-<a name="2.1.0"></a>
-# [2.1.0](https://github.com/npm/npm-pick-manifest/compare/v2.0.1...v2.1.0) (2017-10-18)
-
-
-### Features
-
-* **selection:** allow manually disabling deprecation skipping ([0d239d3](https://github.com/npm/npm-pick-manifest/commit/0d239d3))
-
-
-
-<a name="2.0.1"></a>
-## [2.0.1](https://github.com/npm/npm-pick-manifest/compare/v2.0.0...v2.0.1) (2017-10-18)
-
-
-
-<a name="2.0.0"></a>
-# [2.0.0](https://github.com/npm/npm-pick-manifest/compare/v1.0.4...v2.0.0) (2017-10-03)
-
-
-### Bug Fixes
-
-* **license:** relicense project according to npm policy (#3) ([ed743a0](https://github.com/npm/npm-pick-manifest/commit/ed743a0))
-
-
-### Features
-
-* **selection:** Avoid matching deprecated packages if possible ([3fc6c3a](https://github.com/npm/npm-pick-manifest/commit/3fc6c3a))
-
-
-### BREAKING CHANGES
-
-* **selection:** deprecated versions may be skipped now
-* **license:** This moves the license from CC0 to ISC and properly documents the copyright as belonging to npm, Inc.
-
-
-
-<a name="1.0.4"></a>
-## [1.0.4](https://github.com/npm/npm-pick-manifest/compare/v1.0.3...v1.0.4) (2017-06-29)
-
-
-### Bug Fixes
-
-* **npa:** bump npa version for bugfixes ([7cdaca7](https://github.com/npm/npm-pick-manifest/commit/7cdaca7))
-* **semver:** use loose semver parsing for *all* ops ([bbc0daa](https://github.com/npm/npm-pick-manifest/commit/bbc0daa))
-
-
-
-<a name="1.0.3"></a>
-## [1.0.3](https://github.com/npm/npm-pick-manifest/compare/v1.0.2...v1.0.3) (2017-05-04)
-
-
-### Bug Fixes
-
-* **semver:** use semver.clean() instead ([f4133b5](https://github.com/npm/npm-pick-manifest/commit/f4133b5))
-
-
-
-<a name="1.0.2"></a>
-## [1.0.2](https://github.com/npm/npm-pick-manifest/compare/v1.0.1...v1.0.2) (2017-05-04)
-
-
-### Bug Fixes
-
-* **picker:** spaces in `wanted` prevented match ([97a7d0a](https://github.com/npm/npm-pick-manifest/commit/97a7d0a))
-
-
-
-<a name="1.0.1"></a>
-## [1.0.1](https://github.com/npm/npm-pick-manifest/compare/v1.0.0...v1.0.1) (2017-04-24)
-
-
-### Bug Fixes
-
-* **deps:** forgot to add semver ([1876f4f](https://github.com/npm/npm-pick-manifest/commit/1876f4f))
-
-
-
-<a name="1.0.0"></a>
-# 1.0.0 (2017-04-24)
-
-
-### Features
-
-* **api:** initial implementation. ([b086912](https://github.com/npm/npm-pick-manifest/commit/b086912))
-
-
-### BREAKING CHANGES
-
-* **api:** ex nihilo
diff --git a/node_modules/libcipm/node_modules/npm-pick-manifest/LICENSE.md b/node_modules/libcipm/node_modules/npm-pick-manifest/LICENSE.md
deleted file mode 100644
index 8d28acf86..000000000
--- a/node_modules/libcipm/node_modules/npm-pick-manifest/LICENSE.md
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/libcipm/node_modules/npm-pick-manifest/README.md b/node_modules/libcipm/node_modules/npm-pick-manifest/README.md
deleted file mode 100644
index d32d47af1..000000000
--- a/node_modules/libcipm/node_modules/npm-pick-manifest/README.md
+++ /dev/null
@@ -1,84 +0,0 @@
-# npm-pick-manifest [![npm version](https://img.shields.io/npm/v/npm-pick-manifest.svg)](https://npm.im/npm-pick-manifest) [![license](https://img.shields.io/npm/l/npm-pick-manifest.svg)](https://npm.im/npm-pick-manifest) [![Travis](https://img.shields.io/travis/npm/npm-pick-manifest.svg)](https://travis-ci.org/npm/npm-pick-manifest) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/npm/npm-pick-manifest?svg=true)](https://ci.appveyor.com/project/npm/npm-pick-manifest) [![Coverage Status](https://coveralls.io/repos/github/npm/npm-pick-manifest/badge.svg?branch=latest)](https://coveralls.io/github/npm/npm-pick-manifest?branch=latest)
-
-[`npm-pick-manifest`](https://github.com/npm/npm-pick-manifest) is a standalone
-implementation of [npm](https://npmjs.com)'s semver range resolution algorithm.
-
-## Install
-
-`$ npm install --save npm-pick-manifest`
-
-## Table of Contents
-
-* [Example](#example)
-* [Features](#features)
-* [Contributing](#contributing)
-* [API](#api)
- * [`pickManifest()`](#pick-manifest)
-
-### Example
-
-```javascript
-const pickManifest = require('npm-pick-manifest')
-
-fetch('https://registry.npmjs.org/npm-pick-manifest').then(res => {
- return res.json()
-}).then(packument => {
- return pickManifest(packument, '^1.0.0')
-}) // get same manifest as npm would get if you `npm i npm-pick-manifest@^1.0.0`
-```
-
-### Features
-
-* Uses npm's exact semver resolution algorithm
-* Supports ranges, tags, and versions
-
-### Contributing
-
-The npm-pick-manifest team enthusiastically welcomes contributions and project participation!
-There's a bunch of things you can do if you want to contribute! The [Contributor
-Guide](CONTRIBUTING.md) has all the information you need for everything from
-reporting bugs to contributing entire new features. Please don't hesitate to
-jump in if you'd like to, or even ask us questions if something isn't clear.
-
-### API
-
-#### <a name="pick-manifest"></a> `> pickManifest(packument, selector, [opts]) -> manifest`
-
-Returns the manifest that matches `selector`, or throws an error.
-
-Packuments are anything returned by metadata URLs from the npm registry. That
-is, they're objects with the following shape (only fields used by
-`npm-pick-manifest` included):
-
-```javascript
-{
- name: 'some-package',
- 'dist-tags': {
- foo: '1.0.1'
- },
- versions: {
- '1.0.0': { version: '1.0.0' },
- '1.0.1': { version: '1.0.1' },
- '1.0.2': { version: '1.0.2' },
- '2.0.0': { version: '2.0.0' }
- }
-}
-```
-
-The algorithm will follow npm's algorithm for semver resolution, and only `tag`,
-`range`, and `version` selectors are supported.
-
-The function will throw `ETARGET` if there was no matching manifest, and
-`ENOVERSIONS` if the packument object has no valid versions in `versions`.
-
-If `opts.defaultTag` is provided, it will be used instead of `latest`. That is,
-if that tag matches the selector, it will be used, even if a higher available
-version matches the range.
-
-If `opts.enjoyBy` is provided, it should be something that can be passed to `new
-Date(x)`, such as a `Date` object or a timestamp string. It will be used to
-filter the selected versions such that only versions less than or equal to
-`enjoyBy` are considered.
-
-If `opts.includeDeprecated` passed in as true, deprecated versions will be
-selected. By default, deprecated versions other than `defaultTag` are ignored.
diff --git a/node_modules/libcipm/node_modules/npm-pick-manifest/index.js b/node_modules/libcipm/node_modules/npm-pick-manifest/index.js
deleted file mode 100644
index 9eb2d82d1..000000000
--- a/node_modules/libcipm/node_modules/npm-pick-manifest/index.js
+++ /dev/null
@@ -1,136 +0,0 @@
-'use strict'
-
-const figgyPudding = require('figgy-pudding')
-const npa = require('npm-package-arg')
-const semver = require('semver')
-
-const PickerOpts = figgyPudding({
- defaultTag: { default: 'latest' },
- enjoyBy: {},
- includeDeprecated: { default: false }
-})
-
-module.exports = pickManifest
-function pickManifest (packument, wanted, opts) {
- opts = PickerOpts(opts)
- const time = opts.enjoyBy && packument.time && +(new Date(opts.enjoyBy))
- const spec = npa.resolve(packument.name, wanted)
- const type = spec.type
- if (type === 'version' || type === 'range') {
- wanted = semver.clean(wanted, true) || wanted
- }
- const distTags = packument['dist-tags'] || {}
- const versions = Object.keys(packument.versions || {}).filter(v => {
- return semver.valid(v, true)
- })
- const policyRestrictions = packument.policyRestrictions
- const restrictedVersions = policyRestrictions
- ? Object.keys(policyRestrictions.versions) : []
-
- function enjoyableBy (v) {
- return !time || (
- packument.time[v] && time >= +(new Date(packument.time[v]))
- )
- }
-
- let err
-
- if (!versions.length && !restrictedVersions.length) {
- err = new Error(`No valid versions available for ${packument.name}`)
- err.code = 'ENOVERSIONS'
- err.name = packument.name
- err.type = type
- err.wanted = wanted
- throw err
- }
-
- let target
-
- if (type === 'tag' && enjoyableBy(distTags[wanted])) {
- target = distTags[wanted]
- } else if (type === 'version') {
- target = wanted
- } else if (type !== 'range' && enjoyableBy(distTags[wanted])) {
- throw new Error('Only tag, version, and range are supported')
- }
-
- const tagVersion = distTags[opts.defaultTag]
-
- if (
- !target &&
- tagVersion &&
- packument.versions[tagVersion] &&
- enjoyableBy(tagVersion) &&
- semver.satisfies(tagVersion, wanted, true)
- ) {
- target = tagVersion
- }
-
- if (!target && !opts.includeDeprecated) {
- const undeprecated = versions.filter(v => !packument.versions[v].deprecated && enjoyableBy(v)
- )
- target = semver.maxSatisfying(undeprecated, wanted, true)
- }
- if (!target) {
- const stillFresh = versions.filter(enjoyableBy)
- target = semver.maxSatisfying(stillFresh, wanted, true)
- }
-
- if (!target && wanted === '*' && enjoyableBy(tagVersion)) {
- // This specific corner is meant for the case where
- // someone is using `*` as a selector, but all versions
- // are pre-releases, which don't match ranges at all.
- target = tagVersion
- }
-
- if (
- !target &&
- time &&
- type === 'tag' &&
- distTags[wanted] &&
- !enjoyableBy(distTags[wanted])
- ) {
- const stillFresh = versions.filter(v =>
- enjoyableBy(v) && semver.lte(v, distTags[wanted], true)
- ).sort(semver.rcompare)
- target = stillFresh[0]
- }
-
- if (!target && restrictedVersions) {
- target = semver.maxSatisfying(restrictedVersions, wanted, true)
- }
-
- const manifest = (
- target &&
- packument.versions[target]
- )
- if (!manifest) {
- // Check if target is forbidden
- const isForbidden = target && policyRestrictions && policyRestrictions.versions[target]
- const pckg = `${packument.name}@${wanted}${
- opts.enjoyBy
- ? ` with an Enjoy By date of ${
- new Date(opts.enjoyBy).toLocaleString()
- }. Maybe try a different date?`
- : ''
- }`
-
- if (isForbidden) {
- err = new Error(`Could not download ${pckg} due to policy violations.\n${policyRestrictions.message}\n`)
- err.code = 'E403'
- } else {
- err = new Error(`No matching version found for ${pckg}.`)
- err.code = 'ETARGET'
- }
-
- err.name = packument.name
- err.type = type
- err.wanted = wanted
- err.versions = versions
- err.distTags = distTags
- err.defaultTag = opts.defaultTag
- throw err
- } else {
- return manifest
- }
-}
diff --git a/node_modules/libcipm/node_modules/npm-pick-manifest/node_modules/.bin/semver b/node_modules/libcipm/node_modules/npm-pick-manifest/node_modules/.bin/semver
deleted file mode 120000
index 317eb293d..000000000
--- a/node_modules/libcipm/node_modules/npm-pick-manifest/node_modules/.bin/semver
+++ /dev/null
@@ -1 +0,0 @@
-../semver/bin/semver \ No newline at end of file
diff --git a/node_modules/libcipm/node_modules/npm-pick-manifest/node_modules/semver/CHANGELOG.md b/node_modules/libcipm/node_modules/npm-pick-manifest/node_modules/semver/CHANGELOG.md
deleted file mode 100644
index 66304fdd2..000000000
--- a/node_modules/libcipm/node_modules/npm-pick-manifest/node_modules/semver/CHANGELOG.md
+++ /dev/null
@@ -1,39 +0,0 @@
-# changes log
-
-## 5.7
-
-* Add `minVersion` method
-
-## 5.6
-
-* Move boolean `loose` param to an options object, with
- backwards-compatibility protection.
-* Add ability to opt out of special prerelease version handling with
- the `includePrerelease` option flag.
-
-## 5.5
-
-* Add version coercion capabilities
-
-## 5.4
-
-* Add intersection checking
-
-## 5.3
-
-* Add `minSatisfying` method
-
-## 5.2
-
-* Add `prerelease(v)` that returns prerelease components
-
-## 5.1
-
-* Add Backus-Naur for ranges
-* Remove excessively cute inspection methods
-
-## 5.0
-
-* Remove AMD/Browserified build artifacts
-* Fix ltr and gtr when using the `*` range
-* Fix for range `*` with a prerelease identifier
diff --git a/node_modules/libcipm/node_modules/npm-pick-manifest/node_modules/semver/LICENSE b/node_modules/libcipm/node_modules/npm-pick-manifest/node_modules/semver/LICENSE
deleted file mode 100644
index 19129e315..000000000
--- a/node_modules/libcipm/node_modules/npm-pick-manifest/node_modules/semver/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/libcipm/node_modules/npm-pick-manifest/node_modules/semver/README.md b/node_modules/libcipm/node_modules/npm-pick-manifest/node_modules/semver/README.md
deleted file mode 100644
index f8dfa5a0d..000000000
--- a/node_modules/libcipm/node_modules/npm-pick-manifest/node_modules/semver/README.md
+++ /dev/null
@@ -1,412 +0,0 @@
-semver(1) -- The semantic versioner for npm
-===========================================
-
-## Install
-
-```bash
-npm install --save semver
-````
-
-## Usage
-
-As a node module:
-
-```js
-const semver = require('semver')
-
-semver.valid('1.2.3') // '1.2.3'
-semver.valid('a.b.c') // null
-semver.clean(' =v1.2.3 ') // '1.2.3'
-semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true
-semver.gt('1.2.3', '9.8.7') // false
-semver.lt('1.2.3', '9.8.7') // true
-semver.minVersion('>=1.0.0') // '1.0.0'
-semver.valid(semver.coerce('v2')) // '2.0.0'
-semver.valid(semver.coerce('42.6.7.9.3-alpha')) // '42.6.7'
-```
-
-As a command-line utility:
-
-```
-$ semver -h
-
-A JavaScript implementation of the https://semver.org/ specification
-Copyright Isaac Z. Schlueter
-
-Usage: semver [options] <version> [<version> [...]]
-Prints valid versions sorted by SemVer precedence
-
-Options:
--r --range <range>
- Print versions that match the specified range.
-
--i --increment [<level>]
- Increment a version by the specified level. Level can
- be one of: major, minor, patch, premajor, preminor,
- prepatch, or prerelease. Default level is 'patch'.
- Only one version may be specified.
-
---preid <identifier>
- Identifier to be used to prefix premajor, preminor,
- prepatch or prerelease version increments.
-
--l --loose
- Interpret versions and ranges loosely
-
--p --include-prerelease
- Always include prerelease versions in range matching
-
--c --coerce
- Coerce a string into SemVer if possible
- (does not imply --loose)
-
-Program exits successfully if any valid version satisfies
-all supplied ranges, and prints all satisfying versions.
-
-If no satisfying versions are found, then exits failure.
-
-Versions are printed in ascending order, so supplying
-multiple versions to the utility will just sort them.
-```
-
-## Versions
-
-A "version" is described by the `v2.0.0` specification found at
-<https://semver.org/>.
-
-A leading `"="` or `"v"` character is stripped off and ignored.
-
-## Ranges
-
-A `version range` is a set of `comparators` which specify versions
-that satisfy the range.
-
-A `comparator` is composed of an `operator` and a `version`. The set
-of primitive `operators` is:
-
-* `<` Less than
-* `<=` Less than or equal to
-* `>` Greater than
-* `>=` Greater than or equal to
-* `=` Equal. If no operator is specified, then equality is assumed,
- so this operator is optional, but MAY be included.
-
-For example, the comparator `>=1.2.7` would match the versions
-`1.2.7`, `1.2.8`, `2.5.3`, and `1.3.9`, but not the versions `1.2.6`
-or `1.1.0`.
-
-Comparators can be joined by whitespace to form a `comparator set`,
-which is satisfied by the **intersection** of all of the comparators
-it includes.
-
-A range is composed of one or more comparator sets, joined by `||`. A
-version matches a range if and only if every comparator in at least
-one of the `||`-separated comparator sets is satisfied by the version.
-
-For example, the range `>=1.2.7 <1.3.0` would match the versions
-`1.2.7`, `1.2.8`, and `1.2.99`, but not the versions `1.2.6`, `1.3.0`,
-or `1.1.0`.
-
-The range `1.2.7 || >=1.2.9 <2.0.0` would match the versions `1.2.7`,
-`1.2.9`, and `1.4.6`, but not the versions `1.2.8` or `2.0.0`.
-
-### Prerelease Tags
-
-If a version has a prerelease tag (for example, `1.2.3-alpha.3`) then
-it will only be allowed to satisfy comparator sets if at least one
-comparator with the same `[major, minor, patch]` tuple also has a
-prerelease tag.
-
-For example, the range `>1.2.3-alpha.3` would be allowed to match the
-version `1.2.3-alpha.7`, but it would *not* be satisfied by
-`3.4.5-alpha.9`, even though `3.4.5-alpha.9` is technically "greater
-than" `1.2.3-alpha.3` according to the SemVer sort rules. The version
-range only accepts prerelease tags on the `1.2.3` version. The
-version `3.4.5` *would* satisfy the range, because it does not have a
-prerelease flag, and `3.4.5` is greater than `1.2.3-alpha.7`.
-
-The purpose for this behavior is twofold. First, prerelease versions
-frequently are updated very quickly, and contain many breaking changes
-that are (by the author's design) not yet fit for public consumption.
-Therefore, by default, they are excluded from range matching
-semantics.
-
-Second, a user who has opted into using a prerelease version has
-clearly indicated the intent to use *that specific* set of
-alpha/beta/rc versions. By including a prerelease tag in the range,
-the user is indicating that they are aware of the risk. However, it
-is still not appropriate to assume that they have opted into taking a
-similar risk on the *next* set of prerelease versions.
-
-Note that this behavior can be suppressed (treating all prerelease
-versions as if they were normal versions, for the purpose of range
-matching) by setting the `includePrerelease` flag on the options
-object to any
-[functions](https://github.com/npm/node-semver#functions) that do
-range matching.
-
-#### Prerelease Identifiers
-
-The method `.inc` takes an additional `identifier` string argument that
-will append the value of the string as a prerelease identifier:
-
-```javascript
-semver.inc('1.2.3', 'prerelease', 'beta')
-// '1.2.4-beta.0'
-```
-
-command-line example:
-
-```bash
-$ semver 1.2.3 -i prerelease --preid beta
-1.2.4-beta.0
-```
-
-Which then can be used to increment further:
-
-```bash
-$ semver 1.2.4-beta.0 -i prerelease
-1.2.4-beta.1
-```
-
-### Advanced Range Syntax
-
-Advanced range syntax desugars to primitive comparators in
-deterministic ways.
-
-Advanced ranges may be combined in the same way as primitive
-comparators using white space or `||`.
-
-#### Hyphen Ranges `X.Y.Z - A.B.C`
-
-Specifies an inclusive set.
-
-* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4`
-
-If a partial version is provided as the first version in the inclusive
-range, then the missing pieces are replaced with zeroes.
-
-* `1.2 - 2.3.4` := `>=1.2.0 <=2.3.4`
-
-If a partial version is provided as the second version in the
-inclusive range, then all versions that start with the supplied parts
-of the tuple are accepted, but nothing that would be greater than the
-provided tuple parts.
-
-* `1.2.3 - 2.3` := `>=1.2.3 <2.4.0`
-* `1.2.3 - 2` := `>=1.2.3 <3.0.0`
-
-#### X-Ranges `1.2.x` `1.X` `1.2.*` `*`
-
-Any of `X`, `x`, or `*` may be used to "stand in" for one of the
-numeric values in the `[major, minor, patch]` tuple.
-
-* `*` := `>=0.0.0` (Any version satisfies)
-* `1.x` := `>=1.0.0 <2.0.0` (Matching major version)
-* `1.2.x` := `>=1.2.0 <1.3.0` (Matching major and minor versions)
-
-A partial version range is treated as an X-Range, so the special
-character is in fact optional.
-
-* `""` (empty string) := `*` := `>=0.0.0`
-* `1` := `1.x.x` := `>=1.0.0 <2.0.0`
-* `1.2` := `1.2.x` := `>=1.2.0 <1.3.0`
-
-#### Tilde Ranges `~1.2.3` `~1.2` `~1`
-
-Allows patch-level changes if a minor version is specified on the
-comparator. Allows minor-level changes if not.
-
-* `~1.2.3` := `>=1.2.3 <1.(2+1).0` := `>=1.2.3 <1.3.0`
-* `~1.2` := `>=1.2.0 <1.(2+1).0` := `>=1.2.0 <1.3.0` (Same as `1.2.x`)
-* `~1` := `>=1.0.0 <(1+1).0.0` := `>=1.0.0 <2.0.0` (Same as `1.x`)
-* `~0.2.3` := `>=0.2.3 <0.(2+1).0` := `>=0.2.3 <0.3.0`
-* `~0.2` := `>=0.2.0 <0.(2+1).0` := `>=0.2.0 <0.3.0` (Same as `0.2.x`)
-* `~0` := `>=0.0.0 <(0+1).0.0` := `>=0.0.0 <1.0.0` (Same as `0.x`)
-* `~1.2.3-beta.2` := `>=1.2.3-beta.2 <1.3.0` Note that prereleases in
- the `1.2.3` version will be allowed, if they are greater than or
- equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but
- `1.2.4-beta.2` would not, because it is a prerelease of a
- different `[major, minor, patch]` tuple.
-
-#### Caret Ranges `^1.2.3` `^0.2.5` `^0.0.4`
-
-Allows changes that do not modify the left-most non-zero digit in the
-`[major, minor, patch]` tuple. In other words, this allows patch and
-minor updates for versions `1.0.0` and above, patch updates for
-versions `0.X >=0.1.0`, and *no* updates for versions `0.0.X`.
-
-Many authors treat a `0.x` version as if the `x` were the major
-"breaking-change" indicator.
-
-Caret ranges are ideal when an author may make breaking changes
-between `0.2.4` and `0.3.0` releases, which is a common practice.
-However, it presumes that there will *not* be breaking changes between
-`0.2.4` and `0.2.5`. It allows for changes that are presumed to be
-additive (but non-breaking), according to commonly observed practices.
-
-* `^1.2.3` := `>=1.2.3 <2.0.0`
-* `^0.2.3` := `>=0.2.3 <0.3.0`
-* `^0.0.3` := `>=0.0.3 <0.0.4`
-* `^1.2.3-beta.2` := `>=1.2.3-beta.2 <2.0.0` Note that prereleases in
- the `1.2.3` version will be allowed, if they are greater than or
- equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but
- `1.2.4-beta.2` would not, because it is a prerelease of a
- different `[major, minor, patch]` tuple.
-* `^0.0.3-beta` := `>=0.0.3-beta <0.0.4` Note that prereleases in the
- `0.0.3` version *only* will be allowed, if they are greater than or
- equal to `beta`. So, `0.0.3-pr.2` would be allowed.
-
-When parsing caret ranges, a missing `patch` value desugars to the
-number `0`, but will allow flexibility within that value, even if the
-major and minor versions are both `0`.
-
-* `^1.2.x` := `>=1.2.0 <2.0.0`
-* `^0.0.x` := `>=0.0.0 <0.1.0`
-* `^0.0` := `>=0.0.0 <0.1.0`
-
-A missing `minor` and `patch` values will desugar to zero, but also
-allow flexibility within those values, even if the major version is
-zero.
-
-* `^1.x` := `>=1.0.0 <2.0.0`
-* `^0.x` := `>=0.0.0 <1.0.0`
-
-### Range Grammar
-
-Putting all this together, here is a Backus-Naur grammar for ranges,
-for the benefit of parser authors:
-
-```bnf
-range-set ::= range ( logical-or range ) *
-logical-or ::= ( ' ' ) * '||' ( ' ' ) *
-range ::= hyphen | simple ( ' ' simple ) * | ''
-hyphen ::= partial ' - ' partial
-simple ::= primitive | partial | tilde | caret
-primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial
-partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )?
-xr ::= 'x' | 'X' | '*' | nr
-nr ::= '0' | ['1'-'9'] ( ['0'-'9'] ) *
-tilde ::= '~' partial
-caret ::= '^' partial
-qualifier ::= ( '-' pre )? ( '+' build )?
-pre ::= parts
-build ::= parts
-parts ::= part ( '.' part ) *
-part ::= nr | [-0-9A-Za-z]+
-```
-
-## Functions
-
-All methods and classes take a final `options` object argument. All
-options in this object are `false` by default. The options supported
-are:
-
-- `loose` Be more forgiving about not-quite-valid semver strings.
- (Any resulting output will always be 100% strict compliant, of
- course.) For backwards compatibility reasons, if the `options`
- argument is a boolean value instead of an object, it is interpreted
- to be the `loose` param.
-- `includePrerelease` Set to suppress the [default
- behavior](https://github.com/npm/node-semver#prerelease-tags) of
- excluding prerelease tagged versions from ranges unless they are
- explicitly opted into.
-
-Strict-mode Comparators and Ranges will be strict about the SemVer
-strings that they parse.
-
-* `valid(v)`: Return the parsed version, or null if it's not valid.
-* `inc(v, release)`: Return the version incremented by the release
- type (`major`, `premajor`, `minor`, `preminor`, `patch`,
- `prepatch`, or `prerelease`), or null if it's not valid
- * `premajor` in one call will bump the version up to the next major
- version and down to a prerelease of that major version.
- `preminor`, and `prepatch` work the same way.
- * If called from a non-prerelease version, the `prerelease` will work the
- same as `prepatch`. It increments the patch version, then makes a
- prerelease. If the input version is already a prerelease it simply
- increments it.
-* `prerelease(v)`: Returns an array of prerelease components, or null
- if none exist. Example: `prerelease('1.2.3-alpha.1') -> ['alpha', 1]`
-* `major(v)`: Return the major version number.
-* `minor(v)`: Return the minor version number.
-* `patch(v)`: Return the patch version number.
-* `intersects(r1, r2, loose)`: Return true if the two supplied ranges
- or comparators intersect.
-* `parse(v)`: Attempt to parse a string as a semantic version, returning either
- a `SemVer` object or `null`.
-
-### Comparison
-
-* `gt(v1, v2)`: `v1 > v2`
-* `gte(v1, v2)`: `v1 >= v2`
-* `lt(v1, v2)`: `v1 < v2`
-* `lte(v1, v2)`: `v1 <= v2`
-* `eq(v1, v2)`: `v1 == v2` This is true if they're logically equivalent,
- even if they're not the exact same string. You already know how to
- compare strings.
-* `neq(v1, v2)`: `v1 != v2` The opposite of `eq`.
-* `cmp(v1, comparator, v2)`: Pass in a comparison string, and it'll call
- the corresponding function above. `"==="` and `"!=="` do simple
- string comparison, but are included for completeness. Throws if an
- invalid comparison string is provided.
-* `compare(v1, v2)`: Return `0` if `v1 == v2`, or `1` if `v1` is greater, or `-1` if
- `v2` is greater. Sorts in ascending order if passed to `Array.sort()`.
-* `rcompare(v1, v2)`: The reverse of compare. Sorts an array of versions
- in descending order when passed to `Array.sort()`.
-* `diff(v1, v2)`: Returns difference between two versions by the release type
- (`major`, `premajor`, `minor`, `preminor`, `patch`, `prepatch`, or `prerelease`),
- or null if the versions are the same.
-
-### Comparators
-
-* `intersects(comparator)`: Return true if the comparators intersect
-
-### Ranges
-
-* `validRange(range)`: Return the valid range or null if it's not valid
-* `satisfies(version, range)`: Return true if the version satisfies the
- range.
-* `maxSatisfying(versions, range)`: Return the highest version in the list
- that satisfies the range, or `null` if none of them do.
-* `minSatisfying(versions, range)`: Return the lowest version in the list
- that satisfies the range, or `null` if none of them do.
-* `minVersion(range)`: Return the lowest version that can possibly match
- the given range.
-* `gtr(version, range)`: Return `true` if version is greater than all the
- versions possible in the range.
-* `ltr(version, range)`: Return `true` if version is less than all the
- versions possible in the range.
-* `outside(version, range, hilo)`: Return true if the version is outside
- the bounds of the range in either the high or low direction. The
- `hilo` argument must be either the string `'>'` or `'<'`. (This is
- the function called by `gtr` and `ltr`.)
-* `intersects(range)`: Return true if any of the ranges comparators intersect
-
-Note that, since ranges may be non-contiguous, a version might not be
-greater than a range, less than a range, *or* satisfy a range! For
-example, the range `1.2 <1.2.9 || >2.0.0` would have a hole from `1.2.9`
-until `2.0.0`, so the version `1.2.10` would not be greater than the
-range (because `2.0.1` satisfies, which is higher), nor less than the
-range (since `1.2.8` satisfies, which is lower), and it also does not
-satisfy the range.
-
-If you want to know if a version satisfies or does not satisfy a
-range, use the `satisfies(version, range)` function.
-
-### Coercion
-
-* `coerce(version)`: Coerces a string to semver if possible
-
-This aims to provide a very forgiving translation of a non-semver string to
-semver. It looks for the first digit in a string, and consumes all
-remaining characters which satisfy at least a partial semver (e.g., `1`,
-`1.2`, `1.2.3`) up to the max permitted length (256 characters). Longer
-versions are simply truncated (`4.6.3.9.2-alpha2` becomes `4.6.3`). All
-surrounding text is simply ignored (`v3.4 replaces v3.3.1` becomes
-`3.4.0`). Only text which lacks digits will fail coercion (`version one`
-is not valid). The maximum length for any semver component considered for
-coercion is 16 characters; longer components will be ignored
-(`10000000000000000.4.7.4` becomes `4.7.4`). The maximum value for any
-semver component is `Number.MAX_SAFE_INTEGER || (2**53 - 1)`; higher value
-components are invalid (`9999999999999999.4.7.4` is likely invalid).
diff --git a/node_modules/libcipm/node_modules/npm-pick-manifest/node_modules/semver/bin/semver b/node_modules/libcipm/node_modules/npm-pick-manifest/node_modules/semver/bin/semver
deleted file mode 100755
index 801e77f13..000000000
--- a/node_modules/libcipm/node_modules/npm-pick-manifest/node_modules/semver/bin/semver
+++ /dev/null
@@ -1,160 +0,0 @@
-#!/usr/bin/env node
-// Standalone semver comparison program.
-// Exits successfully and prints matching version(s) if
-// any supplied version is valid and passes all tests.
-
-var argv = process.argv.slice(2)
-
-var versions = []
-
-var range = []
-
-var inc = null
-
-var version = require('../package.json').version
-
-var loose = false
-
-var includePrerelease = false
-
-var coerce = false
-
-var identifier
-
-var semver = require('../semver')
-
-var reverse = false
-
-var options = {}
-
-main()
-
-function main () {
- if (!argv.length) return help()
- while (argv.length) {
- var a = argv.shift()
- var indexOfEqualSign = a.indexOf('=')
- if (indexOfEqualSign !== -1) {
- a = a.slice(0, indexOfEqualSign)
- argv.unshift(a.slice(indexOfEqualSign + 1))
- }
- switch (a) {
- case '-rv': case '-rev': case '--rev': case '--reverse':
- reverse = true
- break
- case '-l': case '--loose':
- loose = true
- break
- case '-p': case '--include-prerelease':
- includePrerelease = true
- break
- case '-v': case '--version':
- versions.push(argv.shift())
- break
- case '-i': case '--inc': case '--increment':
- switch (argv[0]) {
- case 'major': case 'minor': case 'patch': case 'prerelease':
- case 'premajor': case 'preminor': case 'prepatch':
- inc = argv.shift()
- break
- default:
- inc = 'patch'
- break
- }
- break
- case '--preid':
- identifier = argv.shift()
- break
- case '-r': case '--range':
- range.push(argv.shift())
- break
- case '-c': case '--coerce':
- coerce = true
- break
- case '-h': case '--help': case '-?':
- return help()
- default:
- versions.push(a)
- break
- }
- }
-
- var options = { loose: loose, includePrerelease: includePrerelease }
-
- versions = versions.map(function (v) {
- return coerce ? (semver.coerce(v) || { version: v }).version : v
- }).filter(function (v) {
- return semver.valid(v)
- })
- if (!versions.length) return fail()
- if (inc && (versions.length !== 1 || range.length)) { return failInc() }
-
- for (var i = 0, l = range.length; i < l; i++) {
- versions = versions.filter(function (v) {
- return semver.satisfies(v, range[i], options)
- })
- if (!versions.length) return fail()
- }
- return success(versions)
-}
-
-function failInc () {
- console.error('--inc can only be used on a single version with no range')
- fail()
-}
-
-function fail () { process.exit(1) }
-
-function success () {
- var compare = reverse ? 'rcompare' : 'compare'
- versions.sort(function (a, b) {
- return semver[compare](a, b, options)
- }).map(function (v) {
- return semver.clean(v, options)
- }).map(function (v) {
- return inc ? semver.inc(v, inc, options, identifier) : v
- }).forEach(function (v, i, _) { console.log(v) })
-}
-
-function help () {
- console.log(['SemVer ' + version,
- '',
- 'A JavaScript implementation of the https://semver.org/ specification',
- 'Copyright Isaac Z. Schlueter',
- '',
- 'Usage: semver [options] <version> [<version> [...]]',
- 'Prints valid versions sorted by SemVer precedence',
- '',
- 'Options:',
- '-r --range <range>',
- ' Print versions that match the specified range.',
- '',
- '-i --increment [<level>]',
- ' Increment a version by the specified level. Level can',
- ' be one of: major, minor, patch, premajor, preminor,',
- " prepatch, or prerelease. Default level is 'patch'.",
- ' Only one version may be specified.',
- '',
- '--preid <identifier>',
- ' Identifier to be used to prefix premajor, preminor,',
- ' prepatch or prerelease version increments.',
- '',
- '-l --loose',
- ' Interpret versions and ranges loosely',
- '',
- '-p --include-prerelease',
- ' Always include prerelease versions in range matching',
- '',
- '-c --coerce',
- ' Coerce a string into SemVer if possible',
- ' (does not imply --loose)',
- '',
- 'Program exits successfully if any valid version satisfies',
- 'all supplied ranges, and prints all satisfying versions.',
- '',
- 'If no satisfying versions are found, then exits failure.',
- '',
- 'Versions are printed in ascending order, so supplying',
- 'multiple versions to the utility will just sort them.'
- ].join('\n'))
-}
diff --git a/node_modules/libcipm/node_modules/npm-pick-manifest/node_modules/semver/package.json b/node_modules/libcipm/node_modules/npm-pick-manifest/node_modules/semver/package.json
deleted file mode 100644
index 583397e6f..000000000
--- a/node_modules/libcipm/node_modules/npm-pick-manifest/node_modules/semver/package.json
+++ /dev/null
@@ -1,60 +0,0 @@
-{
- "_from": "semver@^5.4.1",
- "_id": "semver@5.7.1",
- "_inBundle": false,
- "_integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==",
- "_location": "/libcipm/npm-pick-manifest/semver",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "semver@^5.4.1",
- "name": "semver",
- "escapedName": "semver",
- "rawSpec": "^5.4.1",
- "saveSpec": null,
- "fetchSpec": "^5.4.1"
- },
- "_requiredBy": [
- "/libcipm/npm-pick-manifest"
- ],
- "_resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz",
- "_shasum": "a954f931aeba508d307bbf069eff0c01c96116f7",
- "_spec": "semver@^5.4.1",
- "_where": "/Users/mperrotte/npminc/cli/node_modules/libcipm/node_modules/npm-pick-manifest",
- "bin": {
- "semver": "bin/semver"
- },
- "bugs": {
- "url": "https://github.com/npm/node-semver/issues"
- },
- "bundleDependencies": false,
- "deprecated": false,
- "description": "The semantic version parser used by npm.",
- "devDependencies": {
- "tap": "^13.0.0-rc.18"
- },
- "files": [
- "bin",
- "range.bnf",
- "semver.js"
- ],
- "homepage": "https://github.com/npm/node-semver#readme",
- "license": "ISC",
- "main": "semver.js",
- "name": "semver",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/node-semver.git"
- },
- "scripts": {
- "postpublish": "git push origin --all; git push origin --tags",
- "postversion": "npm publish",
- "preversion": "npm test",
- "test": "tap"
- },
- "tap": {
- "check-coverage": true
- },
- "version": "5.7.1"
-}
diff --git a/node_modules/libcipm/node_modules/npm-pick-manifest/node_modules/semver/range.bnf b/node_modules/libcipm/node_modules/npm-pick-manifest/node_modules/semver/range.bnf
deleted file mode 100644
index d4c6ae0d7..000000000
--- a/node_modules/libcipm/node_modules/npm-pick-manifest/node_modules/semver/range.bnf
+++ /dev/null
@@ -1,16 +0,0 @@
-range-set ::= range ( logical-or range ) *
-logical-or ::= ( ' ' ) * '||' ( ' ' ) *
-range ::= hyphen | simple ( ' ' simple ) * | ''
-hyphen ::= partial ' - ' partial
-simple ::= primitive | partial | tilde | caret
-primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial
-partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )?
-xr ::= 'x' | 'X' | '*' | nr
-nr ::= '0' | [1-9] ( [0-9] ) *
-tilde ::= '~' partial
-caret ::= '^' partial
-qualifier ::= ( '-' pre )? ( '+' build )?
-pre ::= parts
-build ::= parts
-parts ::= part ( '.' part ) *
-part ::= nr | [-0-9A-Za-z]+
diff --git a/node_modules/libcipm/node_modules/npm-pick-manifest/node_modules/semver/semver.js b/node_modules/libcipm/node_modules/npm-pick-manifest/node_modules/semver/semver.js
deleted file mode 100644
index d315d5d68..000000000
--- a/node_modules/libcipm/node_modules/npm-pick-manifest/node_modules/semver/semver.js
+++ /dev/null
@@ -1,1483 +0,0 @@
-exports = module.exports = SemVer
-
-var debug
-/* istanbul ignore next */
-if (typeof process === 'object' &&
- process.env &&
- process.env.NODE_DEBUG &&
- /\bsemver\b/i.test(process.env.NODE_DEBUG)) {
- debug = function () {
- var args = Array.prototype.slice.call(arguments, 0)
- args.unshift('SEMVER')
- console.log.apply(console, args)
- }
-} else {
- debug = function () {}
-}
-
-// Note: this is the semver.org version of the spec that it implements
-// Not necessarily the package version of this code.
-exports.SEMVER_SPEC_VERSION = '2.0.0'
-
-var MAX_LENGTH = 256
-var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
- /* istanbul ignore next */ 9007199254740991
-
-// Max safe segment length for coercion.
-var MAX_SAFE_COMPONENT_LENGTH = 16
-
-// The actual regexps go on exports.re
-var re = exports.re = []
-var src = exports.src = []
-var R = 0
-
-// The following Regular Expressions can be used for tokenizing,
-// validating, and parsing SemVer version strings.
-
-// ## Numeric Identifier
-// A single `0`, or a non-zero digit followed by zero or more digits.
-
-var NUMERICIDENTIFIER = R++
-src[NUMERICIDENTIFIER] = '0|[1-9]\\d*'
-var NUMERICIDENTIFIERLOOSE = R++
-src[NUMERICIDENTIFIERLOOSE] = '[0-9]+'
-
-// ## Non-numeric Identifier
-// Zero or more digits, followed by a letter or hyphen, and then zero or
-// more letters, digits, or hyphens.
-
-var NONNUMERICIDENTIFIER = R++
-src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'
-
-// ## Main Version
-// Three dot-separated numeric identifiers.
-
-var MAINVERSION = R++
-src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' +
- '(' + src[NUMERICIDENTIFIER] + ')\\.' +
- '(' + src[NUMERICIDENTIFIER] + ')'
-
-var MAINVERSIONLOOSE = R++
-src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' +
- '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' +
- '(' + src[NUMERICIDENTIFIERLOOSE] + ')'
-
-// ## Pre-release Version Identifier
-// A numeric identifier, or a non-numeric identifier.
-
-var PRERELEASEIDENTIFIER = R++
-src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] +
- '|' + src[NONNUMERICIDENTIFIER] + ')'
-
-var PRERELEASEIDENTIFIERLOOSE = R++
-src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] +
- '|' + src[NONNUMERICIDENTIFIER] + ')'
-
-// ## Pre-release Version
-// Hyphen, followed by one or more dot-separated pre-release version
-// identifiers.
-
-var PRERELEASE = R++
-src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] +
- '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))'
-
-var PRERELEASELOOSE = R++
-src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] +
- '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))'
-
-// ## Build Metadata Identifier
-// Any combination of digits, letters, or hyphens.
-
-var BUILDIDENTIFIER = R++
-src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+'
-
-// ## Build Metadata
-// Plus sign, followed by one or more period-separated build metadata
-// identifiers.
-
-var BUILD = R++
-src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] +
- '(?:\\.' + src[BUILDIDENTIFIER] + ')*))'
-
-// ## Full Version String
-// A main version, followed optionally by a pre-release version and
-// build metadata.
-
-// Note that the only major, minor, patch, and pre-release sections of
-// the version string are capturing groups. The build metadata is not a
-// capturing group, because it should not ever be used in version
-// comparison.
-
-var FULL = R++
-var FULLPLAIN = 'v?' + src[MAINVERSION] +
- src[PRERELEASE] + '?' +
- src[BUILD] + '?'
-
-src[FULL] = '^' + FULLPLAIN + '$'
-
-// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.
-// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty
-// common in the npm registry.
-var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] +
- src[PRERELEASELOOSE] + '?' +
- src[BUILD] + '?'
-
-var LOOSE = R++
-src[LOOSE] = '^' + LOOSEPLAIN + '$'
-
-var GTLT = R++
-src[GTLT] = '((?:<|>)?=?)'
-
-// Something like "2.*" or "1.2.x".
-// Note that "x.x" is a valid xRange identifer, meaning "any version"
-// Only the first item is strictly required.
-var XRANGEIDENTIFIERLOOSE = R++
-src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*'
-var XRANGEIDENTIFIER = R++
-src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*'
-
-var XRANGEPLAIN = R++
-src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' +
- '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' +
- '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' +
- '(?:' + src[PRERELEASE] + ')?' +
- src[BUILD] + '?' +
- ')?)?'
-
-var XRANGEPLAINLOOSE = R++
-src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
- '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
- '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
- '(?:' + src[PRERELEASELOOSE] + ')?' +
- src[BUILD] + '?' +
- ')?)?'
-
-var XRANGE = R++
-src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$'
-var XRANGELOOSE = R++
-src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$'
-
-// Coercion.
-// Extract anything that could conceivably be a part of a valid semver
-var COERCE = R++
-src[COERCE] = '(?:^|[^\\d])' +
- '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' +
- '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +
- '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +
- '(?:$|[^\\d])'
-
-// Tilde ranges.
-// Meaning is "reasonably at or greater than"
-var LONETILDE = R++
-src[LONETILDE] = '(?:~>?)'
-
-var TILDETRIM = R++
-src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+'
-re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g')
-var tildeTrimReplace = '$1~'
-
-var TILDE = R++
-src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$'
-var TILDELOOSE = R++
-src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$'
-
-// Caret ranges.
-// Meaning is "at least and backwards compatible with"
-var LONECARET = R++
-src[LONECARET] = '(?:\\^)'
-
-var CARETTRIM = R++
-src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+'
-re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g')
-var caretTrimReplace = '$1^'
-
-var CARET = R++
-src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$'
-var CARETLOOSE = R++
-src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$'
-
-// A simple gt/lt/eq thing, or just "" to indicate "any version"
-var COMPARATORLOOSE = R++
-src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$'
-var COMPARATOR = R++
-src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$'
-
-// An expression to strip any whitespace between the gtlt and the thing
-// it modifies, so that `> 1.2.3` ==> `>1.2.3`
-var COMPARATORTRIM = R++
-src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] +
- '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')'
-
-// this one has to use the /g flag
-re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g')
-var comparatorTrimReplace = '$1$2$3'
-
-// Something like `1.2.3 - 1.2.4`
-// Note that these all use the loose form, because they'll be
-// checked against either the strict or loose comparator form
-// later.
-var HYPHENRANGE = R++
-src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' +
- '\\s+-\\s+' +
- '(' + src[XRANGEPLAIN] + ')' +
- '\\s*$'
-
-var HYPHENRANGELOOSE = R++
-src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' +
- '\\s+-\\s+' +
- '(' + src[XRANGEPLAINLOOSE] + ')' +
- '\\s*$'
-
-// Star ranges basically just allow anything at all.
-var STAR = R++
-src[STAR] = '(<|>)?=?\\s*\\*'
-
-// Compile to actual regexp objects.
-// All are flag-free, unless they were created above with a flag.
-for (var i = 0; i < R; i++) {
- debug(i, src[i])
- if (!re[i]) {
- re[i] = new RegExp(src[i])
- }
-}
-
-exports.parse = parse
-function parse (version, options) {
- if (!options || typeof options !== 'object') {
- options = {
- loose: !!options,
- includePrerelease: false
- }
- }
-
- if (version instanceof SemVer) {
- return version
- }
-
- if (typeof version !== 'string') {
- return null
- }
-
- if (version.length > MAX_LENGTH) {
- return null
- }
-
- var r = options.loose ? re[LOOSE] : re[FULL]
- if (!r.test(version)) {
- return null
- }
-
- try {
- return new SemVer(version, options)
- } catch (er) {
- return null
- }
-}
-
-exports.valid = valid
-function valid (version, options) {
- var v = parse(version, options)
- return v ? v.version : null
-}
-
-exports.clean = clean
-function clean (version, options) {
- var s = parse(version.trim().replace(/^[=v]+/, ''), options)
- return s ? s.version : null
-}
-
-exports.SemVer = SemVer
-
-function SemVer (version, options) {
- if (!options || typeof options !== 'object') {
- options = {
- loose: !!options,
- includePrerelease: false
- }
- }
- if (version instanceof SemVer) {
- if (version.loose === options.loose) {
- return version
- } else {
- version = version.version
- }
- } else if (typeof version !== 'string') {
- throw new TypeError('Invalid Version: ' + version)
- }
-
- if (version.length > MAX_LENGTH) {
- throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters')
- }
-
- if (!(this instanceof SemVer)) {
- return new SemVer(version, options)
- }
-
- debug('SemVer', version, options)
- this.options = options
- this.loose = !!options.loose
-
- var m = version.trim().match(options.loose ? re[LOOSE] : re[FULL])
-
- if (!m) {
- throw new TypeError('Invalid Version: ' + version)
- }
-
- this.raw = version
-
- // these are actually numbers
- this.major = +m[1]
- this.minor = +m[2]
- this.patch = +m[3]
-
- if (this.major > MAX_SAFE_INTEGER || this.major < 0) {
- throw new TypeError('Invalid major version')
- }
-
- if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) {
- throw new TypeError('Invalid minor version')
- }
-
- if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) {
- throw new TypeError('Invalid patch version')
- }
-
- // numberify any prerelease numeric ids
- if (!m[4]) {
- this.prerelease = []
- } else {
- this.prerelease = m[4].split('.').map(function (id) {
- if (/^[0-9]+$/.test(id)) {
- var num = +id
- if (num >= 0 && num < MAX_SAFE_INTEGER) {
- return num
- }
- }
- return id
- })
- }
-
- this.build = m[5] ? m[5].split('.') : []
- this.format()
-}
-
-SemVer.prototype.format = function () {
- this.version = this.major + '.' + this.minor + '.' + this.patch
- if (this.prerelease.length) {
- this.version += '-' + this.prerelease.join('.')
- }
- return this.version
-}
-
-SemVer.prototype.toString = function () {
- return this.version
-}
-
-SemVer.prototype.compare = function (other) {
- debug('SemVer.compare', this.version, this.options, other)
- if (!(other instanceof SemVer)) {
- other = new SemVer(other, this.options)
- }
-
- return this.compareMain(other) || this.comparePre(other)
-}
-
-SemVer.prototype.compareMain = function (other) {
- if (!(other instanceof SemVer)) {
- other = new SemVer(other, this.options)
- }
-
- return compareIdentifiers(this.major, other.major) ||
- compareIdentifiers(this.minor, other.minor) ||
- compareIdentifiers(this.patch, other.patch)
-}
-
-SemVer.prototype.comparePre = function (other) {
- if (!(other instanceof SemVer)) {
- other = new SemVer(other, this.options)
- }
-
- // NOT having a prerelease is > having one
- if (this.prerelease.length && !other.prerelease.length) {
- return -1
- } else if (!this.prerelease.length && other.prerelease.length) {
- return 1
- } else if (!this.prerelease.length && !other.prerelease.length) {
- return 0
- }
-
- var i = 0
- do {
- var a = this.prerelease[i]
- var b = other.prerelease[i]
- debug('prerelease compare', i, a, b)
- if (a === undefined && b === undefined) {
- return 0
- } else if (b === undefined) {
- return 1
- } else if (a === undefined) {
- return -1
- } else if (a === b) {
- continue
- } else {
- return compareIdentifiers(a, b)
- }
- } while (++i)
-}
-
-// preminor will bump the version up to the next minor release, and immediately
-// down to pre-release. premajor and prepatch work the same way.
-SemVer.prototype.inc = function (release, identifier) {
- switch (release) {
- case 'premajor':
- this.prerelease.length = 0
- this.patch = 0
- this.minor = 0
- this.major++
- this.inc('pre', identifier)
- break
- case 'preminor':
- this.prerelease.length = 0
- this.patch = 0
- this.minor++
- this.inc('pre', identifier)
- break
- case 'prepatch':
- // If this is already a prerelease, it will bump to the next version
- // drop any prereleases that might already exist, since they are not
- // relevant at this point.
- this.prerelease.length = 0
- this.inc('patch', identifier)
- this.inc('pre', identifier)
- break
- // If the input is a non-prerelease version, this acts the same as
- // prepatch.
- case 'prerelease':
- if (this.prerelease.length === 0) {
- this.inc('patch', identifier)
- }
- this.inc('pre', identifier)
- break
-
- case 'major':
- // If this is a pre-major version, bump up to the same major version.
- // Otherwise increment major.
- // 1.0.0-5 bumps to 1.0.0
- // 1.1.0 bumps to 2.0.0
- if (this.minor !== 0 ||
- this.patch !== 0 ||
- this.prerelease.length === 0) {
- this.major++
- }
- this.minor = 0
- this.patch = 0
- this.prerelease = []
- break
- case 'minor':
- // If this is a pre-minor version, bump up to the same minor version.
- // Otherwise increment minor.
- // 1.2.0-5 bumps to 1.2.0
- // 1.2.1 bumps to 1.3.0
- if (this.patch !== 0 || this.prerelease.length === 0) {
- this.minor++
- }
- this.patch = 0
- this.prerelease = []
- break
- case 'patch':
- // If this is not a pre-release version, it will increment the patch.
- // If it is a pre-release it will bump up to the same patch version.
- // 1.2.0-5 patches to 1.2.0
- // 1.2.0 patches to 1.2.1
- if (this.prerelease.length === 0) {
- this.patch++
- }
- this.prerelease = []
- break
- // This probably shouldn't be used publicly.
- // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction.
- case 'pre':
- if (this.prerelease.length === 0) {
- this.prerelease = [0]
- } else {
- var i = this.prerelease.length
- while (--i >= 0) {
- if (typeof this.prerelease[i] === 'number') {
- this.prerelease[i]++
- i = -2
- }
- }
- if (i === -1) {
- // didn't increment anything
- this.prerelease.push(0)
- }
- }
- if (identifier) {
- // 1.2.0-beta.1 bumps to 1.2.0-beta.2,
- // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0
- if (this.prerelease[0] === identifier) {
- if (isNaN(this.prerelease[1])) {
- this.prerelease = [identifier, 0]
- }
- } else {
- this.prerelease = [identifier, 0]
- }
- }
- break
-
- default:
- throw new Error('invalid increment argument: ' + release)
- }
- this.format()
- this.raw = this.version
- return this
-}
-
-exports.inc = inc
-function inc (version, release, loose, identifier) {
- if (typeof (loose) === 'string') {
- identifier = loose
- loose = undefined
- }
-
- try {
- return new SemVer(version, loose).inc(release, identifier).version
- } catch (er) {
- return null
- }
-}
-
-exports.diff = diff
-function diff (version1, version2) {
- if (eq(version1, version2)) {
- return null
- } else {
- var v1 = parse(version1)
- var v2 = parse(version2)
- var prefix = ''
- if (v1.prerelease.length || v2.prerelease.length) {
- prefix = 'pre'
- var defaultResult = 'prerelease'
- }
- for (var key in v1) {
- if (key === 'major' || key === 'minor' || key === 'patch') {
- if (v1[key] !== v2[key]) {
- return prefix + key
- }
- }
- }
- return defaultResult // may be undefined
- }
-}
-
-exports.compareIdentifiers = compareIdentifiers
-
-var numeric = /^[0-9]+$/
-function compareIdentifiers (a, b) {
- var anum = numeric.test(a)
- var bnum = numeric.test(b)
-
- if (anum && bnum) {
- a = +a
- b = +b
- }
-
- return a === b ? 0
- : (anum && !bnum) ? -1
- : (bnum && !anum) ? 1
- : a < b ? -1
- : 1
-}
-
-exports.rcompareIdentifiers = rcompareIdentifiers
-function rcompareIdentifiers (a, b) {
- return compareIdentifiers(b, a)
-}
-
-exports.major = major
-function major (a, loose) {
- return new SemVer(a, loose).major
-}
-
-exports.minor = minor
-function minor (a, loose) {
- return new SemVer(a, loose).minor
-}
-
-exports.patch = patch
-function patch (a, loose) {
- return new SemVer(a, loose).patch
-}
-
-exports.compare = compare
-function compare (a, b, loose) {
- return new SemVer(a, loose).compare(new SemVer(b, loose))
-}
-
-exports.compareLoose = compareLoose
-function compareLoose (a, b) {
- return compare(a, b, true)
-}
-
-exports.rcompare = rcompare
-function rcompare (a, b, loose) {
- return compare(b, a, loose)
-}
-
-exports.sort = sort
-function sort (list, loose) {
- return list.sort(function (a, b) {
- return exports.compare(a, b, loose)
- })
-}
-
-exports.rsort = rsort
-function rsort (list, loose) {
- return list.sort(function (a, b) {
- return exports.rcompare(a, b, loose)
- })
-}
-
-exports.gt = gt
-function gt (a, b, loose) {
- return compare(a, b, loose) > 0
-}
-
-exports.lt = lt
-function lt (a, b, loose) {
- return compare(a, b, loose) < 0
-}
-
-exports.eq = eq
-function eq (a, b, loose) {
- return compare(a, b, loose) === 0
-}
-
-exports.neq = neq
-function neq (a, b, loose) {
- return compare(a, b, loose) !== 0
-}
-
-exports.gte = gte
-function gte (a, b, loose) {
- return compare(a, b, loose) >= 0
-}
-
-exports.lte = lte
-function lte (a, b, loose) {
- return compare(a, b, loose) <= 0
-}
-
-exports.cmp = cmp
-function cmp (a, op, b, loose) {
- switch (op) {
- case '===':
- if (typeof a === 'object')
- a = a.version
- if (typeof b === 'object')
- b = b.version
- return a === b
-
- case '!==':
- if (typeof a === 'object')
- a = a.version
- if (typeof b === 'object')
- b = b.version
- return a !== b
-
- case '':
- case '=':
- case '==':
- return eq(a, b, loose)
-
- case '!=':
- return neq(a, b, loose)
-
- case '>':
- return gt(a, b, loose)
-
- case '>=':
- return gte(a, b, loose)
-
- case '<':
- return lt(a, b, loose)
-
- case '<=':
- return lte(a, b, loose)
-
- default:
- throw new TypeError('Invalid operator: ' + op)
- }
-}
-
-exports.Comparator = Comparator
-function Comparator (comp, options) {
- if (!options || typeof options !== 'object') {
- options = {
- loose: !!options,
- includePrerelease: false
- }
- }
-
- if (comp instanceof Comparator) {
- if (comp.loose === !!options.loose) {
- return comp
- } else {
- comp = comp.value
- }
- }
-
- if (!(this instanceof Comparator)) {
- return new Comparator(comp, options)
- }
-
- debug('comparator', comp, options)
- this.options = options
- this.loose = !!options.loose
- this.parse(comp)
-
- if (this.semver === ANY) {
- this.value = ''
- } else {
- this.value = this.operator + this.semver.version
- }
-
- debug('comp', this)
-}
-
-var ANY = {}
-Comparator.prototype.parse = function (comp) {
- var r = this.options.loose ? re[COMPARATORLOOSE] : re[COMPARATOR]
- var m = comp.match(r)
-
- if (!m) {
- throw new TypeError('Invalid comparator: ' + comp)
- }
-
- this.operator = m[1]
- if (this.operator === '=') {
- this.operator = ''
- }
-
- // if it literally is just '>' or '' then allow anything.
- if (!m[2]) {
- this.semver = ANY
- } else {
- this.semver = new SemVer(m[2], this.options.loose)
- }
-}
-
-Comparator.prototype.toString = function () {
- return this.value
-}
-
-Comparator.prototype.test = function (version) {
- debug('Comparator.test', version, this.options.loose)
-
- if (this.semver === ANY) {
- return true
- }
-
- if (typeof version === 'string') {
- version = new SemVer(version, this.options)
- }
-
- return cmp(version, this.operator, this.semver, this.options)
-}
-
-Comparator.prototype.intersects = function (comp, options) {
- if (!(comp instanceof Comparator)) {
- throw new TypeError('a Comparator is required')
- }
-
- if (!options || typeof options !== 'object') {
- options = {
- loose: !!options,
- includePrerelease: false
- }
- }
-
- var rangeTmp
-
- if (this.operator === '') {
- rangeTmp = new Range(comp.value, options)
- return satisfies(this.value, rangeTmp, options)
- } else if (comp.operator === '') {
- rangeTmp = new Range(this.value, options)
- return satisfies(comp.semver, rangeTmp, options)
- }
-
- var sameDirectionIncreasing =
- (this.operator === '>=' || this.operator === '>') &&
- (comp.operator === '>=' || comp.operator === '>')
- var sameDirectionDecreasing =
- (this.operator === '<=' || this.operator === '<') &&
- (comp.operator === '<=' || comp.operator === '<')
- var sameSemVer = this.semver.version === comp.semver.version
- var differentDirectionsInclusive =
- (this.operator === '>=' || this.operator === '<=') &&
- (comp.operator === '>=' || comp.operator === '<=')
- var oppositeDirectionsLessThan =
- cmp(this.semver, '<', comp.semver, options) &&
- ((this.operator === '>=' || this.operator === '>') &&
- (comp.operator === '<=' || comp.operator === '<'))
- var oppositeDirectionsGreaterThan =
- cmp(this.semver, '>', comp.semver, options) &&
- ((this.operator === '<=' || this.operator === '<') &&
- (comp.operator === '>=' || comp.operator === '>'))
-
- return sameDirectionIncreasing || sameDirectionDecreasing ||
- (sameSemVer && differentDirectionsInclusive) ||
- oppositeDirectionsLessThan || oppositeDirectionsGreaterThan
-}
-
-exports.Range = Range
-function Range (range, options) {
- if (!options || typeof options !== 'object') {
- options = {
- loose: !!options,
- includePrerelease: false
- }
- }
-
- if (range instanceof Range) {
- if (range.loose === !!options.loose &&
- range.includePrerelease === !!options.includePrerelease) {
- return range
- } else {
- return new Range(range.raw, options)
- }
- }
-
- if (range instanceof Comparator) {
- return new Range(range.value, options)
- }
-
- if (!(this instanceof Range)) {
- return new Range(range, options)
- }
-
- this.options = options
- this.loose = !!options.loose
- this.includePrerelease = !!options.includePrerelease
-
- // First, split based on boolean or ||
- this.raw = range
- this.set = range.split(/\s*\|\|\s*/).map(function (range) {
- return this.parseRange(range.trim())
- }, this).filter(function (c) {
- // throw out any that are not relevant for whatever reason
- return c.length
- })
-
- if (!this.set.length) {
- throw new TypeError('Invalid SemVer Range: ' + range)
- }
-
- this.format()
-}
-
-Range.prototype.format = function () {
- this.range = this.set.map(function (comps) {
- return comps.join(' ').trim()
- }).join('||').trim()
- return this.range
-}
-
-Range.prototype.toString = function () {
- return this.range
-}
-
-Range.prototype.parseRange = function (range) {
- var loose = this.options.loose
- range = range.trim()
- // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
- var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE]
- range = range.replace(hr, hyphenReplace)
- debug('hyphen replace', range)
- // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
- range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace)
- debug('comparator trim', range, re[COMPARATORTRIM])
-
- // `~ 1.2.3` => `~1.2.3`
- range = range.replace(re[TILDETRIM], tildeTrimReplace)
-
- // `^ 1.2.3` => `^1.2.3`
- range = range.replace(re[CARETTRIM], caretTrimReplace)
-
- // normalize spaces
- range = range.split(/\s+/).join(' ')
-
- // At this point, the range is completely trimmed and
- // ready to be split into comparators.
-
- var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR]
- var set = range.split(' ').map(function (comp) {
- return parseComparator(comp, this.options)
- }, this).join(' ').split(/\s+/)
- if (this.options.loose) {
- // in loose mode, throw out any that are not valid comparators
- set = set.filter(function (comp) {
- return !!comp.match(compRe)
- })
- }
- set = set.map(function (comp) {
- return new Comparator(comp, this.options)
- }, this)
-
- return set
-}
-
-Range.prototype.intersects = function (range, options) {
- if (!(range instanceof Range)) {
- throw new TypeError('a Range is required')
- }
-
- return this.set.some(function (thisComparators) {
- return thisComparators.every(function (thisComparator) {
- return range.set.some(function (rangeComparators) {
- return rangeComparators.every(function (rangeComparator) {
- return thisComparator.intersects(rangeComparator, options)
- })
- })
- })
- })
-}
-
-// Mostly just for testing and legacy API reasons
-exports.toComparators = toComparators
-function toComparators (range, options) {
- return new Range(range, options).set.map(function (comp) {
- return comp.map(function (c) {
- return c.value
- }).join(' ').trim().split(' ')
- })
-}
-
-// comprised of xranges, tildes, stars, and gtlt's at this point.
-// already replaced the hyphen ranges
-// turn into a set of JUST comparators.
-function parseComparator (comp, options) {
- debug('comp', comp, options)
- comp = replaceCarets(comp, options)
- debug('caret', comp)
- comp = replaceTildes(comp, options)
- debug('tildes', comp)
- comp = replaceXRanges(comp, options)
- debug('xrange', comp)
- comp = replaceStars(comp, options)
- debug('stars', comp)
- return comp
-}
-
-function isX (id) {
- return !id || id.toLowerCase() === 'x' || id === '*'
-}
-
-// ~, ~> --> * (any, kinda silly)
-// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0
-// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0
-// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0
-// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0
-// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0
-function replaceTildes (comp, options) {
- return comp.trim().split(/\s+/).map(function (comp) {
- return replaceTilde(comp, options)
- }).join(' ')
-}
-
-function replaceTilde (comp, options) {
- var r = options.loose ? re[TILDELOOSE] : re[TILDE]
- return comp.replace(r, function (_, M, m, p, pr) {
- debug('tilde', comp, _, M, m, p, pr)
- var ret
-
- if (isX(M)) {
- ret = ''
- } else if (isX(m)) {
- ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
- } else if (isX(p)) {
- // ~1.2 == >=1.2.0 <1.3.0
- ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
- } else if (pr) {
- debug('replaceTilde pr', pr)
- ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
- ' <' + M + '.' + (+m + 1) + '.0'
- } else {
- // ~1.2.3 == >=1.2.3 <1.3.0
- ret = '>=' + M + '.' + m + '.' + p +
- ' <' + M + '.' + (+m + 1) + '.0'
- }
-
- debug('tilde return', ret)
- return ret
- })
-}
-
-// ^ --> * (any, kinda silly)
-// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0
-// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0
-// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0
-// ^1.2.3 --> >=1.2.3 <2.0.0
-// ^1.2.0 --> >=1.2.0 <2.0.0
-function replaceCarets (comp, options) {
- return comp.trim().split(/\s+/).map(function (comp) {
- return replaceCaret(comp, options)
- }).join(' ')
-}
-
-function replaceCaret (comp, options) {
- debug('caret', comp, options)
- var r = options.loose ? re[CARETLOOSE] : re[CARET]
- return comp.replace(r, function (_, M, m, p, pr) {
- debug('caret', comp, _, M, m, p, pr)
- var ret
-
- if (isX(M)) {
- ret = ''
- } else if (isX(m)) {
- ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
- } else if (isX(p)) {
- if (M === '0') {
- ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
- } else {
- ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0'
- }
- } else if (pr) {
- debug('replaceCaret pr', pr)
- if (M === '0') {
- if (m === '0') {
- ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
- ' <' + M + '.' + m + '.' + (+p + 1)
- } else {
- ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
- ' <' + M + '.' + (+m + 1) + '.0'
- }
- } else {
- ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
- ' <' + (+M + 1) + '.0.0'
- }
- } else {
- debug('no pr')
- if (M === '0') {
- if (m === '0') {
- ret = '>=' + M + '.' + m + '.' + p +
- ' <' + M + '.' + m + '.' + (+p + 1)
- } else {
- ret = '>=' + M + '.' + m + '.' + p +
- ' <' + M + '.' + (+m + 1) + '.0'
- }
- } else {
- ret = '>=' + M + '.' + m + '.' + p +
- ' <' + (+M + 1) + '.0.0'
- }
- }
-
- debug('caret return', ret)
- return ret
- })
-}
-
-function replaceXRanges (comp, options) {
- debug('replaceXRanges', comp, options)
- return comp.split(/\s+/).map(function (comp) {
- return replaceXRange(comp, options)
- }).join(' ')
-}
-
-function replaceXRange (comp, options) {
- comp = comp.trim()
- var r = options.loose ? re[XRANGELOOSE] : re[XRANGE]
- return comp.replace(r, function (ret, gtlt, M, m, p, pr) {
- debug('xRange', comp, ret, gtlt, M, m, p, pr)
- var xM = isX(M)
- var xm = xM || isX(m)
- var xp = xm || isX(p)
- var anyX = xp
-
- if (gtlt === '=' && anyX) {
- gtlt = ''
- }
-
- if (xM) {
- if (gtlt === '>' || gtlt === '<') {
- // nothing is allowed
- ret = '<0.0.0'
- } else {
- // nothing is forbidden
- ret = '*'
- }
- } else if (gtlt && anyX) {
- // we know patch is an x, because we have any x at all.
- // replace X with 0
- if (xm) {
- m = 0
- }
- p = 0
-
- if (gtlt === '>') {
- // >1 => >=2.0.0
- // >1.2 => >=1.3.0
- // >1.2.3 => >= 1.2.4
- gtlt = '>='
- if (xm) {
- M = +M + 1
- m = 0
- p = 0
- } else {
- m = +m + 1
- p = 0
- }
- } else if (gtlt === '<=') {
- // <=0.7.x is actually <0.8.0, since any 0.7.x should
- // pass. Similarly, <=7.x is actually <8.0.0, etc.
- gtlt = '<'
- if (xm) {
- M = +M + 1
- } else {
- m = +m + 1
- }
- }
-
- ret = gtlt + M + '.' + m + '.' + p
- } else if (xm) {
- ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
- } else if (xp) {
- ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
- }
-
- debug('xRange return', ret)
-
- return ret
- })
-}
-
-// Because * is AND-ed with everything else in the comparator,
-// and '' means "any version", just remove the *s entirely.
-function replaceStars (comp, options) {
- debug('replaceStars', comp, options)
- // Looseness is ignored here. star is always as loose as it gets!
- return comp.trim().replace(re[STAR], '')
-}
-
-// This function is passed to string.replace(re[HYPHENRANGE])
-// M, m, patch, prerelease, build
-// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5
-// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do
-// 1.2 - 3.4 => >=1.2.0 <3.5.0
-function hyphenReplace ($0,
- from, fM, fm, fp, fpr, fb,
- to, tM, tm, tp, tpr, tb) {
- if (isX(fM)) {
- from = ''
- } else if (isX(fm)) {
- from = '>=' + fM + '.0.0'
- } else if (isX(fp)) {
- from = '>=' + fM + '.' + fm + '.0'
- } else {
- from = '>=' + from
- }
-
- if (isX(tM)) {
- to = ''
- } else if (isX(tm)) {
- to = '<' + (+tM + 1) + '.0.0'
- } else if (isX(tp)) {
- to = '<' + tM + '.' + (+tm + 1) + '.0'
- } else if (tpr) {
- to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr
- } else {
- to = '<=' + to
- }
-
- return (from + ' ' + to).trim()
-}
-
-// if ANY of the sets match ALL of its comparators, then pass
-Range.prototype.test = function (version) {
- if (!version) {
- return false
- }
-
- if (typeof version === 'string') {
- version = new SemVer(version, this.options)
- }
-
- for (var i = 0; i < this.set.length; i++) {
- if (testSet(this.set[i], version, this.options)) {
- return true
- }
- }
- return false
-}
-
-function testSet (set, version, options) {
- for (var i = 0; i < set.length; i++) {
- if (!set[i].test(version)) {
- return false
- }
- }
-
- if (version.prerelease.length && !options.includePrerelease) {
- // Find the set of versions that are allowed to have prereleases
- // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0
- // That should allow `1.2.3-pr.2` to pass.
- // However, `1.2.4-alpha.notready` should NOT be allowed,
- // even though it's within the range set by the comparators.
- for (i = 0; i < set.length; i++) {
- debug(set[i].semver)
- if (set[i].semver === ANY) {
- continue
- }
-
- if (set[i].semver.prerelease.length > 0) {
- var allowed = set[i].semver
- if (allowed.major === version.major &&
- allowed.minor === version.minor &&
- allowed.patch === version.patch) {
- return true
- }
- }
- }
-
- // Version has a -pre, but it's not one of the ones we like.
- return false
- }
-
- return true
-}
-
-exports.satisfies = satisfies
-function satisfies (version, range, options) {
- try {
- range = new Range(range, options)
- } catch (er) {
- return false
- }
- return range.test(version)
-}
-
-exports.maxSatisfying = maxSatisfying
-function maxSatisfying (versions, range, options) {
- var max = null
- var maxSV = null
- try {
- var rangeObj = new Range(range, options)
- } catch (er) {
- return null
- }
- versions.forEach(function (v) {
- if (rangeObj.test(v)) {
- // satisfies(v, range, options)
- if (!max || maxSV.compare(v) === -1) {
- // compare(max, v, true)
- max = v
- maxSV = new SemVer(max, options)
- }
- }
- })
- return max
-}
-
-exports.minSatisfying = minSatisfying
-function minSatisfying (versions, range, options) {
- var min = null
- var minSV = null
- try {
- var rangeObj = new Range(range, options)
- } catch (er) {
- return null
- }
- versions.forEach(function (v) {
- if (rangeObj.test(v)) {
- // satisfies(v, range, options)
- if (!min || minSV.compare(v) === 1) {
- // compare(min, v, true)
- min = v
- minSV = new SemVer(min, options)
- }
- }
- })
- return min
-}
-
-exports.minVersion = minVersion
-function minVersion (range, loose) {
- range = new Range(range, loose)
-
- var minver = new SemVer('0.0.0')
- if (range.test(minver)) {
- return minver
- }
-
- minver = new SemVer('0.0.0-0')
- if (range.test(minver)) {
- return minver
- }
-
- minver = null
- for (var i = 0; i < range.set.length; ++i) {
- var comparators = range.set[i]
-
- comparators.forEach(function (comparator) {
- // Clone to avoid manipulating the comparator's semver object.
- var compver = new SemVer(comparator.semver.version)
- switch (comparator.operator) {
- case '>':
- if (compver.prerelease.length === 0) {
- compver.patch++
- } else {
- compver.prerelease.push(0)
- }
- compver.raw = compver.format()
- /* fallthrough */
- case '':
- case '>=':
- if (!minver || gt(minver, compver)) {
- minver = compver
- }
- break
- case '<':
- case '<=':
- /* Ignore maximum versions */
- break
- /* istanbul ignore next */
- default:
- throw new Error('Unexpected operation: ' + comparator.operator)
- }
- })
- }
-
- if (minver && range.test(minver)) {
- return minver
- }
-
- return null
-}
-
-exports.validRange = validRange
-function validRange (range, options) {
- try {
- // Return '*' instead of '' so that truthiness works.
- // This will throw if it's invalid anyway
- return new Range(range, options).range || '*'
- } catch (er) {
- return null
- }
-}
-
-// Determine if version is less than all the versions possible in the range
-exports.ltr = ltr
-function ltr (version, range, options) {
- return outside(version, range, '<', options)
-}
-
-// Determine if version is greater than all the versions possible in the range.
-exports.gtr = gtr
-function gtr (version, range, options) {
- return outside(version, range, '>', options)
-}
-
-exports.outside = outside
-function outside (version, range, hilo, options) {
- version = new SemVer(version, options)
- range = new Range(range, options)
-
- var gtfn, ltefn, ltfn, comp, ecomp
- switch (hilo) {
- case '>':
- gtfn = gt
- ltefn = lte
- ltfn = lt
- comp = '>'
- ecomp = '>='
- break
- case '<':
- gtfn = lt
- ltefn = gte
- ltfn = gt
- comp = '<'
- ecomp = '<='
- break
- default:
- throw new TypeError('Must provide a hilo val of "<" or ">"')
- }
-
- // If it satisifes the range it is not outside
- if (satisfies(version, range, options)) {
- return false
- }
-
- // From now on, variable terms are as if we're in "gtr" mode.
- // but note that everything is flipped for the "ltr" function.
-
- for (var i = 0; i < range.set.length; ++i) {
- var comparators = range.set[i]
-
- var high = null
- var low = null
-
- comparators.forEach(function (comparator) {
- if (comparator.semver === ANY) {
- comparator = new Comparator('>=0.0.0')
- }
- high = high || comparator
- low = low || comparator
- if (gtfn(comparator.semver, high.semver, options)) {
- high = comparator
- } else if (ltfn(comparator.semver, low.semver, options)) {
- low = comparator
- }
- })
-
- // If the edge version comparator has a operator then our version
- // isn't outside it
- if (high.operator === comp || high.operator === ecomp) {
- return false
- }
-
- // If the lowest version comparator has an operator and our version
- // is less than it then it isn't higher than the range
- if ((!low.operator || low.operator === comp) &&
- ltefn(version, low.semver)) {
- return false
- } else if (low.operator === ecomp && ltfn(version, low.semver)) {
- return false
- }
- }
- return true
-}
-
-exports.prerelease = prerelease
-function prerelease (version, options) {
- var parsed = parse(version, options)
- return (parsed && parsed.prerelease.length) ? parsed.prerelease : null
-}
-
-exports.intersects = intersects
-function intersects (r1, r2, options) {
- r1 = new Range(r1, options)
- r2 = new Range(r2, options)
- return r1.intersects(r2)
-}
-
-exports.coerce = coerce
-function coerce (version) {
- if (version instanceof SemVer) {
- return version
- }
-
- if (typeof version !== 'string') {
- return null
- }
-
- var match = version.match(re[COERCE])
-
- if (match == null) {
- return null
- }
-
- return parse(match[1] +
- '.' + (match[2] || '0') +
- '.' + (match[3] || '0'))
-}
diff --git a/node_modules/libcipm/node_modules/npm-pick-manifest/package.json b/node_modules/libcipm/node_modules/npm-pick-manifest/package.json
deleted file mode 100644
index 68412f665..000000000
--- a/node_modules/libcipm/node_modules/npm-pick-manifest/package.json
+++ /dev/null
@@ -1,82 +0,0 @@
-{
- "_from": "npm-pick-manifest@^3.0.0",
- "_id": "npm-pick-manifest@3.0.2",
- "_inBundle": false,
- "_integrity": "sha512-wNprTNg+X5nf+tDi+hbjdHhM4bX+mKqv6XmPh7B5eG+QY9VARfQPfCEH013H5GqfNj6ee8Ij2fg8yk0mzps1Vw==",
- "_location": "/libcipm/npm-pick-manifest",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "npm-pick-manifest@^3.0.0",
- "name": "npm-pick-manifest",
- "escapedName": "npm-pick-manifest",
- "rawSpec": "^3.0.0",
- "saveSpec": null,
- "fetchSpec": "^3.0.0"
- },
- "_requiredBy": [
- "/libcipm/pacote"
- ],
- "_resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-3.0.2.tgz",
- "_shasum": "f4d9e5fd4be2153e5f4e5f9b7be8dc419a99abb7",
- "_spec": "npm-pick-manifest@^3.0.0",
- "_where": "/Users/claudiahdz/npm/cli/node_modules/libcipm/node_modules/pacote",
- "author": {
- "name": "Kat Marchán",
- "email": "kzm@sykosomatic.org"
- },
- "bugs": {
- "url": "https://github.com/npm/npm-pick-manifest/issues"
- },
- "bundleDependencies": false,
- "config": {
- "nyc": {
- "exclude": [
- "node_modules/**",
- "test/**"
- ]
- }
- },
- "dependencies": {
- "figgy-pudding": "^3.5.1",
- "npm-package-arg": "^6.0.0",
- "semver": "^5.4.1"
- },
- "deprecated": false,
- "description": "Resolves a matching manifest from a package metadata document according to standard npm semver resolution rules.",
- "devDependencies": {
- "nyc": "^13.1.0",
- "standard": "^10.0.3",
- "standard-version": "^4.4.0",
- "tap": "^12.0.1",
- "weallbehave": "^1.2.0",
- "weallcontribute": "^1.0.8"
- },
- "files": [
- "*.js"
- ],
- "homepage": "https://github.com/npm/npm-pick-manifest#readme",
- "keywords": [
- "npm",
- "semver",
- "package manager"
- ],
- "license": "ISC",
- "main": "index.js",
- "name": "npm-pick-manifest",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/npm-pick-manifest.git"
- },
- "scripts": {
- "postrelease": "npm publish && git push --follow-tags",
- "prerelease": "npm t",
- "pretest": "standard",
- "release": "standard-version -s",
- "test": "tap -J --100 --coverage test/*.js",
- "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
- "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
- },
- "version": "3.0.2"
-}
diff --git a/node_modules/libcipm/node_modules/npm-registry-fetch/CHANGELOG.md b/node_modules/libcipm/node_modules/npm-registry-fetch/CHANGELOG.md
deleted file mode 100644
index 3599c6b2f..000000000
--- a/node_modules/libcipm/node_modules/npm-registry-fetch/CHANGELOG.md
+++ /dev/null
@@ -1,250 +0,0 @@
-# Change Log
-
-All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
-
-<a name="4.0.3"></a>
-## [4.0.3](https://github.com/npm/registry-fetch/compare/v4.0.2...v4.0.3) (2020-02-13)
-
-
-### Bug Fixes
-
-* always bypass cache when ?write=true ([ba8b4fe](https://github.com/npm/registry-fetch/commit/ba8b4fe))
-* use 30s default for timeout as per README ([69c2977](https://github.com/npm/registry-fetch/commit/69c2977)), closes [#20](https://github.com/npm/registry-fetch/issues/20)
-
-
-
-<a name="4.0.2"></a>
-## [4.0.2](https://github.com/npm/registry-fetch/compare/v4.0.0...v4.0.2) (2019-10-04)
-
-
-### Bug Fixes
-
-* Add null check on body on 401 errors ([e3a0186](https://github.com/npm/registry-fetch/commit/e3a0186)), closes [#9](https://github.com/npm/registry-fetch/issues/9)
-* **deps:** Add explicit dependency on safe-buffer ([8eae5f0](https://github.com/npm/registry-fetch/commit/8eae5f0)), closes [npm/libnpmaccess#2](https://github.com/npm/libnpmaccess/issues/2) [#3](https://github.com/npm/registry-fetch/issues/3)
-
-
-
-<a name="4.0.0"></a>
-# [4.0.0](https://github.com/npm/registry-fetch/compare/v3.9.1...v4.0.0) (2019-07-15)
-
-
-* cacache@12.0.0, infer uid from cache folder ([0c4f060](https://github.com/npm/registry-fetch/commit/0c4f060))
-
-
-### BREAKING CHANGES
-
-* uid and gid are inferred from cache folder, rather than
-being passed in as options.
-
-
-
-<a name="3.9.1"></a>
-## [3.9.1](https://github.com/npm/registry-fetch/compare/v3.9.0...v3.9.1) (2019-07-02)
-
-
-
-<a name="3.9.0"></a>
-# [3.9.0](https://github.com/npm/registry-fetch/compare/v3.8.0...v3.9.0) (2019-01-24)
-
-
-### Features
-
-* **auth:** support username:password encoded legacy _auth ([a91f90c](https://github.com/npm/registry-fetch/commit/a91f90c))
-
-
-
-<a name="3.8.0"></a>
-# [3.8.0](https://github.com/npm/registry-fetch/compare/v3.7.0...v3.8.0) (2018-08-23)
-
-
-### Features
-
-* **mapJson:** add support for passing in json stream mapper ([0600986](https://github.com/npm/registry-fetch/commit/0600986))
-
-
-
-<a name="3.7.0"></a>
-# [3.7.0](https://github.com/npm/registry-fetch/compare/v3.6.0...v3.7.0) (2018-08-23)
-
-
-### Features
-
-* **json.stream:** add utility function for streamed JSON parsing ([051d969](https://github.com/npm/registry-fetch/commit/051d969))
-
-
-
-<a name="3.6.0"></a>
-# [3.6.0](https://github.com/npm/registry-fetch/compare/v3.5.0...v3.6.0) (2018-08-22)
-
-
-### Bug Fixes
-
-* **docs:** document opts.forceAuth ([40bcd65](https://github.com/npm/registry-fetch/commit/40bcd65))
-
-
-### Features
-
-* **opts.ignoreBody:** add a boolean to throw away response bodies ([6923702](https://github.com/npm/registry-fetch/commit/6923702))
-
-
-
-<a name="3.5.0"></a>
-# [3.5.0](https://github.com/npm/registry-fetch/compare/v3.4.0...v3.5.0) (2018-08-22)
-
-
-### Features
-
-* **pkgid:** heuristic pkgid calculation for errors ([2e789a5](https://github.com/npm/registry-fetch/commit/2e789a5))
-
-
-
-<a name="3.4.0"></a>
-# [3.4.0](https://github.com/npm/registry-fetch/compare/v3.3.0...v3.4.0) (2018-08-22)
-
-
-### Bug Fixes
-
-* **deps:** use new figgy-pudding with aliases fix ([0308f54](https://github.com/npm/registry-fetch/commit/0308f54))
-
-
-### Features
-
-* **auth:** add forceAuth option to force a specific auth mechanism ([4524d17](https://github.com/npm/registry-fetch/commit/4524d17))
-
-
-
-<a name="3.3.0"></a>
-# [3.3.0](https://github.com/npm/registry-fetch/compare/v3.2.1...v3.3.0) (2018-08-21)
-
-
-### Bug Fixes
-
-* **query:** stop including undefined keys ([4718b1b](https://github.com/npm/registry-fetch/commit/4718b1b))
-
-
-### Features
-
-* **otp:** use heuristic detection for malformed EOTP responses ([f035194](https://github.com/npm/registry-fetch/commit/f035194))
-
-
-
-<a name="3.2.1"></a>
-## [3.2.1](https://github.com/npm/registry-fetch/compare/v3.2.0...v3.2.1) (2018-08-16)
-
-
-### Bug Fixes
-
-* **opts:** pass through non-null opts.retry ([beba040](https://github.com/npm/registry-fetch/commit/beba040))
-
-
-
-<a name="3.2.0"></a>
-# [3.2.0](https://github.com/npm/registry-fetch/compare/v3.1.1...v3.2.0) (2018-07-27)
-
-
-### Features
-
-* **gzip:** add opts.gzip convenience opt ([340abe0](https://github.com/npm/registry-fetch/commit/340abe0))
-
-
-
-<a name="3.1.1"></a>
-## [3.1.1](https://github.com/npm/registry-fetch/compare/v3.1.0...v3.1.1) (2018-04-09)
-
-
-
-<a name="3.1.0"></a>
-# [3.1.0](https://github.com/npm/registry-fetch/compare/v3.0.0...v3.1.0) (2018-04-09)
-
-
-### Features
-
-* **config:** support no-proxy and https-proxy options ([9aa906b](https://github.com/npm/registry-fetch/commit/9aa906b))
-
-
-
-<a name="3.0.0"></a>
-# [3.0.0](https://github.com/npm/registry-fetch/compare/v2.1.0...v3.0.0) (2018-04-09)
-
-
-### Bug Fixes
-
-* **api:** pacote integration-related fixes ([a29de4f](https://github.com/npm/registry-fetch/commit/a29de4f))
-* **config:** stop caring about opts.config ([5856a6f](https://github.com/npm/registry-fetch/commit/5856a6f))
-
-
-### BREAKING CHANGES
-
-* **config:** opts.config is no longer supported. Pass the options down in opts itself.
-
-
-
-<a name="2.1.0"></a>
-# [2.1.0](https://github.com/npm/registry-fetch/compare/v2.0.0...v2.1.0) (2018-04-08)
-
-
-### Features
-
-* **token:** accept opts.token for opts._authToken ([108c9f0](https://github.com/npm/registry-fetch/commit/108c9f0))
-
-
-
-<a name="2.0.0"></a>
-# [2.0.0](https://github.com/npm/registry-fetch/compare/v1.1.1...v2.0.0) (2018-04-08)
-
-
-### meta
-
-* drop support for node@4 ([758536e](https://github.com/npm/registry-fetch/commit/758536e))
-
-
-### BREAKING CHANGES
-
-* node@4 is no longer supported
-
-
-
-<a name="1.1.1"></a>
-## [1.1.1](https://github.com/npm/registry-fetch/compare/v1.1.0...v1.1.1) (2018-04-06)
-
-
-
-<a name="1.1.0"></a>
-# [1.1.0](https://github.com/npm/registry-fetch/compare/v1.0.1...v1.1.0) (2018-03-16)
-
-
-### Features
-
-* **specs:** can use opts.spec to trigger pickManifest ([85c4ac9](https://github.com/npm/registry-fetch/commit/85c4ac9))
-
-
-
-<a name="1.0.1"></a>
-## [1.0.1](https://github.com/npm/registry-fetch/compare/v1.0.0...v1.0.1) (2018-03-16)
-
-
-### Bug Fixes
-
-* **query:** oops console.log ([870e4f5](https://github.com/npm/registry-fetch/commit/870e4f5))
-
-
-
-<a name="1.0.0"></a>
-# 1.0.0 (2018-03-16)
-
-
-### Bug Fixes
-
-* **auth:** get auth working with all the little details ([84b94ba](https://github.com/npm/registry-fetch/commit/84b94ba))
-* **deps:** add bluebird as an actual dep ([1286e31](https://github.com/npm/registry-fetch/commit/1286e31))
-* **errors:** Unknown auth errors use default code ([#1](https://github.com/npm/registry-fetch/issues/1)) ([3d91b93](https://github.com/npm/registry-fetch/commit/3d91b93))
-* **standard:** remove args from invocation ([9620a0a](https://github.com/npm/registry-fetch/commit/9620a0a))
-
-
-### Features
-
-* **api:** baseline kinda-working API impl ([bf91f9f](https://github.com/npm/registry-fetch/commit/bf91f9f))
-* **body:** automatic handling of different opts.body values ([f3b97db](https://github.com/npm/registry-fetch/commit/f3b97db))
-* **config:** nicer input config input handling ([b9ce21d](https://github.com/npm/registry-fetch/commit/b9ce21d))
-* **opts:** use figgy-pudding for opts handling ([0abd527](https://github.com/npm/registry-fetch/commit/0abd527))
-* **query:** add query utility support ([65ea8b1](https://github.com/npm/registry-fetch/commit/65ea8b1))
diff --git a/node_modules/libcipm/node_modules/npm-registry-fetch/LICENSE.md b/node_modules/libcipm/node_modules/npm-registry-fetch/LICENSE.md
deleted file mode 100644
index 8d28acf86..000000000
--- a/node_modules/libcipm/node_modules/npm-registry-fetch/LICENSE.md
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/libcipm/node_modules/npm-registry-fetch/README.md b/node_modules/libcipm/node_modules/npm-registry-fetch/README.md
deleted file mode 100644
index 80ce64cda..000000000
--- a/node_modules/libcipm/node_modules/npm-registry-fetch/README.md
+++ /dev/null
@@ -1,636 +0,0 @@
-# npm-registry-fetch [![npm version](https://img.shields.io/npm/v/npm-registry-fetch.svg)](https://npm.im/npm-registry-fetch) [![license](https://img.shields.io/npm/l/npm-registry-fetch.svg)](https://npm.im/npm-registry-fetch) [![Travis](https://img.shields.io/travis/npm/npm-registry-fetch/latest.svg)](https://travis-ci.org/npm/npm-registry-fetch) [![AppVeyor](https://img.shields.io/appveyor/ci/zkat/npm-registry-fetch/latest.svg)](https://ci.appveyor.com/project/npm/npm-registry-fetch) [![Coverage Status](https://coveralls.io/repos/github/npm/npm-registry-fetch/badge.svg?branch=latest)](https://coveralls.io/github/npm/npm-registry-fetch?branch=latest)
-
-[`npm-registry-fetch`](https://github.com/npm/npm-registry-fetch) is a Node.js
-library that implements a `fetch`-like API for accessing npm registry APIs
-consistently. It's able to consume npm-style configuration values and has all
-the necessary logic for picking registries, handling scopes, and dealing with
-authentication details built-in.
-
-This package is meant to replace the older
-[`npm-registry-client`](https://npm.im/npm-registry-client).
-
-## Example
-
-```javascript
-const npmFetch = require('npm-registry-fetch')
-
-console.log(
- await npmFetch.json('/-/ping')
-)
-```
-
-## Table of Contents
-
-* [Installing](#install)
-* [Example](#example)
-* [Contributing](#contributing)
-* [API](#api)
- * [`fetch`](#fetch)
- * [`fetch.json`](#fetch-json)
- * [`fetch` options](#fetch-opts)
-
-### Install
-
-`$ npm install npm-registry-fetch`
-
-### Contributing
-
-The npm team enthusiastically welcomes contributions and project participation!
-There's a bunch of things you can do if you want to contribute! The [Contributor
-Guide](CONTRIBUTING.md) has all the information you need for everything from
-reporting bugs to contributing entire new features. Please don't hesitate to
-jump in if you'd like to, or even ask us questions if something isn't clear.
-
-All participants and maintainers in this project are expected to follow [Code of
-Conduct](CODE_OF_CONDUCT.md), and just generally be excellent to each other.
-
-Please refer to the [Changelog](CHANGELOG.md) for project history details, too.
-
-Happy hacking!
-
-### API
-
-#### Caching and `write=true` query strings
-
-Before performing any PUT or DELETE operation, npm clients first make a
-GET request to the registry resource being updated, which includes
-the query string `?write=true`.
-
-The semantics of this are, effectively, "I intend to write to this thing,
-and need to know the latest current value, so that my write can land
-cleanly".
-
-The public npm registry handles these `?write=true` requests by ensuring
-that the cache is re-validated before sending a response. In order to
-maintain the same behavior on the client, and not get tripped up by an
-overeager local cache when we intend to write data to the registry, any
-request that comes through `npm-registry-fetch` that contains `write=true`
-in the query string will forcibly set the `prefer-online` option to `true`,
-and set both `prefer-offline` and `offline` to false, so that any local
-cached value will be revalidated.
-
-#### <a name="fetch"></a> `> fetch(url, [opts]) -> Promise<Response>`
-
-Performs a request to a given URL.
-
-The URL can be either a full URL, or a path to one. The appropriate registry
-will be automatically picked if only a URL path is given.
-
-For available options, please see the section on [`fetch` options](#fetch-opts).
-
-##### Example
-
-```javascript
-const res = await fetch('/-/ping')
-console.log(res.headers)
-res.on('data', d => console.log(d.toString('utf8')))
-```
-
-#### <a name="fetch-json"></a> `> fetch.json(url, [opts]) -> Promise<ResponseJSON>`
-
-Performs a request to a given registry URL, parses the body of the response as
-JSON, and returns it as its final value. This is a utility shorthand for
-`fetch(url).then(res => res.json())`.
-
-For available options, please see the section on [`fetch` options](#fetch-opts).
-
-##### Example
-
-```javascript
-const res = await fetch.json('/-/ping')
-console.log(res) // Body parsed as JSON
-```
-
-#### <a name="fetch-json-stream"></a> `> fetch.json.stream(url, jsonPath, [opts]) -> Stream`
-
-Performs a request to a given registry URL and parses the body of the response
-as JSON, with each entry being emitted through the stream.
-
-The `jsonPath` argument is a [`JSONStream.parse()`
-path](https://github.com/dominictarr/JSONStream#jsonstreamparsepath), and the
-returned stream (unlike default `JSONStream`s), has a valid
-`Symbol.asyncIterator` implementation.
-
-For available options, please see the section on [`fetch` options](#fetch-opts).
-
-##### Example
-
-```javascript
-console.log('https://npm.im/~zkat has access to the following packages:')
-for await (let {key, value} of fetch.json.stream('/-/user/zkat/package', '$*')) {
- console.log(`https://npm.im/${key} (perms: ${value})`)
-}
-```
-
-#### <a name="fetch-opts"></a> `fetch` Options
-
-Fetch options are optional, and can be passed in as either a Map-like object
-(one with a `.get()` method), a plain javascript object, or a
-[`figgy-pudding`](https://npm.im/figgy-pudding) instance.
-
-##### <a name="opts-agent"></a> `opts.agent`
-
-* Type: http.Agent
-* Default: an appropriate agent based on URL protocol and proxy settings
-
-An [`Agent`](https://nodejs.org/api/http.html#http_class_http_agent) instance to
-be shared across requests. This allows multiple concurrent `fetch` requests to
-happen on the same socket.
-
-You do _not_ need to provide this option unless you want something particularly
-specialized, since proxy configurations and http/https agents are already
-automatically managed internally when this option is not passed through.
-
-##### <a name="opts-body"></a> `opts.body`
-
-* Type: Buffer | Stream | Object
-* Default: null
-
-Request body to send through the outgoing request. Buffers and Streams will be
-passed through as-is, with a default `content-type` of
-`application/octet-stream`. Plain JavaScript objects will be `JSON.stringify`ed
-and the `content-type` will default to `application/json`.
-
-Use [`opts.headers`](#opts-headers) to set the content-type to something else.
-
-##### <a name="opts-ca"></a> `opts.ca`
-
-* Type: String, Array, or null
-* Default: null
-
-The Certificate Authority signing certificate that is trusted for SSL
-connections to the registry. Values should be in PEM format (Windows calls it
-"Base-64 encoded X.509 (.CER)") with newlines replaced by the string `'\n'`. For
-example:
-
-```
-{
- ca: '-----BEGIN CERTIFICATE-----\nXXXX\nXXXX\n-----END CERTIFICATE-----'
-}
-```
-
-Set to `null` to only allow "known" registrars, or to a specific CA cert
-to trust only that specific signing authority.
-
-Multiple CAs can be trusted by specifying an array of certificates instead of a
-single string.
-
-See also [`opts.strict-ssl`](#opts-strict-ssl), [`opts.ca`](#opts-ca) and
-[`opts.key`](#opts-key)
-
-##### <a name="opts-cache"></a> `opts.cache`
-
-* Type: path
-* Default: null
-
-The location of the http cache directory. If provided, certain cachable requests
-will be cached according to [IETF RFC 7234](https://tools.ietf.org/html/rfc7234)
-rules. This will speed up future requests, as well as make the cached data
-available offline if necessary/requested.
-
-See also [`offline`](#opts-offline), [`prefer-offline`](#opts-prefer-offline),
-and [`prefer-online`](#opts-prefer-online).
-
-##### <a name="opts-cert"></a> `opts.cert`
-
-* Type: String
-* Default: null
-
-A client certificate to pass when accessing the registry. Values should be in
-PEM format (Windows calls it "Base-64 encoded X.509 (.CER)") with newlines
-replaced by the string `'\n'`. For example:
-
-```
-{
- cert: '-----BEGIN CERTIFICATE-----\nXXXX\nXXXX\n-----END CERTIFICATE-----'
-}
-```
-
-It is _not_ the path to a certificate file (and there is no "certfile" option).
-
-See also: [`opts.ca`](#opts-ca) and [`opts.key`](#opts-key)
-
-##### <a name="opts-fetch-retries"></a> `opts.fetch-retries`
-
-* Type: Number
-* Default: 2
-
-The "retries" config for [`retry`](https://npm.im/retry) to use when fetching
-packages from the registry.
-
-See also [`opts.retry`](#opts-retry) to provide all retry options as a single
-object.
-
-##### <a name="opts-fetch-retry-factor"></a> `opts.fetch-retry-factor`
-
-* Type: Number
-* Default: 10
-
-The "factor" config for [`retry`](https://npm.im/retry) to use when fetching
-packages.
-
-See also [`opts.retry`](#opts-retry) to provide all retry options as a single
-object.
-
-##### <a name="opts-fetch-retry-mintimeout"></a> `opts.fetch-retry-mintimeout`
-
-* Type: Number
-* Default: 10000 (10 seconds)
-
-The "minTimeout" config for [`retry`](https://npm.im/retry) to use when fetching
-packages.
-
-See also [`opts.retry`](#opts-retry) to provide all retry options as a single
-object.
-
-##### <a name="opts-fetch-retry-maxtimeout"></a> `opts.fetch-retry-maxtimeout`
-
-* Type: Number
-* Default: 60000 (1 minute)
-
-The "maxTimeout" config for [`retry`](https://npm.im/retry) to use when fetching
-packages.
-
-See also [`opts.retry`](#opts-retry) to provide all retry options as a single
-object.
-
-##### <a name="opts-force-auth"></a> `opts.force-auth`
-
-* Alias: `opts.forceAuth`
-* Type: Object
-* Default: null
-
-If present, other auth-related values in `opts` will be completely ignored,
-including `alwaysAuth`, `email`, and `otp`, when calculating auth for a request,
-and the auth details in `opts.forceAuth` will be used instead.
-
-##### <a name="opts-gzip"></a> `opts.gzip`
-
-* Type: Boolean
-* Default: false
-
-If true, `npm-registry-fetch` will set the `Content-Encoding` header to `gzip`
-and use `zlib.gzip()` or `zlib.createGzip()` to gzip-encode
-[`opts.body`](#opts-body).
-
-##### <a name="opts-headers"></a> `opts.headers`
-
-* Type: Object
-* Default: null
-
-Additional headers for the outgoing request. This option can also be used to
-override headers automatically generated by `npm-registry-fetch`, such as
-`Content-Type`.
-
-##### <a name="opts-ignore-body"></a> `opts.ignore-body`
-
-* Alias: `opts.ignoreBody`
-* Type: Boolean
-* Default: false
-
-If true, the **response body** will be thrown away and `res.body` set to `null`.
-This will prevent dangling response sockets for requests where you don't usually
-care what the response body is.
-
-##### <a name="opts-integrity"></a> `opts.integrity`
-
-* Type: String | [SRI object](https://npm.im/ssri)
-* Default: null
-
-If provided, the response body's will be verified against this integrity string,
-using [`ssri`](https://npm.im/ssri). If verification succeeds, the response will
-complete as normal. If verification fails, the response body will error with an
-`EINTEGRITY` error.
-
-Body integrity is only verified if the body is actually consumed to completion --
-that is, if you use `res.json()`/`res.buffer()`, or if you consume the default
-`res` stream data to its end.
-
-Cached data will have its integrity automatically verified using the
-previously-generated integrity hash for the saved request information, so
-`EINTEGRITY` errors can happen if [`opts.cache`](#opts-cache) is used, even if
-`opts.integrity` is not passed in.
-
-##### <a name='opts-is-from-ci'></a> `opts.is-from-ci`
-
-* Alias: `opts.isFromCI`
-* Type: Boolean
-* Default: Based on environment variables
-
-This is used to populate the `npm-in-ci` request header sent to the registry.
-
-##### <a name="opts-key"></a> `opts.key`
-
-* Type: String
-* Default: null
-
-A client key to pass when accessing the registry. Values should be in PEM
-format with newlines replaced by the string `'\n'`. For example:
-
-```
-{
- key: '-----BEGIN PRIVATE KEY-----\nXXXX\nXXXX\n-----END PRIVATE KEY-----'
-}
-```
-
-It is _not_ the path to a key file (and there is no "keyfile" option).
-
-See also: [`opts.ca`](#opts-ca) and [`opts.cert`](#opts-cert)
-
-##### <a name="opts-local-address"></a> `opts.local-address`
-
-* Type: IP Address String
-* Default: null
-
-The IP address of the local interface to use when making connections
-to the registry.
-
-See also [`opts.proxy`](#opts-proxy)
-
-##### <a name="opts-log"></a> `opts.log`
-
-* Type: [`npmlog`](https://npm.im/npmlog)-like
-* Default: null
-
-Logger object to use for logging operation details. Must have the same methods
-as `npmlog`.
-
-##### <a name="opts-map-json"></a> `opts.map-json`
-
-* Alias: `mapJson`, `mapJSON`
-* Type: Function
-* Default: undefined
-
-When using `fetch.json.stream()` (NOT `fetch.json()`), this will be passed down
-to [`JSONStream`](https://npm.im/JSONStream) as the second argument to
-`JSONStream.parse`, and can be used to transform stream data before output.
-
-##### <a name="opts-maxsockets"></a> `opts.maxsockets`
-
-* Alias: `opts.max-sockets`
-* Type: Integer
-* Default: 12
-
-Maximum number of sockets to keep open during requests. Has no effect if
-[`opts.agent`](#opts-agent) is used.
-
-##### <a name="opts-method"></a> `opts.method`
-
-* Type: String
-* Default: 'GET'
-
-HTTP method to use for the outgoing request. Case-insensitive.
-
-##### <a name="opts-noproxy"></a> `opts.noproxy`
-
-* Type: Boolean
-* Default: process.env.NOPROXY
-
-If true, proxying will be disabled even if [`opts.proxy`](#opts-proxy) is used.
-
-##### <a name="opts-npm-session"></a> `opts.npm-session`
-
-* Alias: `opts.npmSession`
-* Type: String
-* Default: null
-
-If provided, will be sent in the `npm-session` header. This header is used by
-the npm registry to identify individual user sessions (usually individual
-invocations of the CLI).
-
-##### <a name="opts-offline"></a> `opts.offline`
-
-* Type: Boolean
-* Default: false
-
-Force offline mode: no network requests will be done during install. To allow
-`npm-registry-fetch` to fill in missing cache data, see
-[`opts.prefer-offline`](#opts-prefer-offline).
-
-This option is only really useful if you're also using
-[`opts.cache`](#opts-cache).
-
-This option is set to `true` when the request includes `write=true` in the
-query string.
-
-##### <a name="opts-otp"></a> `opts.otp`
-
-* Type: Number | String
-* Default: null
-
-This is a one-time password from a two-factor authenticator. It is required for
-certain registry interactions when two-factor auth is enabled for a user
-account.
-
-##### <a name="opts-password"></a> `opts.password`
-
-* Alias: `_password`
-* Type: String
-* Default: null
-
-Password used for basic authentication. For the more modern authentication
-method, please use the (more secure) [`opts.token`](#opts-token)
-
-Can optionally be scoped to a registry by using a "nerf dart" for that registry.
-That is:
-
-```
-{
- '//registry.npmjs.org/:password': 't0k3nH34r'
-}
-```
-
-See also [`opts.username`](#opts-username)
-
-##### <a name="opts-prefer-offline"></a> `opts.prefer-offline`
-
-* Type: Boolean
-* Default: false
-
-If true, staleness checks for cached data will be bypassed, but missing data
-will be requested from the server. To force full offline mode, use
-[`opts.offline`](#opts-offline).
-
-This option is generally only useful if you're also using
-[`opts.cache`](#opts-cache).
-
-This option is set to `false` when the request includes `write=true` in the
-query string.
-
-##### <a name="opts-prefer-online"></a> `opts.prefer-online`
-
-* Type: Boolean
-* Default: false
-
-If true, staleness checks for cached data will be forced, making the CLI look
-for updates immediately even for fresh package data.
-
-This option is generally only useful if you're also using
-[`opts.cache`](#opts-cache).
-
-This option is set to `true` when the request includes `write=true` in the
-query string.
-
-##### <a name="opts-project-scope"></a> `opts.project-scope`
-
-* Alias: `opts.projectScope`
-* Type: String
-* Default: null
-
-If provided, will be sent in the `npm-scope` header. This header is used by the
-npm registry to identify the toplevel package scope that a particular project
-installation is using.
-
-##### <a name="opts-proxy"></a> `opts.proxy`
-
-* Type: url
-* Default: null
-
-A proxy to use for outgoing http requests. If not passed in, the `HTTP(S)_PROXY`
-environment variable will be used.
-
-##### <a name="opts-query"></a> `opts.query`
-
-* Type: String | Object
-* Default: null
-
-If provided, the request URI will have a query string appended to it using this
-query. If `opts.query` is an object, it will be converted to a query string
-using
-[`querystring.stringify()`](https://nodejs.org/api/querystring.html#querystring_querystring_stringify_obj_sep_eq_options).
-
-If the request URI already has a query string, it will be merged with
-`opts.query`, preferring `opts.query` values.
-
-##### <a name="opts-refer"></a> `opts.refer`
-
-* Alias: `opts.referer`
-* Type: String
-* Default: null
-
-Value to use for the `Referer` header. The npm CLI itself uses this to serialize
-the npm command line using the given request.
-
-##### <a name="opts-registry"></a> `opts.registry`
-
-* Type: URL
-* Default: `'https://registry.npmjs.org'`
-
-Registry configuration for a request. If a request URL only includes the URL
-path, this registry setting will be prepended. This configuration is also used
-to determine authentication details, so even if the request URL references a
-completely different host, `opts.registry` will be used to find the auth details
-for that request.
-
-See also [`opts.scope`](#opts-scope), [`opts.spec`](#opts-spec), and
-[`opts.<scope>:registry`](#opts-scope-registry) which can all affect the actual
-registry URL used by the outgoing request.
-
-##### <a name="opts-retry"></a> `opts.retry`
-
-* Type: Object
-* Default: null
-
-Single-object configuration for request retry settings. If passed in, will
-override individually-passed `fetch-retry-*` settings.
-
-##### <a name="opts-scope"></a> `opts.scope`
-
-* Type: String
-* Default: null
-
-Associate an operation with a scope for a scoped registry. This option can force
-lookup of scope-specific registries and authentication.
-
-See also [`opts.<scope>:registry`](#opts-scope-registry) and
-[`opts.spec`](#opts-spec) for interactions with this option.
-
-##### <a name="opts-scope-registry"></a> `opts.<scope>:registry`
-
-* Type: String
-* Default: null
-
-This option type can be used to configure the registry used for requests
-involving a particular scope. For example, `opts['@myscope:registry'] =
-'https://scope-specific.registry/'` will make it so requests go out to this
-registry instead of [`opts.registry`](#opts-registry) when
-[`opts.scope`](#opts-scope) is used, or when [`opts.spec`](#opts-spec) is a
-scoped package spec.
-
-The `@` before the scope name is optional, but recommended.
-
-##### <a name="opts-spec"></a> `opts.spec`
-
-* Type: String | [`npm-registry-arg`](https://npm.im/npm-registry-arg) object.
-* Default: null
-
-If provided, can be used to automatically configure [`opts.scope`](#opts-scope)
-based on a specific package name. Non-registry package specs will throw an
-error.
-
-##### <a name="opts-strict-ssl"></a> `opts.strict-ssl`
-
-* Type: Boolean
-* Default: true
-
-Whether or not to do SSL key validation when making requests to the
-registry via https.
-
-See also [`opts.ca`](#opts-ca).
-
-##### <a name="opts-timeout"></a> `opts.timeout`
-
-* Type: Milliseconds
-* Default: 30000 (30 seconds)
-
-Time before a hanging request times out.
-
-##### <a name="opts-token"></a> `opts.token`
-
-* Alias: `opts._authToken`
-* Type: String
-* Default: null
-
-Authentication token string.
-
-Can be scoped to a registry by using a "nerf dart" for that registry. That is:
-
-```
-{
- '//registry.npmjs.org/:token': 't0k3nH34r'
-}
-```
-
-##### <a name="opts-user-agent"></a> `opts.user-agent`
-
-* Type: String
-* Default: `'npm-registry-fetch@<version>/node@<node-version>+<arch> (<platform>)'`
-
-User agent string to send in the `User-Agent` header.
-
-##### <a name="opts-username"></a> `opts.username`
-
-* Type: String
-* Default: null
-
-Username used for basic authentication. For the more modern authentication
-method, please use the (more secure) [`opts.token`](#opts-token)
-
-Can optionally be scoped to a registry by using a "nerf dart" for that registry.
-That is:
-
-```
-{
- '//registry.npmjs.org/:username': 't0k3nH34r'
-}
-```
-
-See also [`opts.password`](#opts-password)
-
-##### <a name="opts-auth"></a> `opts._auth`
-
-* Type: String
-* Default: null
-
-** DEPRECATED ** This is a legacy authentication token supported only for
-compatibility. Please use [`opts.token`](#opts-token) instead.
diff --git a/node_modules/libcipm/node_modules/npm-registry-fetch/auth.js b/node_modules/libcipm/node_modules/npm-registry-fetch/auth.js
deleted file mode 100644
index d583982d0..000000000
--- a/node_modules/libcipm/node_modules/npm-registry-fetch/auth.js
+++ /dev/null
@@ -1,57 +0,0 @@
-'use strict'
-
-const config = require('./config.js')
-const url = require('url')
-
-module.exports = getAuth
-function getAuth (registry, opts) {
- if (!registry) { throw new Error('registry is required') }
- opts = config(opts)
- let AUTH = {}
- const regKey = registry && registryKey(registry)
- if (opts.forceAuth) {
- opts = opts.forceAuth
- }
- const doKey = (key, alias) => addKey(opts, AUTH, regKey, key, alias)
- doKey('token')
- doKey('_authToken', 'token')
- doKey('username')
- doKey('password')
- doKey('_password', 'password')
- doKey('email')
- doKey('_auth')
- doKey('otp')
- doKey('always-auth', 'alwaysAuth')
- if (AUTH.password) {
- AUTH.password = Buffer.from(AUTH.password, 'base64').toString('utf8')
- }
- if (AUTH._auth && !(AUTH.username && AUTH.password)) {
- let auth = Buffer.from(AUTH._auth, 'base64').toString()
- auth = auth.split(':')
- AUTH.username = auth.shift()
- AUTH.password = auth.join(':')
- }
- AUTH.alwaysAuth = AUTH.alwaysAuth === 'false' ? false : !!AUTH.alwaysAuth
- return AUTH
-}
-
-function addKey (opts, obj, scope, key, objKey) {
- if (opts[key]) {
- obj[objKey || key] = opts[key]
- }
- if (scope && opts[`${scope}:${key}`]) {
- obj[objKey || key] = opts[`${scope}:${key}`]
- }
-}
-
-// Called a nerf dart in the main codebase. Used as a "safe"
-// key when fetching registry info from config.
-function registryKey (registry) {
- const parsed = url.parse(registry)
- const formatted = url.format({
- host: parsed.host,
- pathname: parsed.pathname,
- slashes: parsed.slashes
- })
- return url.resolve(formatted, '.')
-}
diff --git a/node_modules/libcipm/node_modules/npm-registry-fetch/check-response.js b/node_modules/libcipm/node_modules/npm-registry-fetch/check-response.js
deleted file mode 100644
index 14058239b..000000000
--- a/node_modules/libcipm/node_modules/npm-registry-fetch/check-response.js
+++ /dev/null
@@ -1,109 +0,0 @@
-'use strict'
-
-const config = require('./config.js')
-const errors = require('./errors.js')
-const LRU = require('lru-cache')
-
-module.exports = checkResponse
-function checkResponse (method, res, registry, startTime, opts) {
- opts = config(opts)
- if (res.headers.has('npm-notice') && !res.headers.has('x-local-cache')) {
- opts.log.notice('', res.headers.get('npm-notice'))
- }
- checkWarnings(res, registry, opts)
- if (res.status >= 400) {
- logRequest(method, res, startTime, opts)
- return checkErrors(method, res, startTime, opts)
- } else {
- res.body.on('end', () => logRequest(method, res, startTime, opts))
- if (opts.ignoreBody) {
- res.body.resume()
- res.body = null
- }
- return res
- }
-}
-
-function logRequest (method, res, startTime, opts) {
- const elapsedTime = Date.now() - startTime
- const attempt = res.headers.get('x-fetch-attempts')
- const attemptStr = attempt && attempt > 1 ? ` attempt #${attempt}` : ''
- const cacheStr = res.headers.get('x-local-cache') ? ' (from cache)' : ''
- opts.log.http(
- 'fetch',
- `${method.toUpperCase()} ${res.status} ${res.url} ${elapsedTime}ms${attemptStr}${cacheStr}`
- )
-}
-
-const WARNING_REGEXP = /^\s*(\d{3})\s+(\S+)\s+"(.*)"\s+"([^"]+)"/
-const BAD_HOSTS = new LRU({ max: 50 })
-
-function checkWarnings (res, registry, opts) {
- if (res.headers.has('warning') && !BAD_HOSTS.has(registry)) {
- const warnings = {}
- res.headers.raw()['warning'].forEach(w => {
- const match = w.match(WARNING_REGEXP)
- if (match) {
- warnings[match[1]] = {
- code: match[1],
- host: match[2],
- message: match[3],
- date: new Date(match[4])
- }
- }
- })
- BAD_HOSTS.set(registry, true)
- if (warnings['199']) {
- if (warnings['199'].message.match(/ENOTFOUND/)) {
- opts.log.warn('registry', `Using stale data from ${registry} because the host is inaccessible -- are you offline?`)
- } else {
- opts.log.warn('registry', `Unexpected warning for ${registry}: ${warnings['199'].message}`)
- }
- }
- if (warnings['111']) {
- // 111 Revalidation failed -- we're using stale data
- opts.log.warn(
- 'registry',
- `Using stale data from ${registry} due to a request error during revalidation.`
- )
- }
- }
-}
-
-function checkErrors (method, res, startTime, opts) {
- return res.buffer()
- .catch(() => null)
- .then(body => {
- let parsed = body
- try {
- parsed = JSON.parse(body.toString('utf8'))
- } catch (e) {}
- if (res.status === 401 && res.headers.get('www-authenticate')) {
- const auth = res.headers.get('www-authenticate')
- .split(/,\s*/)
- .map(s => s.toLowerCase())
- if (auth.indexOf('ipaddress') !== -1) {
- throw new errors.HttpErrorAuthIPAddress(
- method, res, parsed, opts.spec
- )
- } else if (auth.indexOf('otp') !== -1) {
- throw new errors.HttpErrorAuthOTP(
- method, res, parsed, opts.spec
- )
- } else {
- throw new errors.HttpErrorAuthUnknown(
- method, res, parsed, opts.spec
- )
- }
- } else if (res.status === 401 && body != null && /one-time pass/.test(body.toString('utf8'))) {
- // Heuristic for malformed OTP responses that don't include the www-authenticate header.
- throw new errors.HttpErrorAuthOTP(
- method, res, parsed, opts.spec
- )
- } else {
- throw new errors.HttpErrorGeneral(
- method, res, parsed, opts.spec
- )
- }
- })
-}
diff --git a/node_modules/libcipm/node_modules/npm-registry-fetch/config.js b/node_modules/libcipm/node_modules/npm-registry-fetch/config.js
deleted file mode 100644
index d7be3f9b3..000000000
--- a/node_modules/libcipm/node_modules/npm-registry-fetch/config.js
+++ /dev/null
@@ -1,98 +0,0 @@
-'use strict'
-
-const pkg = require('./package.json')
-const figgyPudding = require('figgy-pudding')
-const silentLog = require('./silentlog.js')
-
-const AUTH_REGEX = /^(?:.*:)?(token|_authToken|username|_password|password|email|always-auth|_auth|otp)$/
-const SCOPE_REGISTRY_REGEX = /@.*:registry$/gi
-module.exports = figgyPudding({
- 'agent': {},
- 'algorithms': {},
- 'body': {},
- 'ca': {},
- 'cache': {},
- 'cert': {},
- 'fetch-retries': {},
- 'fetch-retry-factor': {},
- 'fetch-retry-maxtimeout': {},
- 'fetch-retry-mintimeout': {},
- 'force-auth': {},
- forceAuth: 'force-auth',
- 'gzip': {},
- 'headers': {},
- 'https-proxy': {},
- 'ignore-body': {},
- ignoreBody: 'ignore-body',
- 'integrity': {},
- 'is-from-ci': 'isFromCI',
- 'isFromCI': {
- default () {
- return (
- process.env['CI'] === 'true' ||
- process.env['TDDIUM'] ||
- process.env['JENKINS_URL'] ||
- process.env['bamboo.buildKey'] ||
- process.env['GO_PIPELINE_NAME']
- )
- }
- },
- 'key': {},
- 'local-address': {},
- 'log': {
- default: silentLog
- },
- 'map-json': 'mapJson',
- 'mapJSON': 'mapJson',
- 'mapJson': {},
- 'max-sockets': 'maxsockets',
- 'maxsockets': {
- default: 12
- },
- 'memoize': {},
- 'method': {
- default: 'GET'
- },
- 'no-proxy': {},
- 'noproxy': {},
- 'npm-session': 'npmSession',
- 'npmSession': {},
- 'offline': {},
- 'otp': {},
- 'prefer-offline': {},
- 'prefer-online': {},
- 'projectScope': {},
- 'project-scope': 'projectScope',
- 'Promise': {default: () => Promise},
- 'proxy': {},
- 'query': {},
- 'refer': {},
- 'referer': 'refer',
- 'registry': {
- default: 'https://registry.npmjs.org/'
- },
- 'retry': {},
- 'scope': {},
- 'spec': {},
- 'strict-ssl': {},
- 'timeout': {
- default: 30 * 1000
- },
- 'user-agent': {
- default: `${
- pkg.name
- }@${
- pkg.version
- }/node@${
- process.version
- }+${
- process.arch
- } (${
- process.platform
- })`
- }
-}, {
- other (key) {
- return key.match(AUTH_REGEX) || key.match(SCOPE_REGISTRY_REGEX)
- }
-})
diff --git a/node_modules/libcipm/node_modules/npm-registry-fetch/errors.js b/node_modules/libcipm/node_modules/npm-registry-fetch/errors.js
deleted file mode 100644
index ba78735fc..000000000
--- a/node_modules/libcipm/node_modules/npm-registry-fetch/errors.js
+++ /dev/null
@@ -1,79 +0,0 @@
-'use strict'
-
-const url = require('url')
-
-function packageName (href) {
- try {
- let basePath = url.parse(href).pathname.substr(1)
- if (!basePath.match(/^-/)) {
- basePath = basePath.split('/')
- var index = basePath.indexOf('_rewrite')
- if (index === -1) {
- index = basePath.length - 1
- } else {
- index++
- }
- return decodeURIComponent(basePath[index])
- }
- } catch (_) {
- // this is ok
- }
-}
-
-class HttpErrorBase extends Error {
- constructor (method, res, body, spec) {
- super()
- this.headers = res.headers.raw()
- this.statusCode = res.status
- this.code = `E${res.status}`
- this.method = method
- this.uri = res.url
- this.body = body
- this.pkgid = spec ? spec.toString() : packageName(res.url)
- }
-}
-module.exports.HttpErrorBase = HttpErrorBase
-
-class HttpErrorGeneral extends HttpErrorBase {
- constructor (method, res, body, spec) {
- super(method, res, body, spec)
- this.message = `${res.status} ${res.statusText} - ${
- this.method.toUpperCase()
- } ${
- this.spec || this.uri
- }${
- (body && body.error) ? ' - ' + body.error : ''
- }`
- Error.captureStackTrace(this, HttpErrorGeneral)
- }
-}
-module.exports.HttpErrorGeneral = HttpErrorGeneral
-
-class HttpErrorAuthOTP extends HttpErrorBase {
- constructor (method, res, body, spec) {
- super(method, res, body, spec)
- this.message = 'OTP required for authentication'
- this.code = 'EOTP'
- Error.captureStackTrace(this, HttpErrorAuthOTP)
- }
-}
-module.exports.HttpErrorAuthOTP = HttpErrorAuthOTP
-
-class HttpErrorAuthIPAddress extends HttpErrorBase {
- constructor (method, res, body, spec) {
- super(method, res, body, spec)
- this.message = 'Login is not allowed from your IP address'
- this.code = 'EAUTHIP'
- Error.captureStackTrace(this, HttpErrorAuthIPAddress)
- }
-}
-module.exports.HttpErrorAuthIPAddress = HttpErrorAuthIPAddress
-
-class HttpErrorAuthUnknown extends HttpErrorBase {
- constructor (method, res, body, spec) {
- super(method, res, body, spec)
- this.message = 'Unable to authenticate, need: ' + res.headers.get('www-authenticate')
- Error.captureStackTrace(this, HttpErrorAuthUnknown)
- }
-}
-module.exports.HttpErrorAuthUnknown = HttpErrorAuthUnknown
diff --git a/node_modules/libcipm/node_modules/npm-registry-fetch/index.js b/node_modules/libcipm/node_modules/npm-registry-fetch/index.js
deleted file mode 100644
index 9bd0ad32d..000000000
--- a/node_modules/libcipm/node_modules/npm-registry-fetch/index.js
+++ /dev/null
@@ -1,203 +0,0 @@
-'use strict'
-
-const Buffer = require('safe-buffer').Buffer
-
-const checkResponse = require('./check-response.js')
-const config = require('./config.js')
-const getAuth = require('./auth.js')
-const fetch = require('make-fetch-happen')
-const JSONStream = require('JSONStream')
-const npa = require('npm-package-arg')
-const {PassThrough} = require('stream')
-const qs = require('querystring')
-const url = require('url')
-const zlib = require('zlib')
-
-module.exports = regFetch
-function regFetch (uri, opts) {
- opts = config(opts)
- const registry = (
- (opts.spec && pickRegistry(opts.spec, opts)) ||
- opts.registry ||
- 'https://registry.npmjs.org/'
- )
- uri = url.parse(uri).protocol
- ? uri
- : `${
- registry.trim().replace(/\/?$/g, '')
- }/${
- uri.trim().replace(/^\//, '')
- }`
- // through that takes into account the scope, the prefix of `uri`, etc
- const startTime = Date.now()
- const headers = getHeaders(registry, uri, opts)
- let body = opts.body
- const bodyIsStream = body &&
- typeof body === 'object' &&
- typeof body.pipe === 'function'
- if (body && !bodyIsStream && typeof body !== 'string' && !Buffer.isBuffer(body)) {
- headers['content-type'] = headers['content-type'] || 'application/json'
- body = JSON.stringify(body)
- } else if (body && !headers['content-type']) {
- headers['content-type'] = 'application/octet-stream'
- }
- if (opts.gzip) {
- headers['content-encoding'] = 'gzip'
- if (bodyIsStream) {
- const gz = zlib.createGzip()
- body.on('error', err => gz.emit('error', err))
- body = body.pipe(gz)
- } else {
- body = new opts.Promise((resolve, reject) => {
- zlib.gzip(body, (err, gz) => err ? reject(err) : resolve(gz))
- })
- }
- }
-
- let q = opts.query
- if (q) {
- if (typeof q === 'string') {
- q = qs.parse(q)
- } else if (typeof q !== 'object') {
- throw new TypeError('invalid query option, must be string or object')
- }
- Object.keys(q).forEach(key => {
- if (q[key] === undefined) {
- delete q[key]
- }
- })
- }
- const parsed = url.parse(uri)
-
- const query = parsed.query ? Object.assign(qs.parse(parsed.query), q || {})
- : Object.keys(q || {}).length ? q
- : null
-
- if (query) {
- if (String(query.write) === 'true' && opts.method === 'GET') {
- opts = opts.concat({
- offline: false,
- 'prefer-offline': false,
- 'prefer-online': true
- })
- }
- parsed.search = '?' + qs.stringify(query)
- uri = url.format(parsed)
- }
-
- return opts.Promise.resolve(body).then(body => fetch(uri, {
- agent: opts.agent,
- algorithms: opts.algorithms,
- body,
- cache: getCacheMode(opts),
- cacheManager: opts.cache,
- ca: opts.ca,
- cert: opts.cert,
- headers,
- integrity: opts.integrity,
- key: opts.key,
- localAddress: opts['local-address'],
- maxSockets: opts.maxsockets,
- memoize: opts.memoize,
- method: opts.method || 'GET',
- noProxy: opts['no-proxy'] || opts.noproxy,
- Promise: opts.Promise,
- proxy: opts['https-proxy'] || opts.proxy,
- referer: opts.refer,
- retry: opts.retry != null ? opts.retry : {
- retries: opts['fetch-retries'],
- factor: opts['fetch-retry-factor'],
- minTimeout: opts['fetch-retry-mintimeout'],
- maxTimeout: opts['fetch-retry-maxtimeout']
- },
- strictSSL: !!opts['strict-ssl'],
- timeout: opts.timeout
- }).then(res => checkResponse(
- opts.method || 'GET', res, registry, startTime, opts
- )))
-}
-
-module.exports.json = fetchJSON
-function fetchJSON (uri, opts) {
- return regFetch(uri, opts).then(res => res.json())
-}
-
-module.exports.json.stream = fetchJSONStream
-function fetchJSONStream (uri, jsonPath, opts) {
- opts = config(opts)
- const parser = JSONStream.parse(jsonPath, opts.mapJson)
- const pt = parser.pipe(new PassThrough({objectMode: true}))
- parser.on('error', err => pt.emit('error', err))
- regFetch(uri, opts).then(res => {
- res.body.on('error', err => parser.emit('error', err))
- res.body.pipe(parser)
- }, err => pt.emit('error', err))
- return pt
-}
-
-module.exports.pickRegistry = pickRegistry
-function pickRegistry (spec, opts) {
- spec = npa(spec)
- opts = config(opts)
- let registry = spec.scope &&
- opts[spec.scope.replace(/^@?/, '@') + ':registry']
-
- if (!registry && opts.scope) {
- registry = opts[opts.scope.replace(/^@?/, '@') + ':registry']
- }
-
- if (!registry) {
- registry = opts.registry || 'https://registry.npmjs.org/'
- }
-
- return registry
-}
-
-function getCacheMode (opts) {
- return opts.offline
- ? 'only-if-cached'
- : opts['prefer-offline']
- ? 'force-cache'
- : opts['prefer-online']
- ? 'no-cache'
- : 'default'
-}
-
-function getHeaders (registry, uri, opts) {
- const headers = Object.assign({
- 'npm-in-ci': !!(
- opts['is-from-ci'] ||
- process.env['CI'] === 'true' ||
- process.env['TDDIUM'] ||
- process.env['JENKINS_URL'] ||
- process.env['bamboo.buildKey'] ||
- process.env['GO_PIPELINE_NAME']
- ),
- 'npm-scope': opts['project-scope'],
- 'npm-session': opts['npm-session'],
- 'user-agent': opts['user-agent'],
- 'referer': opts.refer
- }, opts.headers)
-
- const auth = getAuth(registry, opts)
- // If a tarball is hosted on a different place than the manifest, only send
- // credentials on `alwaysAuth`
- const shouldAuth = (
- auth.alwaysAuth ||
- url.parse(uri).host === url.parse(registry).host
- )
- if (shouldAuth && auth.token) {
- headers.authorization = `Bearer ${auth.token}`
- } else if (shouldAuth && auth.username && auth.password) {
- const encoded = Buffer.from(
- `${auth.username}:${auth.password}`, 'utf8'
- ).toString('base64')
- headers.authorization = `Basic ${encoded}`
- } else if (shouldAuth && auth._auth) {
- headers.authorization = `Basic ${auth._auth}`
- }
- if (shouldAuth && auth.otp) {
- headers['npm-otp'] = auth.otp
- }
- return headers
-}
diff --git a/node_modules/libcipm/node_modules/npm-registry-fetch/node_modules/safe-buffer/LICENSE b/node_modules/libcipm/node_modules/npm-registry-fetch/node_modules/safe-buffer/LICENSE
deleted file mode 100644
index 0c068ceec..000000000
--- a/node_modules/libcipm/node_modules/npm-registry-fetch/node_modules/safe-buffer/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) Feross Aboukhadijeh
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
diff --git a/node_modules/libcipm/node_modules/npm-registry-fetch/node_modules/safe-buffer/README.md b/node_modules/libcipm/node_modules/npm-registry-fetch/node_modules/safe-buffer/README.md
deleted file mode 100644
index 356e35193..000000000
--- a/node_modules/libcipm/node_modules/npm-registry-fetch/node_modules/safe-buffer/README.md
+++ /dev/null
@@ -1,586 +0,0 @@
-# safe-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url]
-
-[travis-image]: https://img.shields.io/travis/feross/safe-buffer/master.svg
-[travis-url]: https://travis-ci.org/feross/safe-buffer
-[npm-image]: https://img.shields.io/npm/v/safe-buffer.svg
-[npm-url]: https://npmjs.org/package/safe-buffer
-[downloads-image]: https://img.shields.io/npm/dm/safe-buffer.svg
-[downloads-url]: https://npmjs.org/package/safe-buffer
-[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg
-[standard-url]: https://standardjs.com
-
-#### Safer Node.js Buffer API
-
-**Use the new Node.js Buffer APIs (`Buffer.from`, `Buffer.alloc`,
-`Buffer.allocUnsafe`, `Buffer.allocUnsafeSlow`) in all versions of Node.js.**
-
-**Uses the built-in implementation when available.**
-
-## install
-
-```
-npm install safe-buffer
-```
-
-[Get supported safe-buffer with the Tidelift Subscription](https://tidelift.com/subscription/pkg/npm-safe-buffer?utm_source=npm-safe-buffer&utm_medium=referral&utm_campaign=readme)
-
-## usage
-
-The goal of this package is to provide a safe replacement for the node.js `Buffer`.
-
-It's a drop-in replacement for `Buffer`. You can use it by adding one `require` line to
-the top of your node.js modules:
-
-```js
-var Buffer = require('safe-buffer').Buffer
-
-// Existing buffer code will continue to work without issues:
-
-new Buffer('hey', 'utf8')
-new Buffer([1, 2, 3], 'utf8')
-new Buffer(obj)
-new Buffer(16) // create an uninitialized buffer (potentially unsafe)
-
-// But you can use these new explicit APIs to make clear what you want:
-
-Buffer.from('hey', 'utf8') // convert from many types to a Buffer
-Buffer.alloc(16) // create a zero-filled buffer (safe)
-Buffer.allocUnsafe(16) // create an uninitialized buffer (potentially unsafe)
-```
-
-## api
-
-### Class Method: Buffer.from(array)
-<!-- YAML
-added: v3.0.0
--->
-
-* `array` {Array}
-
-Allocates a new `Buffer` using an `array` of octets.
-
-```js
-const buf = Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72]);
- // creates a new Buffer containing ASCII bytes
- // ['b','u','f','f','e','r']
-```
-
-A `TypeError` will be thrown if `array` is not an `Array`.
-
-### Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]])
-<!-- YAML
-added: v5.10.0
--->
-
-* `arrayBuffer` {ArrayBuffer} The `.buffer` property of a `TypedArray` or
- a `new ArrayBuffer()`
-* `byteOffset` {Number} Default: `0`
-* `length` {Number} Default: `arrayBuffer.length - byteOffset`
-
-When passed a reference to the `.buffer` property of a `TypedArray` instance,
-the newly created `Buffer` will share the same allocated memory as the
-TypedArray.
-
-```js
-const arr = new Uint16Array(2);
-arr[0] = 5000;
-arr[1] = 4000;
-
-const buf = Buffer.from(arr.buffer); // shares the memory with arr;
-
-console.log(buf);
- // Prints: <Buffer 88 13 a0 0f>
-
-// changing the TypedArray changes the Buffer also
-arr[1] = 6000;
-
-console.log(buf);
- // Prints: <Buffer 88 13 70 17>
-```
-
-The optional `byteOffset` and `length` arguments specify a memory range within
-the `arrayBuffer` that will be shared by the `Buffer`.
-
-```js
-const ab = new ArrayBuffer(10);
-const buf = Buffer.from(ab, 0, 2);
-console.log(buf.length);
- // Prints: 2
-```
-
-A `TypeError` will be thrown if `arrayBuffer` is not an `ArrayBuffer`.
-
-### Class Method: Buffer.from(buffer)
-<!-- YAML
-added: v3.0.0
--->
-
-* `buffer` {Buffer}
-
-Copies the passed `buffer` data onto a new `Buffer` instance.
-
-```js
-const buf1 = Buffer.from('buffer');
-const buf2 = Buffer.from(buf1);
-
-buf1[0] = 0x61;
-console.log(buf1.toString());
- // 'auffer'
-console.log(buf2.toString());
- // 'buffer' (copy is not changed)
-```
-
-A `TypeError` will be thrown if `buffer` is not a `Buffer`.
-
-### Class Method: Buffer.from(str[, encoding])
-<!-- YAML
-added: v5.10.0
--->
-
-* `str` {String} String to encode.
-* `encoding` {String} Encoding to use, Default: `'utf8'`
-
-Creates a new `Buffer` containing the given JavaScript string `str`. If
-provided, the `encoding` parameter identifies the character encoding.
-If not provided, `encoding` defaults to `'utf8'`.
-
-```js
-const buf1 = Buffer.from('this is a tést');
-console.log(buf1.toString());
- // prints: this is a tést
-console.log(buf1.toString('ascii'));
- // prints: this is a tC)st
-
-const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex');
-console.log(buf2.toString());
- // prints: this is a tést
-```
-
-A `TypeError` will be thrown if `str` is not a string.
-
-### Class Method: Buffer.alloc(size[, fill[, encoding]])
-<!-- YAML
-added: v5.10.0
--->
-
-* `size` {Number}
-* `fill` {Value} Default: `undefined`
-* `encoding` {String} Default: `utf8`
-
-Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the
-`Buffer` will be *zero-filled*.
-
-```js
-const buf = Buffer.alloc(5);
-console.log(buf);
- // <Buffer 00 00 00 00 00>
-```
-
-The `size` must be less than or equal to the value of
-`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is
-`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will
-be created if a `size` less than or equal to 0 is specified.
-
-If `fill` is specified, the allocated `Buffer` will be initialized by calling
-`buf.fill(fill)`. See [`buf.fill()`][] for more information.
-
-```js
-const buf = Buffer.alloc(5, 'a');
-console.log(buf);
- // <Buffer 61 61 61 61 61>
-```
-
-If both `fill` and `encoding` are specified, the allocated `Buffer` will be
-initialized by calling `buf.fill(fill, encoding)`. For example:
-
-```js
-const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64');
-console.log(buf);
- // <Buffer 68 65 6c 6c 6f 20 77 6f 72 6c 64>
-```
-
-Calling `Buffer.alloc(size)` can be significantly slower than the alternative
-`Buffer.allocUnsafe(size)` but ensures that the newly created `Buffer` instance
-contents will *never contain sensitive data*.
-
-A `TypeError` will be thrown if `size` is not a number.
-
-### Class Method: Buffer.allocUnsafe(size)
-<!-- YAML
-added: v5.10.0
--->
-
-* `size` {Number}
-
-Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must
-be less than or equal to the value of `require('buffer').kMaxLength` (on 64-bit
-architectures, `kMaxLength` is `(2^31)-1`). Otherwise, a [`RangeError`][] is
-thrown. A zero-length Buffer will be created if a `size` less than or equal to
-0 is specified.
-
-The underlying memory for `Buffer` instances created in this way is *not
-initialized*. The contents of the newly created `Buffer` are unknown and
-*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such
-`Buffer` instances to zeroes.
-
-```js
-const buf = Buffer.allocUnsafe(5);
-console.log(buf);
- // <Buffer 78 e0 82 02 01>
- // (octets will be different, every time)
-buf.fill(0);
-console.log(buf);
- // <Buffer 00 00 00 00 00>
-```
-
-A `TypeError` will be thrown if `size` is not a number.
-
-Note that the `Buffer` module pre-allocates an internal `Buffer` instance of
-size `Buffer.poolSize` that is used as a pool for the fast allocation of new
-`Buffer` instances created using `Buffer.allocUnsafe(size)` (and the deprecated
-`new Buffer(size)` constructor) only when `size` is less than or equal to
-`Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). The default
-value of `Buffer.poolSize` is `8192` but can be modified.
-
-Use of this pre-allocated internal memory pool is a key difference between
-calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`.
-Specifically, `Buffer.alloc(size, fill)` will *never* use the internal Buffer
-pool, while `Buffer.allocUnsafe(size).fill(fill)` *will* use the internal
-Buffer pool if `size` is less than or equal to half `Buffer.poolSize`. The
-difference is subtle but can be important when an application requires the
-additional performance that `Buffer.allocUnsafe(size)` provides.
-
-### Class Method: Buffer.allocUnsafeSlow(size)
-<!-- YAML
-added: v5.10.0
--->
-
-* `size` {Number}
-
-Allocates a new *non-zero-filled* and non-pooled `Buffer` of `size` bytes. The
-`size` must be less than or equal to the value of
-`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is
-`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will
-be created if a `size` less than or equal to 0 is specified.
-
-The underlying memory for `Buffer` instances created in this way is *not
-initialized*. The contents of the newly created `Buffer` are unknown and
-*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such
-`Buffer` instances to zeroes.
-
-When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances,
-allocations under 4KB are, by default, sliced from a single pre-allocated
-`Buffer`. This allows applications to avoid the garbage collection overhead of
-creating many individually allocated Buffers. This approach improves both
-performance and memory usage by eliminating the need to track and cleanup as
-many `Persistent` objects.
-
-However, in the case where a developer may need to retain a small chunk of
-memory from a pool for an indeterminate amount of time, it may be appropriate
-to create an un-pooled Buffer instance using `Buffer.allocUnsafeSlow()` then
-copy out the relevant bits.
-
-```js
-// need to keep around a few small chunks of memory
-const store = [];
-
-socket.on('readable', () => {
- const data = socket.read();
- // allocate for retained data
- const sb = Buffer.allocUnsafeSlow(10);
- // copy the data into the new allocation
- data.copy(sb, 0, 0, 10);
- store.push(sb);
-});
-```
-
-Use of `Buffer.allocUnsafeSlow()` should be used only as a last resort *after*
-a developer has observed undue memory retention in their applications.
-
-A `TypeError` will be thrown if `size` is not a number.
-
-### All the Rest
-
-The rest of the `Buffer` API is exactly the same as in node.js.
-[See the docs](https://nodejs.org/api/buffer.html).
-
-
-## Related links
-
-- [Node.js issue: Buffer(number) is unsafe](https://github.com/nodejs/node/issues/4660)
-- [Node.js Enhancement Proposal: Buffer.from/Buffer.alloc/Buffer.zalloc/Buffer() soft-deprecate](https://github.com/nodejs/node-eps/pull/4)
-
-## Why is `Buffer` unsafe?
-
-Today, the node.js `Buffer` constructor is overloaded to handle many different argument
-types like `String`, `Array`, `Object`, `TypedArrayView` (`Uint8Array`, etc.),
-`ArrayBuffer`, and also `Number`.
-
-The API is optimized for convenience: you can throw any type at it, and it will try to do
-what you want.
-
-Because the Buffer constructor is so powerful, you often see code like this:
-
-```js
-// Convert UTF-8 strings to hex
-function toHex (str) {
- return new Buffer(str).toString('hex')
-}
-```
-
-***But what happens if `toHex` is called with a `Number` argument?***
-
-### Remote Memory Disclosure
-
-If an attacker can make your program call the `Buffer` constructor with a `Number`
-argument, then they can make it allocate uninitialized memory from the node.js process.
-This could potentially disclose TLS private keys, user data, or database passwords.
-
-When the `Buffer` constructor is passed a `Number` argument, it returns an
-**UNINITIALIZED** block of memory of the specified `size`. When you create a `Buffer` like
-this, you **MUST** overwrite the contents before returning it to the user.
-
-From the [node.js docs](https://nodejs.org/api/buffer.html#buffer_new_buffer_size):
-
-> `new Buffer(size)`
->
-> - `size` Number
->
-> The underlying memory for `Buffer` instances created in this way is not initialized.
-> **The contents of a newly created `Buffer` are unknown and could contain sensitive
-> data.** Use `buf.fill(0)` to initialize a Buffer to zeroes.
-
-(Emphasis our own.)
-
-Whenever the programmer intended to create an uninitialized `Buffer` you often see code
-like this:
-
-```js
-var buf = new Buffer(16)
-
-// Immediately overwrite the uninitialized buffer with data from another buffer
-for (var i = 0; i < buf.length; i++) {
- buf[i] = otherBuf[i]
-}
-```
-
-
-### Would this ever be a problem in real code?
-
-Yes. It's surprisingly common to forget to check the type of your variables in a
-dynamically-typed language like JavaScript.
-
-Usually the consequences of assuming the wrong type is that your program crashes with an
-uncaught exception. But the failure mode for forgetting to check the type of arguments to
-the `Buffer` constructor is more catastrophic.
-
-Here's an example of a vulnerable service that takes a JSON payload and converts it to
-hex:
-
-```js
-// Take a JSON payload {str: "some string"} and convert it to hex
-var server = http.createServer(function (req, res) {
- var data = ''
- req.setEncoding('utf8')
- req.on('data', function (chunk) {
- data += chunk
- })
- req.on('end', function () {
- var body = JSON.parse(data)
- res.end(new Buffer(body.str).toString('hex'))
- })
-})
-
-server.listen(8080)
-```
-
-In this example, an http client just has to send:
-
-```json
-{
- "str": 1000
-}
-```
-
-and it will get back 1,000 bytes of uninitialized memory from the server.
-
-This is a very serious bug. It's similar in severity to the
-[the Heartbleed bug](http://heartbleed.com/) that allowed disclosure of OpenSSL process
-memory by remote attackers.
-
-
-### Which real-world packages were vulnerable?
-
-#### [`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht)
-
-[Mathias Buus](https://github.com/mafintosh) and I
-([Feross Aboukhadijeh](http://feross.org/)) found this issue in one of our own packages,
-[`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht). The bug would allow
-anyone on the internet to send a series of messages to a user of `bittorrent-dht` and get
-them to reveal 20 bytes at a time of uninitialized memory from the node.js process.
-
-Here's
-[the commit](https://github.com/feross/bittorrent-dht/commit/6c7da04025d5633699800a99ec3fbadf70ad35b8)
-that fixed it. We released a new fixed version, created a
-[Node Security Project disclosure](https://nodesecurity.io/advisories/68), and deprecated all
-vulnerable versions on npm so users will get a warning to upgrade to a newer version.
-
-#### [`ws`](https://www.npmjs.com/package/ws)
-
-That got us wondering if there were other vulnerable packages. Sure enough, within a short
-period of time, we found the same issue in [`ws`](https://www.npmjs.com/package/ws), the
-most popular WebSocket implementation in node.js.
-
-If certain APIs were called with `Number` parameters instead of `String` or `Buffer` as
-expected, then uninitialized server memory would be disclosed to the remote peer.
-
-These were the vulnerable methods:
-
-```js
-socket.send(number)
-socket.ping(number)
-socket.pong(number)
-```
-
-Here's a vulnerable socket server with some echo functionality:
-
-```js
-server.on('connection', function (socket) {
- socket.on('message', function (message) {
- message = JSON.parse(message)
- if (message.type === 'echo') {
- socket.send(message.data) // send back the user's message
- }
- })
-})
-```
-
-`socket.send(number)` called on the server, will disclose server memory.
-
-Here's [the release](https://github.com/websockets/ws/releases/tag/1.0.1) where the issue
-was fixed, with a more detailed explanation. Props to
-[Arnout Kazemier](https://github.com/3rd-Eden) for the quick fix. Here's the
-[Node Security Project disclosure](https://nodesecurity.io/advisories/67).
-
-
-### What's the solution?
-
-It's important that node.js offers a fast way to get memory otherwise performance-critical
-applications would needlessly get a lot slower.
-
-But we need a better way to *signal our intent* as programmers. **When we want
-uninitialized memory, we should request it explicitly.**
-
-Sensitive functionality should not be packed into a developer-friendly API that loosely
-accepts many different types. This type of API encourages the lazy practice of passing
-variables in without checking the type very carefully.
-
-#### A new API: `Buffer.allocUnsafe(number)`
-
-The functionality of creating buffers with uninitialized memory should be part of another
-API. We propose `Buffer.allocUnsafe(number)`. This way, it's not part of an API that
-frequently gets user input of all sorts of different types passed into it.
-
-```js
-var buf = Buffer.allocUnsafe(16) // careful, uninitialized memory!
-
-// Immediately overwrite the uninitialized buffer with data from another buffer
-for (var i = 0; i < buf.length; i++) {
- buf[i] = otherBuf[i]
-}
-```
-
-
-### How do we fix node.js core?
-
-We sent [a PR to node.js core](https://github.com/nodejs/node/pull/4514) (merged as
-`semver-major`) which defends against one case:
-
-```js
-var str = 16
-new Buffer(str, 'utf8')
-```
-
-In this situation, it's implied that the programmer intended the first argument to be a
-string, since they passed an encoding as a second argument. Today, node.js will allocate
-uninitialized memory in the case of `new Buffer(number, encoding)`, which is probably not
-what the programmer intended.
-
-But this is only a partial solution, since if the programmer does `new Buffer(variable)`
-(without an `encoding` parameter) there's no way to know what they intended. If `variable`
-is sometimes a number, then uninitialized memory will sometimes be returned.
-
-### What's the real long-term fix?
-
-We could deprecate and remove `new Buffer(number)` and use `Buffer.allocUnsafe(number)` when
-we need uninitialized memory. But that would break 1000s of packages.
-
-~~We believe the best solution is to:~~
-
-~~1. Change `new Buffer(number)` to return safe, zeroed-out memory~~
-
-~~2. Create a new API for creating uninitialized Buffers. We propose: `Buffer.allocUnsafe(number)`~~
-
-#### Update
-
-We now support adding three new APIs:
-
-- `Buffer.from(value)` - convert from any type to a buffer
-- `Buffer.alloc(size)` - create a zero-filled buffer
-- `Buffer.allocUnsafe(size)` - create an uninitialized buffer with given size
-
-This solves the core problem that affected `ws` and `bittorrent-dht` which is
-`Buffer(variable)` getting tricked into taking a number argument.
-
-This way, existing code continues working and the impact on the npm ecosystem will be
-minimal. Over time, npm maintainers can migrate performance-critical code to use
-`Buffer.allocUnsafe(number)` instead of `new Buffer(number)`.
-
-
-### Conclusion
-
-We think there's a serious design issue with the `Buffer` API as it exists today. It
-promotes insecure software by putting high-risk functionality into a convenient API
-with friendly "developer ergonomics".
-
-This wasn't merely a theoretical exercise because we found the issue in some of the
-most popular npm packages.
-
-Fortunately, there's an easy fix that can be applied today. Use `safe-buffer` in place of
-`buffer`.
-
-```js
-var Buffer = require('safe-buffer').Buffer
-```
-
-Eventually, we hope that node.js core can switch to this new, safer behavior. We believe
-the impact on the ecosystem would be minimal since it's not a breaking change.
-Well-maintained, popular packages would be updated to use `Buffer.alloc` quickly, while
-older, insecure packages would magically become safe from this attack vector.
-
-
-## links
-
-- [Node.js PR: buffer: throw if both length and enc are passed](https://github.com/nodejs/node/pull/4514)
-- [Node Security Project disclosure for `ws`](https://nodesecurity.io/advisories/67)
-- [Node Security Project disclosure for`bittorrent-dht`](https://nodesecurity.io/advisories/68)
-
-
-## credit
-
-The original issues in `bittorrent-dht`
-([disclosure](https://nodesecurity.io/advisories/68)) and
-`ws` ([disclosure](https://nodesecurity.io/advisories/67)) were discovered by
-[Mathias Buus](https://github.com/mafintosh) and
-[Feross Aboukhadijeh](http://feross.org/).
-
-Thanks to [Adam Baldwin](https://github.com/evilpacket) for helping disclose these issues
-and for his work running the [Node Security Project](https://nodesecurity.io/).
-
-Thanks to [John Hiesey](https://github.com/jhiesey) for proofreading this README and
-auditing the code.
-
-
-## license
-
-MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org)
diff --git a/node_modules/libcipm/node_modules/npm-registry-fetch/node_modules/safe-buffer/index.d.ts b/node_modules/libcipm/node_modules/npm-registry-fetch/node_modules/safe-buffer/index.d.ts
deleted file mode 100644
index e9fed809a..000000000
--- a/node_modules/libcipm/node_modules/npm-registry-fetch/node_modules/safe-buffer/index.d.ts
+++ /dev/null
@@ -1,187 +0,0 @@
-declare module "safe-buffer" {
- export class Buffer {
- length: number
- write(string: string, offset?: number, length?: number, encoding?: string): number;
- toString(encoding?: string, start?: number, end?: number): string;
- toJSON(): { type: 'Buffer', data: any[] };
- equals(otherBuffer: Buffer): boolean;
- compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number;
- copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number;
- slice(start?: number, end?: number): Buffer;
- writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
- writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
- writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
- writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number;
- readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number;
- readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number;
- readIntLE(offset: number, byteLength: number, noAssert?: boolean): number;
- readIntBE(offset: number, byteLength: number, noAssert?: boolean): number;
- readUInt8(offset: number, noAssert?: boolean): number;
- readUInt16LE(offset: number, noAssert?: boolean): number;
- readUInt16BE(offset: number, noAssert?: boolean): number;
- readUInt32LE(offset: number, noAssert?: boolean): number;
- readUInt32BE(offset: number, noAssert?: boolean): number;
- readInt8(offset: number, noAssert?: boolean): number;
- readInt16LE(offset: number, noAssert?: boolean): number;
- readInt16BE(offset: number, noAssert?: boolean): number;
- readInt32LE(offset: number, noAssert?: boolean): number;
- readInt32BE(offset: number, noAssert?: boolean): number;
- readFloatLE(offset: number, noAssert?: boolean): number;
- readFloatBE(offset: number, noAssert?: boolean): number;
- readDoubleLE(offset: number, noAssert?: boolean): number;
- readDoubleBE(offset: number, noAssert?: boolean): number;
- swap16(): Buffer;
- swap32(): Buffer;
- swap64(): Buffer;
- writeUInt8(value: number, offset: number, noAssert?: boolean): number;
- writeUInt16LE(value: number, offset: number, noAssert?: boolean): number;
- writeUInt16BE(value: number, offset: number, noAssert?: boolean): number;
- writeUInt32LE(value: number, offset: number, noAssert?: boolean): number;
- writeUInt32BE(value: number, offset: number, noAssert?: boolean): number;
- writeInt8(value: number, offset: number, noAssert?: boolean): number;
- writeInt16LE(value: number, offset: number, noAssert?: boolean): number;
- writeInt16BE(value: number, offset: number, noAssert?: boolean): number;
- writeInt32LE(value: number, offset: number, noAssert?: boolean): number;
- writeInt32BE(value: number, offset: number, noAssert?: boolean): number;
- writeFloatLE(value: number, offset: number, noAssert?: boolean): number;
- writeFloatBE(value: number, offset: number, noAssert?: boolean): number;
- writeDoubleLE(value: number, offset: number, noAssert?: boolean): number;
- writeDoubleBE(value: number, offset: number, noAssert?: boolean): number;
- fill(value: any, offset?: number, end?: number): this;
- indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number;
- lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number;
- includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean;
-
- /**
- * Allocates a new buffer containing the given {str}.
- *
- * @param str String to store in buffer.
- * @param encoding encoding to use, optional. Default is 'utf8'
- */
- constructor (str: string, encoding?: string);
- /**
- * Allocates a new buffer of {size} octets.
- *
- * @param size count of octets to allocate.
- */
- constructor (size: number);
- /**
- * Allocates a new buffer containing the given {array} of octets.
- *
- * @param array The octets to store.
- */
- constructor (array: Uint8Array);
- /**
- * Produces a Buffer backed by the same allocated memory as
- * the given {ArrayBuffer}.
- *
- *
- * @param arrayBuffer The ArrayBuffer with which to share memory.
- */
- constructor (arrayBuffer: ArrayBuffer);
- /**
- * Allocates a new buffer containing the given {array} of octets.
- *
- * @param array The octets to store.
- */
- constructor (array: any[]);
- /**
- * Copies the passed {buffer} data onto a new {Buffer} instance.
- *
- * @param buffer The buffer to copy.
- */
- constructor (buffer: Buffer);
- prototype: Buffer;
- /**
- * Allocates a new Buffer using an {array} of octets.
- *
- * @param array
- */
- static from(array: any[]): Buffer;
- /**
- * When passed a reference to the .buffer property of a TypedArray instance,
- * the newly created Buffer will share the same allocated memory as the TypedArray.
- * The optional {byteOffset} and {length} arguments specify a memory range
- * within the {arrayBuffer} that will be shared by the Buffer.
- *
- * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer()
- * @param byteOffset
- * @param length
- */
- static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer;
- /**
- * Copies the passed {buffer} data onto a new Buffer instance.
- *
- * @param buffer
- */
- static from(buffer: Buffer): Buffer;
- /**
- * Creates a new Buffer containing the given JavaScript string {str}.
- * If provided, the {encoding} parameter identifies the character encoding.
- * If not provided, {encoding} defaults to 'utf8'.
- *
- * @param str
- */
- static from(str: string, encoding?: string): Buffer;
- /**
- * Returns true if {obj} is a Buffer
- *
- * @param obj object to test.
- */
- static isBuffer(obj: any): obj is Buffer;
- /**
- * Returns true if {encoding} is a valid encoding argument.
- * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex'
- *
- * @param encoding string to test.
- */
- static isEncoding(encoding: string): boolean;
- /**
- * Gives the actual byte length of a string. encoding defaults to 'utf8'.
- * This is not the same as String.prototype.length since that returns the number of characters in a string.
- *
- * @param string string to test.
- * @param encoding encoding used to evaluate (defaults to 'utf8')
- */
- static byteLength(string: string, encoding?: string): number;
- /**
- * Returns a buffer which is the result of concatenating all the buffers in the list together.
- *
- * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer.
- * If the list has exactly one item, then the first item of the list is returned.
- * If the list has more than one item, then a new Buffer is created.
- *
- * @param list An array of Buffer objects to concatenate
- * @param totalLength Total length of the buffers when concatenated.
- * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly.
- */
- static concat(list: Buffer[], totalLength?: number): Buffer;
- /**
- * The same as buf1.compare(buf2).
- */
- static compare(buf1: Buffer, buf2: Buffer): number;
- /**
- * Allocates a new buffer of {size} octets.
- *
- * @param size count of octets to allocate.
- * @param fill if specified, buffer will be initialized by calling buf.fill(fill).
- * If parameter is omitted, buffer will be filled with zeros.
- * @param encoding encoding used for call to buf.fill while initalizing
- */
- static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer;
- /**
- * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents
- * of the newly created Buffer are unknown and may contain sensitive data.
- *
- * @param size count of octets to allocate
- */
- static allocUnsafe(size: number): Buffer;
- /**
- * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents
- * of the newly created Buffer are unknown and may contain sensitive data.
- *
- * @param size count of octets to allocate
- */
- static allocUnsafeSlow(size: number): Buffer;
- }
-} \ No newline at end of file
diff --git a/node_modules/libcipm/node_modules/npm-registry-fetch/node_modules/safe-buffer/index.js b/node_modules/libcipm/node_modules/npm-registry-fetch/node_modules/safe-buffer/index.js
deleted file mode 100644
index 054c8d30d..000000000
--- a/node_modules/libcipm/node_modules/npm-registry-fetch/node_modules/safe-buffer/index.js
+++ /dev/null
@@ -1,64 +0,0 @@
-/* eslint-disable node/no-deprecated-api */
-var buffer = require('buffer')
-var Buffer = buffer.Buffer
-
-// alternative to using Object.keys for old browsers
-function copyProps (src, dst) {
- for (var key in src) {
- dst[key] = src[key]
- }
-}
-if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) {
- module.exports = buffer
-} else {
- // Copy properties from require('buffer')
- copyProps(buffer, exports)
- exports.Buffer = SafeBuffer
-}
-
-function SafeBuffer (arg, encodingOrOffset, length) {
- return Buffer(arg, encodingOrOffset, length)
-}
-
-SafeBuffer.prototype = Object.create(Buffer.prototype)
-
-// Copy static methods from Buffer
-copyProps(Buffer, SafeBuffer)
-
-SafeBuffer.from = function (arg, encodingOrOffset, length) {
- if (typeof arg === 'number') {
- throw new TypeError('Argument must not be a number')
- }
- return Buffer(arg, encodingOrOffset, length)
-}
-
-SafeBuffer.alloc = function (size, fill, encoding) {
- if (typeof size !== 'number') {
- throw new TypeError('Argument must be a number')
- }
- var buf = Buffer(size)
- if (fill !== undefined) {
- if (typeof encoding === 'string') {
- buf.fill(fill, encoding)
- } else {
- buf.fill(fill)
- }
- } else {
- buf.fill(0)
- }
- return buf
-}
-
-SafeBuffer.allocUnsafe = function (size) {
- if (typeof size !== 'number') {
- throw new TypeError('Argument must be a number')
- }
- return Buffer(size)
-}
-
-SafeBuffer.allocUnsafeSlow = function (size) {
- if (typeof size !== 'number') {
- throw new TypeError('Argument must be a number')
- }
- return buffer.SlowBuffer(size)
-}
diff --git a/node_modules/libcipm/node_modules/npm-registry-fetch/node_modules/safe-buffer/package.json b/node_modules/libcipm/node_modules/npm-registry-fetch/node_modules/safe-buffer/package.json
deleted file mode 100644
index bc0e115d5..000000000
--- a/node_modules/libcipm/node_modules/npm-registry-fetch/node_modules/safe-buffer/package.json
+++ /dev/null
@@ -1,62 +0,0 @@
-{
- "_from": "safe-buffer@^5.2.0",
- "_id": "safe-buffer@5.2.0",
- "_inBundle": false,
- "_integrity": "sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg==",
- "_location": "/libcipm/npm-registry-fetch/safe-buffer",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "safe-buffer@^5.2.0",
- "name": "safe-buffer",
- "escapedName": "safe-buffer",
- "rawSpec": "^5.2.0",
- "saveSpec": null,
- "fetchSpec": "^5.2.0"
- },
- "_requiredBy": [
- "/libcipm/npm-registry-fetch"
- ],
- "_resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.0.tgz",
- "_shasum": "b74daec49b1148f88c64b68d49b1e815c1f2f519",
- "_spec": "safe-buffer@^5.2.0",
- "_where": "/Users/claudiahdz/npm/cli/node_modules/libcipm/node_modules/npm-registry-fetch",
- "author": {
- "name": "Feross Aboukhadijeh",
- "email": "feross@feross.org",
- "url": "http://feross.org"
- },
- "bugs": {
- "url": "https://github.com/feross/safe-buffer/issues"
- },
- "bundleDependencies": false,
- "deprecated": false,
- "description": "Safer Node.js Buffer API",
- "devDependencies": {
- "standard": "*",
- "tape": "^4.0.0"
- },
- "homepage": "https://github.com/feross/safe-buffer",
- "keywords": [
- "buffer",
- "buffer allocate",
- "node security",
- "safe",
- "safe-buffer",
- "security",
- "uninitialized"
- ],
- "license": "MIT",
- "main": "index.js",
- "name": "safe-buffer",
- "repository": {
- "type": "git",
- "url": "git://github.com/feross/safe-buffer.git"
- },
- "scripts": {
- "test": "standard && tape test/*.js"
- },
- "types": "index.d.ts",
- "version": "5.2.0"
-}
diff --git a/node_modules/libcipm/node_modules/npm-registry-fetch/package.json b/node_modules/libcipm/node_modules/npm-registry-fetch/package.json
deleted file mode 100644
index 2d48376b2..000000000
--- a/node_modules/libcipm/node_modules/npm-registry-fetch/package.json
+++ /dev/null
@@ -1,96 +0,0 @@
-{
- "_from": "npm-registry-fetch@^4.0.0",
- "_id": "npm-registry-fetch@4.0.3",
- "_inBundle": false,
- "_integrity": "sha512-WGvUx0lkKFhu9MbiGFuT9nG2NpfQ+4dCJwRwwtK2HK5izJEvwDxMeUyqbuMS7N/OkpVCqDorV6rO5E4V9F8lJw==",
- "_location": "/libcipm/npm-registry-fetch",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "npm-registry-fetch@^4.0.0",
- "name": "npm-registry-fetch",
- "escapedName": "npm-registry-fetch",
- "rawSpec": "^4.0.0",
- "saveSpec": null,
- "fetchSpec": "^4.0.0"
- },
- "_requiredBy": [
- "/libcipm/pacote"
- ],
- "_resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-4.0.3.tgz",
- "_shasum": "3c2179e39e04f9348b1c2979545951d36bee8766",
- "_spec": "npm-registry-fetch@^4.0.0",
- "_where": "/Users/claudiahdz/npm/cli/node_modules/libcipm/node_modules/pacote",
- "author": {
- "name": "Kat Marchán",
- "email": "kzm@sykosomatic.org"
- },
- "bugs": {
- "url": "https://github.com/npm/registry-fetch/issues"
- },
- "bundleDependencies": false,
- "config": {
- "nyc": {
- "exclude": [
- "node_modules/**",
- "test/**"
- ]
- }
- },
- "dependencies": {
- "JSONStream": "^1.3.4",
- "bluebird": "^3.5.1",
- "figgy-pudding": "^3.4.1",
- "lru-cache": "^5.1.1",
- "make-fetch-happen": "^5.0.0",
- "npm-package-arg": "^6.1.0",
- "safe-buffer": "^5.2.0"
- },
- "deprecated": false,
- "description": "Fetch-based http client for use with npm registry APIs",
- "devDependencies": {
- "cacache": "^12.0.0",
- "get-stream": "^4.0.0",
- "mkdirp": "^0.5.1",
- "nock": "^9.4.3",
- "npmlog": "^4.1.2",
- "rimraf": "^2.6.2",
- "ssri": "^6.0.0",
- "standard": "^11.0.1",
- "standard-version": "^4.4.0",
- "tap": "^12.0.1",
- "weallbehave": "^1.2.0",
- "weallcontribute": "^1.0.8"
- },
- "files": [
- "*.js",
- "lib"
- ],
- "homepage": "https://github.com/npm/registry-fetch#readme",
- "keywords": [
- "npm",
- "registry",
- "fetch"
- ],
- "license": "ISC",
- "main": "index.js",
- "name": "npm-registry-fetch",
- "publishConfig": {
- "tag": "latest-v4"
- },
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/registry-fetch.git"
- },
- "scripts": {
- "postrelease": "npm publish && git push --follow-tags",
- "prerelease": "npm t",
- "pretest": "standard",
- "release": "standard-version -s",
- "test": "tap -J --coverage test/*.js",
- "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
- "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
- },
- "version": "4.0.3"
-}
diff --git a/node_modules/libcipm/node_modules/npm-registry-fetch/silentlog.js b/node_modules/libcipm/node_modules/npm-registry-fetch/silentlog.js
deleted file mode 100644
index 886c5d55b..000000000
--- a/node_modules/libcipm/node_modules/npm-registry-fetch/silentlog.js
+++ /dev/null
@@ -1,14 +0,0 @@
-'use strict'
-
-const noop = Function.prototype
-module.exports = {
- error: noop,
- warn: noop,
- notice: noop,
- info: noop,
- verbose: noop,
- silly: noop,
- http: noop,
- pause: noop,
- resume: noop
-}
diff --git a/node_modules/libcipm/node_modules/pacote/CHANGELOG.md b/node_modules/libcipm/node_modules/pacote/CHANGELOG.md
deleted file mode 100644
index b632c4eb4..000000000
--- a/node_modules/libcipm/node_modules/pacote/CHANGELOG.md
+++ /dev/null
@@ -1,1417 +0,0 @@
-# Change Log
-
-All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
-
-<a name="9.5.12"></a>
-## [9.5.12](https://github.com/npm/pacote/compare/v9.5.11...v9.5.12) (2020-01-06)
-
-
-### Bug Fixes
-
-* **git:** Do not drop uid/gid when executing in root-owned directory ([d2f4176](https://github.com/npm/pacote/commit/d2f4176))
-
-
-
-<a name="9.5.11"></a>
-## [9.5.11](https://github.com/npm/pacote/compare/v9.5.10...v9.5.11) (2019-12-09)
-
-
-### Bug Fixes
-
-* sanitize and normalize package bin field ([6f229f7](https://github.com/npm/pacote/commit/6f229f7))
-
-
-
-<a name="9.5.10"></a>
-## [9.5.10](https://github.com/npm/pacote/compare/v9.5.9...v9.5.10) (2019-12-04)
-
-
-### Bug Fixes
-
-* Do not drop perms in git when not root ([5f33040](https://github.com/npm/pacote/commit/5f33040)), closes [#23](https://github.com/npm/pacote/issues/23)
-
-
-
-<a name="9.5.9"></a>
-## [9.5.9](https://github.com/npm/pacote/compare/v9.5.8...v9.5.9) (2019-10-29)
-
-
-### Bug Fixes
-
-* include peerDependenciesMeta in manifest ([7a400d3](https://github.com/npm/pacote/commit/7a400d3)), closes [/github.com/npm/cli/pull/224#issuecomment-547666807](https://github.com//github.com/npm/cli/pull/224/issues/issuecomment-547666807)
-
-
-
-<a name="9.5.8"></a>
-## [9.5.8](https://github.com/npm/pacote/compare/v9.5.7...v9.5.8) (2019-08-20)
-
-
-
-<a name="9.5.7"></a>
-## [9.5.7](https://github.com/npm/pacote/compare/v9.5.6...v9.5.7) (2019-08-19)
-
-
-### Bug Fixes
-
-* do not try to chown if not running as root ([bbc5da3](https://github.com/npm/pacote/commit/bbc5da3))
-
-
-
-<a name="9.5.6"></a>
-## [9.5.6](https://github.com/npm/pacote/compare/v9.5.5...v9.5.6) (2019-08-15)
-
-
-### Bug Fixes
-
-* **extract:** chown properly when more than one directory is made ([5161828](https://github.com/npm/pacote/commit/5161828))
-
-
-
-<a name="9.5.5"></a>
-## [9.5.5](https://github.com/npm/pacote/compare/v9.5.4...v9.5.5) (2019-08-12)
-
-
-### Bug Fixes
-
-* don't pass uid/gid to cacache ([0a0c73c](https://github.com/npm/pacote/commit/0a0c73c))
-* Infer owner of all unpacked files ([f12e7ef](https://github.com/npm/pacote/commit/f12e7ef))
-* invalid arg detection in extract() ([b4dc363](https://github.com/npm/pacote/commit/b4dc363)), closes [#5](https://github.com/npm/pacote/issues/5) [#6](https://github.com/npm/pacote/issues/6)
-
-
-
-<a name="9.5.4"></a>
-## [9.5.4](https://github.com/npm/pacote/compare/v9.5.3...v9.5.4) (2019-07-16)
-
-
-### Bug Fixes
-
-* **git:** ensure stream failures are reported ([7f07b5d](https://github.com/npm/pacote/commit/7f07b5d)), closes [#1](https://github.com/npm/pacote/issues/1)
-
-
-
-<a name="9.5.3"></a>
-## [9.5.3](https://github.com/npm/pacote/compare/v9.5.2...v9.5.3) (2019-07-16)
-
-
-
-<a name="9.5.2"></a>
-## [9.5.2](https://github.com/npm/pacote/compare/v9.5.1...v9.5.2) (2019-07-12)
-
-
-### Bug Fixes
-
-* always pass uid/gid to cacache.put ([3d08925](https://github.com/npm/pacote/commit/3d08925))
-
-
-
-<a name="9.5.1"></a>
-## [9.5.1](https://github.com/npm/pacote/compare/v9.5.0...v9.5.1) (2019-06-17)
-
-
-### Bug Fixes
-
-* **audit:** npm audit fix ([127a28b](https://github.com/npm/pacote/commit/127a28b))
-* **errors:** Fix "TypeError: err.code.match is not a function" error ([#170](https://github.com/npm/pacote/issues/170)) ([92f5e4c](https://github.com/zkat/pacote/commit/92f5e4c))
-* **git:** limit retry times, avoid unlimited retries ([#172](https://github.com/npm/pacote/issues/172)) ([8bbd051](https://github.com/zkat/pacote/commit/8bbd051))
-
-
-
-<a name="9.5.0"></a>
-# [9.5.0](https://github.com/npm/pacote/compare/v9.4.1...v9.5.0) (2019-02-18)
-
-
-### Features
-
-* **enjoy-by:** add `before` as an alias to enjoy-by ([75d62b7](https://github.com/npm/pacote/commit/75d62b7))
-
-
-
-<a name="9.4.1"></a>
-## [9.4.1](https://github.com/npm/pacote/compare/v9.4.0...v9.4.1) (2019-01-24)
-
-
-### Bug Fixes
-
-* **directory, finalize-manifest:** strip byte order marker from JSON ([723ad63](https://github.com/npm/pacote/commit/723ad63))
-
-
-
-<a name="9.4.0"></a>
-# [9.4.0](https://github.com/npm/pacote/compare/v9.3.0...v9.4.0) (2019-01-14)
-
-
-### Features
-
-* **registry:** fall back to fullfat if something might be wrong with corgis ([0e71d6b](https://github.com/npm/pacote/commit/0e71d6b))
-
-
-
-<a name="9.3.0"></a>
-# [9.3.0](https://github.com/npm/pacote/compare/v9.2.3...v9.3.0) (2018-12-21)
-
-
-### Bug Fixes
-
-* **git, file:** properly catch otherwise unhandled errors ([89d4897](https://github.com/npm/pacote/commit/89d4897))
-* **test:** set umask opt to fix extract-stream 'accepts dmode/fmode/umask opts' ([e51de83](https://github.com/npm/pacote/commit/e51de83))
-
-
-### Features
-
-* **git:** accept git path option ([#164](https://github.com/npm/pacote/issues/164)) ([f06c8c5](https://github.com/zkat/pacote/commit/f06c8c5))
-
-
-
-<a name="9.2.3"></a>
-## [9.2.3](https://github.com/npm/pacote/compare/v9.2.2...v9.2.3) (2018-10-31)
-
-
-
-<a name="9.2.2"></a>
-## [9.2.2](https://github.com/npm/pacote/compare/v9.2.1...v9.2.2) (2018-10-31)
-
-
-
-<a name="9.2.1"></a>
-## [9.2.1](https://github.com/npm/pacote/compare/v9.2.0...v9.2.1) (2018-10-31)
-
-
-
-<a name="9.2.0"></a>
-# [9.2.0](https://github.com/npm/pacote/compare/v9.1.1...v9.2.0) (2018-10-30)
-
-
-### Features
-
-* **enjoyBy:** add opts.enjoy-by option ([7df399c](https://github.com/npm/pacote/commit/7df399c))
-
-
-
-<a name="9.1.1"></a>
-## [9.1.1](https://github.com/npm/pacote/compare/v9.1.0...v9.1.1) (2018-10-26)
-
-
-### Bug Fixes
-
-* **deps:** bump protoduck to remove CC0-1.0 license in dep ([3d9d9a6](https://github.com/npm/pacote/commit/3d9d9a6))
-* **git:** Fix temp directory permissions for git fetcher ([#159](https://github.com/npm/pacote/issues/159)) ([97c3aeb](https://github.com/zkat/pacote/commit/97c3aeb))
-* **packument:** group together all registry specs in silly log ([2333a17](https://github.com/npm/pacote/commit/2333a17))
-* **standard:** standard --fix ([0ecb188](https://github.com/npm/pacote/commit/0ecb188))
-
-
-
-<a name="9.1.0"></a>
-# [9.1.0](https://github.com/npm/pacote/compare/v9.0.0...v9.1.0) (2018-08-15)
-
-
-### Bug Fixes
-
-* **docs:** tweaking ToC ([1eeb8a0](https://github.com/npm/pacote/commit/1eeb8a0))
-* **packument:** take accept header into account when memoizing ([3c637e8](https://github.com/npm/pacote/commit/3c637e8))
-
-
-### Features
-
-* **aliases:** add support for registry alias specs ([b173f26](https://github.com/npm/pacote/commit/b173f26))
-* **packument:** add packument api ([97888d9](https://github.com/npm/pacote/commit/97888d9))
-
-
-
-<a name="9.0.0"></a>
-# [9.0.0](https://github.com/npm/pacote/compare/v8.1.6...v9.0.0) (2018-07-31)
-
-
-### Bug Fixes
-
-* **travis:** idk why travis was failing ([ab593c9](https://github.com/npm/pacote/commit/ab593c9))
-
-
-### Features
-
-* **config:** convert to use figgy-pudding ([0b5bb82](https://github.com/npm/pacote/commit/0b5bb82))
-* **log:** use process.emit-style logging by default ([29ff8b3](https://github.com/npm/pacote/commit/29ff8b3))
-* **registry:** switch to npm-registry-fetch ([c43d6b0](https://github.com/npm/pacote/commit/c43d6b0))
-
-
-### BREAKING CHANGES
-
-* **log:** pacote will start emitting events on the process object by default now, instead of doing silent logging
-* **registry:** config has changed significantly, especially
-for auth and registry-related configs. Refer to README.md
-for available options.
-
-
-
-<a name="8.1.6"></a>
-## [8.1.6](https://github.com/npm/pacote/compare/v8.1.5...v8.1.6) (2018-05-24)
-
-
-
-<a name="8.1.5"></a>
-## [8.1.5](https://github.com/npm/pacote/compare/v8.1.4...v8.1.5) (2018-05-22)
-
-
-
-<a name="8.1.4"></a>
-## [8.1.4](https://github.com/npm/pacote/compare/v8.1.3...v8.1.4) (2018-05-22)
-
-
-
-<a name="8.1.3"></a>
-## [8.1.3](https://github.com/npm/pacote/compare/v8.1.2...v8.1.3) (2018-05-20)
-
-
-### Bug Fixes
-
-* **deps:** try reverting tar ([574ecff](https://github.com/npm/pacote/commit/574ecff))
-* **extract-stream:** address "write after end" issue ([#151](https://github.com/npm/pacote/issues/151)) ([50ed408](https://github.com/zkat/pacote/commit/50ed408)), closes [#142](https://github.com/zkat/pacote/issues/142)
-
-
-
-<a name="8.1.2"></a>
-## [8.1.2](https://github.com/npm/pacote/compare/v8.1.1...v8.1.2) (2018-05-16)
-
-
-### Bug Fixes
-
-* **extract-stream:** nudge things to stop write-after-end heisenbug, hopefully ([a398715](https://github.com/npm/pacote/commit/a398715))
-
-
-
-<a name="8.1.1"></a>
-## [8.1.1](https://github.com/npm/pacote/compare/v8.1.0...v8.1.1) (2018-04-24)
-
-
-### Bug Fixes
-
-* **tarball:** Remove promise handler error ([#148](https://github.com/npm/pacote/issues/148)) ([47da3f6](https://github.com/zkat/pacote/commit/47da3f6)), closes [#145](https://github.com/zkat/pacote/issues/145)
-
-
-
-<a name="8.1.0"></a>
-# [8.1.0](https://github.com/npm/pacote/compare/v8.0.0...v8.1.0) (2018-04-18)
-
-
-### Bug Fixes
-
-* **git:** workaround for mississippi.finished intermitent failures ([#144](https://github.com/npm/pacote/issues/144)) ([788fd13](https://github.com/zkat/pacote/commit/788fd13)), closes [#143](https://github.com/zkat/pacote/issues/143)
-
-
-### Features
-
-* **tarball:** calculate shasum when missing, not just integrity ([#149](https://github.com/npm/pacote/issues/149)) ([ccc6e90](https://github.com/zkat/pacote/commit/ccc6e90))
-
-
-
-<a name="8.0.0"></a>
-# [8.0.0](https://github.com/npm/pacote/compare/v7.6.1...v8.0.0) (2018-04-12)
-
-
-### Bug Fixes
-
-* **git:** make full clones do a full mirror ([85b269b](https://github.com/npm/pacote/commit/85b269b))
-
-
-### deps
-
-* bump deps ([6737bf6](https://github.com/npm/pacote/commit/6737bf6))
-
-
-### meta
-
-* drop support for node@4 ([11478ff](https://github.com/npm/pacote/commit/11478ff))
-
-
-### BREAKING CHANGES
-
-* some dependencies were upgraded to versions that do not
-support node@4.
-* node@4 is no longer supported
-
-
-
-<a name="7.6.1"></a>
-## [7.6.1](https://github.com/npm/pacote/compare/v7.6.0...v7.6.1) (2018-03-08)
-
-
-### Bug Fixes
-
-* **standard:** update to new standard rules ([bb52d02](https://github.com/npm/pacote/commit/bb52d02))
-
-
-
-<a name="7.6.0"></a>
-# [7.6.0](https://github.com/npm/pacote/compare/v7.5.3...v7.6.0) (2018-03-08)
-
-
-### Features
-
-* **git:** added retry logic for all git operations. ([#136](https://github.com/npm/pacote/issues/136)) ([425c58d](https://github.com/zkat/pacote/commit/425c58d))
-
-
-
-<a name="7.5.3"></a>
-## [7.5.3](https://github.com/npm/pacote/compare/v7.5.2...v7.5.3) (2018-03-02)
-
-
-### Bug Fixes
-
-* **tarball:** stop dropping stream errors on the floor ([3db03c2](https://github.com/npm/pacote/commit/3db03c2))
-
-
-
-<a name="7.5.2"></a>
-## [7.5.2](https://github.com/npm/pacote/compare/v7.5.1...v7.5.2) (2018-03-02)
-
-
-### Bug Fixes
-
-* **console:** remove spurious debugging console.log :< ([5b8b509](https://github.com/npm/pacote/commit/5b8b509))
-
-
-
-<a name="7.5.1"></a>
-## [7.5.1](https://github.com/npm/pacote/compare/v7.5.0...v7.5.1) (2018-03-01)
-
-
-### Bug Fixes
-
-* **tarball:** catch errors thrown from stream handler ([bdd6628](https://github.com/npm/pacote/commit/bdd6628))
-
-
-
-<a name="7.5.0"></a>
-# [7.5.0](https://github.com/npm/pacote/compare/v7.4.2...v7.5.0) (2018-03-01)
-
-
-### Features
-
-* **logging:** let users know when file: resolved packages fail integrity check ([0fb8037](https://github.com/npm/pacote/commit/0fb8037))
-
-
-
-<a name="7.4.2"></a>
-## [7.4.2](https://github.com/npm/pacote/compare/v7.4.1...v7.4.2) (2018-02-23)
-
-
-### Bug Fixes
-
-* **deps:** move mkdirp and rimraf to dependencies ([#140](https://github.com/npm/pacote/issues/140)) ([bba20c8](https://github.com/zkat/pacote/commit/bba20c8)), closes [#128](https://github.com/zkat/pacote/issues/128)
-
-
-
-<a name="7.4.1"></a>
-## [7.4.1](https://github.com/npm/pacote/compare/v7.4.0...v7.4.1) (2018-02-23)
-
-
-### Bug Fixes
-
-* **tarball:** fix spurious errors from tarball.stream() ([0286ba5](https://github.com/npm/pacote/commit/0286ba5))
-
-
-
-<a name="7.4.0"></a>
-# [7.4.0](https://github.com/npm/pacote/compare/v7.3.3...v7.4.0) (2018-02-17)
-
-
-### Features
-
-* **tarball:** support file: opts.resolved shortcut ([a6cf279](https://github.com/npm/pacote/commit/a6cf279))
-
-
-
-<a name="7.3.3"></a>
-## [7.3.3](https://github.com/npm/pacote/compare/v7.3.2...v7.3.3) (2018-02-15)
-
-
-### Bug Fixes
-
-* **tarball:** another attempt at fixing opts.resolved ([aff3b6a](https://github.com/npm/pacote/commit/aff3b6a))
-
-
-
-<a name="7.3.2"></a>
-## [7.3.2](https://github.com/npm/pacote/compare/v7.3.1...v7.3.2) (2018-02-15)
-
-
-### Bug Fixes
-
-* **tarball:** opts.resolved impl was triggering extra registry lookups ([0a4729d](https://github.com/npm/pacote/commit/0a4729d))
-
-
-
-<a name="7.3.1"></a>
-## [7.3.1](https://github.com/npm/pacote/compare/v7.3.0...v7.3.1) (2018-02-14)
-
-
-### Bug Fixes
-
-* **tarball:** stop using mississippi.pipe() in tarball.js and extract.js ([f5c1da9](https://github.com/npm/pacote/commit/f5c1da9))
-
-
-
-<a name="7.3.0"></a>
-# [7.3.0](https://github.com/npm/pacote/compare/v7.2.0...v7.3.0) (2018-02-07)
-
-
-### Bug Fixes
-
-* **git:** fix resolution of prerelease versions ([#130](https://github.com/npm/pacote/issues/130)) ([83be46b](https://github.com/zkat/pacote/commit/83be46b)), closes [#129](https://github.com/zkat/pacote/issues/129)
-
-
-### Features
-
-* **extract:** append _resolved and _integrity automatically ([#134](https://github.com/npm/pacote/issues/134)) ([6886b65](https://github.com/zkat/pacote/commit/6886b65))
-
-
-
-<a name="7.2.0"></a>
-# [7.2.0](https://github.com/npm/pacote/compare/v7.1.1...v7.2.0) (2018-01-19)
-
-
-### Features
-
-* **resolved:** tarball shortcut when opts.resolved is provided ([46a2f58](https://github.com/npm/pacote/commit/46a2f58))
-
-
-
-<a name="7.1.1"></a>
-## [7.1.1](https://github.com/npm/pacote/compare/v7.1.0...v7.1.1) (2018-01-08)
-
-
-### Bug Fixes
-
-* **publish:** a spurious file was included in the previous release ([296741a](https://github.com/npm/pacote/commit/296741a))
-
-
-
-<a name="7.1.0"></a>
-# [7.1.0](https://github.com/npm/pacote/compare/v7.0.2...v7.1.0) (2018-01-07)
-
-
-### Bug Fixes
-
-* **security:** deep-update debug due to vulnerabilities ([ff16da7](https://github.com/npm/pacote/commit/ff16da7))
-
-
-### Features
-
-* **resolved:** add opts.resolved for cache stuff ([#131](https://github.com/npm/pacote/issues/131)) ([149a4b5](https://github.com/zkat/pacote/commit/149a4b5))
-
-
-
-<a name="7.0.2"></a>
-## [7.0.2](https://github.com/npm/pacote/compare/v7.0.1...v7.0.2) (2017-11-28)
-
-
-### Bug Fixes
-
-* **git:** only resolvedRefs can be shallow-cloned ([899720f](https://github.com/npm/pacote/commit/899720f))
-
-
-
-<a name="7.0.1"></a>
-## [7.0.1](https://github.com/npm/pacote/compare/v7.0.0...v7.0.1) (2017-11-15)
-
-
-### Bug Fixes
-
-* **git:** use resolved ref if available when doing a full clone (#125) ([46ca45a](https://github.com/npm/pacote/commit/46ca45a)), closes [#125](https://github.com/zkat/pacote/issues/125)
-* **move:** bump cacache for some cross-platform move fixes ([eebdcda](https://github.com/npm/pacote/commit/eebdcda))
-* **test:** missed a spot converting tests to promises ([c43caed](https://github.com/npm/pacote/commit/c43caed))
-
-
-
-<a name="7.0.0"></a>
-# [7.0.0](https://github.com/npm/pacote/compare/v6.1.0...v7.0.0) (2017-11-15)
-
-
-### Bug Fixes
-
-* **docs:** You totally should use pacote now (#126) ([d49a9b5](https://github.com/npm/pacote/commit/d49a9b5))
-* **git:** stop generating integrity for git ([d45363b](https://github.com/npm/pacote/commit/d45363b))
-* **integrity:** stop defaulting to sha1 hashes ([62f8cdf](https://github.com/npm/pacote/commit/62f8cdf))
-* **license:** relicense to MIT for OSI-compat ([ba6b3e0](https://github.com/npm/pacote/commit/ba6b3e0))
-
-
-### Features
-
-* **tarball:** add externall pacote.tarball() api ([e30bd49](https://github.com/npm/pacote/commit/e30bd49))
-
-
-### prefetch
-
-* deprecate pacote.prefetch ([e47e521](https://github.com/npm/pacote/commit/e47e521))
-
-
-### BREAKING CHANGES
-
-* **license:** The license has changed from CC0-1.0 to MIT, which is less permissive and also OSI-approved.
-* pacote.prefetch is deprecated in favor of pacote.tarball
-
-
-
-<a name="6.1.0"></a>
-# [6.1.0](https://github.com/npm/pacote/compare/v6.0.4...v6.1.0) (2017-10-19)
-
-
-### Bug Fixes
-
-* **git:** use actual default git branch instead of assuming master (#122) ([79ce949](https://github.com/npm/pacote/commit/79ce949))
-* **npa:** ensure spec is a valid npa instance ([1757b2b](https://github.com/npm/pacote/commit/1757b2b))
-
-
-### Features
-
-* **selection:** add opts.includeDeprecated (#123) ([2001549](https://github.com/npm/pacote/commit/2001549))
-
-
-
-<a name="6.0.4"></a>
-## [6.0.4](https://github.com/npm/pacote/compare/v6.0.3...v6.0.4) (2017-10-05)
-
-
-### Bug Fixes
-
-* **file:** include integrity hash for streamed tarballs too ([030cee7](https://github.com/npm/pacote/commit/030cee7))
-
-
-
-<a name="6.0.3"></a>
-## [6.0.3](https://github.com/npm/pacote/compare/v6.0.2...v6.0.3) (2017-10-05)
-
-
-### Bug Fixes
-
-* **extract:** clean up mode/fmode/dmode tests ([f915045](https://github.com/npm/pacote/commit/f915045))
-* **file:** make sure file tarballs are written to cache and have integrity data ([dae391a](https://github.com/npm/pacote/commit/dae391a))
-* **git:** version resolution regression from #115 (#119) ([9a68205](https://github.com/npm/pacote/commit/9a68205))
-
-
-
-<a name="6.0.2"></a>
-## [6.0.2](https://github.com/npm/pacote/compare/v6.0.1...v6.0.2) (2017-09-06)
-
-
-### Bug Fixes
-
-* **extract:** preserve executable perms on extracted files ([19b3dfd](https://github.com/npm/pacote/commit/19b3dfd))
-
-
-### Performance Improvements
-
-* replace some calls to .match() with .starts/endsWith() (#115) ([192a02f](https://github.com/npm/pacote/commit/192a02f))
-
-
-
-<a name="6.0.1"></a>
-## [6.0.1](https://github.com/npm/pacote/compare/v6.0.0...v6.0.1) (2017-08-22)
-
-
-### Bug Fixes
-
-* **finalize:** insist on getting a package.json ([f72ee91](https://github.com/npm/pacote/commit/f72ee91))
-
-
-
-<a name="6.0.0"></a>
-# [6.0.0](https://github.com/npm/pacote/compare/v5.0.1...v6.0.0) (2017-08-19)
-
-
-### Bug Fixes
-
-* **tar:** bring back the .gitignore -> .npmignore logic (#113) ([0dd518e](https://github.com/npm/pacote/commit/0dd518e))
-
-
-### BREAKING CHANGES
-
-* **tar:** this reverts a previous change to disable this feature.
-
-
-
-<a name="5.0.1"></a>
-## [5.0.1](https://github.com/npm/pacote/compare/v5.0.0...v5.0.1) (2017-08-17)
-
-
-### Bug Fixes
-
-* **tar:** chown directories on extract as well ([2fa4598](https://github.com/npm/pacote/commit/2fa4598))
-
-
-
-<a name="5.0.0"></a>
-# [5.0.0](https://github.com/npm/pacote/compare/v4.0.0...v5.0.0) (2017-08-16)
-
-
-### Bug Fixes
-
-* **registry:** Pass maxSockets options down (#110) ([3f05b79](https://github.com/npm/pacote/commit/3f05b79))
-
-
-### Features
-
-* **deps:** replace tar-fs/tar-stream with tar[@3](https://github.com/3) ([28c80a9](https://github.com/npm/pacote/commit/28c80a9))
-* **tar:** switch to tarv3 ([53899c7](https://github.com/npm/pacote/commit/53899c7))
-
-
-### BREAKING CHANGES
-
-* **tar:** this changes the underlying tar library, and thus may introduce some subtle low-level incompatibility. Also:
-
-* The tarball packer built into pacote works much closer to how the one npm injects does.
-* Special characters on Windows will now be escaped the way tar(1) usually does: by replacing them with the `0xf000` masked character on the way out.
-* Directories won't be chowned.
-
-
-
-<a name="4.0.0"></a>
-# [4.0.0](https://github.com/npm/pacote/compare/v3.0.0...v4.0.0) (2017-06-29)
-
-
-### Bug Fixes
-
-* **extract:** revert uid/gid change ([41852e0](https://github.com/npm/pacote/commit/41852e0))
-
-
-### BREAKING CHANGES
-
-* **extract:** behavior for setting uid/gid on extracted contents was restored to what it was in pacote@2
-
-
-
-<a name="3.0.0"></a>
-# [3.0.0](https://github.com/npm/pacote/compare/v2.7.38...v3.0.0) (2017-06-29)
-
-
-### Bug Fixes
-
-* **extract:** always extract as current user gid/uid ([6fc01a5](https://github.com/npm/pacote/commit/6fc01a5))
-
-
-### BREAKING CHANGES
-
-* **extract:** pacote will no longer set ownership of extracted
-contents -- uid/gid will *only* be used for the cache and other internal
-details.
-
-
-
-<a name="2.7.38"></a>
-## [2.7.38](https://github.com/npm/pacote/compare/v2.7.37...v2.7.38) (2017-06-29)
-
-
-### Bug Fixes
-
-* **manifest:** bump npm-pick-manifest for loose semver fix ([b3d45ef](https://github.com/npm/pacote/commit/b3d45ef))
-
-
-
-<a name="2.7.37"></a>
-## [2.7.37](https://github.com/npm/pacote/compare/v2.7.36...v2.7.37) (2017-06-29)
-
-
-### Bug Fixes
-
-* **deps:** bump deps for fixes ([f156655](https://github.com/npm/pacote/commit/f156655))
-
-
-
-<a name="2.7.36"></a>
-## [2.7.36](https://github.com/npm/pacote/compare/v2.7.35...v2.7.36) (2017-06-10)
-
-
-### Bug Fixes
-
-* **deps:** update tar-fs with the special characters patch (#102) ([ed43aa3](https://github.com/npm/pacote/commit/ed43aa3))
-
-
-
-<a name="2.7.35"></a>
-## [2.7.35](https://github.com/npm/pacote/compare/v2.7.34...v2.7.35) (2017-06-09)
-
-
-### Bug Fixes
-
-* **registry:** only print one 199 warning (#100) ([b395138](https://github.com/npm/pacote/commit/b395138))
-
-
-
-<a name="2.7.34"></a>
-## [2.7.34](https://github.com/npm/pacote/compare/v2.7.33...v2.7.34) (2017-06-09)
-
-
-### Bug Fixes
-
-* **git:** whitelist specific shallow-cloneable hosts ([b210cc8](https://github.com/npm/pacote/commit/b210cc8))
-
-
-
-<a name="2.7.33"></a>
-## [2.7.33](https://github.com/npm/pacote/compare/v2.7.32...v2.7.33) (2017-06-08)
-
-
-### Bug Fixes
-
-* **git:** better error reporting when ls-remote fails ([10aae8f](https://github.com/npm/pacote/commit/10aae8f))
-
-
-
-<a name="2.7.32"></a>
-## [2.7.32](https://github.com/npm/pacote/compare/v2.7.31...v2.7.32) (2017-06-07)
-
-
-### Bug Fixes
-
-* **registry:** print both 111 and 199 warnings ([2f8c201](https://github.com/npm/pacote/commit/2f8c201))
-
-
-
-<a name="2.7.31"></a>
-## [2.7.31](https://github.com/npm/pacote/compare/v2.7.30...v2.7.31) (2017-06-06)
-
-
-### Bug Fixes
-
-* **extract:** always return a bluebird promise ([06ca91d](https://github.com/npm/pacote/commit/06ca91d))
-* **registry:** bump make-fetch-happen for local cache header issue fix ([868615c](https://github.com/npm/pacote/commit/868615c))
-
-
-
-<a name="2.7.30"></a>
-## [2.7.30](https://github.com/npm/pacote/compare/v2.7.29...v2.7.30) (2017-06-05)
-
-
-### Bug Fixes
-
-* **ssri:** bump ssri for bugfix ([70a859c](https://github.com/npm/pacote/commit/70a859c))
-
-
-
-<a name="2.7.29"></a>
-## [2.7.29](https://github.com/npm/pacote/compare/v2.7.28...v2.7.29) (2017-06-05)
-
-
-### Bug Fixes
-
-* **registry:** use cert instead of certfile opt ([a45880d](https://github.com/npm/pacote/commit/a45880d))
-
-
-
-<a name="2.7.28"></a>
-## [2.7.28](https://github.com/npm/pacote/compare/v2.7.27...v2.7.28) (2017-06-05)
-
-
-### Bug Fixes
-
-* **git:** limit ls-remote output to heads/tags (#97) ([c1e3dcd](https://github.com/npm/pacote/commit/c1e3dcd))
-* **proxy:** send certificate authority, key and other options (#95) ([c4b6128](https://github.com/npm/pacote/commit/c4b6128))
-* **registry:** add support for global auth and _auth token (#96) ([7919fb7](https://github.com/npm/pacote/commit/7919fb7))
-* **registry:** emit npm-session header (#98) ([9816b18](https://github.com/npm/pacote/commit/9816b18))
-
-
-
-<a name="2.7.27"></a>
-## [2.7.27](https://github.com/npm/pacote/compare/v2.7.26...v2.7.27) (2017-06-01)
-
-
-### Bug Fixes
-
-* **git:** fix semver range detection. oops ([76d9233](https://github.com/npm/pacote/commit/76d9233))
-
-
-
-<a name="2.7.26"></a>
-## [2.7.26](https://github.com/npm/pacote/compare/v2.7.25...v2.7.26) (2017-06-01)
-
-
-### Bug Fixes
-
-* **git:** hash was not being replaced/appended correctly ([6fcbed5](https://github.com/npm/pacote/commit/6fcbed5))
-
-
-
-<a name="2.7.25"></a>
-## [2.7.25](https://github.com/npm/pacote/compare/v2.7.24...v2.7.25) (2017-05-31)
-
-
-### Bug Fixes
-
-* **git:** git deps were getting _resolved without shasums ([96f0675](https://github.com/npm/pacote/commit/96f0675))
-
-
-
-<a name="2.7.24"></a>
-## [2.7.24](https://github.com/npm/pacote/compare/v2.7.23...v2.7.24) (2017-05-31)
-
-
-### Bug Fixes
-
-* **deps:** update dep versions with new patches ([dc2e4ff](https://github.com/npm/pacote/commit/dc2e4ff))
-
-
-
-<a name="2.7.23"></a>
-## [2.7.23](https://github.com/npm/pacote/compare/v2.7.22...v2.7.23) (2017-05-31)
-
-
-### Bug Fixes
-
-* **git:** fix ls-remote command and throw away ^{} junk ([62ba84d](https://github.com/npm/pacote/commit/62ba84d))
-* **git:** use the parsed git committish from npa ([77a676a](https://github.com/npm/pacote/commit/77a676a))
-
-
-
-<a name="2.7.22"></a>
-## [2.7.22](https://github.com/npm/pacote/compare/v2.7.21...v2.7.22) (2017-05-31)
-
-
-### Bug Fixes
-
-* **git:** accept shortened git hashes (#91) ([4466388](https://github.com/npm/pacote/commit/4466388))
-
-
-
-<a name="2.7.21"></a>
-## [2.7.21](https://github.com/npm/pacote/compare/v2.7.20...v2.7.21) (2017-05-25)
-
-
-### Bug Fixes
-
-* **registry:** stop URIEncoding username/password ([011c9a2](https://github.com/npm/pacote/commit/011c9a2))
-
-
-
-<a name="2.7.20"></a>
-## [2.7.20](https://github.com/npm/pacote/compare/v2.7.19...v2.7.20) (2017-05-25)
-
-
-### Bug Fixes
-
-* **registry:** encode username and password for auth ([c48b651](https://github.com/npm/pacote/commit/c48b651))
-
-
-
-<a name="2.7.19"></a>
-## [2.7.19](https://github.com/npm/pacote/compare/v2.7.18...v2.7.19) (2017-05-25)
-
-
-### Bug Fixes
-
-* **registry:** respect alwaysAuth ([150788a](https://github.com/npm/pacote/commit/150788a))
-
-
-
-<a name="2.7.18"></a>
-## [2.7.18](https://github.com/npm/pacote/compare/v2.7.17...v2.7.18) (2017-05-25)
-
-
-### Bug Fixes
-
-* **cache:** pass uid/gid settings through to mfh ([d8845df](https://github.com/npm/pacote/commit/d8845df))
-* **deps:** update m-f-h for cache opts fix ([faab6cd](https://github.com/npm/pacote/commit/faab6cd))
-
-
-
-<a name="2.7.17"></a>
-## [2.7.17](https://github.com/npm/pacote/compare/v2.7.16...v2.7.17) (2017-05-25)
-
-
-### Bug Fixes
-
-* **deps:** bump cacache ([34bd656](https://github.com/npm/pacote/commit/34bd656))
-
-
-
-<a name="2.7.16"></a>
-## [2.7.16](https://github.com/npm/pacote/compare/v2.7.15...v2.7.16) (2017-05-24)
-
-
-### Bug Fixes
-
-* **deps:** pull in various fixes from deps ([4354703](https://github.com/npm/pacote/commit/4354703))
-
-
-
-<a name="2.7.15"></a>
-## [2.7.15](https://github.com/npm/pacote/compare/v2.7.14...v2.7.15) (2017-05-24)
-
-
-### Bug Fixes
-
-* **proxy:** bump m-f-h with more patches ([26d4170](https://github.com/npm/pacote/commit/26d4170))
-
-
-
-<a name="2.7.14"></a>
-## [2.7.14](https://github.com/npm/pacote/compare/v2.7.13...v2.7.14) (2017-05-24)
-
-
-### Bug Fixes
-
-* **proxy:** pull in new m-f-h with fixed http proxies ([d6a14e0](https://github.com/npm/pacote/commit/d6a14e0))
-
-
-
-<a name="2.7.13"></a>
-## [2.7.13](https://github.com/npm/pacote/compare/v2.7.12...v2.7.13) (2017-05-23)
-
-
-### Bug Fixes
-
-* **deps:** bump dep versions to fix http redirect issues ([b23a9fa](https://github.com/npm/pacote/commit/b23a9fa))
-
-
-
-<a name="2.7.12"></a>
-## [2.7.12](https://github.com/npm/pacote/compare/v2.7.11...v2.7.12) (2017-05-16)
-
-
-### Bug Fixes
-
-* **fetch:** fix default userAgent ([4b9d344](https://github.com/npm/pacote/commit/4b9d344))
-* **registry:** log failed requests too ([0f23f06](https://github.com/npm/pacote/commit/0f23f06))
-* **remote:** send a useful pkg id header for remote tarballs ([ac13356](https://github.com/npm/pacote/commit/ac13356))
-
-
-
-<a name="2.7.11"></a>
-## [2.7.11](https://github.com/npm/pacote/compare/v2.7.10...v2.7.11) (2017-05-12)
-
-
-### Bug Fixes
-
-* **fetch:** make it play nicer with bundlers ([67cd713](https://github.com/npm/pacote/commit/67cd713))
-
-
-
-<a name="2.7.10"></a>
-## [2.7.10](https://github.com/npm/pacote/compare/v2.7.9...v2.7.10) (2017-05-12)
-
-
-### Bug Fixes
-
-* **logging:** shhhhhhh ([e7ea56e](https://github.com/npm/pacote/commit/e7ea56e))
-* **manifest:** _resolved is the only main field we do not overwrite ([4c12421](https://github.com/npm/pacote/commit/4c12421))
-
-
-
-<a name="2.7.9"></a>
-## [2.7.9](https://github.com/npm/pacote/compare/v2.7.8...v2.7.9) (2017-05-09)
-
-
-### Bug Fixes
-
-* **git:** Resolve to ref git specs w/o committishes (#88) ([cb885f5](https://github.com/npm/pacote/commit/cb885f5)), closes [#88](https://github.com/zkat/pacote/issues/88)
-
-
-
-<a name="2.7.8"></a>
-## [2.7.8](https://github.com/npm/pacote/compare/v2.7.7...v2.7.8) (2017-05-07)
-
-
-### Bug Fixes
-
-* **git:** integrity hash was not always emitted ([97ed9e1](https://github.com/npm/pacote/commit/97ed9e1))
-
-
-
-<a name="2.7.7"></a>
-## [2.7.7](https://github.com/npm/pacote/compare/v2.7.6...v2.7.7) (2017-05-06)
-
-
-### Bug Fixes
-
-* **auth:** redirects no longer send auth to different host ([82e78c5](https://github.com/npm/pacote/commit/82e78c5))
-
-
-
-<a name="2.7.6"></a>
-## [2.7.6](https://github.com/npm/pacote/compare/v2.7.5...v2.7.6) (2017-05-05)
-
-
-### Bug Fixes
-
-* **git:** only use longpaths on win32 because old gits ([32846fc](https://github.com/npm/pacote/commit/32846fc))
-
-
-
-<a name="2.7.5"></a>
-## [2.7.5](https://github.com/npm/pacote/compare/v2.7.4...v2.7.5) (2017-05-04)
-
-
-### Bug Fixes
-
-* **registry-key:** Use pathname instead of path in registryKey (#85) ([5339831](https://github.com/npm/pacote/commit/5339831))
-
-
-
-<a name="2.7.4"></a>
-## [2.7.4](https://github.com/npm/pacote/compare/v2.7.3...v2.7.4) (2017-05-04)
-
-
-### Bug Fixes
-
-* **pick-manifest:** fix =1.2.3 semver range requests ([dd6911c](https://github.com/npm/pacote/commit/dd6911c))
-
-
-
-<a name="2.7.3"></a>
-## [2.7.3](https://github.com/npm/pacote/compare/v2.7.2...v2.7.3) (2017-05-04)
-
-
-### Bug Fixes
-
-* **pick-manifest:** spaces in requested version are now trimmed out ([6422b28](https://github.com/npm/pacote/commit/6422b28))
-
-
-
-<a name="2.7.2"></a>
-## [2.7.2](https://github.com/npm/pacote/compare/v2.7.1...v2.7.2) (2017-05-04)
-
-
-### Bug Fixes
-
-* **extract:** missing or corrupted content properly re-fetched again ([46f60c2](https://github.com/npm/pacote/commit/46f60c2))
-
-
-
-<a name="2.7.1"></a>
-## [2.7.1](https://github.com/npm/pacote/compare/v2.7.0...v2.7.1) (2017-05-01)
-
-
-### Bug Fixes
-
-* **logging:** log specs correctly on extract ([4b5bab0](https://github.com/npm/pacote/commit/4b5bab0))
-* **manifest:** obey opts.preferOnline when fetching from memoized ([26928a7](https://github.com/npm/pacote/commit/26928a7))
-
-
-
-<a name="2.7.0"></a>
-# [2.7.0](https://github.com/npm/pacote/compare/v2.6.0...v2.7.0) (2017-04-29)
-
-
-### Bug Fixes
-
-* **registry:** stop using integrity hashes for metadata. again. ([4595ab2](https://github.com/npm/pacote/commit/4595ab2))
-
-
-### Features
-
-* **manifest:** include _shasum for legacy compat ([b3a7eed](https://github.com/npm/pacote/commit/b3a7eed))
-
-
-
-<a name="2.6.0"></a>
-# [2.6.0](https://github.com/npm/pacote/compare/v2.5.0...v2.6.0) (2017-04-29)
-
-
-### Features
-
-* **manifest:** annotate manifests with _from ([e45e968](https://github.com/npm/pacote/commit/e45e968))
-
-
-
-<a name="2.5.0"></a>
-# [2.5.0](https://github.com/npm/pacote/compare/v2.4.0...v2.5.0) (2017-04-28)
-
-
-### Bug Fixes
-
-* **registry:** JSON text is not a valid header value ([78951ea](https://github.com/npm/pacote/commit/78951ea))
-
-
-### Features
-
-* **memoization:** allow injection and control of memoizers ([d8a2be7](https://github.com/npm/pacote/commit/d8a2be7))
-
-
-
-<a name="2.4.0"></a>
-# [2.4.0](https://github.com/npm/pacote/compare/v2.3.2...v2.4.0) (2017-04-27)
-
-
-### Bug Fixes
-
-* **tests:** nicer error message on registry 404 ([e8e71c8](https://github.com/npm/pacote/commit/e8e71c8))
-
-
-### Features
-
-* **auth:** added basic auth and always-auth support ([548aeb5](https://github.com/npm/pacote/commit/548aeb5))
-* **proxy:** proxy support for registry and remote deps ([3766bbb](https://github.com/npm/pacote/commit/3766bbb))
-
-
-
-<a name="2.3.2"></a>
-## [2.3.2](https://github.com/npm/pacote/compare/v2.3.1...v2.3.2) (2017-04-26)
-
-
-### Bug Fixes
-
-* **deps:** reduce deps size with m-f-h upgrade ([ba75461](https://github.com/npm/pacote/commit/ba75461))
-
-
-
-<a name="2.3.1"></a>
-## [2.3.1](https://github.com/npm/pacote/compare/v2.3.0...v2.3.1) (2017-04-26)
-
-
-### Bug Fixes
-
-* **git:** another attempt at fixing EPERM b.s. ([e445bef](https://github.com/npm/pacote/commit/e445bef))
-
-
-
-<a name="2.3.0"></a>
-# [2.3.0](https://github.com/npm/pacote/compare/v2.2.2...v2.3.0) (2017-04-26)
-
-
-### Bug Fixes
-
-* **git:** had ENOTSUP error on windows ([ee17c35](https://github.com/npm/pacote/commit/ee17c35))
-* **memoization:** actually memoize package metadata ([e2078c0](https://github.com/npm/pacote/commit/e2078c0))
-
-
-### Features
-
-* **memoization:** better packument memoization + pacote.clearMemoized() ([eb1bd4f](https://github.com/npm/pacote/commit/eb1bd4f))
-
-
-
-<a name="2.2.2"></a>
-## [2.2.2](https://github.com/npm/pacote/compare/v2.2.1...v2.2.2) (2017-04-24)
-
-
-### Bug Fixes
-
-* **prefetch:** pull in new cacache + fix prefetch hasContent call ([9f476b8](https://github.com/npm/pacote/commit/9f476b8))
-
-
-
-<a name="2.2.1"></a>
-## [2.2.1](https://github.com/npm/pacote/compare/v2.2.0...v2.2.1) (2017-04-23)
-
-
-### Bug Fixes
-
-* **finalize:** pass on engines/cpu/os ([0a73c78](https://github.com/npm/pacote/commit/0a73c78))
-
-
-
-<a name="2.2.0"></a>
-# [2.2.0](https://github.com/npm/pacote/compare/v2.1.2...v2.2.0) (2017-04-22)
-
-
-### Bug Fixes
-
-* **git:** fix shortcut fallback order again ([5759d40](https://github.com/npm/pacote/commit/5759d40))
-* **registry:** fix infinite manifetch loop ([6c6a62b](https://github.com/npm/pacote/commit/6c6a62b))
-
-
-### Features
-
-* **manifest:** opts.fullMetadata to get unfiltered manifests ([ff2945b](https://github.com/npm/pacote/commit/ff2945b))
-
-
-
-<a name="2.1.2"></a>
-## [2.1.2](https://github.com/npm/pacote/compare/v2.1.1...v2.1.2) (2017-04-20)
-
-
-
-<a name="2.1.1"></a>
-## [2.1.1](https://github.com/npm/pacote/compare/v2.1.0...v2.1.1) (2017-04-19)
-
-
-### Bug Fixes
-
-* **git:** use sshurl instead of ssh for ssh clones ([ff20803](https://github.com/npm/pacote/commit/ff20803))
-* **notice:** only log npm-notice if the packument came from network ([eeeb411](https://github.com/npm/pacote/commit/eeeb411))
-* **registry:** improve 404 error messages ([6a5cbdb](https://github.com/npm/pacote/commit/6a5cbdb))
-
-
-
-<a name="2.1.0"></a>
-# [2.1.0](https://github.com/npm/pacote/compare/v2.0.5...v2.1.0) (2017-04-18)
-
-
-### Bug Fixes
-
-* **cache:** bump deps for cache fixes ([9596434](https://github.com/npm/pacote/commit/9596434))
-
-
-### Features
-
-* **warn:** http warning headers now logged ([f22ce1d](https://github.com/npm/pacote/commit/f22ce1d))
-
-
-
-<a name="2.0.5"></a>
-## [2.0.5](https://github.com/npm/pacote/compare/v2.0.4...v2.0.5) (2017-04-18)
-
-
-### Bug Fixes
-
-* **file:** oops, the type for these is file ([e7a3d35](https://github.com/npm/pacote/commit/e7a3d35))
-
-
-
-<a name="2.0.4"></a>
-## [2.0.4](https://github.com/npm/pacote/compare/v2.0.3...v2.0.4) (2017-04-18)
-
-
-### Bug Fixes
-
-* **deps:** remove normalize-git-url ([12d464a](https://github.com/npm/pacote/commit/12d464a))
-* **git:** Correctly read in the HEAD ref after cloning ([dbe1b15](https://github.com/npm/pacote/commit/dbe1b15))
-* **git:** The full clone path doesn't have _resolved set ([ddce561](https://github.com/npm/pacote/commit/ddce561))
-* **manifest:** no _from ever ([15087c4](https://github.com/npm/pacote/commit/15087c4))
-
-
-
-<a name="2.0.3"></a>
-## [2.0.3](https://github.com/npm/pacote/compare/v2.0.2...v2.0.3) (2017-04-15)
-
-
-### Bug Fixes
-
-* **manifest:** meh just shove _from in there ([4396f09](https://github.com/npm/pacote/commit/4396f09))
-* **registry:** include CI header ([86ad911](https://github.com/npm/pacote/commit/86ad911))
-* **registry:** include npm-scope header ([574cd93](https://github.com/npm/pacote/commit/574cd93))
-* **registry:** make sure to send referer header ([2d3aaac](https://github.com/npm/pacote/commit/2d3aaac))
-
-
-
-<a name="2.0.2"></a>
-## [2.0.2](https://github.com/npm/pacote/compare/v2.0.1...v2.0.2) (2017-04-15)
-
-
-### Bug Fixes
-
-* **directory:** fix default pack-dir and write a test for it ([9d9266f](https://github.com/npm/pacote/commit/9d9266f))
-* **extract:** brainfart with extractByManifest fixed. lol. ([a1367fb](https://github.com/npm/pacote/commit/a1367fb))
-
-
-
-<a name="2.0.1"></a>
-## [2.0.1](https://github.com/npm/pacote/compare/v2.0.0...v2.0.1) (2017-04-15)
-
-
-### Bug Fixes
-
-* **tarball:** missed the local->tarball rename ([ac42dc4](https://github.com/npm/pacote/commit/ac42dc4))
-
-
-
-<a name="2.0.0"></a>
-# [2.0.0](https://github.com/npm/pacote/compare/v1.0.0...v2.0.0) (2017-04-15)
-
-
-### Bug Fixes
-
-* **api:** use npa[@5](https://github.com/5) for spec parsing (#78) ([3f56298](https://github.com/npm/pacote/commit/3f56298))
-* **deprecated:** remove underscore from manifest._deprecated ([9f4af93](https://github.com/npm/pacote/commit/9f4af93))
-* **directory:** add _resolved to directory manifests ([1d305db](https://github.com/npm/pacote/commit/1d305db))
-* **directory:** return null instead of throwing ([d35630d](https://github.com/npm/pacote/commit/d35630d))
-* **finalize:** don't try to cache manifests we can't get a good key for ([8ab1758](https://github.com/npm/pacote/commit/8ab1758))
-* **finalize:** refactored finalize-manifest code + add _integrity=false sentinel ([657b7fa](https://github.com/npm/pacote/commit/657b7fa))
-* **git:** cleaner handling of git tarball streams when caching ([11acd0a](https://github.com/npm/pacote/commit/11acd0a))
-* **git:** emit manifests from git tarball handler ([b139d4b](https://github.com/npm/pacote/commit/b139d4b))
-* **git:** fix .git exclusion, set mtime = 0 to make tarballs idempotent ([9a9fa1b](https://github.com/npm/pacote/commit/9a9fa1b))
-* **git:** fix fallback order and only fall back on hosted shortcuts ([551cb33](https://github.com/npm/pacote/commit/551cb33))
-* **git:** fix filling-out of git manifests ([95e807c](https://github.com/npm/pacote/commit/95e807c))
-* **git:** got dir packer option working with git ([7669b3e](https://github.com/npm/pacote/commit/7669b3e))
-* **headers:** nudge around some headers to make things behave ([db1e0a1](https://github.com/npm/pacote/commit/db1e0a1))
-* **manifest:** get rid of resolved-with-non-error warning ([d4d4917](https://github.com/npm/pacote/commit/d4d4917))
-* **manifest:** stop using digest for manifests ([4ddd2f5](https://github.com/npm/pacote/commit/4ddd2f5))
-* **opts:** bring opt-check up to date ([564419e](https://github.com/npm/pacote/commit/564419e))
-* **opts:** rename refreshCache to preferOnline cause much clearer ([94171d6](https://github.com/npm/pacote/commit/94171d6))
-* **prefetch:** fall back to the _integrity in the manifest if none calculated ([083ac79](https://github.com/npm/pacote/commit/083ac79))
-* **prefetch:** if there's no stream, just skip (for directory) ([714de91](https://github.com/npm/pacote/commit/714de91))
-* **registry:** fix error handling for registry tarballs ([e69539f](https://github.com/npm/pacote/commit/e69539f))
-* **registry:** nudging logging stuff around a bit ([61d62cc](https://github.com/npm/pacote/commit/61d62cc))
-* **registry:** only send auth info if tarball is hosted on the same registry ([1de5a2b](https://github.com/npm/pacote/commit/1de5a2b))
-* **registry:** redirect tarball urls to provided registry port+protocol if same host ([f50167e](https://github.com/npm/pacote/commit/f50167e))
-* **registry:** support memoizing packuments ([e7fff31](https://github.com/npm/pacote/commit/e7fff31))
-* **registry:** treat registry cache as "private" -- bumps m-f-h ([6fa1503](https://github.com/npm/pacote/commit/6fa1503))
-
-
-### Features
-
-* **directory:** implement local dir packing ([017d989](https://github.com/npm/pacote/commit/017d989))
-* **fetch:** bump make-fetch-happen for new restarts ([cf90716](https://github.com/npm/pacote/commit/cf90716))
-* **git:** support pulling in git submodules ([5825d33](https://github.com/npm/pacote/commit/5825d33))
-* **integrity:** replace http client (#72) ([189cdd2](https://github.com/npm/pacote/commit/189cdd2))
-* **prefetch:** return cache-related info on prefetch ([623b7f3](https://github.com/npm/pacote/commit/623b7f3))
-* **registry:** allow injection of request agents ([805e5ae](https://github.com/npm/pacote/commit/805e5ae))
-* **registry:** fast request pooling ([321f84b](https://github.com/npm/pacote/commit/321f84b))
-* **registry:** registry requests now follow cache spec more closely, respect Age, etc ([9e47098](https://github.com/npm/pacote/commit/9e47098))
-
-
-### BREAKING CHANGES
-
-* **api:** spec objects can no longer be realize-package-specifier objects. Pass a string or generate npa@>=5 spec objects to pass in.
-* **integrity:** This PR replaces a pretty fundamental chunk of pacote.
-
-* Caching now follows standard-ish cache rules for http-related requests.
-
-* manifest() no longer includes the `_shasum` field. It's been replaced by `_integrity`, which is a Subresource Integrity hash string containing equivalent data. These strings can be parsed and managed using https://npm.im/ssri.
-
-* Any functions that accepted `opts.digest` and/or `opts.hashAlgorithm` now expect `opts.integrity` instead.
-
-* Packuments and finalized manifests are now cached using sha512. Tarballs can start using that hash (or any other more secure hash) once registries start supporting them: `packument.dist.integrity` will be prioritized over `packument.shasum`.
-
-* If opts.offline is used, a `ENOCACHE` error will be returned.
-
-
-
-<a name="1.0.0"></a>
-# [1.0.0](https://github.com/npm/pacote/compare/v0.1.1...v1.0.0) (2017-03-17)
-
-
-### Bug Fixes
-
-* **extract-stream:** adapt to tar-fs api ([aa21308](https://github.com/npm/pacote/commit/aa21308))
-* add 'use strict' to all .js files (#26) ([021bd59](https://github.com/npm/pacote/commit/021bd59))
-* **cache:** this is really a user error, so just throw ([5c9c0fa](https://github.com/npm/pacote/commit/5c9c0fa))
-* **deps:** cacache[@5](https://github.com/5).0.3 ([37cddc5](https://github.com/npm/pacote/commit/37cddc5))
-* **deps:** tar-fs[@1](https://github.com/1).15.1 ([e0d853a](https://github.com/npm/pacote/commit/e0d853a))
-* **docs:** correct fixtures table (#57) ([23d2eb4](https://github.com/npm/pacote/commit/23d2eb4))
-* **extract:** correctly detect digest cache misses ([ec6672b](https://github.com/npm/pacote/commit/ec6672b))
-* **extract:** fixed race condition ([14fd2a8](https://github.com/npm/pacote/commit/14fd2a8))
-* **finalize-manifest:** use digest to uniquify cached manifests ([931a9cb](https://github.com/npm/pacote/commit/931a9cb))
-* **http:** Fixed cache-related race condition ([b70a4b1](https://github.com/npm/pacote/commit/b70a4b1))
-* **manifest:** dir manifests should throw ENOPACKAGEJSON ([b06882d](https://github.com/npm/pacote/commit/b06882d))
-* **manifest:** ETARGET when no packages match ([ea2127d](https://github.com/npm/pacote/commit/ea2127d))
-* **manifest:** local manifest fn should return a promise ([c700622](https://github.com/npm/pacote/commit/c700622))
-* **manifest:** retry registry manifests once on ETARGET (#66) ([3b99adc](https://github.com/npm/pacote/commit/3b99adc))
-* **prefetch:** hashAlgorithm is required for hasContent ([f03d51c](https://github.com/npm/pacote/commit/f03d51c))
-* **request:** report cache write errors on end ([c102b86](https://github.com/npm/pacote/commit/c102b86))
-
-
-### Features
-
-* **api:** support pre-realized specifiers as specs (#62) ([1d5bf39](https://github.com/npm/pacote/commit/1d5bf39))
-* **cache:** grabbing info and hasContent ([a559711](https://github.com/npm/pacote/commit/a559711))
-* **deps:** minimatch[@3](https://github.com/3).0.3 ([2bb8cd5](https://github.com/npm/pacote/commit/2bb8cd5))
-* **deps:** normalize-package-data[@2](https://github.com/2).3.5 ([4250e0d](https://github.com/npm/pacote/commit/4250e0d))
-* **directory:** directory dep support (#68) ([6d5307a](https://github.com/npm/pacote/commit/6d5307a))
-* **git:** baseline git support (#69) ([6d7eaf5](https://github.com/npm/pacote/commit/6d7eaf5))
-* **handlers:** added remote tarball support (#64) ([add1808](https://github.com/npm/pacote/commit/add1808))
-* **local:** local tarball support (#67) ([e50d625](https://github.com/npm/pacote/commit/e50d625))
-* **manifest:** handle deprecation notice (#60) ([db82dae](https://github.com/npm/pacote/commit/db82dae))
-* **manifest:** standardize manifest format ([3dd9a72](https://github.com/npm/pacote/commit/3dd9a72))
-* **manifest:** switch to cacache for caching ([8ba7249](https://github.com/npm/pacote/commit/8ba7249))
-* **prefetch:** added tarball prefetch support ([26c34ce](https://github.com/npm/pacote/commit/26c34ce))
-* **request:** accept maxSockets opt ([3987807](https://github.com/npm/pacote/commit/3987807))
-* **scopes:** new scopeTargets option (#59) ([b5db7ae](https://github.com/npm/pacote/commit/b5db7ae))
-
-
-### Performance Improvements
-
-* **finalize-manifest:** cache finalized manifests ([fa3c430](https://github.com/npm/pacote/commit/fa3c430))
-
-
-### BREAKING CHANGES
-
-* **manifest:** Toplevel APIs now return Promises instead of using callbacks.
diff --git a/node_modules/libcipm/node_modules/pacote/LICENSE b/node_modules/libcipm/node_modules/pacote/LICENSE
deleted file mode 100644
index 841ef53a2..000000000
--- a/node_modules/libcipm/node_modules/pacote/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-The MIT License (MIT)
-Copyright (c) Kat Marchán, npm, Inc., and Contributors
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
-OR OTHER DEALINGS IN THE SOFTWARE.
-
diff --git a/node_modules/libcipm/node_modules/pacote/README.md b/node_modules/libcipm/node_modules/pacote/README.md
deleted file mode 100644
index f29d330d8..000000000
--- a/node_modules/libcipm/node_modules/pacote/README.md
+++ /dev/null
@@ -1,288 +0,0 @@
-# pacote [![npm version](https://img.shields.io/npm/v/pacote.svg)](https://npm.im/pacote) [![license](https://img.shields.io/npm/l/pacote.svg)](https://npm.im/pacote) [![Travis](https://img.shields.io/travis/npm/pacote.svg)](https://travis-ci.org/npm/pacote) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/npm/pacote?svg=true)](https://ci.appveyor.com/project/npm/pacote) [![Coverage Status](https://coveralls.io/repos/github/npm/pacote/badge.svg?branch=latest)](https://coveralls.io/github/npm/pacote?branch=latest)
-
-[`pacote`](https://github.com/npm/pacote) is a Node.js library for downloading
-[npm](https://npmjs.org)-compatible packages. It supports all package specifier
-syntax that `npm install` and its ilk support. It transparently caches anything
-needed to reduce excess operations, using [`cacache`](https://npm.im/cacache).
-
-## Install
-
-`$ npm install --save pacote`
-
-## Table of Contents
-
-* [Example](#example)
-* [Features](#features)
-* [Contributing](#contributing)
-* [API](#api)
- * [`manifest`](#manifest)
- * [`packument`](#packument)
- * [`extract`](#extract)
- * [`tarball`](#tarball)
- * [`tarball.stream`](#tarball-stream)
- * [`tarball.toFile`](#tarball-to-file)
- * ~~[`prefetch`](#prefetch)~~ (deprecated)
- * [`clearMemoized`](#clearMemoized)
- * [`options`](#options)
-
-### Example
-
-```javascript
-const pacote = require('pacote')
-
-pacote.manifest('pacote@^1').then(pkg => {
- console.log('package manifest for registry pkg:', pkg)
- // { "name": "pacote", "version": "1.0.0", ... }
-})
-
-pacote.extract('http://hi.com/pkg.tgz', './here').then(() => {
- console.log('remote tarball contents extracted to ./here')
-})
-```
-
-### Features
-
-* Handles all package types [npm](https://npm.im/npm) does
-* [high-performance, reliable, verified local cache](https://npm.im/cacache)
-* offline mode
-* authentication support (private git, private npm registries, etc)
-* github, gitlab, and bitbucket-aware
-* semver range support for git dependencies
-
-### Contributing
-
-The pacote team enthusiastically welcomes contributions and project participation! There's a bunch of things you can do if you want to contribute! The [Contributor Guide](CONTRIBUTING.md) has all the information you need for everything from reporting bugs to contributing entire new features. Please don't hesitate to jump in if you'd like to, or even ask us questions if something isn't clear.
-
-### API
-
-#### <a name="manifest"></a> `> pacote.manifest(spec, [opts])`
-
-Fetches the *manifest* for a package. Manifest objects are similar and based
-on the `package.json` for that package, but with pre-processed and limited
-fields. The object has the following shape:
-
-```javascript
-{
- "name": PkgName,
- "version": SemverString,
- "dependencies": { PkgName: SemverString },
- "optionalDependencies": { PkgName: SemverString },
- "devDependencies": { PkgName: SemverString },
- "peerDependencies": { PkgName: SemverString },
- "bundleDependencies": false || [PkgName],
- "bin": { BinName: Path },
- "_resolved": TarballSource, // different for each package type
- "_integrity": SubresourceIntegrityHash,
- "_shrinkwrap": null || ShrinkwrapJsonObj
-}
-```
-
-Note that depending on the spec type, some additional fields might be present.
-For example, packages from `registry.npmjs.org` have additional metadata
-appended by the registry.
-
-##### Example
-
-```javascript
-pacote.manifest('pacote@1.0.0').then(pkgJson => {
- // fetched `package.json` data from the registry
-})
-```
-
-#### <a name="packument"></a> `> pacote.packument(spec, [opts])`
-
-Fetches the *packument* for a package. Packument objects are general metadata
-about a project corresponding to registry metadata, and include version and
-`dist-tag` information about a package's available versions, rather than a
-specific version. It may include additional metadata not usually available
-through the individual package metadata objects.
-
-It generally looks something like this:
-
-```javascript
-{
- "name": PkgName,
- "dist-tags": {
- 'latest': VersionString,
- [TagName]: VersionString,
- ...
- },
- "versions": {
- [VersionString]: Manifest,
- ...
- }
-}
-```
-
-Note that depending on the spec type, some additional fields might be present.
-For example, packages from `registry.npmjs.org` have additional metadata
-appended by the registry.
-
-##### Example
-
-```javascript
-pacote.packument('pacote').then(pkgJson => {
- // fetched package versions metadata from the registry
-})
-```
-
-#### <a name="extract"></a> `> pacote.extract(spec, destination, [opts])`
-
-Extracts package data identified by `<spec>` into a directory named
-`<destination>`, which will be created if it does not already exist.
-
-If `opts.digest` is provided and the data it identifies is present in the cache,
-`extract` will bypass most of its operations and go straight to extracting the
-tarball.
-
-##### Example
-
-```javascript
-pacote.extract('pacote@1.0.0', './woot', {
- digest: 'deadbeef'
-}).then(() => {
- // Succeeds as long as `pacote@1.0.0` still exists somewhere. Network and
- // other operations are bypassed entirely if `digest` is present in the cache.
-})
-```
-
-#### <a name="tarball"></a> `> pacote.tarball(spec, [opts])`
-
-Fetches package data identified by `<spec>` and returns the data as a buffer.
-
-This API has two variants:
-
-* `pacote.tarball.stream(spec, [opts])` - Same as `pacote.tarball`, except it returns a stream instead of a Promise.
-* `pacote.tarball.toFile(spec, dest, [opts])` - Instead of returning data directly, data will be written directly to `dest`, and create any required directories along the way.
-
-##### Example
-
-```javascript
-pacote.tarball('pacote@1.0.0', { cache: './my-cache' }).then(data => {
- // data is the tarball data for pacote@1.0.0
-})
-```
-
-#### <a name="tarball-stream"></a> `> pacote.tarball.stream(spec, [opts])`
-
-Same as `pacote.tarball`, except it returns a stream instead of a Promise.
-
-##### Example
-
-```javascript
-pacote.tarball.stream('pacote@1.0.0')
-.pipe(fs.createWriteStream('./pacote-1.0.0.tgz'))
-```
-
-#### <a name="tarball-to-file"></a> `> pacote.tarball.toFile(spec, dest, [opts])`
-
-Like `pacote.tarball`, but instead of returning data directly, data will be
-written directly to `dest`, and create any required directories along the way.
-
-##### Example
-
-```javascript
-pacote.tarball.toFile('pacote@1.0.0', './pacote-1.0.0.tgz')
-.then(() => /* pacote tarball written directly to ./pacote-1.0.0.tgz */)
-```
-
-#### <a name="prefetch"></a> `> pacote.prefetch(spec, [opts])`
-
-##### THIS API IS DEPRECATED. USE `pacote.tarball()` INSTEAD
-
-Fetches package data identified by `<spec>`, usually for the purpose of warming
-up the local package cache (with `opts.cache`). It does not return anything.
-
-##### Example
-
-```javascript
-pacote.prefetch('pacote@1.0.0', { cache: './my-cache' }).then(() => {
- // ./my-cache now has both the manifest and tarball for `pacote@1.0.0`.
-})
-```
-
-#### <a name="clearMemoized"></a> `> pacote.clearMemoized()`
-
-This utility function can be used to force pacote to release its references
-to any memoized data in its various internal caches. It might help free
-some memory.
-
-```javascript
-pacote.manifest(...).then(() => pacote.clearMemoized)
-
-```
-
-#### <a name="options"></a> `> options`
-
-`pacote` accepts [the options for
-`npm-registry-fetch`](https://npm.im/npm-registry-fetch#fetch-options) as-is,
-with a couple of additional `pacote-specific` ones:
-
-##### <a name="dirPacker"></a> `opts.dirPacker`
-
-* Type: Function
-* Default: Uses [`npm-packlist`](https://npm.im/npm-packlist) and [`tar`](https://npm.im/tar) to make a tarball.
-
-Expects a function that takes a single argument, `dir`, and returns a
-`ReadableStream` that outputs packaged tarball data. Used when creating tarballs
-for package specs that are not already packaged, such as git and directory
-dependencies. The default `opts.dirPacker` does not execute `prepare` scripts,
-even though npm itself does.
-
-##### <a name="opts-enjoy-by"></a> `opts.enjoy-by`
-
-* Alias: `opts.enjoyBy`, `opts.before`
-* Type: Date-able
-* Default: undefined
-
-If passed in, will be used while resolving to filter the versions for **registry
-dependencies** such that versions published **after** `opts.enjoy-by` are not
-considered -- as if they'd never been published.
-
-##### <a name="opts-include-deprecated"></a> `opts.include-deprecated`
-
-* Alias: `opts.includeDeprecated`
-* Type: Boolean
-* Default: false
-
-If false, deprecated versions will be skipped when selecting from registry range
-specifiers. If true, deprecations do not affect version selection.
-
-##### <a name="opts-full-metadata"></a> `opts.full-metadata`
-
-* Type: Boolean
-* Default: false
-
-If `true`, the full packument will be fetched when doing metadata requests. By
-defaul, `pacote` only fetches the summarized packuments, also called "corgis".
-
-##### <a name="opts-tag"></a> `opts.tag`
-
-* Alias: `opts.defaultTag`
-* Type: String
-* Default: `'latest'`
-
-Package version resolution tag. When processing registry spec ranges, this
-option is used to determine what dist-tag to treat as "latest". For more details
-about how `pacote` selects versions and how `tag` is involved, see [the
-documentation for `npm-pick-manifest`](https://npm.im/npm-pick-manifest).
-
-##### <a name="opts-resolved"></a> `opts.resolved`
-
-* Type: String
-* Default: null
-
-When fetching tarballs, this option can be passed in to skip registry metadata
-lookups when downloading tarballs. If the string is a `file:` URL, pacote will
-try to read the referenced local file before attempting to do any further
-lookups. This option does not bypass integrity checks when `opts.integrity` is
-passed in.
-
-##### <a name="opts-where"></a> `opts.where`
-
-* Type: String
-* Default: null
-
-Passed as an argument to [`npm-package-arg`](https://npm.im/npm-package-arg)
-when resolving `spec` arguments. Used to determine what path to resolve local
-path specs relatively from.
diff --git a/node_modules/libcipm/node_modules/pacote/extract.js b/node_modules/libcipm/node_modules/pacote/extract.js
deleted file mode 100644
index 6ed0b18aa..000000000
--- a/node_modules/libcipm/node_modules/pacote/extract.js
+++ /dev/null
@@ -1,99 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const extractStream = require('./lib/extract-stream.js')
-const fs = require('fs')
-const mkdirp = BB.promisify(require('mkdirp'))
-const npa = require('npm-package-arg')
-const optCheck = require('./lib/util/opt-check.js')
-const path = require('path')
-const rimraf = BB.promisify(require('rimraf'))
-const withTarballStream = require('./lib/with-tarball-stream.js')
-const inferOwner = require('infer-owner')
-const chown = BB.promisify(require('chownr'))
-
-const truncateAsync = BB.promisify(fs.truncate)
-const readFileAsync = BB.promisify(fs.readFile)
-const appendFileAsync = BB.promisify(fs.appendFile)
-
-// you used to call me on my...
-const selfOwner = process.getuid ? {
- uid: process.getuid(),
- gid: process.getgid()
-} : {
- uid: undefined,
- gid: undefined
-}
-
-module.exports = extract
-function extract (spec, dest, opts) {
- opts = optCheck(opts)
- spec = npa(spec, opts.where)
- if (spec.type === 'git' && !opts.cache) {
- throw new TypeError('Extracting git packages requires a cache folder')
- }
- if (typeof dest !== 'string') {
- throw new TypeError('Extract requires a destination')
- }
- const startTime = Date.now()
- return inferOwner(dest).then(({ uid, gid }) => {
- opts = opts.concat({ uid, gid })
- return withTarballStream(spec, opts, stream => {
- return tryExtract(spec, stream, dest, opts)
- })
- .then(() => {
- if (!opts.resolved) {
- const pjson = path.join(dest, 'package.json')
- return readFileAsync(pjson, 'utf8')
- .then(str => truncateAsync(pjson)
- .then(() => appendFileAsync(pjson, str.replace(
- /}\s*$/,
- `\n,"_resolved": ${
- JSON.stringify(opts.resolved || '')
- }\n,"_integrity": ${
- JSON.stringify(opts.integrity || '')
- }\n,"_from": ${
- JSON.stringify(spec.toString())
- }\n}`
- ))))
- }
- })
- .then(() => opts.log.silly(
- 'extract',
- `${spec} extracted to ${dest} (${Date.now() - startTime}ms)`
- ))
- })
-}
-
-function tryExtract (spec, tarStream, dest, opts) {
- return new BB((resolve, reject) => {
- tarStream.on('error', reject)
-
- rimraf(dest)
- .then(() => mkdirp(dest))
- .then((made) => {
- // respect the current ownership of unpack targets
- // but don't try to chown if we're not root.
- if (selfOwner.uid === 0 &&
- typeof selfOwner.gid === 'number' &&
- selfOwner.uid !== opts.uid && selfOwner.gid !== opts.gid) {
- return chown(made || dest, opts.uid, opts.gid)
- }
- })
- .then(() => {
- const xtractor = extractStream(spec, dest, opts)
- xtractor.on('error', reject)
- xtractor.on('close', resolve)
- tarStream.pipe(xtractor)
- })
- .catch(reject)
- })
- .catch(err => {
- if (err.code === 'EINTEGRITY') {
- err.message = `Verification failed while extracting ${spec}:\n${err.message}`
- }
-
- throw err
- })
-}
diff --git a/node_modules/libcipm/node_modules/pacote/index.js b/node_modules/libcipm/node_modules/pacote/index.js
deleted file mode 100644
index a0ed98759..000000000
--- a/node_modules/libcipm/node_modules/pacote/index.js
+++ /dev/null
@@ -1,10 +0,0 @@
-'use strict'
-
-module.exports = {
- extract: require('./extract'),
- manifest: require('./manifest'),
- packument: require('./packument'),
- prefetch: require('./prefetch'),
- tarball: require('./tarball'),
- clearMemoized: require('./lib/fetch').clearMemoized
-}
diff --git a/node_modules/libcipm/node_modules/pacote/lib/extract-stream.js b/node_modules/libcipm/node_modules/pacote/lib/extract-stream.js
deleted file mode 100644
index d967b9f89..000000000
--- a/node_modules/libcipm/node_modules/pacote/lib/extract-stream.js
+++ /dev/null
@@ -1,89 +0,0 @@
-'use strict'
-
-const Minipass = require('minipass')
-const path = require('path')
-const tar = require('tar')
-
-module.exports = extractStream
-module.exports._computeMode = computeMode
-
-class Transformer extends Minipass {
- constructor (spec, opts) {
- super()
- this.spec = spec
- this.opts = opts
- this.str = ''
- }
- write (data) {
- this.str += data
- return true
- }
- end () {
- const replaced = this.str.replace(
- /}\s*$/,
- `\n,"_resolved": ${
- JSON.stringify(this.opts.resolved || '')
- }\n,"_integrity": ${
- JSON.stringify(this.opts.integrity || '')
- }\n,"_from": ${
- JSON.stringify(this.spec.toString())
- }\n}`
- )
- super.write(replaced)
- return super.end()
- }
-}
-
-function computeMode (fileMode, optMode, umask) {
- return (fileMode | optMode) & ~(umask || 0)
-}
-
-function pkgJsonTransform (spec, opts) {
- return entry => {
- if (entry.path === 'package.json') {
- const transformed = new Transformer(spec, opts)
- return transformed
- }
- }
-}
-
-function extractStream (spec, dest, opts) {
- opts = opts || {}
- const sawIgnores = new Set()
- return tar.x({
- cwd: dest,
- filter: (name, entry) => !entry.header.type.match(/^.*link$/i),
- strip: 1,
- onwarn: msg => opts.log && opts.log.warn('tar', msg),
- uid: opts.uid,
- gid: opts.gid,
- umask: opts.umask,
- transform: opts.resolved && pkgJsonTransform(spec, opts),
- onentry (entry) {
- if (entry.type.toLowerCase() === 'file') {
- entry.mode = computeMode(entry.mode, opts.fmode, opts.umask)
- } else if (entry.type.toLowerCase() === 'directory') {
- entry.mode = computeMode(entry.mode, opts.dmode, opts.umask)
- } else {
- entry.mode = computeMode(entry.mode, 0, opts.umask)
- }
-
- // Note: This mirrors logic in the fs read operations that are
- // employed during tarball creation, in the fstream-npm module.
- // It is duplicated here to handle tarballs that are created
- // using other means, such as system tar or git archive.
- if (entry.type.toLowerCase() === 'file') {
- const base = path.basename(entry.path)
- if (base === '.npmignore') {
- sawIgnores.add(entry.path)
- } else if (base === '.gitignore') {
- const npmignore = entry.path.replace(/\.gitignore$/, '.npmignore')
- if (!sawIgnores.has(npmignore)) {
- // Rename, may be clobbered later.
- entry.path = npmignore
- }
- }
- }
- }
- })
-}
diff --git a/node_modules/libcipm/node_modules/pacote/lib/fetch.js b/node_modules/libcipm/node_modules/pacote/lib/fetch.js
deleted file mode 100644
index 36fb6b6d3..000000000
--- a/node_modules/libcipm/node_modules/pacote/lib/fetch.js
+++ /dev/null
@@ -1,82 +0,0 @@
-'use strict'
-
-const duck = require('protoduck')
-
-const Fetcher = duck.define(['spec', 'opts', 'manifest'], {
- packument: ['spec', 'opts'],
- manifest: ['spec', 'opts'],
- tarball: ['spec', 'opts'],
- fromManifest: ['manifest', 'spec', 'opts'],
- clearMemoized () {}
-}, { name: 'Fetcher' })
-module.exports = Fetcher
-
-module.exports.packument = packument
-function packument (spec, opts) {
- const fetcher = getFetcher(spec.type)
- return fetcher.packument(spec, opts)
-}
-
-module.exports.manifest = manifest
-function manifest (spec, opts) {
- const fetcher = getFetcher(spec.type)
- return fetcher.manifest(spec, opts)
-}
-
-module.exports.tarball = tarball
-function tarball (spec, opts) {
- return getFetcher(spec.type).tarball(spec, opts)
-}
-
-module.exports.fromManifest = fromManifest
-function fromManifest (manifest, spec, opts) {
- return getFetcher(spec.type).fromManifest(manifest, spec, opts)
-}
-
-const fetchers = {}
-
-module.exports.clearMemoized = clearMemoized
-function clearMemoized () {
- Object.keys(fetchers).forEach(k => {
- fetchers[k].clearMemoized()
- })
-}
-
-function getFetcher (type) {
- if (!fetchers[type]) {
- // This is spelled out both to prevent sketchy stuff and to make life
- // easier for bundlers/preprocessors.
- switch (type) {
- case 'alias':
- fetchers[type] = require('./fetchers/alias')
- break
- case 'directory':
- fetchers[type] = require('./fetchers/directory')
- break
- case 'file':
- fetchers[type] = require('./fetchers/file')
- break
- case 'git':
- fetchers[type] = require('./fetchers/git')
- break
- case 'hosted':
- fetchers[type] = require('./fetchers/hosted')
- break
- case 'range':
- fetchers[type] = require('./fetchers/range')
- break
- case 'remote':
- fetchers[type] = require('./fetchers/remote')
- break
- case 'tag':
- fetchers[type] = require('./fetchers/tag')
- break
- case 'version':
- fetchers[type] = require('./fetchers/version')
- break
- default:
- throw new Error(`Invalid dependency type requested: ${type}`)
- }
- }
- return fetchers[type]
-}
diff --git a/node_modules/libcipm/node_modules/pacote/lib/fetchers/alias.js b/node_modules/libcipm/node_modules/pacote/lib/fetchers/alias.js
deleted file mode 100644
index f22cbb1d7..000000000
--- a/node_modules/libcipm/node_modules/pacote/lib/fetchers/alias.js
+++ /dev/null
@@ -1,24 +0,0 @@
-'use strict'
-
-const Fetcher = require('../fetch')
-const fetchRegistry = require('./registry')
-
-const fetchRemote = module.exports = Object.create(null)
-
-Fetcher.impl(fetchRemote, {
- packument (spec, opts) {
- return fetchRegistry.packument(spec.subSpec, opts)
- },
-
- manifest (spec, opts) {
- return fetchRegistry.manifest(spec.subSpec, opts)
- },
-
- tarball (spec, opts) {
- return fetchRegistry.tarball(spec.subSpec, opts)
- },
-
- fromManifest (manifest, spec, opts) {
- return fetchRegistry.fromManifest(manifest, spec.subSpec, opts)
- }
-})
diff --git a/node_modules/libcipm/node_modules/pacote/lib/fetchers/directory.js b/node_modules/libcipm/node_modules/pacote/lib/fetchers/directory.js
deleted file mode 100644
index fc9c46cd3..000000000
--- a/node_modules/libcipm/node_modules/pacote/lib/fetchers/directory.js
+++ /dev/null
@@ -1,89 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const Fetcher = require('../fetch')
-const glob = BB.promisify(require('glob'))
-const packDir = require('../util/pack-dir')
-const readJson = require('../util/read-json')
-const path = require('path')
-const pipe = BB.promisify(require('mississippi').pipe)
-const through = require('mississippi').through
-const normalizePackageBin = require('npm-normalize-package-bin')
-
-const readFileAsync = BB.promisify(require('fs').readFile)
-
-const fetchDirectory = module.exports = Object.create(null)
-
-Fetcher.impl(fetchDirectory, {
- packument (spec, opts) {
- return this.manifest(spec, opts).then(manifest => {
- return Object.assign({}, manifest, {
- 'dist-tags': {
- 'latest': manifest.version
- },
- time: {
- [manifest.version]: (new Date()).toISOString()
- },
- versions: {
- [manifest.version]: manifest
- }
- })
- })
- },
- // `directory` manifests come from the actual manifest/lockfile data.
- manifest (spec, opts) {
- const pkgPath = path.join(spec.fetchSpec, 'package.json')
- const srPath = path.join(spec.fetchSpec, 'npm-shrinkwrap.json')
- return BB.join(
- readFileAsync(pkgPath).then(readJson).catch({ code: 'ENOENT' }, err => {
- err.code = 'ENOPACKAGEJSON'
- throw err
- }),
- readFileAsync(srPath).then(readJson).catch({ code: 'ENOENT' }, () => null),
- (pkg, sr) => {
- pkg._shrinkwrap = sr
- pkg._hasShrinkwrap = !!sr
- pkg._resolved = spec.fetchSpec
- pkg._integrity = false // Don't auto-calculate integrity
- pkg._shasum = false // Don't auto-calculate shasum either
- return pkg
- }
- ).then(pkg => {
- if (!pkg.bin && pkg.directories && pkg.directories.bin) {
- const dirBin = pkg.directories.bin
- return glob(path.join(spec.fetchSpec, dirBin, '/**'), { nodir: true }).then(matches => {
- matches.forEach(filePath => {
- const relative = path.relative(spec.fetchSpec, filePath)
- if (relative && relative[0] !== '.') {
- if (!pkg.bin) { pkg.bin = {} }
- pkg.bin[path.basename(relative)] = relative
- }
- })
- }).then(() => pkg)
- } else {
- return pkg
- }
- }).then(pkg => normalizePackageBin(pkg))
- },
-
- // As of npm@5, the npm installer doesn't pack + install directories: it just
- // creates symlinks. This code is here because `npm pack` still needs the
- // ability to create a tarball from a local directory.
- tarball (spec, opts) {
- const stream = through()
- this.manifest(spec, opts).then(mani => {
- return pipe(this.fromManifest(mani, spec, opts), stream)
- }).catch(err => stream.emit('error', err))
- return stream
- },
-
- // `directory` tarballs are generated in a very similar way to git tarballs.
- fromManifest (manifest, spec, opts) {
- const stream = through()
- packDir(manifest, manifest._resolved, manifest._resolved, stream, opts).catch(err => {
- stream.emit('error', err)
- })
- return stream
- }
-})
diff --git a/node_modules/libcipm/node_modules/pacote/lib/fetchers/file.js b/node_modules/libcipm/node_modules/pacote/lib/fetchers/file.js
deleted file mode 100644
index a58e32913..000000000
--- a/node_modules/libcipm/node_modules/pacote/lib/fetchers/file.js
+++ /dev/null
@@ -1,78 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const cacache = require('cacache')
-const Fetcher = require('../fetch')
-const fs = require('fs')
-const pipe = BB.promisify(require('mississippi').pipe)
-const through = require('mississippi').through
-
-const readFileAsync = BB.promisify(fs.readFile)
-const statAsync = BB.promisify(fs.stat)
-
-const MAX_BULK_SIZE = 2 * 1024 * 1024 // 2MB
-
-// `file` packages refer to local tarball files.
-const fetchFile = module.exports = Object.create(null)
-
-Fetcher.impl(fetchFile, {
- packument (spec, opts) {
- return BB.reject(new Error('Not implemented yet'))
- },
-
- manifest (spec, opts) {
- // We can't do much here. `finalizeManifest` will take care of
- // calling `tarball` to fill out all the necessary details.
- return BB.resolve(null)
- },
-
- // All the heavy lifting for `file` packages is done here.
- // They're never cached. We just read straight out of the file.
- // TODO - maybe they *should* be cached?
- tarball (spec, opts) {
- const src = spec._resolved || spec.fetchSpec
- const stream = through()
- statAsync(src).then(stat => {
- if (spec._resolved) { stream.emit('manifest', spec) }
- if (stat.size <= MAX_BULK_SIZE) {
- // YAY LET'S DO THING IN BULK
- return readFileAsync(src).then(data => {
- if (opts.cache) {
- return cacache.put(
- opts.cache, `pacote:tarball:file:${src}`, data, {
- integrity: opts.integrity
- }
- ).then(integrity => ({ data, integrity }))
- } else {
- return { data }
- }
- }).then(info => {
- if (info.integrity) { stream.emit('integrity', info.integrity) }
- stream.write(info.data, () => {
- stream.end()
- })
- })
- } else {
- let integrity
- const cacheWriter = !opts.cache
- ? BB.resolve(null)
- : (pipe(
- fs.createReadStream(src),
- cacache.put.stream(opts.cache, `pacote:tarball:${src}`, {
- integrity: opts.integrity
- }).on('integrity', d => { integrity = d })
- ))
- return cacheWriter.then(() => {
- if (integrity) { stream.emit('integrity', integrity) }
- return pipe(fs.createReadStream(src), stream)
- })
- }
- }).catch(err => stream.emit('error', err))
- return stream
- },
-
- fromManifest (manifest, spec, opts) {
- return this.tarball(manifest || spec, opts)
- }
-})
diff --git a/node_modules/libcipm/node_modules/pacote/lib/fetchers/git.js b/node_modules/libcipm/node_modules/pacote/lib/fetchers/git.js
deleted file mode 100644
index a1579d1f9..000000000
--- a/node_modules/libcipm/node_modules/pacote/lib/fetchers/git.js
+++ /dev/null
@@ -1,178 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const cacache = require('cacache')
-const cacheKey = require('../util/cache-key')
-const Fetcher = require('../fetch')
-const git = require('../util/git')
-const mkdirp = BB.promisify(require('mkdirp'))
-const pickManifest = require('npm-pick-manifest')
-const optCheck = require('../util/opt-check')
-const osenv = require('osenv')
-const packDir = require('../util/pack-dir')
-const PassThrough = require('stream').PassThrough
-const path = require('path')
-const pipe = BB.promisify(require('mississippi').pipe)
-const rimraf = BB.promisify(require('rimraf'))
-const uniqueFilename = require('unique-filename')
-
-// `git` dependencies are fetched from git repositories and packed up.
-const fetchGit = module.exports = Object.create(null)
-
-Fetcher.impl(fetchGit, {
- packument (spec, opts) {
- return BB.reject(new Error('Not implemented yet.'))
- },
-
- manifest (spec, opts) {
- opts = optCheck(opts)
- if (spec.hosted && spec.hosted.getDefaultRepresentation() === 'shortcut') {
- return hostedManifest(spec, opts)
- } else {
- // If it's not a shortcut, don't do fallbacks.
- return plainManifest(spec.fetchSpec, spec, opts)
- }
- },
-
- tarball (spec, opts) {
- opts = optCheck(opts)
- const stream = new PassThrough()
- this.manifest(spec, opts).then(manifest => {
- stream.emit('manifest', manifest)
- return pipe(
- this.fromManifest(
- manifest, spec, opts
- ).on('integrity', i => stream.emit('integrity', i)), stream
- )
- }).catch(err => stream.emit('error', err))
- return stream
- },
-
- fromManifest (manifest, spec, opts) {
- opts = optCheck(opts)
- let streamError
- const stream = new PassThrough().on('error', e => { streamError = e })
- const cacheName = manifest._uniqueResolved || manifest._resolved || ''
- const cacheStream = (
- opts.cache &&
- cacache.get.stream(
- opts.cache, cacheKey('packed-dir', cacheName), opts
- ).on('integrity', i => stream.emit('integrity', i))
- )
- cacheStream.pipe(stream)
- cacheStream.on('error', err => {
- if (err.code !== 'ENOENT') {
- return stream.emit('error', err)
- } else {
- stream.emit('reset')
- return withTmp(opts, tmp => {
- if (streamError) { throw streamError }
- return cloneRepo(
- spec, manifest._repo, manifest._ref, manifest._rawRef, tmp, opts
- ).then(HEAD => {
- if (streamError) { throw streamError }
- manifest._resolved = spec.saveSpec.replace(/(:?#.*)?$/, `#${HEAD}`)
- manifest._uniqueResolved = manifest._resolved
- return packDir(manifest, manifest._uniqueResolved, tmp, stream, opts)
- })
- }).catch(err => stream.emit('error', err))
- }
- })
- return stream
- }
-})
-
-function hostedManifest (spec, opts) {
- return BB.resolve(null).then(() => {
- if (!spec.hosted.git()) {
- throw new Error(`No git url for ${spec}`)
- }
- return plainManifest(spec.hosted.git(), spec, opts)
- }).catch(err => {
- if (!spec.hosted.https()) {
- throw err
- }
- return plainManifest(spec.hosted.https(), spec, opts)
- }).catch(err => {
- if (!spec.hosted.sshurl()) {
- throw err
- }
- return plainManifest(spec.hosted.sshurl(), spec, opts)
- })
-}
-
-function plainManifest (repo, spec, opts) {
- const rawRef = spec.gitCommittish || spec.gitRange
- return resolve(
- repo, spec, spec.name, opts
- ).then(ref => {
- if (ref) {
- const resolved = spec.saveSpec.replace(/(?:#.*)?$/, `#${ref.sha}`)
- return {
- _repo: repo,
- _resolved: resolved,
- _spec: spec,
- _ref: ref,
- _rawRef: spec.gitCommittish || spec.gitRange,
- _uniqueResolved: resolved,
- _integrity: false,
- _shasum: false
- }
- } else {
- // We're SOL and need a full clone :(
- //
- // If we're confident enough that `rawRef` is a commit SHA,
- // then we can at least get `finalize-manifest` to cache its result.
- const resolved = spec.saveSpec.replace(/(?:#.*)?$/, rawRef ? `#${rawRef}` : '')
- return {
- _repo: repo,
- _rawRef: rawRef,
- _resolved: rawRef && rawRef.match(/^[a-f0-9]{40}$/) && resolved,
- _uniqueResolved: rawRef && rawRef.match(/^[a-f0-9]{40}$/) && resolved,
- _integrity: false,
- _shasum: false
- }
- }
- })
-}
-
-function resolve (url, spec, name, opts) {
- const isSemver = !!spec.gitRange
- return git.revs(url, opts).then(remoteRefs => {
- return isSemver
- ? pickManifest({
- versions: remoteRefs.versions,
- 'dist-tags': remoteRefs['dist-tags'],
- name: name
- }, spec.gitRange, opts)
- : remoteRefs
- ? BB.resolve(
- remoteRefs.refs[spec.gitCommittish] || remoteRefs.refs[remoteRefs.shas[spec.gitCommittish]]
- )
- : null
- })
-}
-
-function withTmp (opts, cb) {
- if (opts.cache) {
- // cacache has a special facility for working in a tmp dir
- return cacache.tmp.withTmp(opts.cache, { tmpPrefix: 'git-clone' }, cb)
- } else {
- const tmpDir = path.join(osenv.tmpdir(), 'pacote-git-tmp')
- const tmpName = uniqueFilename(tmpDir, 'git-clone')
- const tmp = mkdirp(tmpName).then(() => tmpName).disposer(rimraf)
- return BB.using(tmp, cb)
- }
-}
-
-// Only certain whitelisted hosted gits support shadow cloning
-const SHALLOW_HOSTS = new Set(['github', 'gist', 'gitlab', 'bitbucket'])
-function cloneRepo (spec, repo, resolvedRef, rawRef, tmp, opts) {
- const ref = resolvedRef ? resolvedRef.ref : rawRef
- if (resolvedRef && spec.hosted && SHALLOW_HOSTS.has(spec.hosted.type)) {
- return git.shallow(repo, ref, tmp, opts)
- } else {
- return git.clone(repo, ref, tmp, opts)
- }
-}
diff --git a/node_modules/libcipm/node_modules/pacote/lib/fetchers/hosted.js b/node_modules/libcipm/node_modules/pacote/lib/fetchers/hosted.js
deleted file mode 100644
index d41793c81..000000000
--- a/node_modules/libcipm/node_modules/pacote/lib/fetchers/hosted.js
+++ /dev/null
@@ -1,3 +0,0 @@
-'use strict'
-
-module.exports = require('./git')
diff --git a/node_modules/libcipm/node_modules/pacote/lib/fetchers/range.js b/node_modules/libcipm/node_modules/pacote/lib/fetchers/range.js
deleted file mode 100644
index 9f172e986..000000000
--- a/node_modules/libcipm/node_modules/pacote/lib/fetchers/range.js
+++ /dev/null
@@ -1,3 +0,0 @@
-'use strict'
-
-module.exports = require('./registry')
diff --git a/node_modules/libcipm/node_modules/pacote/lib/fetchers/registry/index.js b/node_modules/libcipm/node_modules/pacote/lib/fetchers/registry/index.js
deleted file mode 100644
index 2cca7040b..000000000
--- a/node_modules/libcipm/node_modules/pacote/lib/fetchers/registry/index.js
+++ /dev/null
@@ -1,32 +0,0 @@
-'use strict'
-
-const cacache = require('cacache')
-const Fetcher = require('../../fetch')
-const regManifest = require('./manifest')
-const regPackument = require('./packument')
-const regTarball = require('./tarball')
-
-const fetchRegistry = module.exports = Object.create(null)
-
-Fetcher.impl(fetchRegistry, {
- packument (spec, opts) {
- return regPackument(spec, opts)
- },
-
- manifest (spec, opts) {
- return regManifest(spec, opts)
- },
-
- tarball (spec, opts) {
- return regTarball(spec, opts)
- },
-
- fromManifest (manifest, spec, opts) {
- return regTarball.fromManifest(manifest, spec, opts)
- },
-
- clearMemoized () {
- cacache.clearMemoized()
- regPackument.clearMemoized()
- }
-})
diff --git a/node_modules/libcipm/node_modules/pacote/lib/fetchers/registry/manifest.js b/node_modules/libcipm/node_modules/pacote/lib/fetchers/registry/manifest.js
deleted file mode 100644
index 00deb13af..000000000
--- a/node_modules/libcipm/node_modules/pacote/lib/fetchers/registry/manifest.js
+++ /dev/null
@@ -1,81 +0,0 @@
-'use strict'
-
-const fetch = require('npm-registry-fetch')
-const fetchPackument = require('./packument')
-const optCheck = require('../../util/opt-check')
-const pickManifest = require('npm-pick-manifest')
-const ssri = require('ssri')
-
-module.exports = manifest
-function manifest (spec, opts) {
- opts = optCheck(opts)
-
- return getManifest(spec, opts).then(manifest => {
- return annotateManifest(spec, manifest, opts)
- })
-}
-
-function getManifest (spec, opts) {
- opts = opts.concat({
- fullMetadata: opts.enjoyBy ? true : opts.fullMetadata
- })
- return fetchPackument(spec, opts).then(packument => {
- try {
- return pickManifest(packument, spec.fetchSpec, {
- defaultTag: opts.defaultTag,
- enjoyBy: opts.enjoyBy,
- includeDeprecated: opts.includeDeprecated
- })
- } catch (err) {
- if ((err.code === 'ETARGET' || err.code === 'E403') && packument._cached && !opts.offline) {
- opts.log.silly(
- 'registry:manifest',
- `no matching version for ${spec.name}@${spec.fetchSpec} in the cache. Forcing revalidation.`
- )
- opts = opts.concat({
- preferOffline: false,
- preferOnline: true
- })
- return fetchPackument(spec, opts.concat({
- // Fetch full metadata in case ETARGET was due to corgi delay
- fullMetadata: true
- })).then(packument => {
- return pickManifest(packument, spec.fetchSpec, {
- defaultTag: opts.defaultTag,
- enjoyBy: opts.enjoyBy
- })
- })
- } else {
- throw err
- }
- }
- })
-}
-
-function annotateManifest (spec, manifest, opts) {
- const shasum = manifest.dist && manifest.dist.shasum
- manifest._integrity = manifest.dist && manifest.dist.integrity
- manifest._shasum = shasum
- if (!manifest._integrity && shasum) {
- // Use legacy dist.shasum field if available.
- manifest._integrity = ssri.fromHex(shasum, 'sha1').toString()
- }
- manifest._resolved = (
- manifest.dist && manifest.dist.tarball
- )
- if (!manifest._resolved) {
- const registry = fetch.pickRegistry(spec, opts)
- const uri = registry.replace(/\/?$/, '/') + spec.escapedName
-
- const err = new Error(
- `Manifest for ${manifest.name}@${manifest.version} from ${uri} is missing a tarball url (pkg.dist.tarball). Guessing a default.`
- )
- err.code = 'ENOTARBALL'
- err.manifest = manifest
- if (!manifest._warnings) { manifest._warnings = [] }
- manifest._warnings.push(err.message)
- manifest._resolved =
- `${registry}/${manifest.name}/-/${manifest.name}-${manifest.version}.tgz`
- }
- return manifest
-}
diff --git a/node_modules/libcipm/node_modules/pacote/lib/fetchers/registry/packument.js b/node_modules/libcipm/node_modules/pacote/lib/fetchers/registry/packument.js
deleted file mode 100644
index f5286c803..000000000
--- a/node_modules/libcipm/node_modules/pacote/lib/fetchers/registry/packument.js
+++ /dev/null
@@ -1,92 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const fetch = require('npm-registry-fetch')
-const LRU = require('lru-cache')
-const optCheck = require('../../util/opt-check')
-
-// Corgis are cute. 🐕🐶
-const CORGI_DOC = 'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*'
-const JSON_DOC = 'application/json'
-
-module.exports = packument
-function packument (spec, opts) {
- opts = optCheck(opts)
-
- const registry = fetch.pickRegistry(spec, opts)
- const uri = registry.replace(/\/?$/, '/') + spec.escapedName
-
- return fetchPackument(uri, registry, spec, opts)
-}
-
-const MEMO = new LRU({
- length: m => m._contentLength,
- max: 200 * 1024 * 1024, // 200MB
- maxAge: 30 * 1000 // 30s
-})
-
-module.exports.clearMemoized = clearMemoized
-function clearMemoized () {
- MEMO.reset()
-}
-
-function fetchPackument (uri, registry, spec, opts) {
- const mem = pickMem(opts)
- const accept = opts.fullMetadata ? JSON_DOC : CORGI_DOC
- const memoKey = `${uri}~(${accept})`
- if (mem && !opts.preferOnline && mem.has(memoKey)) {
- return BB.resolve(mem.get(memoKey))
- }
-
- return fetch(uri, opts.concat({
- headers: {
- 'pacote-req-type': 'packument',
- 'pacote-pkg-id': `registry:${spec.name}`,
- accept
- },
- spec
- }, opts, {
- // Force integrity to null: we never check integrity hashes for manifests
- integrity: null
- })).then(res => res.json().then(packument => {
- packument._cached = res.headers.has('x-local-cache')
- packument._contentLength = +res.headers.get('content-length')
- // NOTE - we need to call pickMem again because proxy
- // objects get reused!
- const mem = pickMem(opts)
- if (mem) {
- mem.set(memoKey, packument)
- }
- return packument
- })).catch(err => {
- if (err.code === 'E404' && !opts.fullMetadata) {
- return fetchPackument(uri, registry, spec, opts.concat({
- fullMetadata: true
- }))
- } else {
- throw err
- }
- })
-}
-
-class ObjProxy {
- get (key) { return this.obj[key] }
- set (key, val) { this.obj[key] = val }
-}
-
-// This object is used synchronously and immediately, so
-// we can safely reuse it instead of consing up new ones
-const PROX = new ObjProxy()
-function pickMem (opts) {
- if (!opts || !opts.memoize) {
- return MEMO
- } else if (opts.memoize.get && opts.memoize.set) {
- return opts.memoize
- } else if (typeof opts.memoize === 'object') {
- PROX.obj = opts.memoize
- return PROX
- } else {
- return null
- }
-}
diff --git a/node_modules/libcipm/node_modules/pacote/lib/fetchers/registry/tarball.js b/node_modules/libcipm/node_modules/pacote/lib/fetchers/registry/tarball.js
deleted file mode 100644
index 134153280..000000000
--- a/node_modules/libcipm/node_modules/pacote/lib/fetchers/registry/tarball.js
+++ /dev/null
@@ -1,102 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const fetch = require('npm-registry-fetch')
-const manifest = require('./manifest')
-const optCheck = require('../../util/opt-check')
-const PassThrough = require('stream').PassThrough
-const ssri = require('ssri')
-const url = require('url')
-
-module.exports = tarball
-function tarball (spec, opts) {
- opts = optCheck(opts)
- const registry = fetch.pickRegistry(spec, opts)
- const stream = new PassThrough()
- let mani
- if (
- opts.resolved &&
- // spec.type === 'version' &&
- opts.resolved.indexOf(registry) === 0
- ) {
- // fakeChild is a shortcut to avoid looking up a manifest!
- mani = BB.resolve({
- name: spec.name,
- version: spec.fetchSpec,
- _integrity: opts.integrity,
- _resolved: opts.resolved,
- _fakeChild: true
- })
- } else {
- // We can't trust opts.resolved if it's going to a separate host.
- mani = manifest(spec, opts)
- }
-
- mani.then(mani => {
- !mani._fakeChild && stream.emit('manifest', mani)
- const fetchStream = fromManifest(mani, spec, opts).on(
- 'integrity', i => stream.emit('integrity', i)
- )
- fetchStream.on('error', err => stream.emit('error', err))
- fetchStream.pipe(stream)
- return null
- }).catch(err => stream.emit('error', err))
- return stream
-}
-
-module.exports.fromManifest = fromManifest
-function fromManifest (manifest, spec, opts) {
- opts = optCheck(opts)
- if (spec.scope) { opts = opts.concat({ scope: spec.scope }) }
- const stream = new PassThrough()
- const registry = fetch.pickRegistry(spec, opts)
- const uri = getTarballUrl(spec, registry, manifest, opts)
- fetch(uri, opts.concat({
- headers: {
- 'pacote-req-type': 'tarball',
- 'pacote-pkg-id': `registry:${manifest.name}@${uri}`
- },
- integrity: manifest._integrity,
- algorithms: [
- manifest._integrity
- ? ssri.parse(manifest._integrity).pickAlgorithm()
- : 'sha1'
- ],
- spec
- }, opts))
- .then(res => {
- const hash = res.headers.get('x-local-cache-hash')
- if (hash) {
- stream.emit('integrity', decodeURIComponent(hash))
- }
- res.body.on('error', err => stream.emit('error', err))
- res.body.pipe(stream)
- return null
- })
- .catch(err => stream.emit('error', err))
- return stream
-}
-
-function getTarballUrl (spec, registry, mani, opts) {
- const reg = url.parse(registry)
- const tarball = url.parse(mani._resolved)
- // https://github.com/npm/npm/pull/9471
- //
- // TL;DR: Some alternative registries host tarballs on http and packuments
- // on https, and vice-versa. There's also a case where people who can't use
- // SSL to access the npm registry, for example, might use
- // `--registry=http://registry.npmjs.org/`. In this case, we need to
- // rewrite `tarball` to match the protocol.
- //
- if (reg.hostname === tarball.hostname && reg.protocol !== tarball.protocol) {
- tarball.protocol = reg.protocol
- // Ports might be same host different protocol!
- if (reg.port !== tarball.port) {
- delete tarball.host
- tarball.port = reg.port
- }
- delete tarball.href
- }
- return url.format(tarball)
-}
diff --git a/node_modules/libcipm/node_modules/pacote/lib/fetchers/remote.js b/node_modules/libcipm/node_modules/pacote/lib/fetchers/remote.js
deleted file mode 100644
index 8941f9938..000000000
--- a/node_modules/libcipm/node_modules/pacote/lib/fetchers/remote.js
+++ /dev/null
@@ -1,34 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const Fetcher = require('../fetch')
-const fetchRegistry = require('./registry')
-
-const fetchRemote = module.exports = Object.create(null)
-
-Fetcher.impl(fetchRemote, {
- packument (spec, opts) {
- return BB.reject(new Error('Not implemented yet'))
- },
-
- manifest (spec, opts) {
- // We can't get the manifest for a remote tarball until
- // we extract the tarball itself.
- // `finalize-manifest` takes care of this process of extracting
- // a manifest based on ./tarball.js
- return BB.resolve(null)
- },
-
- tarball (spec, opts) {
- const uri = spec._resolved || spec.fetchSpec
- return fetchRegistry.fromManifest({
- _resolved: uri,
- _integrity: opts.integrity
- }, spec, opts)
- },
-
- fromManifest (manifest, spec, opts) {
- return this.tarball(manifest || spec, opts)
- }
-})
diff --git a/node_modules/libcipm/node_modules/pacote/lib/fetchers/tag.js b/node_modules/libcipm/node_modules/pacote/lib/fetchers/tag.js
deleted file mode 100644
index 9f172e986..000000000
--- a/node_modules/libcipm/node_modules/pacote/lib/fetchers/tag.js
+++ /dev/null
@@ -1,3 +0,0 @@
-'use strict'
-
-module.exports = require('./registry')
diff --git a/node_modules/libcipm/node_modules/pacote/lib/fetchers/version.js b/node_modules/libcipm/node_modules/pacote/lib/fetchers/version.js
deleted file mode 100644
index 9f172e986..000000000
--- a/node_modules/libcipm/node_modules/pacote/lib/fetchers/version.js
+++ /dev/null
@@ -1,3 +0,0 @@
-'use strict'
-
-module.exports = require('./registry')
diff --git a/node_modules/libcipm/node_modules/pacote/lib/finalize-manifest.js b/node_modules/libcipm/node_modules/pacote/lib/finalize-manifest.js
deleted file mode 100644
index 80b9cda73..000000000
--- a/node_modules/libcipm/node_modules/pacote/lib/finalize-manifest.js
+++ /dev/null
@@ -1,248 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const cacache = require('cacache')
-const cacheKey = require('./util/cache-key')
-const fetchFromManifest = require('./fetch').fromManifest
-const finished = require('./util/finished')
-const minimatch = require('minimatch')
-const normalize = require('normalize-package-data')
-const optCheck = require('./util/opt-check')
-const path = require('path')
-const pipe = BB.promisify(require('mississippi').pipe)
-const ssri = require('ssri')
-const tar = require('tar')
-const readJson = require('./util/read-json')
-const normalizePackageBin = require('npm-normalize-package-bin')
-
-// `finalizeManifest` takes as input the various kinds of manifests that
-// manifest handlers ('lib/fetchers/*.js#manifest()') return, and makes sure
-// they are:
-//
-// * filled out with any required data that the handler couldn't fill in
-// * formatted consistently
-// * cached so we don't have to repeat this work more than necessary
-//
-// The biggest thing this package might do is do a full tarball extraction in
-// order to find missing bits of metadata required by the npm installer. For
-// example, it will fill in `_shrinkwrap`, `_integrity`, and other details that
-// the plain manifest handlers would require a tarball to fill out. If a
-// handler returns everything necessary, this process is skipped.
-//
-// If we get to the tarball phase, the corresponding tarball handler for the
-// requested type will be invoked and the entire tarball will be read from the
-// stream.
-//
-module.exports = finalizeManifest
-function finalizeManifest (pkg, spec, opts) {
- const key = finalKey(pkg, spec)
- opts = optCheck(opts)
-
- const cachedManifest = (opts.cache && key && !opts.preferOnline && !opts.fullMetadata && !opts.enjoyBy)
- ? cacache.get.info(opts.cache, key, opts)
- : BB.resolve(null)
-
- return cachedManifest.then(cached => {
- if (cached && cached.metadata && cached.metadata.manifest) {
- return new Manifest(cached.metadata.manifest)
- } else {
- return tarballedProps(pkg, spec, opts).then(props => {
- return pkg && pkg.name
- ? new Manifest(pkg, props, opts.fullMetadata)
- : new Manifest(props, null, opts.fullMetadata)
- }).then(manifest => {
- const cacheKey = key || finalKey(manifest, spec)
- if (!opts.cache || !cacheKey) {
- return manifest
- } else {
- return cacache.put(
- opts.cache, cacheKey, '.', {
- metadata: {
- id: manifest._id,
- manifest,
- type: 'finalized-manifest'
- }
- }
- ).then(() => manifest)
- }
- })
- }
- })
-}
-
-module.exports.Manifest = Manifest
-function Manifest (pkg, fromTarball, fullMetadata) {
- fromTarball = fromTarball || {}
- if (fullMetadata) {
- Object.assign(this, pkg)
- }
- this.name = pkg.name
- this.version = pkg.version
- this.engines = pkg.engines || fromTarball.engines
- this.cpu = pkg.cpu || fromTarball.cpu
- this.os = pkg.os || fromTarball.os
- this.dependencies = pkg.dependencies || {}
- this.optionalDependencies = pkg.optionalDependencies || {}
- this.peerDependenciesMeta = pkg.peerDependenciesMeta || {}
- this.devDependencies = pkg.devDependencies || {}
- const bundled = (
- pkg.bundledDependencies ||
- pkg.bundleDependencies ||
- false
- )
- this.bundleDependencies = bundled
- this.peerDependencies = pkg.peerDependencies || {}
- this.deprecated = pkg.deprecated || false
-
- // These depend entirely on each handler
- this._resolved = pkg._resolved
-
- // Not all handlers (or registries) provide these out of the box,
- // and if they don't, we need to extract and read the tarball ourselves.
- // These are details required by the installer.
- this._integrity = pkg._integrity || fromTarball._integrity || null
- this._shasum = pkg._shasum || fromTarball._shasum || null
- this._shrinkwrap = pkg._shrinkwrap || fromTarball._shrinkwrap || null
- this.bin = pkg.bin || fromTarball.bin || null
-
- // turn arrays and strings into a legit object, strip out bad stuff
- normalizePackageBin(this)
-
- this._id = null
-
- // TODO - freezing and inextensibility pending npm changes. See test suite.
- // Object.preventExtensions(this)
- normalize(this)
-
- // I don't want this why did you give it to me. Go away. 🔥🔥🔥🔥
- delete this.readme
-
- // Object.freeze(this)
-}
-
-// Some things aren't filled in by standard manifest fetching.
-// If this function needs to do its work, it will grab the
-// package tarball, extract it, and take whatever it needs
-// from the stream.
-function tarballedProps (pkg, spec, opts) {
- const needsShrinkwrap = (!pkg || (
- pkg._hasShrinkwrap !== false &&
- !pkg._shrinkwrap
- ))
- const needsBin = !!(!pkg || (
- !pkg.bin &&
- pkg.directories &&
- pkg.directories.bin
- ))
- const needsIntegrity = !pkg || (!pkg._integrity && pkg._integrity !== false)
- const needsShasum = !pkg || (!pkg._shasum && pkg._shasum !== false)
- const needsHash = needsIntegrity || needsShasum
- const needsManifest = !pkg || !pkg.name
- const needsExtract = needsShrinkwrap || needsBin || needsManifest
- if (!needsShrinkwrap && !needsBin && !needsHash && !needsManifest) {
- return BB.resolve({})
- } else {
- opts = optCheck(opts)
- const tarStream = fetchFromManifest(pkg, spec, opts)
- const extracted = needsExtract && new tar.Parse()
- return BB.join(
- needsShrinkwrap && jsonFromStream('npm-shrinkwrap.json', extracted),
- needsManifest && jsonFromStream('package.json', extracted),
- needsBin && getPaths(extracted),
- needsHash && ssri.fromStream(tarStream, { algorithms: ['sha1', 'sha512'] }),
- needsExtract && pipe(tarStream, extracted),
- (sr, mani, paths, hash) => {
- if (needsManifest && !mani) {
- const err = new Error(`Non-registry package missing package.json: ${spec}.`)
- err.code = 'ENOPACKAGEJSON'
- throw err
- }
- const extraProps = mani || {}
- delete extraProps._resolved
- // drain out the rest of the tarball
- tarStream.resume()
- // if we have directories.bin, we need to collect any matching files
- // to add to bin
- if (paths && paths.length) {
- const dirBin = mani
- ? (mani && mani.directories && mani.directories.bin)
- : (pkg && pkg.directories && pkg.directories.bin)
- if (dirBin) {
- extraProps.bin = {}
- paths.forEach(filePath => {
- if (minimatch(filePath, dirBin + '/**')) {
- const relative = path.relative(dirBin, filePath)
- if (relative && relative[0] !== '.') {
- extraProps.bin[path.basename(relative)] = path.join(dirBin, relative)
- }
- }
- })
- }
- }
- return Object.assign(extraProps, {
- _shrinkwrap: sr,
- _resolved: (mani && mani._resolved) ||
- (pkg && pkg._resolved) ||
- spec.fetchSpec,
- _integrity: needsIntegrity && hash && hash.sha512 && hash.sha512[0].toString(),
- _shasum: needsShasum && hash && hash.sha1 && hash.sha1[0].hexDigest()
- })
- }
- )
- }
-}
-
-function jsonFromStream (filename, dataStream) {
- return BB.fromNode(cb => {
- dataStream.on('error', cb)
- dataStream.on('close', cb)
- dataStream.on('entry', entry => {
- const filePath = entry.header.path.replace(/[^/]+\//, '')
- if (filePath !== filename) {
- entry.resume()
- } else {
- let data = ''
- entry.on('error', cb)
- finished(entry).then(() => {
- try {
- cb(null, readJson(data))
- } catch (err) {
- cb(err)
- }
- }, err => {
- cb(err)
- })
- entry.on('data', d => { data += d })
- }
- })
- })
-}
-
-function getPaths (dataStream) {
- return BB.fromNode(cb => {
- let paths = []
- dataStream.on('error', cb)
- dataStream.on('close', () => cb(null, paths))
- dataStream.on('entry', function handler (entry) {
- const filePath = entry.header.path.replace(/[^/]+\//, '')
- entry.resume()
- paths.push(filePath)
- })
- })
-}
-
-function finalKey (pkg, spec) {
- if (pkg && pkg._uniqueResolved) {
- // git packages have a unique, identifiable id, but no tar sha
- return cacheKey(`${spec.type}-manifest`, pkg._uniqueResolved)
- } else {
- return (
- pkg && pkg._integrity &&
- cacheKey(
- `${spec.type}-manifest`,
- `${pkg._resolved}:${ssri.stringify(pkg._integrity)}`
- )
- )
- }
-}
diff --git a/node_modules/libcipm/node_modules/pacote/lib/util/cache-key.js b/node_modules/libcipm/node_modules/pacote/lib/util/cache-key.js
deleted file mode 100644
index 157e60b0d..000000000
--- a/node_modules/libcipm/node_modules/pacote/lib/util/cache-key.js
+++ /dev/null
@@ -1,6 +0,0 @@
-'use strict'
-
-module.exports = cacheKey
-function cacheKey (type, identifier) {
- return ['pacote', type, identifier].join(':')
-}
diff --git a/node_modules/libcipm/node_modules/pacote/lib/util/finished.js b/node_modules/libcipm/node_modules/pacote/lib/util/finished.js
deleted file mode 100644
index 6dadc8b5b..000000000
--- a/node_modules/libcipm/node_modules/pacote/lib/util/finished.js
+++ /dev/null
@@ -1,17 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-module.exports = function (child, hasExitCode = false) {
- return BB.fromNode(function (cb) {
- child.on('error', cb)
- child.on(hasExitCode ? 'close' : 'end', function (exitCode) {
- if (exitCode === undefined || exitCode === 0) {
- cb()
- } else {
- let err = new Error('exited with error code: ' + exitCode)
- cb(err)
- }
- })
- })
-}
diff --git a/node_modules/libcipm/node_modules/pacote/lib/util/git.js b/node_modules/libcipm/node_modules/pacote/lib/util/git.js
deleted file mode 100644
index 7642eb2c8..000000000
--- a/node_modules/libcipm/node_modules/pacote/lib/util/git.js
+++ /dev/null
@@ -1,292 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const cp = require('child_process')
-const execFileAsync = BB.promisify(cp.execFile, {
- multiArgs: true
-})
-const finished = require('./finished')
-const LRU = require('lru-cache')
-const optCheck = require('./opt-check')
-const osenv = require('osenv')
-const path = require('path')
-const pinflight = require('promise-inflight')
-const promiseRetry = require('promise-retry')
-const uniqueFilename = require('unique-filename')
-const which = BB.promisify(require('which'))
-const semver = require('semver')
-const inferOwner = require('infer-owner')
-
-const GOOD_ENV_VARS = new Set([
- 'GIT_ASKPASS',
- 'GIT_EXEC_PATH',
- 'GIT_PROXY_COMMAND',
- 'GIT_SSH',
- 'GIT_SSH_COMMAND',
- 'GIT_SSL_CAINFO',
- 'GIT_SSL_NO_VERIFY'
-])
-
-const GIT_TRANSIENT_ERRORS = [
- 'remote error: Internal Server Error',
- 'The remote end hung up unexpectedly',
- 'Connection timed out',
- 'Operation timed out',
- 'Failed to connect to .* Timed out',
- 'Connection reset by peer',
- 'SSL_ERROR_SYSCALL',
- 'The requested URL returned error: 503'
-].join('|')
-
-const GIT_TRANSIENT_ERROR_RE = new RegExp(GIT_TRANSIENT_ERRORS)
-
-const GIT_TRANSIENT_ERROR_MAX_RETRY_NUMBER = 3
-
-function shouldRetry (error, number) {
- return GIT_TRANSIENT_ERROR_RE.test(error) && (number < GIT_TRANSIENT_ERROR_MAX_RETRY_NUMBER)
-}
-
-const GIT_ = 'GIT_'
-let GITENV
-function gitEnv () {
- if (GITENV) { return GITENV }
- const tmpDir = path.join(osenv.tmpdir(), 'pacote-git-template-tmp')
- const tmpName = uniqueFilename(tmpDir, 'git-clone')
- GITENV = {
- GIT_ASKPASS: 'echo',
- GIT_TEMPLATE_DIR: tmpName
- }
- Object.keys(process.env).forEach(k => {
- if (GOOD_ENV_VARS.has(k) || !k.startsWith(GIT_)) {
- GITENV[k] = process.env[k]
- }
- })
- return GITENV
-}
-
-let GITPATH
-try {
- GITPATH = which.sync('git')
-} catch (e) {}
-
-module.exports.clone = fullClone
-function fullClone (repo, committish, target, opts) {
- opts = optCheck(opts)
- const gitArgs = ['clone', '--mirror', '-q', repo, path.join(target, '.git')]
- if (process.platform === 'win32') {
- gitArgs.push('--config', 'core.longpaths=true')
- }
- return execGit(gitArgs, { cwd: target }, opts).then(() => {
- return execGit(['init'], { cwd: target }, opts)
- }).then(() => {
- return execGit(['checkout', committish || 'HEAD'], { cwd: target }, opts)
- }).then(() => {
- return updateSubmodules(target, opts)
- }).then(() => headSha(target, opts))
-}
-
-module.exports.shallow = shallowClone
-function shallowClone (repo, branch, target, opts) {
- opts = optCheck(opts)
- const gitArgs = ['clone', '--depth=1', '-q']
- if (branch) {
- gitArgs.push('-b', branch)
- }
- gitArgs.push(repo, target)
- if (process.platform === 'win32') {
- gitArgs.push('--config', 'core.longpaths=true')
- }
- return execGit(gitArgs, {
- cwd: target
- }, opts).then(() => {
- return updateSubmodules(target, opts)
- }).then(() => headSha(target, opts))
-}
-
-function updateSubmodules (localRepo, opts) {
- const gitArgs = ['submodule', 'update', '-q', '--init', '--recursive']
- return execGit(gitArgs, {
- cwd: localRepo
- }, opts)
-}
-
-function headSha (repo, opts) {
- opts = optCheck(opts)
- return execGit(['rev-parse', '--revs-only', 'HEAD'], { cwd: repo }, opts).spread(stdout => {
- return stdout.trim()
- })
-}
-
-const CARET_BRACES = '^{}'
-const REVS = new LRU({
- max: 100,
- maxAge: 5 * 60 * 1000
-})
-module.exports.revs = revs
-function revs (repo, opts) {
- opts = optCheck(opts)
- const cached = REVS.get(repo)
- if (cached) {
- return BB.resolve(cached)
- }
- return pinflight(`ls-remote:${repo}`, () => {
- return spawnGit(['ls-remote', '-h', '-t', repo], {
- env: gitEnv()
- }, opts).then((stdout) => {
- return stdout.split('\n').reduce((revs, line) => {
- const split = line.split(/\s+/, 2)
- if (split.length < 2) { return revs }
- const sha = split[0].trim()
- const ref = split[1].trim().match(/(?:refs\/[^/]+\/)?(.*)/)[1]
- if (!ref) { return revs } // ???
- if (ref.endsWith(CARET_BRACES)) { return revs } // refs/tags/x^{} crap
- const type = refType(line)
- const doc = { sha, ref, type }
-
- revs.refs[ref] = doc
- // We can check out shallow clones on specific SHAs if we have a ref
- if (revs.shas[sha]) {
- revs.shas[sha].push(ref)
- } else {
- revs.shas[sha] = [ref]
- }
-
- if (type === 'tag') {
- const match = ref.match(/v?(\d+\.\d+\.\d+(?:[-+].+)?)$/)
- if (match && semver.valid(match[1], true)) {
- revs.versions[semver.clean(match[1], true)] = doc
- }
- }
-
- return revs
- }, { versions: {}, 'dist-tags': {}, refs: {}, shas: {} })
- }, err => {
- err.message = `Error while executing:\n${GITPATH} ls-remote -h -t ${repo}\n\n${err.stderr}\n${err.message}`
- throw err
- }).then(revs => {
- if (revs.refs.HEAD) {
- const HEAD = revs.refs.HEAD
- Object.keys(revs.versions).forEach(v => {
- if (v.sha === HEAD.sha) {
- revs['dist-tags'].HEAD = v
- if (!revs.refs.latest) {
- revs['dist-tags'].latest = revs.refs.HEAD
- }
- }
- })
- }
- REVS.set(repo, revs)
- return revs
- })
- })
-}
-
-// infer the owner from the cwd git is operating in, if not the
-// process cwd, but only if we're root.
-// See: https://github.com/npm/cli/issues/624
-module.exports._cwdOwner = cwdOwner
-function cwdOwner (gitOpts, opts) {
- const isRoot = process.getuid && process.getuid() === 0
- if (!isRoot || !gitOpts.cwd) { return Promise.resolve() }
-
- return BB.resolve(inferOwner(gitOpts.cwd).then(owner => {
- gitOpts.uid = owner.uid
- gitOpts.gid = owner.gid
- }))
-}
-
-module.exports._exec = execGit
-function execGit (gitArgs, gitOpts, opts) {
- opts = optCheck(opts)
- return BB.resolve(cwdOwner(gitOpts, opts).then(() => checkGit(opts).then(gitPath => {
- return promiseRetry((retry, number) => {
- if (number !== 1) {
- opts.log.silly('pacote', 'Retrying git command: ' + gitArgs.join(' ') + ' attempt # ' + number)
- }
- return execFileAsync(gitPath, gitArgs, mkOpts(gitOpts, opts)).catch((err) => {
- if (shouldRetry(err, number)) {
- retry(err)
- } else {
- throw err
- }
- })
- }, opts.retry != null ? opts.retry : {
- retries: opts['fetch-retries'],
- factor: opts['fetch-retry-factor'],
- maxTimeout: opts['fetch-retry-maxtimeout'],
- minTimeout: opts['fetch-retry-mintimeout']
- })
- })))
-}
-
-module.exports._spawn = spawnGit
-function spawnGit (gitArgs, gitOpts, opts) {
- opts = optCheck(opts)
- return BB.resolve(cwdOwner(gitOpts, opts).then(() => checkGit(opts).then(gitPath => {
- return promiseRetry((retry, number) => {
- if (number !== 1) {
- opts.log.silly('pacote', 'Retrying git command: ' + gitArgs.join(' ') + ' attempt # ' + number)
- }
- const child = cp.spawn(gitPath, gitArgs, mkOpts(gitOpts, opts))
-
- let stdout = ''
- let stderr = ''
- child.stdout.on('data', d => { stdout += d })
- child.stderr.on('data', d => { stderr += d })
-
- return finished(child, true).catch(err => {
- if (shouldRetry(stderr, number)) {
- retry(err)
- } else {
- err.stderr = stderr
- throw err
- }
- }).then(() => {
- return stdout
- })
- }, opts.retry)
- })))
-}
-
-module.exports._mkOpts = mkOpts
-function mkOpts (_gitOpts, opts) {
- const gitOpts = {
- env: gitEnv()
- }
- const isRoot = process.getuid && process.getuid() === 0
- // don't change child process uid/gid if not root
- if (+opts.uid && !isNaN(opts.uid) && isRoot) {
- gitOpts.uid = +opts.uid
- }
- if (+opts.gid && !isNaN(opts.gid) && isRoot) {
- gitOpts.gid = +opts.gid
- }
- Object.assign(gitOpts, _gitOpts)
- return gitOpts
-}
-
-function checkGit (opts) {
- if (opts.git) {
- return BB.resolve(opts.git)
- } else if (!GITPATH) {
- const err = new Error('No git binary found in $PATH')
- err.code = 'ENOGIT'
- return BB.reject(err)
- } else {
- return BB.resolve(GITPATH)
- }
-}
-
-const REFS_TAGS = 'refs/tags/'
-const REFS_HEADS = 'refs/heads/'
-const HEAD = 'HEAD'
-function refType (ref) {
- return ref.indexOf(REFS_TAGS) !== -1
- ? 'tag'
- : ref.indexOf(REFS_HEADS) !== -1
- ? 'branch'
- : ref.endsWith(HEAD)
- ? 'head'
- : 'other'
-}
diff --git a/node_modules/libcipm/node_modules/pacote/lib/util/opt-check.js b/node_modules/libcipm/node_modules/pacote/lib/util/opt-check.js
deleted file mode 100644
index 8b6b472f8..000000000
--- a/node_modules/libcipm/node_modules/pacote/lib/util/opt-check.js
+++ /dev/null
@@ -1,48 +0,0 @@
-'use strict'
-
-const figgyPudding = require('figgy-pudding')
-const logger = require('./proclog.js')
-
-const AUTH_REGEX = /^(?:.*:)?(token|_authToken|username|_password|password|email|always-auth|_auth|otp)$/
-const SCOPE_REGISTRY_REGEX = /@.*:registry$/gi
-module.exports = figgyPudding({
- annotate: {},
- cache: {},
- defaultTag: 'tag',
- dirPacker: {},
- dmode: {},
- 'enjoy-by': 'enjoyBy',
- enjoyBy: {},
- before: 'enjoyBy',
- fmode: {},
- 'fetch-retries': { default: 2 },
- 'fetch-retry-factor': { default: 10 },
- 'fetch-retry-maxtimeout': { default: 60000 },
- 'fetch-retry-mintimeout': { default: 10000 },
- fullMetadata: 'full-metadata',
- 'full-metadata': { default: false },
- gid: {},
- git: {},
- includeDeprecated: { default: true },
- 'include-deprecated': 'includeDeprecated',
- integrity: {},
- log: { default: logger },
- memoize: {},
- offline: {},
- preferOffline: 'prefer-offline',
- 'prefer-offline': {},
- preferOnline: 'prefer-online',
- 'prefer-online': {},
- registry: { default: 'https://registry.npmjs.org/' },
- resolved: {},
- retry: {},
- scope: {},
- tag: { default: 'latest' },
- uid: {},
- umask: {},
- where: {}
-}, {
- other (key) {
- return key.match(AUTH_REGEX) || key.match(SCOPE_REGISTRY_REGEX)
- }
-})
diff --git a/node_modules/libcipm/node_modules/pacote/lib/util/pack-dir.js b/node_modules/libcipm/node_modules/pacote/lib/util/pack-dir.js
deleted file mode 100644
index 157a9a82f..000000000
--- a/node_modules/libcipm/node_modules/pacote/lib/util/pack-dir.js
+++ /dev/null
@@ -1,44 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const cacache = require('cacache')
-const cacheKey = require('./cache-key')
-const optCheck = require('./opt-check')
-const packlist = require('npm-packlist')
-const pipe = BB.promisify(require('mississippi').pipe)
-const tar = require('tar')
-
-module.exports = packDir
-function packDir (manifest, label, dir, target, opts) {
- opts = optCheck(opts)
-
- const packer = opts.dirPacker
- ? BB.resolve(opts.dirPacker(manifest, dir))
- : mkPacker(dir)
-
- if (!opts.cache) {
- return packer.then(packer => pipe(packer, target))
- } else {
- const cacher = cacache.put.stream(
- opts.cache, cacheKey('packed-dir', label), opts
- ).on('integrity', i => {
- target.emit('integrity', i)
- })
- return packer.then(packer => BB.all([
- pipe(packer, cacher),
- pipe(packer, target)
- ]))
- }
-}
-
-function mkPacker (dir) {
- return packlist({ path: dir }).then(files => {
- return tar.c({
- cwd: dir,
- gzip: true,
- portable: true,
- prefix: 'package/'
- }, files)
- })
-}
diff --git a/node_modules/libcipm/node_modules/pacote/lib/util/proclog.js b/node_modules/libcipm/node_modules/pacote/lib/util/proclog.js
deleted file mode 100644
index e4a2bf8ac..000000000
--- a/node_modules/libcipm/node_modules/pacote/lib/util/proclog.js
+++ /dev/null
@@ -1,23 +0,0 @@
-'use strict'
-
-const LEVELS = [
- 'notice',
- 'error',
- 'warn',
- 'info',
- 'verbose',
- 'http',
- 'silly',
- 'pause',
- 'resume'
-]
-
-const logger = {}
-for (const level of LEVELS) {
- logger[level] = log(level)
-}
-module.exports = logger
-
-function log (level) {
- return (category, ...args) => process.emit('log', level, category, ...args)
-}
diff --git a/node_modules/libcipm/node_modules/pacote/lib/util/read-json.js b/node_modules/libcipm/node_modules/pacote/lib/util/read-json.js
deleted file mode 100644
index 32fffbc53..000000000
--- a/node_modules/libcipm/node_modules/pacote/lib/util/read-json.js
+++ /dev/null
@@ -1,15 +0,0 @@
-'use strict'
-
-module.exports = function (content) {
- // Code also yanked from read-package-json.
- function stripBOM (content) {
- content = content.toString()
- // Remove byte order marker. This catches EF BB BF (the UTF-8 BOM)
- // because the buffer-to-string conversion in `fs.readFileSync()`
- // translates it to FEFF, the UTF-16 BOM.
- if (content.charCodeAt(0) === 0xFEFF) return content.slice(1)
- return content
- }
-
- return JSON.parse(stripBOM(content))
-}
diff --git a/node_modules/libcipm/node_modules/pacote/lib/with-tarball-stream.js b/node_modules/libcipm/node_modules/pacote/lib/with-tarball-stream.js
deleted file mode 100644
index 0d84696d6..000000000
--- a/node_modules/libcipm/node_modules/pacote/lib/with-tarball-stream.js
+++ /dev/null
@@ -1,135 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const cacache = require('cacache')
-const fetch = require('./fetch.js')
-const fs = require('fs')
-const npa = require('npm-package-arg')
-const optCheck = require('./util/opt-check.js')
-const path = require('path')
-const ssri = require('ssri')
-const retry = require('promise-retry')
-
-const statAsync = BB.promisify(fs.stat)
-
-const RETRIABLE_ERRORS = new Set(['ENOENT', 'EINTEGRITY', 'Z_DATA_ERROR'])
-
-module.exports = withTarballStream
-function withTarballStream (spec, opts, streamHandler) {
- opts = optCheck(opts)
- spec = npa(spec, opts.where)
-
- // First, we check for a file: resolved shortcut
- const tryFile = (
- !opts.preferOnline &&
- opts.integrity &&
- opts.resolved &&
- opts.resolved.startsWith('file:')
- )
- ? BB.try(() => {
- // NOTE - this is a special shortcut! Packages installed as files do not
- // have a `resolved` field -- this specific case only occurs when you have,
- // say, a git dependency or a registry dependency that you've packaged into
- // a local file, and put that file: spec in the `resolved` field.
- opts.log.silly('pacote', `trying ${spec} by local file: ${opts.resolved}`)
- const file = path.resolve(opts.where || '.', opts.resolved.substr(5))
- return statAsync(file)
- .then(() => {
- const verifier = ssri.integrityStream({ integrity: opts.integrity })
- const stream = fs.createReadStream(file)
- .on('error', err => verifier.emit('error', err))
- .pipe(verifier)
- return streamHandler(stream)
- })
- .catch(err => {
- if (err.code === 'EINTEGRITY') {
- opts.log.warn('pacote', `EINTEGRITY while extracting ${spec} from ${file}.You will have to recreate the file.`)
- opts.log.verbose('pacote', `EINTEGRITY for ${spec}: ${err.message}`)
- }
- throw err
- })
- })
- : BB.reject(Object.assign(new Error('no file!'), { code: 'ENOENT' }))
-
- const tryDigest = tryFile
- .catch(err => {
- if (
- opts.preferOnline ||
- !opts.cache ||
- !opts.integrity ||
- !RETRIABLE_ERRORS.has(err.code)
- ) {
- throw err
- } else {
- opts.log.silly('tarball', `trying ${spec} by hash: ${opts.integrity}`)
- const stream = cacache.get.stream.byDigest(
- opts.cache, opts.integrity, opts
- )
- stream.once('error', err => stream.on('newListener', (ev, l) => {
- if (ev === 'error') { l(err) }
- }))
- return streamHandler(stream)
- .catch(err => {
- if (err.code === 'EINTEGRITY' || err.code === 'Z_DATA_ERROR') {
- opts.log.warn('tarball', `cached data for ${spec} (${opts.integrity}) seems to be corrupted. Refreshing cache.`)
- return cleanUpCached(opts.cache, opts.integrity, opts)
- .then(() => { throw err })
- } else {
- throw err
- }
- })
- }
- })
-
- const trySpec = tryDigest
- .catch(err => {
- if (!RETRIABLE_ERRORS.has(err.code)) {
- // If it's not one of our retriable errors, bail out and give up.
- throw err
- } else {
- opts.log.silly(
- 'tarball',
- `no local data for ${spec}. Extracting by manifest.`
- )
- return BB.resolve(retry((tryAgain, attemptNum) => {
- const tardata = fetch.tarball(spec, opts)
- if (!opts.resolved) {
- tardata.on('manifest', m => {
- opts = opts.concat({ resolved: m._resolved })
- })
- tardata.on('integrity', i => {
- opts = opts.concat({ integrity: i })
- })
- }
- return BB.try(() => streamHandler(tardata))
- .catch(err => {
- // Retry once if we have a cache, to clear up any weird conditions.
- // Don't retry network errors, though -- make-fetch-happen has already
- // taken care of making sure we're all set on that front.
- if (opts.cache && err.code && !String(err.code).match(/^E\d{3}$/)) {
- if (err.code === 'EINTEGRITY' || err.code === 'Z_DATA_ERROR') {
- opts.log.warn('tarball', `tarball data for ${spec} (${opts.integrity}) seems to be corrupted. Trying one more time.`)
- }
- return cleanUpCached(opts.cache, err.sri, opts)
- .then(() => tryAgain(err))
- } else {
- throw err
- }
- })
- }, { retries: 1 }))
- }
- })
-
- return trySpec
- .catch(err => {
- if (err.code === 'EINTEGRITY') {
- err.message = `Verification failed while extracting ${spec}:\n${err.message}`
- }
- throw err
- })
-}
-
-function cleanUpCached (cachePath, integrity, opts) {
- return cacache.rm.content(cachePath, integrity, opts)
-}
diff --git a/node_modules/libcipm/node_modules/pacote/manifest.js b/node_modules/libcipm/node_modules/pacote/manifest.js
deleted file mode 100644
index 6a89ff76b..000000000
--- a/node_modules/libcipm/node_modules/pacote/manifest.js
+++ /dev/null
@@ -1,38 +0,0 @@
-'use strict'
-
-const fetchManifest = require('./lib/fetch').manifest
-const finalizeManifest = require('./lib/finalize-manifest')
-const optCheck = require('./lib/util/opt-check')
-const pinflight = require('promise-inflight')
-const npa = require('npm-package-arg')
-
-module.exports = manifest
-function manifest (spec, opts) {
- opts = optCheck(opts)
- spec = npa(spec, opts.where)
-
- const label = [
- spec.name,
- spec.saveSpec || spec.fetchSpec,
- spec.type,
- opts.cache,
- opts.registry,
- opts.scope
- ].join(':')
- return pinflight(label, () => {
- const startTime = Date.now()
- return fetchManifest(spec, opts).then(rawManifest => {
- return finalizeManifest(rawManifest, spec, opts)
- }).then(manifest => {
- if (opts.annotate) {
- manifest._from = spec.saveSpec || spec.raw
- manifest._requested = spec
- manifest._spec = spec.raw
- manifest._where = opts.where
- }
- const elapsedTime = Date.now() - startTime
- opts.log.silly('pacote', `${spec.type} manifest for ${spec.name}@${spec.saveSpec || spec.fetchSpec} fetched in ${elapsedTime}ms`)
- return manifest
- })
- })
-}
diff --git a/node_modules/libcipm/node_modules/pacote/node_modules/.bin/semver b/node_modules/libcipm/node_modules/pacote/node_modules/.bin/semver
deleted file mode 120000
index 317eb293d..000000000
--- a/node_modules/libcipm/node_modules/pacote/node_modules/.bin/semver
+++ /dev/null
@@ -1 +0,0 @@
-../semver/bin/semver \ No newline at end of file
diff --git a/node_modules/libcipm/node_modules/pacote/node_modules/npm-packlist/LICENSE b/node_modules/libcipm/node_modules/pacote/node_modules/npm-packlist/LICENSE
deleted file mode 100644
index 19129e315..000000000
--- a/node_modules/libcipm/node_modules/pacote/node_modules/npm-packlist/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/libcipm/node_modules/pacote/node_modules/npm-packlist/README.md b/node_modules/libcipm/node_modules/pacote/node_modules/npm-packlist/README.md
deleted file mode 100644
index ead5821e8..000000000
--- a/node_modules/libcipm/node_modules/pacote/node_modules/npm-packlist/README.md
+++ /dev/null
@@ -1,68 +0,0 @@
-# npm-packlist
-
-[![Build Status](https://travis-ci.com/npm/npm-packlist.svg?token=hHeDp9pQmz9kvsgRNVHy&branch=master)](https://travis-ci.com/npm/npm-packlist)
-
-Get a list of the files to add from a folder into an npm package
-
-These can be handed to [tar](http://npm.im/tar) like so to make an npm
-package tarball:
-
-```js
-const packlist = require('npm-packlist')
-const tar = require('tar')
-const packageDir = '/path/to/package'
-const packageTarball = '/path/to/package.tgz'
-
-packlist({ path: packageDir })
- .then(files => tar.create({
- prefix: 'package/',
- cwd: packageDir,
- file: packageTarball,
- gzip: true
- }, files))
- .then(_ => {
- // tarball has been created, continue with your day
- })
-```
-
-This uses the following rules:
-
-1. If a `package.json` file is found, and it has a `files` list,
- then ignore everything that isn't in `files`. Always include the
- readme, license, notice, changes, changelog, and history files, if
- they exist, and the package.json file itself.
-2. If there's no `package.json` file (or it has no `files` list), and
- there is a `.npmignore` file, then ignore all the files in the
- `.npmignore` file.
-3. If there's no `package.json` with a `files` list, and there's no
- `.npmignore` file, but there is a `.gitignore` file, then ignore
- all the files in the `.gitignore` file.
-4. Everything in the root `node_modules` is ignored, unless it's a
- bundled dependency. If it IS a bundled dependency, and it's a
- symbolic link, then the target of the link is included, not the
- symlink itself.
-4. Unless they're explicitly included (by being in a `files` list, or
- a `!negated` rule in a relevant `.npmignore` or `.gitignore`),
- always ignore certain common cruft files:
-
- 1. .npmignore and .gitignore files (their effect is in the package
- already, there's no need to include them in the package)
- 2. editor junk like `.*.swp`, `._*` and `.*.orig` files
- 3. `.npmrc` files (these may contain private configs)
- 4. The `node_modules/.bin` folder
- 5. Waf and gyp cruft like `/build/config.gypi` and `.lock-wscript`
- 6. Darwin's `.DS_Store` files because wtf are those even
- 7. `npm-debug.log` files at the root of a project
-
- You can explicitly re-include any of these with a `files` list in
- `package.json` or a negated ignore file rule.
-
-## API
-
-Same API as [ignore-walk](http://npm.im/ignore-walk), just hard-coded
-file list and rule sets.
-
-The `Walker` and `WalkerSync` classes take a `bundled` argument, which
-is a list of package names to include from node_modules. When calling
-the top-level `packlist()` and `packlist.sync()` functions, this
-module calls into `npm-bundled` directly.
diff --git a/node_modules/libcipm/node_modules/pacote/node_modules/npm-packlist/index.js b/node_modules/libcipm/node_modules/pacote/node_modules/npm-packlist/index.js
deleted file mode 100644
index eaf14b866..000000000
--- a/node_modules/libcipm/node_modules/pacote/node_modules/npm-packlist/index.js
+++ /dev/null
@@ -1,289 +0,0 @@
-'use strict'
-
-// Do a two-pass walk, first to get the list of packages that need to be
-// bundled, then again to get the actual files and folders.
-// Keep a cache of node_modules content and package.json data, so that the
-// second walk doesn't have to re-do all the same work.
-
-const bundleWalk = require('npm-bundled')
-const BundleWalker = bundleWalk.BundleWalker
-const BundleWalkerSync = bundleWalk.BundleWalkerSync
-
-const ignoreWalk = require('ignore-walk')
-const IgnoreWalker = ignoreWalk.Walker
-const IgnoreWalkerSync = ignoreWalk.WalkerSync
-
-const rootBuiltinRules = Symbol('root-builtin-rules')
-const packageNecessaryRules = Symbol('package-necessary-rules')
-const path = require('path')
-
-const normalizePackageBin = require('npm-normalize-package-bin')
-
-const defaultRules = [
- '.npmignore',
- '.gitignore',
- '**/.git',
- '**/.svn',
- '**/.hg',
- '**/CVS',
- '**/.git/**',
- '**/.svn/**',
- '**/.hg/**',
- '**/CVS/**',
- '/.lock-wscript',
- '/.wafpickle-*',
- '/build/config.gypi',
- 'npm-debug.log',
- '**/.npmrc',
- '.*.swp',
- '.DS_Store',
- '**/.DS_Store/**',
- '._*',
- '**/._*/**',
- '*.orig',
- '/package-lock.json',
- '/yarn.lock',
- 'archived-packages/**',
- 'core',
- '!core/',
- '!**/core/',
- '*.core',
- '*.vgcore',
- 'vgcore.*',
- 'core.+([0-9])',
-]
-
-// There may be others, but :?|<> are handled by node-tar
-const nameIsBadForWindows = file => /\*/.test(file)
-
-// a decorator that applies our custom rules to an ignore walker
-const npmWalker = Class => class Walker extends Class {
- constructor (opt) {
- opt = opt || {}
-
- // the order in which rules are applied.
- opt.ignoreFiles = [
- rootBuiltinRules,
- 'package.json',
- '.npmignore',
- '.gitignore',
- packageNecessaryRules
- ]
-
- opt.includeEmpty = false
- opt.path = opt.path || process.cwd()
- const dirName = path.basename(opt.path)
- const parentName = path.basename(path.dirname(opt.path))
- opt.follow =
- dirName === 'node_modules' ||
- (parentName === 'node_modules' && /^@/.test(dirName))
- super(opt)
-
- // ignore a bunch of things by default at the root level.
- // also ignore anything in node_modules, except bundled dependencies
- if (!this.parent) {
- this.bundled = opt.bundled || []
- this.bundledScopes = Array.from(new Set(
- this.bundled.filter(f => /^@/.test(f))
- .map(f => f.split('/')[0])))
- const rules = defaultRules.join('\n') + '\n'
- this.packageJsonCache = opt.packageJsonCache || new Map()
- super.onReadIgnoreFile(rootBuiltinRules, rules, _=>_)
- } else {
- this.bundled = []
- this.bundledScopes = []
- this.packageJsonCache = this.parent.packageJsonCache
- }
- }
-
- onReaddir (entries) {
- if (!this.parent) {
- entries = entries.filter(e =>
- e !== '.git' &&
- !(e === 'node_modules' && this.bundled.length === 0)
- )
- }
- return super.onReaddir(entries)
- }
-
- filterEntry (entry, partial) {
- // get the partial path from the root of the walk
- const p = this.path.substr(this.root.length + 1)
- const pkgre = /^node_modules\/(@[^\/]+\/?[^\/]+|[^\/]+)(\/.*)?$/
- const isRoot = !this.parent
- const pkg = isRoot && pkgre.test(entry) ?
- entry.replace(pkgre, '$1') : null
- const rootNM = isRoot && entry === 'node_modules'
- const rootPJ = isRoot && entry === 'package.json'
-
- return (
- // if we're in a bundled package, check with the parent.
- /^node_modules($|\/)/i.test(p) ? this.parent.filterEntry(
- this.basename + '/' + entry, partial)
-
- // if package is bundled, all files included
- // also include @scope dirs for bundled scoped deps
- // they'll be ignored if no files end up in them.
- // However, this only matters if we're in the root.
- // node_modules folders elsewhere, like lib/node_modules,
- // should be included normally unless ignored.
- : pkg ? -1 !== this.bundled.indexOf(pkg) ||
- -1 !== this.bundledScopes.indexOf(pkg)
-
- // only walk top node_modules if we want to bundle something
- : rootNM ? !!this.bundled.length
-
- // always include package.json at the root.
- : rootPJ ? true
-
- // otherwise, follow ignore-walk's logic
- : super.filterEntry(entry, partial)
- )
- }
-
- filterEntries () {
- if (this.ignoreRules['package.json'])
- this.ignoreRules['.gitignore'] = this.ignoreRules['.npmignore'] = null
- else if (this.ignoreRules['.npmignore'])
- this.ignoreRules['.gitignore'] = null
- this.filterEntries = super.filterEntries
- super.filterEntries()
- }
-
- addIgnoreFile (file, then) {
- const ig = path.resolve(this.path, file)
- if (this.packageJsonCache.has(ig))
- this.onPackageJson(ig, this.packageJsonCache.get(ig), then)
- else
- super.addIgnoreFile(file, then)
- }
-
- onPackageJson (ig, pkg, then) {
- this.packageJsonCache.set(ig, pkg)
-
- // if there's a bin, browser or main, make sure we don't ignore it
- // also, don't ignore the package.json itself!
- //
- // Weird side-effect of this: a readme (etc) file will be included
- // if it exists anywhere within a folder with a package.json file.
- // The original intent was only to include these files in the root,
- // but now users in the wild are dependent on that behavior for
- // localized documentation and other use cases. Adding a `/` to
- // these rules, while tempting and arguably more "correct", is a
- // breaking change.
- const rules = [
- pkg.browser ? '!' + pkg.browser : '',
- pkg.main ? '!' + pkg.main : '',
- '!package.json',
- '!npm-shrinkwrap.json',
- '!@(readme|copying|license|licence|notice|changes|changelog|history){,.*[^~$]}'
- ]
- if (pkg.bin) {
- // always an object, because normalized already
- for (const key in pkg.bin)
- rules.push('!' + pkg.bin[key])
- }
-
- const data = rules.filter(f => f).join('\n') + '\n'
- super.onReadIgnoreFile(packageNecessaryRules, data, _=>_)
-
- if (Array.isArray(pkg.files))
- super.onReadIgnoreFile('package.json', '*\n' + pkg.files.map(
- f => '!' + f + '\n!' + f.replace(/\/+$/, '') + '/**'
- ).join('\n') + '\n', then)
- else
- then()
- }
-
- // override parent stat function to completely skip any filenames
- // that will break windows entirely.
- // XXX(isaacs) Next major version should make this an error instead.
- stat (entry, file, dir, then) {
- if (nameIsBadForWindows(entry))
- then()
- else
- super.stat(entry, file, dir, then)
- }
-
- // override parent onstat function to nix all symlinks
- onstat (st, entry, file, dir, then) {
- if (st.isSymbolicLink())
- then()
- else
- super.onstat(st, entry, file, dir, then)
- }
-
- onReadIgnoreFile (file, data, then) {
- if (file === 'package.json')
- try {
- const ig = path.resolve(this.path, file)
- this.onPackageJson(ig, normalizePackageBin(JSON.parse(data)), then)
- } catch (er) {
- // ignore package.json files that are not json
- then()
- }
- else
- super.onReadIgnoreFile(file, data, then)
- }
-
- sort (a, b) {
- return sort(a, b)
- }
-}
-
-class Walker extends npmWalker(IgnoreWalker) {
- walker (entry, then) {
- new Walker(this.walkerOpt(entry)).on('done', then).start()
- }
-}
-
-class WalkerSync extends npmWalker(IgnoreWalkerSync) {
- walker (entry, then) {
- new WalkerSync(this.walkerOpt(entry)).start()
- then()
- }
-}
-
-const walk = (options, callback) => {
- options = options || {}
- const p = new Promise((resolve, reject) => {
- const bw = new BundleWalker(options)
- bw.on('done', bundled => {
- options.bundled = bundled
- options.packageJsonCache = bw.packageJsonCache
- new Walker(options).on('done', resolve).on('error', reject).start()
- })
- bw.start()
- })
- return callback ? p.then(res => callback(null, res), callback) : p
-}
-
-const walkSync = options => {
- options = options || {}
- const bw = new BundleWalkerSync(options).start()
- options.bundled = bw.result
- options.packageJsonCache = bw.packageJsonCache
- const walker = new WalkerSync(options)
- walker.start()
- return walker.result
-}
-
-// optimize for compressibility
-// extname, then basename, then locale alphabetically
-// https://twitter.com/isntitvacant/status/1131094910923231232
-const sort = (a, b) => {
- const exta = path.extname(a).toLowerCase()
- const extb = path.extname(b).toLowerCase()
- const basea = path.basename(a).toLowerCase()
- const baseb = path.basename(b).toLowerCase()
-
- return exta.localeCompare(extb) ||
- basea.localeCompare(baseb) ||
- a.localeCompare(b)
-}
-
-
-module.exports = walk
-walk.sync = walkSync
-walk.Walker = Walker
-walk.WalkerSync = WalkerSync
diff --git a/node_modules/libcipm/node_modules/pacote/node_modules/npm-packlist/package.json b/node_modules/libcipm/node_modules/pacote/node_modules/npm-packlist/package.json
deleted file mode 100644
index 0a732a758..000000000
--- a/node_modules/libcipm/node_modules/pacote/node_modules/npm-packlist/package.json
+++ /dev/null
@@ -1,74 +0,0 @@
-{
- "_from": "npm-packlist@^1.1.12",
- "_id": "npm-packlist@1.4.8",
- "_inBundle": false,
- "_integrity": "sha512-5+AZgwru5IevF5ZdnFglB5wNlHG1AOOuw28WhUq8/8emhBmLv6jX5by4WJCh7lW0uSYZYS6DXqIsyZVIXRZU9A==",
- "_location": "/libcipm/pacote/npm-packlist",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "npm-packlist@^1.1.12",
- "name": "npm-packlist",
- "escapedName": "npm-packlist",
- "rawSpec": "^1.1.12",
- "saveSpec": null,
- "fetchSpec": "^1.1.12"
- },
- "_requiredBy": [
- "/libcipm/pacote"
- ],
- "_resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-1.4.8.tgz",
- "_shasum": "56ee6cc135b9f98ad3d51c1c95da22bbb9b2ef3e",
- "_spec": "npm-packlist@^1.1.12",
- "_where": "/Users/mperrotte/npminc/cli/node_modules/libcipm/node_modules/pacote",
- "author": {
- "name": "Isaac Z. Schlueter",
- "email": "i@izs.me",
- "url": "http://blog.izs.me/"
- },
- "bugs": {
- "url": "https://github.com/npm/npm-packlist/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "ignore-walk": "^3.0.1",
- "npm-bundled": "^1.0.1",
- "npm-normalize-package-bin": "^1.0.1"
- },
- "deprecated": false,
- "description": "Get a list of the files to add from a folder into an npm package",
- "devDependencies": {
- "mkdirp": "^0.5.1",
- "rimraf": "^2.6.1",
- "tap": "^14.6.9"
- },
- "directories": {
- "test": "test"
- },
- "files": [
- "index.js"
- ],
- "homepage": "https://www.npmjs.com/package/npm-packlist",
- "license": "ISC",
- "main": "index.js",
- "name": "npm-packlist",
- "publishConfig": {
- "tag": "legacy-v1"
- },
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/npm-packlist.git"
- },
- "scripts": {
- "postpublish": "git push origin --follow-tags",
- "postversion": "npm publish",
- "preversion": "npm test",
- "snap": "tap",
- "test": "tap"
- },
- "tap": {
- "jobs": 1
- },
- "version": "1.4.8"
-}
diff --git a/node_modules/libcipm/node_modules/pacote/node_modules/semver/CHANGELOG.md b/node_modules/libcipm/node_modules/pacote/node_modules/semver/CHANGELOG.md
deleted file mode 100644
index 66304fdd2..000000000
--- a/node_modules/libcipm/node_modules/pacote/node_modules/semver/CHANGELOG.md
+++ /dev/null
@@ -1,39 +0,0 @@
-# changes log
-
-## 5.7
-
-* Add `minVersion` method
-
-## 5.6
-
-* Move boolean `loose` param to an options object, with
- backwards-compatibility protection.
-* Add ability to opt out of special prerelease version handling with
- the `includePrerelease` option flag.
-
-## 5.5
-
-* Add version coercion capabilities
-
-## 5.4
-
-* Add intersection checking
-
-## 5.3
-
-* Add `minSatisfying` method
-
-## 5.2
-
-* Add `prerelease(v)` that returns prerelease components
-
-## 5.1
-
-* Add Backus-Naur for ranges
-* Remove excessively cute inspection methods
-
-## 5.0
-
-* Remove AMD/Browserified build artifacts
-* Fix ltr and gtr when using the `*` range
-* Fix for range `*` with a prerelease identifier
diff --git a/node_modules/libcipm/node_modules/pacote/node_modules/semver/LICENSE b/node_modules/libcipm/node_modules/pacote/node_modules/semver/LICENSE
deleted file mode 100644
index 19129e315..000000000
--- a/node_modules/libcipm/node_modules/pacote/node_modules/semver/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/libcipm/node_modules/pacote/node_modules/semver/README.md b/node_modules/libcipm/node_modules/pacote/node_modules/semver/README.md
deleted file mode 100644
index f8dfa5a0d..000000000
--- a/node_modules/libcipm/node_modules/pacote/node_modules/semver/README.md
+++ /dev/null
@@ -1,412 +0,0 @@
-semver(1) -- The semantic versioner for npm
-===========================================
-
-## Install
-
-```bash
-npm install --save semver
-````
-
-## Usage
-
-As a node module:
-
-```js
-const semver = require('semver')
-
-semver.valid('1.2.3') // '1.2.3'
-semver.valid('a.b.c') // null
-semver.clean(' =v1.2.3 ') // '1.2.3'
-semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true
-semver.gt('1.2.3', '9.8.7') // false
-semver.lt('1.2.3', '9.8.7') // true
-semver.minVersion('>=1.0.0') // '1.0.0'
-semver.valid(semver.coerce('v2')) // '2.0.0'
-semver.valid(semver.coerce('42.6.7.9.3-alpha')) // '42.6.7'
-```
-
-As a command-line utility:
-
-```
-$ semver -h
-
-A JavaScript implementation of the https://semver.org/ specification
-Copyright Isaac Z. Schlueter
-
-Usage: semver [options] <version> [<version> [...]]
-Prints valid versions sorted by SemVer precedence
-
-Options:
--r --range <range>
- Print versions that match the specified range.
-
--i --increment [<level>]
- Increment a version by the specified level. Level can
- be one of: major, minor, patch, premajor, preminor,
- prepatch, or prerelease. Default level is 'patch'.
- Only one version may be specified.
-
---preid <identifier>
- Identifier to be used to prefix premajor, preminor,
- prepatch or prerelease version increments.
-
--l --loose
- Interpret versions and ranges loosely
-
--p --include-prerelease
- Always include prerelease versions in range matching
-
--c --coerce
- Coerce a string into SemVer if possible
- (does not imply --loose)
-
-Program exits successfully if any valid version satisfies
-all supplied ranges, and prints all satisfying versions.
-
-If no satisfying versions are found, then exits failure.
-
-Versions are printed in ascending order, so supplying
-multiple versions to the utility will just sort them.
-```
-
-## Versions
-
-A "version" is described by the `v2.0.0` specification found at
-<https://semver.org/>.
-
-A leading `"="` or `"v"` character is stripped off and ignored.
-
-## Ranges
-
-A `version range` is a set of `comparators` which specify versions
-that satisfy the range.
-
-A `comparator` is composed of an `operator` and a `version`. The set
-of primitive `operators` is:
-
-* `<` Less than
-* `<=` Less than or equal to
-* `>` Greater than
-* `>=` Greater than or equal to
-* `=` Equal. If no operator is specified, then equality is assumed,
- so this operator is optional, but MAY be included.
-
-For example, the comparator `>=1.2.7` would match the versions
-`1.2.7`, `1.2.8`, `2.5.3`, and `1.3.9`, but not the versions `1.2.6`
-or `1.1.0`.
-
-Comparators can be joined by whitespace to form a `comparator set`,
-which is satisfied by the **intersection** of all of the comparators
-it includes.
-
-A range is composed of one or more comparator sets, joined by `||`. A
-version matches a range if and only if every comparator in at least
-one of the `||`-separated comparator sets is satisfied by the version.
-
-For example, the range `>=1.2.7 <1.3.0` would match the versions
-`1.2.7`, `1.2.8`, and `1.2.99`, but not the versions `1.2.6`, `1.3.0`,
-or `1.1.0`.
-
-The range `1.2.7 || >=1.2.9 <2.0.0` would match the versions `1.2.7`,
-`1.2.9`, and `1.4.6`, but not the versions `1.2.8` or `2.0.0`.
-
-### Prerelease Tags
-
-If a version has a prerelease tag (for example, `1.2.3-alpha.3`) then
-it will only be allowed to satisfy comparator sets if at least one
-comparator with the same `[major, minor, patch]` tuple also has a
-prerelease tag.
-
-For example, the range `>1.2.3-alpha.3` would be allowed to match the
-version `1.2.3-alpha.7`, but it would *not* be satisfied by
-`3.4.5-alpha.9`, even though `3.4.5-alpha.9` is technically "greater
-than" `1.2.3-alpha.3` according to the SemVer sort rules. The version
-range only accepts prerelease tags on the `1.2.3` version. The
-version `3.4.5` *would* satisfy the range, because it does not have a
-prerelease flag, and `3.4.5` is greater than `1.2.3-alpha.7`.
-
-The purpose for this behavior is twofold. First, prerelease versions
-frequently are updated very quickly, and contain many breaking changes
-that are (by the author's design) not yet fit for public consumption.
-Therefore, by default, they are excluded from range matching
-semantics.
-
-Second, a user who has opted into using a prerelease version has
-clearly indicated the intent to use *that specific* set of
-alpha/beta/rc versions. By including a prerelease tag in the range,
-the user is indicating that they are aware of the risk. However, it
-is still not appropriate to assume that they have opted into taking a
-similar risk on the *next* set of prerelease versions.
-
-Note that this behavior can be suppressed (treating all prerelease
-versions as if they were normal versions, for the purpose of range
-matching) by setting the `includePrerelease` flag on the options
-object to any
-[functions](https://github.com/npm/node-semver#functions) that do
-range matching.
-
-#### Prerelease Identifiers
-
-The method `.inc` takes an additional `identifier` string argument that
-will append the value of the string as a prerelease identifier:
-
-```javascript
-semver.inc('1.2.3', 'prerelease', 'beta')
-// '1.2.4-beta.0'
-```
-
-command-line example:
-
-```bash
-$ semver 1.2.3 -i prerelease --preid beta
-1.2.4-beta.0
-```
-
-Which then can be used to increment further:
-
-```bash
-$ semver 1.2.4-beta.0 -i prerelease
-1.2.4-beta.1
-```
-
-### Advanced Range Syntax
-
-Advanced range syntax desugars to primitive comparators in
-deterministic ways.
-
-Advanced ranges may be combined in the same way as primitive
-comparators using white space or `||`.
-
-#### Hyphen Ranges `X.Y.Z - A.B.C`
-
-Specifies an inclusive set.
-
-* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4`
-
-If a partial version is provided as the first version in the inclusive
-range, then the missing pieces are replaced with zeroes.
-
-* `1.2 - 2.3.4` := `>=1.2.0 <=2.3.4`
-
-If a partial version is provided as the second version in the
-inclusive range, then all versions that start with the supplied parts
-of the tuple are accepted, but nothing that would be greater than the
-provided tuple parts.
-
-* `1.2.3 - 2.3` := `>=1.2.3 <2.4.0`
-* `1.2.3 - 2` := `>=1.2.3 <3.0.0`
-
-#### X-Ranges `1.2.x` `1.X` `1.2.*` `*`
-
-Any of `X`, `x`, or `*` may be used to "stand in" for one of the
-numeric values in the `[major, minor, patch]` tuple.
-
-* `*` := `>=0.0.0` (Any version satisfies)
-* `1.x` := `>=1.0.0 <2.0.0` (Matching major version)
-* `1.2.x` := `>=1.2.0 <1.3.0` (Matching major and minor versions)
-
-A partial version range is treated as an X-Range, so the special
-character is in fact optional.
-
-* `""` (empty string) := `*` := `>=0.0.0`
-* `1` := `1.x.x` := `>=1.0.0 <2.0.0`
-* `1.2` := `1.2.x` := `>=1.2.0 <1.3.0`
-
-#### Tilde Ranges `~1.2.3` `~1.2` `~1`
-
-Allows patch-level changes if a minor version is specified on the
-comparator. Allows minor-level changes if not.
-
-* `~1.2.3` := `>=1.2.3 <1.(2+1).0` := `>=1.2.3 <1.3.0`
-* `~1.2` := `>=1.2.0 <1.(2+1).0` := `>=1.2.0 <1.3.0` (Same as `1.2.x`)
-* `~1` := `>=1.0.0 <(1+1).0.0` := `>=1.0.0 <2.0.0` (Same as `1.x`)
-* `~0.2.3` := `>=0.2.3 <0.(2+1).0` := `>=0.2.3 <0.3.0`
-* `~0.2` := `>=0.2.0 <0.(2+1).0` := `>=0.2.0 <0.3.0` (Same as `0.2.x`)
-* `~0` := `>=0.0.0 <(0+1).0.0` := `>=0.0.0 <1.0.0` (Same as `0.x`)
-* `~1.2.3-beta.2` := `>=1.2.3-beta.2 <1.3.0` Note that prereleases in
- the `1.2.3` version will be allowed, if they are greater than or
- equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but
- `1.2.4-beta.2` would not, because it is a prerelease of a
- different `[major, minor, patch]` tuple.
-
-#### Caret Ranges `^1.2.3` `^0.2.5` `^0.0.4`
-
-Allows changes that do not modify the left-most non-zero digit in the
-`[major, minor, patch]` tuple. In other words, this allows patch and
-minor updates for versions `1.0.0` and above, patch updates for
-versions `0.X >=0.1.0`, and *no* updates for versions `0.0.X`.
-
-Many authors treat a `0.x` version as if the `x` were the major
-"breaking-change" indicator.
-
-Caret ranges are ideal when an author may make breaking changes
-between `0.2.4` and `0.3.0` releases, which is a common practice.
-However, it presumes that there will *not* be breaking changes between
-`0.2.4` and `0.2.5`. It allows for changes that are presumed to be
-additive (but non-breaking), according to commonly observed practices.
-
-* `^1.2.3` := `>=1.2.3 <2.0.0`
-* `^0.2.3` := `>=0.2.3 <0.3.0`
-* `^0.0.3` := `>=0.0.3 <0.0.4`
-* `^1.2.3-beta.2` := `>=1.2.3-beta.2 <2.0.0` Note that prereleases in
- the `1.2.3` version will be allowed, if they are greater than or
- equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but
- `1.2.4-beta.2` would not, because it is a prerelease of a
- different `[major, minor, patch]` tuple.
-* `^0.0.3-beta` := `>=0.0.3-beta <0.0.4` Note that prereleases in the
- `0.0.3` version *only* will be allowed, if they are greater than or
- equal to `beta`. So, `0.0.3-pr.2` would be allowed.
-
-When parsing caret ranges, a missing `patch` value desugars to the
-number `0`, but will allow flexibility within that value, even if the
-major and minor versions are both `0`.
-
-* `^1.2.x` := `>=1.2.0 <2.0.0`
-* `^0.0.x` := `>=0.0.0 <0.1.0`
-* `^0.0` := `>=0.0.0 <0.1.0`
-
-A missing `minor` and `patch` values will desugar to zero, but also
-allow flexibility within those values, even if the major version is
-zero.
-
-* `^1.x` := `>=1.0.0 <2.0.0`
-* `^0.x` := `>=0.0.0 <1.0.0`
-
-### Range Grammar
-
-Putting all this together, here is a Backus-Naur grammar for ranges,
-for the benefit of parser authors:
-
-```bnf
-range-set ::= range ( logical-or range ) *
-logical-or ::= ( ' ' ) * '||' ( ' ' ) *
-range ::= hyphen | simple ( ' ' simple ) * | ''
-hyphen ::= partial ' - ' partial
-simple ::= primitive | partial | tilde | caret
-primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial
-partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )?
-xr ::= 'x' | 'X' | '*' | nr
-nr ::= '0' | ['1'-'9'] ( ['0'-'9'] ) *
-tilde ::= '~' partial
-caret ::= '^' partial
-qualifier ::= ( '-' pre )? ( '+' build )?
-pre ::= parts
-build ::= parts
-parts ::= part ( '.' part ) *
-part ::= nr | [-0-9A-Za-z]+
-```
-
-## Functions
-
-All methods and classes take a final `options` object argument. All
-options in this object are `false` by default. The options supported
-are:
-
-- `loose` Be more forgiving about not-quite-valid semver strings.
- (Any resulting output will always be 100% strict compliant, of
- course.) For backwards compatibility reasons, if the `options`
- argument is a boolean value instead of an object, it is interpreted
- to be the `loose` param.
-- `includePrerelease` Set to suppress the [default
- behavior](https://github.com/npm/node-semver#prerelease-tags) of
- excluding prerelease tagged versions from ranges unless they are
- explicitly opted into.
-
-Strict-mode Comparators and Ranges will be strict about the SemVer
-strings that they parse.
-
-* `valid(v)`: Return the parsed version, or null if it's not valid.
-* `inc(v, release)`: Return the version incremented by the release
- type (`major`, `premajor`, `minor`, `preminor`, `patch`,
- `prepatch`, or `prerelease`), or null if it's not valid
- * `premajor` in one call will bump the version up to the next major
- version and down to a prerelease of that major version.
- `preminor`, and `prepatch` work the same way.
- * If called from a non-prerelease version, the `prerelease` will work the
- same as `prepatch`. It increments the patch version, then makes a
- prerelease. If the input version is already a prerelease it simply
- increments it.
-* `prerelease(v)`: Returns an array of prerelease components, or null
- if none exist. Example: `prerelease('1.2.3-alpha.1') -> ['alpha', 1]`
-* `major(v)`: Return the major version number.
-* `minor(v)`: Return the minor version number.
-* `patch(v)`: Return the patch version number.
-* `intersects(r1, r2, loose)`: Return true if the two supplied ranges
- or comparators intersect.
-* `parse(v)`: Attempt to parse a string as a semantic version, returning either
- a `SemVer` object or `null`.
-
-### Comparison
-
-* `gt(v1, v2)`: `v1 > v2`
-* `gte(v1, v2)`: `v1 >= v2`
-* `lt(v1, v2)`: `v1 < v2`
-* `lte(v1, v2)`: `v1 <= v2`
-* `eq(v1, v2)`: `v1 == v2` This is true if they're logically equivalent,
- even if they're not the exact same string. You already know how to
- compare strings.
-* `neq(v1, v2)`: `v1 != v2` The opposite of `eq`.
-* `cmp(v1, comparator, v2)`: Pass in a comparison string, and it'll call
- the corresponding function above. `"==="` and `"!=="` do simple
- string comparison, but are included for completeness. Throws if an
- invalid comparison string is provided.
-* `compare(v1, v2)`: Return `0` if `v1 == v2`, or `1` if `v1` is greater, or `-1` if
- `v2` is greater. Sorts in ascending order if passed to `Array.sort()`.
-* `rcompare(v1, v2)`: The reverse of compare. Sorts an array of versions
- in descending order when passed to `Array.sort()`.
-* `diff(v1, v2)`: Returns difference between two versions by the release type
- (`major`, `premajor`, `minor`, `preminor`, `patch`, `prepatch`, or `prerelease`),
- or null if the versions are the same.
-
-### Comparators
-
-* `intersects(comparator)`: Return true if the comparators intersect
-
-### Ranges
-
-* `validRange(range)`: Return the valid range or null if it's not valid
-* `satisfies(version, range)`: Return true if the version satisfies the
- range.
-* `maxSatisfying(versions, range)`: Return the highest version in the list
- that satisfies the range, or `null` if none of them do.
-* `minSatisfying(versions, range)`: Return the lowest version in the list
- that satisfies the range, or `null` if none of them do.
-* `minVersion(range)`: Return the lowest version that can possibly match
- the given range.
-* `gtr(version, range)`: Return `true` if version is greater than all the
- versions possible in the range.
-* `ltr(version, range)`: Return `true` if version is less than all the
- versions possible in the range.
-* `outside(version, range, hilo)`: Return true if the version is outside
- the bounds of the range in either the high or low direction. The
- `hilo` argument must be either the string `'>'` or `'<'`. (This is
- the function called by `gtr` and `ltr`.)
-* `intersects(range)`: Return true if any of the ranges comparators intersect
-
-Note that, since ranges may be non-contiguous, a version might not be
-greater than a range, less than a range, *or* satisfy a range! For
-example, the range `1.2 <1.2.9 || >2.0.0` would have a hole from `1.2.9`
-until `2.0.0`, so the version `1.2.10` would not be greater than the
-range (because `2.0.1` satisfies, which is higher), nor less than the
-range (since `1.2.8` satisfies, which is lower), and it also does not
-satisfy the range.
-
-If you want to know if a version satisfies or does not satisfy a
-range, use the `satisfies(version, range)` function.
-
-### Coercion
-
-* `coerce(version)`: Coerces a string to semver if possible
-
-This aims to provide a very forgiving translation of a non-semver string to
-semver. It looks for the first digit in a string, and consumes all
-remaining characters which satisfy at least a partial semver (e.g., `1`,
-`1.2`, `1.2.3`) up to the max permitted length (256 characters). Longer
-versions are simply truncated (`4.6.3.9.2-alpha2` becomes `4.6.3`). All
-surrounding text is simply ignored (`v3.4 replaces v3.3.1` becomes
-`3.4.0`). Only text which lacks digits will fail coercion (`version one`
-is not valid). The maximum length for any semver component considered for
-coercion is 16 characters; longer components will be ignored
-(`10000000000000000.4.7.4` becomes `4.7.4`). The maximum value for any
-semver component is `Number.MAX_SAFE_INTEGER || (2**53 - 1)`; higher value
-components are invalid (`9999999999999999.4.7.4` is likely invalid).
diff --git a/node_modules/libcipm/node_modules/pacote/node_modules/semver/bin/semver b/node_modules/libcipm/node_modules/pacote/node_modules/semver/bin/semver
deleted file mode 100755
index 801e77f13..000000000
--- a/node_modules/libcipm/node_modules/pacote/node_modules/semver/bin/semver
+++ /dev/null
@@ -1,160 +0,0 @@
-#!/usr/bin/env node
-// Standalone semver comparison program.
-// Exits successfully and prints matching version(s) if
-// any supplied version is valid and passes all tests.
-
-var argv = process.argv.slice(2)
-
-var versions = []
-
-var range = []
-
-var inc = null
-
-var version = require('../package.json').version
-
-var loose = false
-
-var includePrerelease = false
-
-var coerce = false
-
-var identifier
-
-var semver = require('../semver')
-
-var reverse = false
-
-var options = {}
-
-main()
-
-function main () {
- if (!argv.length) return help()
- while (argv.length) {
- var a = argv.shift()
- var indexOfEqualSign = a.indexOf('=')
- if (indexOfEqualSign !== -1) {
- a = a.slice(0, indexOfEqualSign)
- argv.unshift(a.slice(indexOfEqualSign + 1))
- }
- switch (a) {
- case '-rv': case '-rev': case '--rev': case '--reverse':
- reverse = true
- break
- case '-l': case '--loose':
- loose = true
- break
- case '-p': case '--include-prerelease':
- includePrerelease = true
- break
- case '-v': case '--version':
- versions.push(argv.shift())
- break
- case '-i': case '--inc': case '--increment':
- switch (argv[0]) {
- case 'major': case 'minor': case 'patch': case 'prerelease':
- case 'premajor': case 'preminor': case 'prepatch':
- inc = argv.shift()
- break
- default:
- inc = 'patch'
- break
- }
- break
- case '--preid':
- identifier = argv.shift()
- break
- case '-r': case '--range':
- range.push(argv.shift())
- break
- case '-c': case '--coerce':
- coerce = true
- break
- case '-h': case '--help': case '-?':
- return help()
- default:
- versions.push(a)
- break
- }
- }
-
- var options = { loose: loose, includePrerelease: includePrerelease }
-
- versions = versions.map(function (v) {
- return coerce ? (semver.coerce(v) || { version: v }).version : v
- }).filter(function (v) {
- return semver.valid(v)
- })
- if (!versions.length) return fail()
- if (inc && (versions.length !== 1 || range.length)) { return failInc() }
-
- for (var i = 0, l = range.length; i < l; i++) {
- versions = versions.filter(function (v) {
- return semver.satisfies(v, range[i], options)
- })
- if (!versions.length) return fail()
- }
- return success(versions)
-}
-
-function failInc () {
- console.error('--inc can only be used on a single version with no range')
- fail()
-}
-
-function fail () { process.exit(1) }
-
-function success () {
- var compare = reverse ? 'rcompare' : 'compare'
- versions.sort(function (a, b) {
- return semver[compare](a, b, options)
- }).map(function (v) {
- return semver.clean(v, options)
- }).map(function (v) {
- return inc ? semver.inc(v, inc, options, identifier) : v
- }).forEach(function (v, i, _) { console.log(v) })
-}
-
-function help () {
- console.log(['SemVer ' + version,
- '',
- 'A JavaScript implementation of the https://semver.org/ specification',
- 'Copyright Isaac Z. Schlueter',
- '',
- 'Usage: semver [options] <version> [<version> [...]]',
- 'Prints valid versions sorted by SemVer precedence',
- '',
- 'Options:',
- '-r --range <range>',
- ' Print versions that match the specified range.',
- '',
- '-i --increment [<level>]',
- ' Increment a version by the specified level. Level can',
- ' be one of: major, minor, patch, premajor, preminor,',
- " prepatch, or prerelease. Default level is 'patch'.",
- ' Only one version may be specified.',
- '',
- '--preid <identifier>',
- ' Identifier to be used to prefix premajor, preminor,',
- ' prepatch or prerelease version increments.',
- '',
- '-l --loose',
- ' Interpret versions and ranges loosely',
- '',
- '-p --include-prerelease',
- ' Always include prerelease versions in range matching',
- '',
- '-c --coerce',
- ' Coerce a string into SemVer if possible',
- ' (does not imply --loose)',
- '',
- 'Program exits successfully if any valid version satisfies',
- 'all supplied ranges, and prints all satisfying versions.',
- '',
- 'If no satisfying versions are found, then exits failure.',
- '',
- 'Versions are printed in ascending order, so supplying',
- 'multiple versions to the utility will just sort them.'
- ].join('\n'))
-}
diff --git a/node_modules/libcipm/node_modules/pacote/node_modules/semver/package.json b/node_modules/libcipm/node_modules/pacote/node_modules/semver/package.json
deleted file mode 100644
index f56db5538..000000000
--- a/node_modules/libcipm/node_modules/pacote/node_modules/semver/package.json
+++ /dev/null
@@ -1,60 +0,0 @@
-{
- "_from": "semver@^5.6.0",
- "_id": "semver@5.7.1",
- "_inBundle": false,
- "_integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==",
- "_location": "/libcipm/pacote/semver",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "semver@^5.6.0",
- "name": "semver",
- "escapedName": "semver",
- "rawSpec": "^5.6.0",
- "saveSpec": null,
- "fetchSpec": "^5.6.0"
- },
- "_requiredBy": [
- "/libcipm/pacote"
- ],
- "_resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz",
- "_shasum": "a954f931aeba508d307bbf069eff0c01c96116f7",
- "_spec": "semver@^5.6.0",
- "_where": "/Users/mperrotte/npminc/cli/node_modules/libcipm/node_modules/pacote",
- "bin": {
- "semver": "bin/semver"
- },
- "bugs": {
- "url": "https://github.com/npm/node-semver/issues"
- },
- "bundleDependencies": false,
- "deprecated": false,
- "description": "The semantic version parser used by npm.",
- "devDependencies": {
- "tap": "^13.0.0-rc.18"
- },
- "files": [
- "bin",
- "range.bnf",
- "semver.js"
- ],
- "homepage": "https://github.com/npm/node-semver#readme",
- "license": "ISC",
- "main": "semver.js",
- "name": "semver",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/node-semver.git"
- },
- "scripts": {
- "postpublish": "git push origin --all; git push origin --tags",
- "postversion": "npm publish",
- "preversion": "npm test",
- "test": "tap"
- },
- "tap": {
- "check-coverage": true
- },
- "version": "5.7.1"
-}
diff --git a/node_modules/libcipm/node_modules/pacote/node_modules/semver/range.bnf b/node_modules/libcipm/node_modules/pacote/node_modules/semver/range.bnf
deleted file mode 100644
index d4c6ae0d7..000000000
--- a/node_modules/libcipm/node_modules/pacote/node_modules/semver/range.bnf
+++ /dev/null
@@ -1,16 +0,0 @@
-range-set ::= range ( logical-or range ) *
-logical-or ::= ( ' ' ) * '||' ( ' ' ) *
-range ::= hyphen | simple ( ' ' simple ) * | ''
-hyphen ::= partial ' - ' partial
-simple ::= primitive | partial | tilde | caret
-primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial
-partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )?
-xr ::= 'x' | 'X' | '*' | nr
-nr ::= '0' | [1-9] ( [0-9] ) *
-tilde ::= '~' partial
-caret ::= '^' partial
-qualifier ::= ( '-' pre )? ( '+' build )?
-pre ::= parts
-build ::= parts
-parts ::= part ( '.' part ) *
-part ::= nr | [-0-9A-Za-z]+
diff --git a/node_modules/libcipm/node_modules/pacote/node_modules/semver/semver.js b/node_modules/libcipm/node_modules/pacote/node_modules/semver/semver.js
deleted file mode 100644
index d315d5d68..000000000
--- a/node_modules/libcipm/node_modules/pacote/node_modules/semver/semver.js
+++ /dev/null
@@ -1,1483 +0,0 @@
-exports = module.exports = SemVer
-
-var debug
-/* istanbul ignore next */
-if (typeof process === 'object' &&
- process.env &&
- process.env.NODE_DEBUG &&
- /\bsemver\b/i.test(process.env.NODE_DEBUG)) {
- debug = function () {
- var args = Array.prototype.slice.call(arguments, 0)
- args.unshift('SEMVER')
- console.log.apply(console, args)
- }
-} else {
- debug = function () {}
-}
-
-// Note: this is the semver.org version of the spec that it implements
-// Not necessarily the package version of this code.
-exports.SEMVER_SPEC_VERSION = '2.0.0'
-
-var MAX_LENGTH = 256
-var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
- /* istanbul ignore next */ 9007199254740991
-
-// Max safe segment length for coercion.
-var MAX_SAFE_COMPONENT_LENGTH = 16
-
-// The actual regexps go on exports.re
-var re = exports.re = []
-var src = exports.src = []
-var R = 0
-
-// The following Regular Expressions can be used for tokenizing,
-// validating, and parsing SemVer version strings.
-
-// ## Numeric Identifier
-// A single `0`, or a non-zero digit followed by zero or more digits.
-
-var NUMERICIDENTIFIER = R++
-src[NUMERICIDENTIFIER] = '0|[1-9]\\d*'
-var NUMERICIDENTIFIERLOOSE = R++
-src[NUMERICIDENTIFIERLOOSE] = '[0-9]+'
-
-// ## Non-numeric Identifier
-// Zero or more digits, followed by a letter or hyphen, and then zero or
-// more letters, digits, or hyphens.
-
-var NONNUMERICIDENTIFIER = R++
-src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'
-
-// ## Main Version
-// Three dot-separated numeric identifiers.
-
-var MAINVERSION = R++
-src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' +
- '(' + src[NUMERICIDENTIFIER] + ')\\.' +
- '(' + src[NUMERICIDENTIFIER] + ')'
-
-var MAINVERSIONLOOSE = R++
-src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' +
- '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' +
- '(' + src[NUMERICIDENTIFIERLOOSE] + ')'
-
-// ## Pre-release Version Identifier
-// A numeric identifier, or a non-numeric identifier.
-
-var PRERELEASEIDENTIFIER = R++
-src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] +
- '|' + src[NONNUMERICIDENTIFIER] + ')'
-
-var PRERELEASEIDENTIFIERLOOSE = R++
-src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] +
- '|' + src[NONNUMERICIDENTIFIER] + ')'
-
-// ## Pre-release Version
-// Hyphen, followed by one or more dot-separated pre-release version
-// identifiers.
-
-var PRERELEASE = R++
-src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] +
- '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))'
-
-var PRERELEASELOOSE = R++
-src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] +
- '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))'
-
-// ## Build Metadata Identifier
-// Any combination of digits, letters, or hyphens.
-
-var BUILDIDENTIFIER = R++
-src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+'
-
-// ## Build Metadata
-// Plus sign, followed by one or more period-separated build metadata
-// identifiers.
-
-var BUILD = R++
-src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] +
- '(?:\\.' + src[BUILDIDENTIFIER] + ')*))'
-
-// ## Full Version String
-// A main version, followed optionally by a pre-release version and
-// build metadata.
-
-// Note that the only major, minor, patch, and pre-release sections of
-// the version string are capturing groups. The build metadata is not a
-// capturing group, because it should not ever be used in version
-// comparison.
-
-var FULL = R++
-var FULLPLAIN = 'v?' + src[MAINVERSION] +
- src[PRERELEASE] + '?' +
- src[BUILD] + '?'
-
-src[FULL] = '^' + FULLPLAIN + '$'
-
-// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.
-// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty
-// common in the npm registry.
-var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] +
- src[PRERELEASELOOSE] + '?' +
- src[BUILD] + '?'
-
-var LOOSE = R++
-src[LOOSE] = '^' + LOOSEPLAIN + '$'
-
-var GTLT = R++
-src[GTLT] = '((?:<|>)?=?)'
-
-// Something like "2.*" or "1.2.x".
-// Note that "x.x" is a valid xRange identifer, meaning "any version"
-// Only the first item is strictly required.
-var XRANGEIDENTIFIERLOOSE = R++
-src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*'
-var XRANGEIDENTIFIER = R++
-src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*'
-
-var XRANGEPLAIN = R++
-src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' +
- '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' +
- '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' +
- '(?:' + src[PRERELEASE] + ')?' +
- src[BUILD] + '?' +
- ')?)?'
-
-var XRANGEPLAINLOOSE = R++
-src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
- '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
- '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
- '(?:' + src[PRERELEASELOOSE] + ')?' +
- src[BUILD] + '?' +
- ')?)?'
-
-var XRANGE = R++
-src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$'
-var XRANGELOOSE = R++
-src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$'
-
-// Coercion.
-// Extract anything that could conceivably be a part of a valid semver
-var COERCE = R++
-src[COERCE] = '(?:^|[^\\d])' +
- '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' +
- '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +
- '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +
- '(?:$|[^\\d])'
-
-// Tilde ranges.
-// Meaning is "reasonably at or greater than"
-var LONETILDE = R++
-src[LONETILDE] = '(?:~>?)'
-
-var TILDETRIM = R++
-src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+'
-re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g')
-var tildeTrimReplace = '$1~'
-
-var TILDE = R++
-src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$'
-var TILDELOOSE = R++
-src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$'
-
-// Caret ranges.
-// Meaning is "at least and backwards compatible with"
-var LONECARET = R++
-src[LONECARET] = '(?:\\^)'
-
-var CARETTRIM = R++
-src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+'
-re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g')
-var caretTrimReplace = '$1^'
-
-var CARET = R++
-src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$'
-var CARETLOOSE = R++
-src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$'
-
-// A simple gt/lt/eq thing, or just "" to indicate "any version"
-var COMPARATORLOOSE = R++
-src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$'
-var COMPARATOR = R++
-src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$'
-
-// An expression to strip any whitespace between the gtlt and the thing
-// it modifies, so that `> 1.2.3` ==> `>1.2.3`
-var COMPARATORTRIM = R++
-src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] +
- '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')'
-
-// this one has to use the /g flag
-re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g')
-var comparatorTrimReplace = '$1$2$3'
-
-// Something like `1.2.3 - 1.2.4`
-// Note that these all use the loose form, because they'll be
-// checked against either the strict or loose comparator form
-// later.
-var HYPHENRANGE = R++
-src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' +
- '\\s+-\\s+' +
- '(' + src[XRANGEPLAIN] + ')' +
- '\\s*$'
-
-var HYPHENRANGELOOSE = R++
-src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' +
- '\\s+-\\s+' +
- '(' + src[XRANGEPLAINLOOSE] + ')' +
- '\\s*$'
-
-// Star ranges basically just allow anything at all.
-var STAR = R++
-src[STAR] = '(<|>)?=?\\s*\\*'
-
-// Compile to actual regexp objects.
-// All are flag-free, unless they were created above with a flag.
-for (var i = 0; i < R; i++) {
- debug(i, src[i])
- if (!re[i]) {
- re[i] = new RegExp(src[i])
- }
-}
-
-exports.parse = parse
-function parse (version, options) {
- if (!options || typeof options !== 'object') {
- options = {
- loose: !!options,
- includePrerelease: false
- }
- }
-
- if (version instanceof SemVer) {
- return version
- }
-
- if (typeof version !== 'string') {
- return null
- }
-
- if (version.length > MAX_LENGTH) {
- return null
- }
-
- var r = options.loose ? re[LOOSE] : re[FULL]
- if (!r.test(version)) {
- return null
- }
-
- try {
- return new SemVer(version, options)
- } catch (er) {
- return null
- }
-}
-
-exports.valid = valid
-function valid (version, options) {
- var v = parse(version, options)
- return v ? v.version : null
-}
-
-exports.clean = clean
-function clean (version, options) {
- var s = parse(version.trim().replace(/^[=v]+/, ''), options)
- return s ? s.version : null
-}
-
-exports.SemVer = SemVer
-
-function SemVer (version, options) {
- if (!options || typeof options !== 'object') {
- options = {
- loose: !!options,
- includePrerelease: false
- }
- }
- if (version instanceof SemVer) {
- if (version.loose === options.loose) {
- return version
- } else {
- version = version.version
- }
- } else if (typeof version !== 'string') {
- throw new TypeError('Invalid Version: ' + version)
- }
-
- if (version.length > MAX_LENGTH) {
- throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters')
- }
-
- if (!(this instanceof SemVer)) {
- return new SemVer(version, options)
- }
-
- debug('SemVer', version, options)
- this.options = options
- this.loose = !!options.loose
-
- var m = version.trim().match(options.loose ? re[LOOSE] : re[FULL])
-
- if (!m) {
- throw new TypeError('Invalid Version: ' + version)
- }
-
- this.raw = version
-
- // these are actually numbers
- this.major = +m[1]
- this.minor = +m[2]
- this.patch = +m[3]
-
- if (this.major > MAX_SAFE_INTEGER || this.major < 0) {
- throw new TypeError('Invalid major version')
- }
-
- if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) {
- throw new TypeError('Invalid minor version')
- }
-
- if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) {
- throw new TypeError('Invalid patch version')
- }
-
- // numberify any prerelease numeric ids
- if (!m[4]) {
- this.prerelease = []
- } else {
- this.prerelease = m[4].split('.').map(function (id) {
- if (/^[0-9]+$/.test(id)) {
- var num = +id
- if (num >= 0 && num < MAX_SAFE_INTEGER) {
- return num
- }
- }
- return id
- })
- }
-
- this.build = m[5] ? m[5].split('.') : []
- this.format()
-}
-
-SemVer.prototype.format = function () {
- this.version = this.major + '.' + this.minor + '.' + this.patch
- if (this.prerelease.length) {
- this.version += '-' + this.prerelease.join('.')
- }
- return this.version
-}
-
-SemVer.prototype.toString = function () {
- return this.version
-}
-
-SemVer.prototype.compare = function (other) {
- debug('SemVer.compare', this.version, this.options, other)
- if (!(other instanceof SemVer)) {
- other = new SemVer(other, this.options)
- }
-
- return this.compareMain(other) || this.comparePre(other)
-}
-
-SemVer.prototype.compareMain = function (other) {
- if (!(other instanceof SemVer)) {
- other = new SemVer(other, this.options)
- }
-
- return compareIdentifiers(this.major, other.major) ||
- compareIdentifiers(this.minor, other.minor) ||
- compareIdentifiers(this.patch, other.patch)
-}
-
-SemVer.prototype.comparePre = function (other) {
- if (!(other instanceof SemVer)) {
- other = new SemVer(other, this.options)
- }
-
- // NOT having a prerelease is > having one
- if (this.prerelease.length && !other.prerelease.length) {
- return -1
- } else if (!this.prerelease.length && other.prerelease.length) {
- return 1
- } else if (!this.prerelease.length && !other.prerelease.length) {
- return 0
- }
-
- var i = 0
- do {
- var a = this.prerelease[i]
- var b = other.prerelease[i]
- debug('prerelease compare', i, a, b)
- if (a === undefined && b === undefined) {
- return 0
- } else if (b === undefined) {
- return 1
- } else if (a === undefined) {
- return -1
- } else if (a === b) {
- continue
- } else {
- return compareIdentifiers(a, b)
- }
- } while (++i)
-}
-
-// preminor will bump the version up to the next minor release, and immediately
-// down to pre-release. premajor and prepatch work the same way.
-SemVer.prototype.inc = function (release, identifier) {
- switch (release) {
- case 'premajor':
- this.prerelease.length = 0
- this.patch = 0
- this.minor = 0
- this.major++
- this.inc('pre', identifier)
- break
- case 'preminor':
- this.prerelease.length = 0
- this.patch = 0
- this.minor++
- this.inc('pre', identifier)
- break
- case 'prepatch':
- // If this is already a prerelease, it will bump to the next version
- // drop any prereleases that might already exist, since they are not
- // relevant at this point.
- this.prerelease.length = 0
- this.inc('patch', identifier)
- this.inc('pre', identifier)
- break
- // If the input is a non-prerelease version, this acts the same as
- // prepatch.
- case 'prerelease':
- if (this.prerelease.length === 0) {
- this.inc('patch', identifier)
- }
- this.inc('pre', identifier)
- break
-
- case 'major':
- // If this is a pre-major version, bump up to the same major version.
- // Otherwise increment major.
- // 1.0.0-5 bumps to 1.0.0
- // 1.1.0 bumps to 2.0.0
- if (this.minor !== 0 ||
- this.patch !== 0 ||
- this.prerelease.length === 0) {
- this.major++
- }
- this.minor = 0
- this.patch = 0
- this.prerelease = []
- break
- case 'minor':
- // If this is a pre-minor version, bump up to the same minor version.
- // Otherwise increment minor.
- // 1.2.0-5 bumps to 1.2.0
- // 1.2.1 bumps to 1.3.0
- if (this.patch !== 0 || this.prerelease.length === 0) {
- this.minor++
- }
- this.patch = 0
- this.prerelease = []
- break
- case 'patch':
- // If this is not a pre-release version, it will increment the patch.
- // If it is a pre-release it will bump up to the same patch version.
- // 1.2.0-5 patches to 1.2.0
- // 1.2.0 patches to 1.2.1
- if (this.prerelease.length === 0) {
- this.patch++
- }
- this.prerelease = []
- break
- // This probably shouldn't be used publicly.
- // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction.
- case 'pre':
- if (this.prerelease.length === 0) {
- this.prerelease = [0]
- } else {
- var i = this.prerelease.length
- while (--i >= 0) {
- if (typeof this.prerelease[i] === 'number') {
- this.prerelease[i]++
- i = -2
- }
- }
- if (i === -1) {
- // didn't increment anything
- this.prerelease.push(0)
- }
- }
- if (identifier) {
- // 1.2.0-beta.1 bumps to 1.2.0-beta.2,
- // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0
- if (this.prerelease[0] === identifier) {
- if (isNaN(this.prerelease[1])) {
- this.prerelease = [identifier, 0]
- }
- } else {
- this.prerelease = [identifier, 0]
- }
- }
- break
-
- default:
- throw new Error('invalid increment argument: ' + release)
- }
- this.format()
- this.raw = this.version
- return this
-}
-
-exports.inc = inc
-function inc (version, release, loose, identifier) {
- if (typeof (loose) === 'string') {
- identifier = loose
- loose = undefined
- }
-
- try {
- return new SemVer(version, loose).inc(release, identifier).version
- } catch (er) {
- return null
- }
-}
-
-exports.diff = diff
-function diff (version1, version2) {
- if (eq(version1, version2)) {
- return null
- } else {
- var v1 = parse(version1)
- var v2 = parse(version2)
- var prefix = ''
- if (v1.prerelease.length || v2.prerelease.length) {
- prefix = 'pre'
- var defaultResult = 'prerelease'
- }
- for (var key in v1) {
- if (key === 'major' || key === 'minor' || key === 'patch') {
- if (v1[key] !== v2[key]) {
- return prefix + key
- }
- }
- }
- return defaultResult // may be undefined
- }
-}
-
-exports.compareIdentifiers = compareIdentifiers
-
-var numeric = /^[0-9]+$/
-function compareIdentifiers (a, b) {
- var anum = numeric.test(a)
- var bnum = numeric.test(b)
-
- if (anum && bnum) {
- a = +a
- b = +b
- }
-
- return a === b ? 0
- : (anum && !bnum) ? -1
- : (bnum && !anum) ? 1
- : a < b ? -1
- : 1
-}
-
-exports.rcompareIdentifiers = rcompareIdentifiers
-function rcompareIdentifiers (a, b) {
- return compareIdentifiers(b, a)
-}
-
-exports.major = major
-function major (a, loose) {
- return new SemVer(a, loose).major
-}
-
-exports.minor = minor
-function minor (a, loose) {
- return new SemVer(a, loose).minor
-}
-
-exports.patch = patch
-function patch (a, loose) {
- return new SemVer(a, loose).patch
-}
-
-exports.compare = compare
-function compare (a, b, loose) {
- return new SemVer(a, loose).compare(new SemVer(b, loose))
-}
-
-exports.compareLoose = compareLoose
-function compareLoose (a, b) {
- return compare(a, b, true)
-}
-
-exports.rcompare = rcompare
-function rcompare (a, b, loose) {
- return compare(b, a, loose)
-}
-
-exports.sort = sort
-function sort (list, loose) {
- return list.sort(function (a, b) {
- return exports.compare(a, b, loose)
- })
-}
-
-exports.rsort = rsort
-function rsort (list, loose) {
- return list.sort(function (a, b) {
- return exports.rcompare(a, b, loose)
- })
-}
-
-exports.gt = gt
-function gt (a, b, loose) {
- return compare(a, b, loose) > 0
-}
-
-exports.lt = lt
-function lt (a, b, loose) {
- return compare(a, b, loose) < 0
-}
-
-exports.eq = eq
-function eq (a, b, loose) {
- return compare(a, b, loose) === 0
-}
-
-exports.neq = neq
-function neq (a, b, loose) {
- return compare(a, b, loose) !== 0
-}
-
-exports.gte = gte
-function gte (a, b, loose) {
- return compare(a, b, loose) >= 0
-}
-
-exports.lte = lte
-function lte (a, b, loose) {
- return compare(a, b, loose) <= 0
-}
-
-exports.cmp = cmp
-function cmp (a, op, b, loose) {
- switch (op) {
- case '===':
- if (typeof a === 'object')
- a = a.version
- if (typeof b === 'object')
- b = b.version
- return a === b
-
- case '!==':
- if (typeof a === 'object')
- a = a.version
- if (typeof b === 'object')
- b = b.version
- return a !== b
-
- case '':
- case '=':
- case '==':
- return eq(a, b, loose)
-
- case '!=':
- return neq(a, b, loose)
-
- case '>':
- return gt(a, b, loose)
-
- case '>=':
- return gte(a, b, loose)
-
- case '<':
- return lt(a, b, loose)
-
- case '<=':
- return lte(a, b, loose)
-
- default:
- throw new TypeError('Invalid operator: ' + op)
- }
-}
-
-exports.Comparator = Comparator
-function Comparator (comp, options) {
- if (!options || typeof options !== 'object') {
- options = {
- loose: !!options,
- includePrerelease: false
- }
- }
-
- if (comp instanceof Comparator) {
- if (comp.loose === !!options.loose) {
- return comp
- } else {
- comp = comp.value
- }
- }
-
- if (!(this instanceof Comparator)) {
- return new Comparator(comp, options)
- }
-
- debug('comparator', comp, options)
- this.options = options
- this.loose = !!options.loose
- this.parse(comp)
-
- if (this.semver === ANY) {
- this.value = ''
- } else {
- this.value = this.operator + this.semver.version
- }
-
- debug('comp', this)
-}
-
-var ANY = {}
-Comparator.prototype.parse = function (comp) {
- var r = this.options.loose ? re[COMPARATORLOOSE] : re[COMPARATOR]
- var m = comp.match(r)
-
- if (!m) {
- throw new TypeError('Invalid comparator: ' + comp)
- }
-
- this.operator = m[1]
- if (this.operator === '=') {
- this.operator = ''
- }
-
- // if it literally is just '>' or '' then allow anything.
- if (!m[2]) {
- this.semver = ANY
- } else {
- this.semver = new SemVer(m[2], this.options.loose)
- }
-}
-
-Comparator.prototype.toString = function () {
- return this.value
-}
-
-Comparator.prototype.test = function (version) {
- debug('Comparator.test', version, this.options.loose)
-
- if (this.semver === ANY) {
- return true
- }
-
- if (typeof version === 'string') {
- version = new SemVer(version, this.options)
- }
-
- return cmp(version, this.operator, this.semver, this.options)
-}
-
-Comparator.prototype.intersects = function (comp, options) {
- if (!(comp instanceof Comparator)) {
- throw new TypeError('a Comparator is required')
- }
-
- if (!options || typeof options !== 'object') {
- options = {
- loose: !!options,
- includePrerelease: false
- }
- }
-
- var rangeTmp
-
- if (this.operator === '') {
- rangeTmp = new Range(comp.value, options)
- return satisfies(this.value, rangeTmp, options)
- } else if (comp.operator === '') {
- rangeTmp = new Range(this.value, options)
- return satisfies(comp.semver, rangeTmp, options)
- }
-
- var sameDirectionIncreasing =
- (this.operator === '>=' || this.operator === '>') &&
- (comp.operator === '>=' || comp.operator === '>')
- var sameDirectionDecreasing =
- (this.operator === '<=' || this.operator === '<') &&
- (comp.operator === '<=' || comp.operator === '<')
- var sameSemVer = this.semver.version === comp.semver.version
- var differentDirectionsInclusive =
- (this.operator === '>=' || this.operator === '<=') &&
- (comp.operator === '>=' || comp.operator === '<=')
- var oppositeDirectionsLessThan =
- cmp(this.semver, '<', comp.semver, options) &&
- ((this.operator === '>=' || this.operator === '>') &&
- (comp.operator === '<=' || comp.operator === '<'))
- var oppositeDirectionsGreaterThan =
- cmp(this.semver, '>', comp.semver, options) &&
- ((this.operator === '<=' || this.operator === '<') &&
- (comp.operator === '>=' || comp.operator === '>'))
-
- return sameDirectionIncreasing || sameDirectionDecreasing ||
- (sameSemVer && differentDirectionsInclusive) ||
- oppositeDirectionsLessThan || oppositeDirectionsGreaterThan
-}
-
-exports.Range = Range
-function Range (range, options) {
- if (!options || typeof options !== 'object') {
- options = {
- loose: !!options,
- includePrerelease: false
- }
- }
-
- if (range instanceof Range) {
- if (range.loose === !!options.loose &&
- range.includePrerelease === !!options.includePrerelease) {
- return range
- } else {
- return new Range(range.raw, options)
- }
- }
-
- if (range instanceof Comparator) {
- return new Range(range.value, options)
- }
-
- if (!(this instanceof Range)) {
- return new Range(range, options)
- }
-
- this.options = options
- this.loose = !!options.loose
- this.includePrerelease = !!options.includePrerelease
-
- // First, split based on boolean or ||
- this.raw = range
- this.set = range.split(/\s*\|\|\s*/).map(function (range) {
- return this.parseRange(range.trim())
- }, this).filter(function (c) {
- // throw out any that are not relevant for whatever reason
- return c.length
- })
-
- if (!this.set.length) {
- throw new TypeError('Invalid SemVer Range: ' + range)
- }
-
- this.format()
-}
-
-Range.prototype.format = function () {
- this.range = this.set.map(function (comps) {
- return comps.join(' ').trim()
- }).join('||').trim()
- return this.range
-}
-
-Range.prototype.toString = function () {
- return this.range
-}
-
-Range.prototype.parseRange = function (range) {
- var loose = this.options.loose
- range = range.trim()
- // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
- var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE]
- range = range.replace(hr, hyphenReplace)
- debug('hyphen replace', range)
- // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
- range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace)
- debug('comparator trim', range, re[COMPARATORTRIM])
-
- // `~ 1.2.3` => `~1.2.3`
- range = range.replace(re[TILDETRIM], tildeTrimReplace)
-
- // `^ 1.2.3` => `^1.2.3`
- range = range.replace(re[CARETTRIM], caretTrimReplace)
-
- // normalize spaces
- range = range.split(/\s+/).join(' ')
-
- // At this point, the range is completely trimmed and
- // ready to be split into comparators.
-
- var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR]
- var set = range.split(' ').map(function (comp) {
- return parseComparator(comp, this.options)
- }, this).join(' ').split(/\s+/)
- if (this.options.loose) {
- // in loose mode, throw out any that are not valid comparators
- set = set.filter(function (comp) {
- return !!comp.match(compRe)
- })
- }
- set = set.map(function (comp) {
- return new Comparator(comp, this.options)
- }, this)
-
- return set
-}
-
-Range.prototype.intersects = function (range, options) {
- if (!(range instanceof Range)) {
- throw new TypeError('a Range is required')
- }
-
- return this.set.some(function (thisComparators) {
- return thisComparators.every(function (thisComparator) {
- return range.set.some(function (rangeComparators) {
- return rangeComparators.every(function (rangeComparator) {
- return thisComparator.intersects(rangeComparator, options)
- })
- })
- })
- })
-}
-
-// Mostly just for testing and legacy API reasons
-exports.toComparators = toComparators
-function toComparators (range, options) {
- return new Range(range, options).set.map(function (comp) {
- return comp.map(function (c) {
- return c.value
- }).join(' ').trim().split(' ')
- })
-}
-
-// comprised of xranges, tildes, stars, and gtlt's at this point.
-// already replaced the hyphen ranges
-// turn into a set of JUST comparators.
-function parseComparator (comp, options) {
- debug('comp', comp, options)
- comp = replaceCarets(comp, options)
- debug('caret', comp)
- comp = replaceTildes(comp, options)
- debug('tildes', comp)
- comp = replaceXRanges(comp, options)
- debug('xrange', comp)
- comp = replaceStars(comp, options)
- debug('stars', comp)
- return comp
-}
-
-function isX (id) {
- return !id || id.toLowerCase() === 'x' || id === '*'
-}
-
-// ~, ~> --> * (any, kinda silly)
-// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0
-// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0
-// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0
-// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0
-// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0
-function replaceTildes (comp, options) {
- return comp.trim().split(/\s+/).map(function (comp) {
- return replaceTilde(comp, options)
- }).join(' ')
-}
-
-function replaceTilde (comp, options) {
- var r = options.loose ? re[TILDELOOSE] : re[TILDE]
- return comp.replace(r, function (_, M, m, p, pr) {
- debug('tilde', comp, _, M, m, p, pr)
- var ret
-
- if (isX(M)) {
- ret = ''
- } else if (isX(m)) {
- ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
- } else if (isX(p)) {
- // ~1.2 == >=1.2.0 <1.3.0
- ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
- } else if (pr) {
- debug('replaceTilde pr', pr)
- ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
- ' <' + M + '.' + (+m + 1) + '.0'
- } else {
- // ~1.2.3 == >=1.2.3 <1.3.0
- ret = '>=' + M + '.' + m + '.' + p +
- ' <' + M + '.' + (+m + 1) + '.0'
- }
-
- debug('tilde return', ret)
- return ret
- })
-}
-
-// ^ --> * (any, kinda silly)
-// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0
-// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0
-// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0
-// ^1.2.3 --> >=1.2.3 <2.0.0
-// ^1.2.0 --> >=1.2.0 <2.0.0
-function replaceCarets (comp, options) {
- return comp.trim().split(/\s+/).map(function (comp) {
- return replaceCaret(comp, options)
- }).join(' ')
-}
-
-function replaceCaret (comp, options) {
- debug('caret', comp, options)
- var r = options.loose ? re[CARETLOOSE] : re[CARET]
- return comp.replace(r, function (_, M, m, p, pr) {
- debug('caret', comp, _, M, m, p, pr)
- var ret
-
- if (isX(M)) {
- ret = ''
- } else if (isX(m)) {
- ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
- } else if (isX(p)) {
- if (M === '0') {
- ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
- } else {
- ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0'
- }
- } else if (pr) {
- debug('replaceCaret pr', pr)
- if (M === '0') {
- if (m === '0') {
- ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
- ' <' + M + '.' + m + '.' + (+p + 1)
- } else {
- ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
- ' <' + M + '.' + (+m + 1) + '.0'
- }
- } else {
- ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
- ' <' + (+M + 1) + '.0.0'
- }
- } else {
- debug('no pr')
- if (M === '0') {
- if (m === '0') {
- ret = '>=' + M + '.' + m + '.' + p +
- ' <' + M + '.' + m + '.' + (+p + 1)
- } else {
- ret = '>=' + M + '.' + m + '.' + p +
- ' <' + M + '.' + (+m + 1) + '.0'
- }
- } else {
- ret = '>=' + M + '.' + m + '.' + p +
- ' <' + (+M + 1) + '.0.0'
- }
- }
-
- debug('caret return', ret)
- return ret
- })
-}
-
-function replaceXRanges (comp, options) {
- debug('replaceXRanges', comp, options)
- return comp.split(/\s+/).map(function (comp) {
- return replaceXRange(comp, options)
- }).join(' ')
-}
-
-function replaceXRange (comp, options) {
- comp = comp.trim()
- var r = options.loose ? re[XRANGELOOSE] : re[XRANGE]
- return comp.replace(r, function (ret, gtlt, M, m, p, pr) {
- debug('xRange', comp, ret, gtlt, M, m, p, pr)
- var xM = isX(M)
- var xm = xM || isX(m)
- var xp = xm || isX(p)
- var anyX = xp
-
- if (gtlt === '=' && anyX) {
- gtlt = ''
- }
-
- if (xM) {
- if (gtlt === '>' || gtlt === '<') {
- // nothing is allowed
- ret = '<0.0.0'
- } else {
- // nothing is forbidden
- ret = '*'
- }
- } else if (gtlt && anyX) {
- // we know patch is an x, because we have any x at all.
- // replace X with 0
- if (xm) {
- m = 0
- }
- p = 0
-
- if (gtlt === '>') {
- // >1 => >=2.0.0
- // >1.2 => >=1.3.0
- // >1.2.3 => >= 1.2.4
- gtlt = '>='
- if (xm) {
- M = +M + 1
- m = 0
- p = 0
- } else {
- m = +m + 1
- p = 0
- }
- } else if (gtlt === '<=') {
- // <=0.7.x is actually <0.8.0, since any 0.7.x should
- // pass. Similarly, <=7.x is actually <8.0.0, etc.
- gtlt = '<'
- if (xm) {
- M = +M + 1
- } else {
- m = +m + 1
- }
- }
-
- ret = gtlt + M + '.' + m + '.' + p
- } else if (xm) {
- ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
- } else if (xp) {
- ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
- }
-
- debug('xRange return', ret)
-
- return ret
- })
-}
-
-// Because * is AND-ed with everything else in the comparator,
-// and '' means "any version", just remove the *s entirely.
-function replaceStars (comp, options) {
- debug('replaceStars', comp, options)
- // Looseness is ignored here. star is always as loose as it gets!
- return comp.trim().replace(re[STAR], '')
-}
-
-// This function is passed to string.replace(re[HYPHENRANGE])
-// M, m, patch, prerelease, build
-// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5
-// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do
-// 1.2 - 3.4 => >=1.2.0 <3.5.0
-function hyphenReplace ($0,
- from, fM, fm, fp, fpr, fb,
- to, tM, tm, tp, tpr, tb) {
- if (isX(fM)) {
- from = ''
- } else if (isX(fm)) {
- from = '>=' + fM + '.0.0'
- } else if (isX(fp)) {
- from = '>=' + fM + '.' + fm + '.0'
- } else {
- from = '>=' + from
- }
-
- if (isX(tM)) {
- to = ''
- } else if (isX(tm)) {
- to = '<' + (+tM + 1) + '.0.0'
- } else if (isX(tp)) {
- to = '<' + tM + '.' + (+tm + 1) + '.0'
- } else if (tpr) {
- to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr
- } else {
- to = '<=' + to
- }
-
- return (from + ' ' + to).trim()
-}
-
-// if ANY of the sets match ALL of its comparators, then pass
-Range.prototype.test = function (version) {
- if (!version) {
- return false
- }
-
- if (typeof version === 'string') {
- version = new SemVer(version, this.options)
- }
-
- for (var i = 0; i < this.set.length; i++) {
- if (testSet(this.set[i], version, this.options)) {
- return true
- }
- }
- return false
-}
-
-function testSet (set, version, options) {
- for (var i = 0; i < set.length; i++) {
- if (!set[i].test(version)) {
- return false
- }
- }
-
- if (version.prerelease.length && !options.includePrerelease) {
- // Find the set of versions that are allowed to have prereleases
- // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0
- // That should allow `1.2.3-pr.2` to pass.
- // However, `1.2.4-alpha.notready` should NOT be allowed,
- // even though it's within the range set by the comparators.
- for (i = 0; i < set.length; i++) {
- debug(set[i].semver)
- if (set[i].semver === ANY) {
- continue
- }
-
- if (set[i].semver.prerelease.length > 0) {
- var allowed = set[i].semver
- if (allowed.major === version.major &&
- allowed.minor === version.minor &&
- allowed.patch === version.patch) {
- return true
- }
- }
- }
-
- // Version has a -pre, but it's not one of the ones we like.
- return false
- }
-
- return true
-}
-
-exports.satisfies = satisfies
-function satisfies (version, range, options) {
- try {
- range = new Range(range, options)
- } catch (er) {
- return false
- }
- return range.test(version)
-}
-
-exports.maxSatisfying = maxSatisfying
-function maxSatisfying (versions, range, options) {
- var max = null
- var maxSV = null
- try {
- var rangeObj = new Range(range, options)
- } catch (er) {
- return null
- }
- versions.forEach(function (v) {
- if (rangeObj.test(v)) {
- // satisfies(v, range, options)
- if (!max || maxSV.compare(v) === -1) {
- // compare(max, v, true)
- max = v
- maxSV = new SemVer(max, options)
- }
- }
- })
- return max
-}
-
-exports.minSatisfying = minSatisfying
-function minSatisfying (versions, range, options) {
- var min = null
- var minSV = null
- try {
- var rangeObj = new Range(range, options)
- } catch (er) {
- return null
- }
- versions.forEach(function (v) {
- if (rangeObj.test(v)) {
- // satisfies(v, range, options)
- if (!min || minSV.compare(v) === 1) {
- // compare(min, v, true)
- min = v
- minSV = new SemVer(min, options)
- }
- }
- })
- return min
-}
-
-exports.minVersion = minVersion
-function minVersion (range, loose) {
- range = new Range(range, loose)
-
- var minver = new SemVer('0.0.0')
- if (range.test(minver)) {
- return minver
- }
-
- minver = new SemVer('0.0.0-0')
- if (range.test(minver)) {
- return minver
- }
-
- minver = null
- for (var i = 0; i < range.set.length; ++i) {
- var comparators = range.set[i]
-
- comparators.forEach(function (comparator) {
- // Clone to avoid manipulating the comparator's semver object.
- var compver = new SemVer(comparator.semver.version)
- switch (comparator.operator) {
- case '>':
- if (compver.prerelease.length === 0) {
- compver.patch++
- } else {
- compver.prerelease.push(0)
- }
- compver.raw = compver.format()
- /* fallthrough */
- case '':
- case '>=':
- if (!minver || gt(minver, compver)) {
- minver = compver
- }
- break
- case '<':
- case '<=':
- /* Ignore maximum versions */
- break
- /* istanbul ignore next */
- default:
- throw new Error('Unexpected operation: ' + comparator.operator)
- }
- })
- }
-
- if (minver && range.test(minver)) {
- return minver
- }
-
- return null
-}
-
-exports.validRange = validRange
-function validRange (range, options) {
- try {
- // Return '*' instead of '' so that truthiness works.
- // This will throw if it's invalid anyway
- return new Range(range, options).range || '*'
- } catch (er) {
- return null
- }
-}
-
-// Determine if version is less than all the versions possible in the range
-exports.ltr = ltr
-function ltr (version, range, options) {
- return outside(version, range, '<', options)
-}
-
-// Determine if version is greater than all the versions possible in the range.
-exports.gtr = gtr
-function gtr (version, range, options) {
- return outside(version, range, '>', options)
-}
-
-exports.outside = outside
-function outside (version, range, hilo, options) {
- version = new SemVer(version, options)
- range = new Range(range, options)
-
- var gtfn, ltefn, ltfn, comp, ecomp
- switch (hilo) {
- case '>':
- gtfn = gt
- ltefn = lte
- ltfn = lt
- comp = '>'
- ecomp = '>='
- break
- case '<':
- gtfn = lt
- ltefn = gte
- ltfn = gt
- comp = '<'
- ecomp = '<='
- break
- default:
- throw new TypeError('Must provide a hilo val of "<" or ">"')
- }
-
- // If it satisifes the range it is not outside
- if (satisfies(version, range, options)) {
- return false
- }
-
- // From now on, variable terms are as if we're in "gtr" mode.
- // but note that everything is flipped for the "ltr" function.
-
- for (var i = 0; i < range.set.length; ++i) {
- var comparators = range.set[i]
-
- var high = null
- var low = null
-
- comparators.forEach(function (comparator) {
- if (comparator.semver === ANY) {
- comparator = new Comparator('>=0.0.0')
- }
- high = high || comparator
- low = low || comparator
- if (gtfn(comparator.semver, high.semver, options)) {
- high = comparator
- } else if (ltfn(comparator.semver, low.semver, options)) {
- low = comparator
- }
- })
-
- // If the edge version comparator has a operator then our version
- // isn't outside it
- if (high.operator === comp || high.operator === ecomp) {
- return false
- }
-
- // If the lowest version comparator has an operator and our version
- // is less than it then it isn't higher than the range
- if ((!low.operator || low.operator === comp) &&
- ltefn(version, low.semver)) {
- return false
- } else if (low.operator === ecomp && ltfn(version, low.semver)) {
- return false
- }
- }
- return true
-}
-
-exports.prerelease = prerelease
-function prerelease (version, options) {
- var parsed = parse(version, options)
- return (parsed && parsed.prerelease.length) ? parsed.prerelease : null
-}
-
-exports.intersects = intersects
-function intersects (r1, r2, options) {
- r1 = new Range(r1, options)
- r2 = new Range(r2, options)
- return r1.intersects(r2)
-}
-
-exports.coerce = coerce
-function coerce (version) {
- if (version instanceof SemVer) {
- return version
- }
-
- if (typeof version !== 'string') {
- return null
- }
-
- var match = version.match(re[COERCE])
-
- if (match == null) {
- return null
- }
-
- return parse(match[1] +
- '.' + (match[2] || '0') +
- '.' + (match[3] || '0'))
-}
diff --git a/node_modules/libcipm/node_modules/pacote/package.json b/node_modules/libcipm/node_modules/pacote/package.json
deleted file mode 100644
index 72a152141..000000000
--- a/node_modules/libcipm/node_modules/pacote/package.json
+++ /dev/null
@@ -1,121 +0,0 @@
-{
- "_from": "pacote@^9.1.0",
- "_id": "pacote@9.5.12",
- "_inBundle": false,
- "_integrity": "sha512-BUIj/4kKbwWg4RtnBncXPJd15piFSVNpTzY0rysSr3VnMowTYgkGKcaHrbReepAkjTr8lH2CVWRi58Spg2CicQ==",
- "_location": "/libcipm/pacote",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "pacote@^9.1.0",
- "name": "pacote",
- "escapedName": "pacote",
- "rawSpec": "^9.1.0",
- "saveSpec": null,
- "fetchSpec": "^9.1.0"
- },
- "_requiredBy": [
- "/libcipm"
- ],
- "_resolved": "https://registry.npmjs.org/pacote/-/pacote-9.5.12.tgz",
- "_shasum": "1e11dd7a8d736bcc36b375a9804d41bb0377bf66",
- "_spec": "pacote@^9.1.0",
- "_where": "/Users/claudiahdz/npm/cli/node_modules/libcipm",
- "author": {
- "name": "Kat Marchán",
- "email": "kzm@sykosomatic.org"
- },
- "bugs": {
- "url": "https://github.com/npm/pacote/issues"
- },
- "bundleDependencies": false,
- "contributors": [
- {
- "name": "Charlotte Spencer",
- "email": "charlottelaspencer@gmail.com"
- },
- {
- "name": "Rebecca Turner",
- "email": "me@re-becca.org"
- }
- ],
- "dependencies": {
- "bluebird": "^3.5.3",
- "cacache": "^12.0.2",
- "chownr": "^1.1.2",
- "figgy-pudding": "^3.5.1",
- "get-stream": "^4.1.0",
- "glob": "^7.1.3",
- "infer-owner": "^1.0.4",
- "lru-cache": "^5.1.1",
- "make-fetch-happen": "^5.0.0",
- "minimatch": "^3.0.4",
- "minipass": "^2.3.5",
- "mississippi": "^3.0.0",
- "mkdirp": "^0.5.1",
- "normalize-package-data": "^2.4.0",
- "npm-normalize-package-bin": "^1.0.0",
- "npm-package-arg": "^6.1.0",
- "npm-packlist": "^1.1.12",
- "npm-pick-manifest": "^3.0.0",
- "npm-registry-fetch": "^4.0.0",
- "osenv": "^0.1.5",
- "promise-inflight": "^1.0.1",
- "promise-retry": "^1.1.1",
- "protoduck": "^5.0.1",
- "rimraf": "^2.6.2",
- "safe-buffer": "^5.1.2",
- "semver": "^5.6.0",
- "ssri": "^6.0.1",
- "tar": "^4.4.10",
- "unique-filename": "^1.1.1",
- "which": "^1.3.1"
- },
- "deprecated": false,
- "description": "JavaScript package downloader",
- "devDependencies": {
- "nock": "^10.0.3",
- "npmlog": "^4.1.2",
- "nyc": "^14.1.1",
- "require-inject": "^1.4.3",
- "standard": "^12.0.1",
- "standard-version": "^4.4.0",
- "tacks": "^1.2.7",
- "tap": "^12.7.0",
- "tar-stream": "^1.6.2",
- "weallbehave": "^1.2.0",
- "weallcontribute": "^1.0.7"
- },
- "files": [
- "*.js",
- "lib"
- ],
- "homepage": "https://github.com/npm/pacote#readme",
- "keywords": [
- "packages",
- "npm",
- "git"
- ],
- "license": "MIT",
- "main": "index.js",
- "name": "pacote",
- "publishConfig": {
- "tag": "v9-legacy"
- },
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/pacote.git"
- },
- "scripts": {
- "postrelease": "npm publish && git push --follow-tags",
- "prerelease": "npm t",
- "pretest": "standard",
- "release": "standard-version -s",
- "test": "nyc --all -- tap -J test/*.js",
- "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
- "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
- "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
- },
- "version": "9.5.12"
-}
diff --git a/node_modules/libcipm/node_modules/pacote/packument.js b/node_modules/libcipm/node_modules/pacote/packument.js
deleted file mode 100644
index 0606b266f..000000000
--- a/node_modules/libcipm/node_modules/pacote/packument.js
+++ /dev/null
@@ -1,29 +0,0 @@
-'use strict'
-
-const fetchPackument = require('./lib/fetch').packument
-const optCheck = require('./lib/util/opt-check')
-const pinflight = require('promise-inflight')
-const npa = require('npm-package-arg')
-
-module.exports = packument
-function packument (spec, opts) {
- opts = optCheck(opts)
- spec = npa(spec, opts.where)
-
- const label = [
- spec.name,
- spec.saveSpec || spec.fetchSpec,
- spec.type,
- opts.cache,
- opts.registry,
- opts.scope
- ].join(':')
- const startTime = Date.now()
- return pinflight(label, () => {
- return fetchPackument(spec, opts)
- }).then(p => {
- const elapsedTime = Date.now() - startTime
- opts.log.silly('pacote', `${spec.registry ? 'registry' : spec.type} packument for ${spec.name}@${spec.saveSpec || spec.fetchSpec} fetched in ${elapsedTime}ms`)
- return p
- })
-}
diff --git a/node_modules/libcipm/node_modules/pacote/prefetch.js b/node_modules/libcipm/node_modules/pacote/prefetch.js
deleted file mode 100644
index 9e6b5af12..000000000
--- a/node_modules/libcipm/node_modules/pacote/prefetch.js
+++ /dev/null
@@ -1,64 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const cacache = require('cacache')
-const finished = BB.promisify(require('mississippi').finished)
-const optCheck = require('./lib/util/opt-check')
-const npa = require('npm-package-arg')
-
-module.exports = prefetch
-function prefetch (spec, opts) {
- opts = optCheck(opts)
- spec = npa(spec, opts.where)
- opts.log.warn('prefetch', 'pacote.prefetch() is deprecated. Please use pacote.tarball() instead.')
- const startTime = Date.now()
- if (!opts.cache) {
- opts.log.info('prefetch', 'skipping prefetch: no cache provided')
- return BB.resolve({ spec })
- }
- if (opts.integrity && !opts.preferOnline) {
- opts.log.silly('prefetch', 'checking if', opts.integrity, 'is already cached')
- return cacache.get.hasContent(opts.cache, opts.integrity).then(info => {
- if (info) {
- opts.log.silly('prefetch', `content already exists for ${spec} (${Date.now() - startTime}ms)`)
- return {
- spec,
- integrity: info.integrity,
- size: info.size,
- byDigest: true
- }
- } else {
- return prefetchByManifest(startTime, spec, opts)
- }
- })
- } else {
- opts.log.silly('prefetch', `no integrity hash provided for ${spec} - fetching by manifest`)
- return prefetchByManifest(startTime, spec, opts)
- }
-}
-
-let fetch
-function prefetchByManifest (start, spec, opts) {
- let manifest
- let integrity
- return BB.resolve().then(() => {
- if (!fetch) {
- fetch = require('./lib/fetch')
- }
- const stream = fetch.tarball(spec, opts)
- if (!stream) { return }
- stream.on('data', function () {})
- stream.on('manifest', m => { manifest = m })
- stream.on('integrity', i => { integrity = i })
- return finished(stream)
- }).then(() => {
- opts.log.silly('prefetch', `${spec} done in ${Date.now() - start}ms`)
- return {
- manifest,
- spec,
- integrity: integrity || (manifest && manifest._integrity),
- byDigest: false
- }
- })
-}
diff --git a/node_modules/libcipm/node_modules/pacote/tarball.js b/node_modules/libcipm/node_modules/pacote/tarball.js
deleted file mode 100644
index e0ad52ab3..000000000
--- a/node_modules/libcipm/node_modules/pacote/tarball.js
+++ /dev/null
@@ -1,67 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const fs = require('fs')
-const getStream = require('get-stream')
-const mkdirp = BB.promisify(require('mkdirp'))
-const npa = require('npm-package-arg')
-const optCheck = require('./lib/util/opt-check.js')
-const PassThrough = require('stream').PassThrough
-const path = require('path')
-const rimraf = BB.promisify(require('rimraf'))
-const withTarballStream = require('./lib/with-tarball-stream.js')
-
-module.exports = tarball
-function tarball (spec, opts) {
- opts = optCheck(opts)
- spec = npa(spec, opts.where)
- return withTarballStream(spec, opts, stream => getStream.buffer(stream))
-}
-
-module.exports.stream = tarballStream
-function tarballStream (spec, opts) {
- opts = optCheck(opts)
- spec = npa(spec, opts.where)
- const output = new PassThrough()
- let hasTouchedOutput = false
- let lastError = null
- withTarballStream(spec, opts, stream => {
- if (hasTouchedOutput && lastError) {
- throw lastError
- } else if (hasTouchedOutput) {
- throw new Error('abort, abort!')
- } else {
- return new BB((resolve, reject) => {
- stream.on('error', reject)
- output.on('error', reject)
- output.on('error', () => { hasTouchedOutput = true })
- output.on('finish', resolve)
- stream.pipe(output)
- stream.once('data', () => { hasTouchedOutput = true })
- }).catch(err => {
- lastError = err
- throw err
- })
- }
- })
- .catch(err => output.emit('error', err))
- return output
-}
-
-module.exports.toFile = tarballToFile
-function tarballToFile (spec, dest, opts) {
- opts = optCheck(opts)
- spec = npa(spec, opts.where)
- return mkdirp(path.dirname(dest))
- .then(() => withTarballStream(spec, opts, stream => {
- return rimraf(dest)
- .then(() => new BB((resolve, reject) => {
- const writer = fs.createWriteStream(dest)
- stream.on('error', reject)
- writer.on('error', reject)
- writer.on('close', resolve)
- stream.pipe(writer)
- }))
- }))
-}
diff --git a/node_modules/libcipm/node_modules/ssri/CHANGELOG.md b/node_modules/libcipm/node_modules/ssri/CHANGELOG.md
deleted file mode 100644
index d4c589790..000000000
--- a/node_modules/libcipm/node_modules/ssri/CHANGELOG.md
+++ /dev/null
@@ -1,286 +0,0 @@
-# Change Log
-
-All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
-
-<a name="6.0.1"></a>
-## [6.0.1](https://github.com/zkat/ssri/compare/v6.0.0...v6.0.1) (2018-08-27)
-
-
-### Bug Fixes
-
-* **opts:** use figgy-pudding to specify consumed opts ([cf86553](https://github.com/zkat/ssri/commit/cf86553))
-
-
-
-<a name="6.0.0"></a>
-# [6.0.0](https://github.com/zkat/ssri/compare/v5.3.0...v6.0.0) (2018-04-09)
-
-
-### Bug Fixes
-
-* **docs:** minor typo ([b71ef17](https://github.com/zkat/ssri/commit/b71ef17))
-
-
-### meta
-
-* drop support for node@4 ([d9bf359](https://github.com/zkat/ssri/commit/d9bf359))
-
-
-### BREAKING CHANGES
-
-* node@4 is no longer supported
-
-
-
-<a name="5.3.0"></a>
-# [5.3.0](https://github.com/zkat/ssri/compare/v5.2.4...v5.3.0) (2018-03-13)
-
-
-### Features
-
-* **checkData:** optionally throw when checkData fails ([bf26b84](https://github.com/zkat/ssri/commit/bf26b84))
-
-
-
-<a name="5.2.4"></a>
-## [5.2.4](https://github.com/zkat/ssri/compare/v5.2.3...v5.2.4) (2018-02-16)
-
-
-
-<a name="5.2.3"></a>
-## [5.2.3](https://github.com/zkat/ssri/compare/v5.2.2...v5.2.3) (2018-02-16)
-
-
-### Bug Fixes
-
-* **hashes:** filter hash priority list by available hashes ([2fa30b8](https://github.com/zkat/ssri/commit/2fa30b8))
-* **integrityStream:** dedupe algorithms to generate ([d56c654](https://github.com/zkat/ssri/commit/d56c654))
-
-
-
-<a name="5.2.2"></a>
-## [5.2.2](https://github.com/zkat/ssri/compare/v5.2.1...v5.2.2) (2018-02-14)
-
-
-### Bug Fixes
-
-* **security:** tweak strict SRI regex ([#10](https://github.com/zkat/ssri/issues/10)) ([d0ebcdc](https://github.com/zkat/ssri/commit/d0ebcdc))
-
-
-
-<a name="5.2.1"></a>
-## [5.2.1](https://github.com/zkat/ssri/compare/v5.2.0...v5.2.1) (2018-02-06)
-
-
-
-<a name="5.2.0"></a>
-# [5.2.0](https://github.com/zkat/ssri/compare/v5.1.0...v5.2.0) (2018-02-06)
-
-
-### Features
-
-* **match:** add integrity.match() ([3c49cc4](https://github.com/zkat/ssri/commit/3c49cc4))
-
-
-
-<a name="5.1.0"></a>
-# [5.1.0](https://github.com/zkat/ssri/compare/v5.0.0...v5.1.0) (2018-01-18)
-
-
-### Bug Fixes
-
-* **checkStream:** integrityStream now takes opts.integrity algos into account ([d262910](https://github.com/zkat/ssri/commit/d262910))
-
-
-### Features
-
-* **sha3:** do some guesswork about upcoming sha3 ([7fdd9df](https://github.com/zkat/ssri/commit/7fdd9df))
-
-
-
-<a name="5.0.0"></a>
-# [5.0.0](https://github.com/zkat/ssri/compare/v4.1.6...v5.0.0) (2017-10-23)
-
-
-### Features
-
-* **license:** relicense to ISC (#9) ([c82983a](https://github.com/zkat/ssri/commit/c82983a))
-
-
-### BREAKING CHANGES
-
-* **license:** the license has been changed from CC0-1.0 to ISC.
-
-
-
-<a name="4.1.6"></a>
-## [4.1.6](https://github.com/zkat/ssri/compare/v4.1.5...v4.1.6) (2017-06-07)
-
-
-### Bug Fixes
-
-* **checkStream:** make sure to pass all opts through ([0b1bcbe](https://github.com/zkat/ssri/commit/0b1bcbe))
-
-
-
-<a name="4.1.5"></a>
-## [4.1.5](https://github.com/zkat/ssri/compare/v4.1.4...v4.1.5) (2017-06-05)
-
-
-### Bug Fixes
-
-* **integrityStream:** stop crashing if opts.algorithms and opts.integrity have an algo mismatch ([fb1293e](https://github.com/zkat/ssri/commit/fb1293e))
-
-
-
-<a name="4.1.4"></a>
-## [4.1.4](https://github.com/zkat/ssri/compare/v4.1.3...v4.1.4) (2017-05-31)
-
-
-### Bug Fixes
-
-* **node:** older versions of node[@4](https://github.com/4) do not support base64buffer string parsing ([513df4e](https://github.com/zkat/ssri/commit/513df4e))
-
-
-
-<a name="4.1.3"></a>
-## [4.1.3](https://github.com/zkat/ssri/compare/v4.1.2...v4.1.3) (2017-05-24)
-
-
-### Bug Fixes
-
-* **check:** handle various bad hash corner cases better ([c2c262b](https://github.com/zkat/ssri/commit/c2c262b))
-
-
-
-<a name="4.1.2"></a>
-## [4.1.2](https://github.com/zkat/ssri/compare/v4.1.1...v4.1.2) (2017-04-18)
-
-
-### Bug Fixes
-
-* **stream:** _flush can be called multiple times. use on("end") ([b1c4805](https://github.com/zkat/ssri/commit/b1c4805))
-
-
-
-<a name="4.1.1"></a>
-## [4.1.1](https://github.com/zkat/ssri/compare/v4.1.0...v4.1.1) (2017-04-12)
-
-
-### Bug Fixes
-
-* **pickAlgorithm:** error if pickAlgorithm() is used in an empty Integrity ([fab470e](https://github.com/zkat/ssri/commit/fab470e))
-
-
-
-<a name="4.1.0"></a>
-# [4.1.0](https://github.com/zkat/ssri/compare/v4.0.0...v4.1.0) (2017-04-07)
-
-
-### Features
-
-* adding ssri.create for a crypto style interface (#2) ([96f52ad](https://github.com/zkat/ssri/commit/96f52ad))
-
-
-
-<a name="4.0.0"></a>
-# [4.0.0](https://github.com/zkat/ssri/compare/v3.0.2...v4.0.0) (2017-04-03)
-
-
-### Bug Fixes
-
-* **integrity:** should have changed the error code before. oops ([8381afa](https://github.com/zkat/ssri/commit/8381afa))
-
-
-### BREAKING CHANGES
-
-* **integrity:** EBADCHECKSUM -> EINTEGRITY for verification errors
-
-
-
-<a name="3.0.2"></a>
-## [3.0.2](https://github.com/zkat/ssri/compare/v3.0.1...v3.0.2) (2017-04-03)
-
-
-
-<a name="3.0.1"></a>
-## [3.0.1](https://github.com/zkat/ssri/compare/v3.0.0...v3.0.1) (2017-04-03)
-
-
-### Bug Fixes
-
-* **package.json:** really should have these in the keywords because search ([a6ac6d0](https://github.com/zkat/ssri/commit/a6ac6d0))
-
-
-
-<a name="3.0.0"></a>
-# [3.0.0](https://github.com/zkat/ssri/compare/v2.0.0...v3.0.0) (2017-04-03)
-
-
-### Bug Fixes
-
-* **hashes:** IntegrityMetadata -> Hash ([d04aa1f](https://github.com/zkat/ssri/commit/d04aa1f))
-
-
-### Features
-
-* **check:** return IntegrityMetadata on check success ([2301e74](https://github.com/zkat/ssri/commit/2301e74))
-* **fromHex:** ssri.fromHex to make it easier to generate them from hex valus ([049b89e](https://github.com/zkat/ssri/commit/049b89e))
-* **hex:** utility function for getting hex version of digest ([a9f021c](https://github.com/zkat/ssri/commit/a9f021c))
-* **hexDigest:** added hexDigest method to Integrity objects too ([85208ba](https://github.com/zkat/ssri/commit/85208ba))
-* **integrity:** add .isIntegrity and .isIntegrityMetadata ([1b29e6f](https://github.com/zkat/ssri/commit/1b29e6f))
-* **integrityStream:** new stream that can both generate and check streamed data ([fd23e1b](https://github.com/zkat/ssri/commit/fd23e1b))
-* **parse:** allow parsing straight into a single IntegrityMetadata object ([c8ddf48](https://github.com/zkat/ssri/commit/c8ddf48))
-* **pickAlgorithm:** Intergrity#pickAlgorithm() added ([b97a796](https://github.com/zkat/ssri/commit/b97a796))
-* **size:** calculate and update stream sizes ([02ed1ad](https://github.com/zkat/ssri/commit/02ed1ad))
-
-
-### BREAKING CHANGES
-
-* **hashes:** `.isIntegrityMetadata` is now `.isHash`. Also, any references to `IntegrityMetadata` now refer to `Hash`.
-* **integrityStream:** createCheckerStream has been removed and replaced with a general-purpose integrityStream.
-
-To convert existing createCheckerStream code, move the `sri` argument into `opts.integrity` in integrityStream. All other options should be the same.
-* **check:** `checkData`, `checkStream`, and `createCheckerStream` now yield a whole IntegrityMetadata instance representing the first successful hash match.
-
-
-
-<a name="2.0.0"></a>
-# [2.0.0](https://github.com/zkat/ssri/compare/v1.0.0...v2.0.0) (2017-03-24)
-
-
-### Bug Fixes
-
-* **strict-mode:** make regexes more rigid ([122a32c](https://github.com/zkat/ssri/commit/122a32c))
-
-
-### Features
-
-* **api:** added serialize alias for unparse ([999b421](https://github.com/zkat/ssri/commit/999b421))
-* **concat:** add Integrity#concat() ([cae12c7](https://github.com/zkat/ssri/commit/cae12c7))
-* **pickAlgo:** pick the strongest algorithm provided, by default ([58c18f7](https://github.com/zkat/ssri/commit/58c18f7))
-* **strict-mode:** strict SRI support ([3f0b64c](https://github.com/zkat/ssri/commit/3f0b64c))
-* **stringify:** replaced unparse/serialize with stringify ([4acad30](https://github.com/zkat/ssri/commit/4acad30))
-* **verification:** add opts.pickAlgorithm ([f72e658](https://github.com/zkat/ssri/commit/f72e658))
-
-
-### BREAKING CHANGES
-
-* **pickAlgo:** ssri will prioritize specific hashes now
-* **stringify:** serialize and unparse have been removed. Use ssri.stringify instead.
-* **strict-mode:** functions that accepted an optional `sep` argument now expect `opts.sep`.
-
-
-
-<a name="1.0.0"></a>
-# 1.0.0 (2017-03-23)
-
-
-### Features
-
-* **api:** implemented initial api ([4fbb16b](https://github.com/zkat/ssri/commit/4fbb16b))
-
-
-### BREAKING CHANGES
-
-* **api:** Initial API established.
diff --git a/node_modules/libcipm/node_modules/ssri/LICENSE.md b/node_modules/libcipm/node_modules/ssri/LICENSE.md
deleted file mode 100644
index 8d28acf86..000000000
--- a/node_modules/libcipm/node_modules/ssri/LICENSE.md
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/libcipm/node_modules/ssri/README.md b/node_modules/libcipm/node_modules/ssri/README.md
deleted file mode 100644
index c250961bd..000000000
--- a/node_modules/libcipm/node_modules/ssri/README.md
+++ /dev/null
@@ -1,488 +0,0 @@
-# ssri [![npm version](https://img.shields.io/npm/v/ssri.svg)](https://npm.im/ssri) [![license](https://img.shields.io/npm/l/ssri.svg)](https://npm.im/ssri) [![Travis](https://img.shields.io/travis/zkat/ssri.svg)](https://travis-ci.org/zkat/ssri) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/ssri?svg=true)](https://ci.appveyor.com/project/zkat/ssri) [![Coverage Status](https://coveralls.io/repos/github/zkat/ssri/badge.svg?branch=latest)](https://coveralls.io/github/zkat/ssri?branch=latest)
-
-[`ssri`](https://github.com/zkat/ssri), short for Standard Subresource
-Integrity, is a Node.js utility for parsing, manipulating, serializing,
-generating, and verifying [Subresource
-Integrity](https://w3c.github.io/webappsec/specs/subresourceintegrity/) hashes.
-
-## Install
-
-`$ npm install --save ssri`
-
-## Table of Contents
-
-* [Example](#example)
-* [Features](#features)
-* [Contributing](#contributing)
-* [API](#api)
- * Parsing & Serializing
- * [`parse`](#parse)
- * [`stringify`](#stringify)
- * [`Integrity#concat`](#integrity-concat)
- * [`Integrity#toString`](#integrity-to-string)
- * [`Integrity#toJSON`](#integrity-to-json)
- * [`Integrity#match`](#integrity-match)
- * [`Integrity#pickAlgorithm`](#integrity-pick-algorithm)
- * [`Integrity#hexDigest`](#integrity-hex-digest)
- * Integrity Generation
- * [`fromHex`](#from-hex)
- * [`fromData`](#from-data)
- * [`fromStream`](#from-stream)
- * [`create`](#create)
- * Integrity Verification
- * [`checkData`](#check-data)
- * [`checkStream`](#check-stream)
- * [`integrityStream`](#integrity-stream)
-
-### Example
-
-```javascript
-const ssri = require('ssri')
-
-const integrity = 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo'
-
-// Parsing and serializing
-const parsed = ssri.parse(integrity)
-ssri.stringify(parsed) // === integrity (works on non-Integrity objects)
-parsed.toString() // === integrity
-
-// Async stream functions
-ssri.checkStream(fs.createReadStream('./my-file'), integrity).then(...)
-ssri.fromStream(fs.createReadStream('./my-file')).then(sri => {
- sri.toString() === integrity
-})
-fs.createReadStream('./my-file').pipe(ssri.createCheckerStream(sri))
-
-// Sync data functions
-ssri.fromData(fs.readFileSync('./my-file')) // === parsed
-ssri.checkData(fs.readFileSync('./my-file'), integrity) // => 'sha512'
-```
-
-### Features
-
-* Parses and stringifies SRI strings.
-* Generates SRI strings from raw data or Streams.
-* Strict standard compliance.
-* `?foo` metadata option support.
-* Multiple entries for the same algorithm.
-* Object-based integrity hash manipulation.
-* Small footprint: no dependencies, concise implementation.
-* Full test coverage.
-* Customizable algorithm picker.
-
-### Contributing
-
-The ssri team enthusiastically welcomes contributions and project participation!
-There's a bunch of things you can do if you want to contribute! The [Contributor
-Guide](CONTRIBUTING.md) has all the information you need for everything from
-reporting bugs to contributing entire new features. Please don't hesitate to
-jump in if you'd like to, or even ask us questions if something isn't clear.
-
-### API
-
-#### <a name="parse"></a> `> ssri.parse(sri, [opts]) -> Integrity`
-
-Parses `sri` into an `Integrity` data structure. `sri` can be an integrity
-string, an `Hash`-like with `digest` and `algorithm` fields and an optional
-`options` field, or an `Integrity`-like object. The resulting object will be an
-`Integrity` instance that has this shape:
-
-```javascript
-{
- 'sha1': [{algorithm: 'sha1', digest: 'deadbeef', options: []}],
- 'sha512': [
- {algorithm: 'sha512', digest: 'c0ffee', options: []},
- {algorithm: 'sha512', digest: 'bad1dea', options: ['foo']}
- ],
-}
-```
-
-If `opts.single` is truthy, a single `Hash` object will be returned. That is, a
-single object that looks like `{algorithm, digest, options}`, as opposed to a
-larger object with multiple of these.
-
-If `opts.strict` is truthy, the resulting object will be filtered such that
-it strictly follows the Subresource Integrity spec, throwing away any entries
-with any invalid components. This also means a restricted set of algorithms
-will be used -- the spec limits them to `sha256`, `sha384`, and `sha512`.
-
-Strict mode is recommended if the integrity strings are intended for use in
-browsers, or in other situations where strict adherence to the spec is needed.
-
-##### Example
-
-```javascript
-ssri.parse('sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo') // -> Integrity object
-```
-
-#### <a name="stringify"></a> `> ssri.stringify(sri, [opts]) -> String`
-
-This function is identical to [`Integrity#toString()`](#integrity-to-string),
-except it can be used on _any_ object that [`parse`](#parse) can handle -- that
-is, a string, an `Hash`-like, or an `Integrity`-like.
-
-The `opts.sep` option defines the string to use when joining multiple entries
-together. To be spec-compliant, this _must_ be whitespace. The default is a
-single space (`' '`).
-
-If `opts.strict` is true, the integrity string will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-##### Example
-
-```javascript
-// Useful for cleaning up input SRI strings:
-ssri.stringify('\n\rsha512-foo\n\t\tsha384-bar')
-// -> 'sha512-foo sha384-bar'
-
-// Hash-like: only a single entry.
-ssri.stringify({
- algorithm: 'sha512',
- digest:'9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==',
- options: ['foo']
-})
-// ->
-// 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo'
-
-// Integrity-like: full multi-entry syntax. Similar to output of `ssri.parse`
-ssri.stringify({
- 'sha512': [
- {
- algorithm: 'sha512',
- digest:'9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==',
- options: ['foo']
- }
- ]
-})
-// ->
-// 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo'
-```
-
-#### <a name="integrity-concat"></a> `> Integrity#concat(otherIntegrity, [opts]) -> Integrity`
-
-Concatenates an `Integrity` object with another IntegrityLike, or an integrity
-string.
-
-This is functionally equivalent to concatenating the string format of both
-integrity arguments, and calling [`ssri.parse`](#ssri-parse) on the new string.
-
-If `opts.strict` is true, the new `Integrity` will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-##### Example
-
-```javascript
-// This will combine the integrity checks for two different versions of
-// your index.js file so you can use a single integrity string and serve
-// either of these to clients, from a single `<script>` tag.
-const desktopIntegrity = ssri.fromData(fs.readFileSync('./index.desktop.js'))
-const mobileIntegrity = ssri.fromData(fs.readFileSync('./index.mobile.js'))
-
-// Note that browsers (and ssri) will succeed as long as ONE of the entries
-// for the *prioritized* algorithm succeeds. That is, in order for this fallback
-// to work, both desktop and mobile *must* use the same `algorithm` values.
-desktopIntegrity.concat(mobileIntegrity)
-```
-
-#### <a name="integrity-to-string"></a> `> Integrity#toString([opts]) -> String`
-
-Returns the string representation of an `Integrity` object. All hash entries
-will be concatenated in the string by `opts.sep`, which defaults to `' '`.
-
-If you want to serialize an object that didn't come from an `ssri` function,
-use [`ssri.stringify()`](#stringify).
-
-If `opts.strict` is true, the integrity string will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-##### Example
-
-```javascript
-const integrity = 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo'
-
-ssri.parse(integrity).toString() === integrity
-```
-
-#### <a name="integrity-to-json"></a> `> Integrity#toJSON() -> String`
-
-Returns the string representation of an `Integrity` object. All hash entries
-will be concatenated in the string by `' '`.
-
-This is a convenience method so you can pass an `Integrity` object directly to `JSON.stringify`.
-For more info check out [toJSON() behavior on mdn](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify#toJSON%28%29_behavior).
-
-##### Example
-
-```javascript
-const integrity = '"sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo"'
-
-JSON.stringify(ssri.parse(integrity)) === integrity
-```
-
-#### <a name="integrity-match"></a> `> Integrity#match(sri, [opts]) -> Hash | false`
-
-Returns the matching (truthy) hash if `Integrity` matches the argument passed as
-`sri`, which can be anything that [`parse`](#parse) will accept. `opts` will be
-passed through to `parse` and [`pickAlgorithm()`](#integrity-pick-algorithm).
-
-##### Example
-
-```javascript
-const integrity = 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A=='
-
-ssri.parse(integrity).match(integrity)
-// Hash {
-// digest: '9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A=='
-// algorithm: 'sha512'
-// }
-
-ssri.parse(integrity).match('sha1-deadbeef')
-// false
-```
-
-#### <a name="integrity-pick-algorithm"></a> `> Integrity#pickAlgorithm([opts]) -> String`
-
-Returns the "best" algorithm from those available in the integrity object.
-
-If `opts.pickAlgorithm` is provided, it will be passed two algorithms as
-arguments. ssri will prioritize whichever of the two algorithms is returned by
-this function. Note that the function may be called multiple times, and it
-**must** return one of the two algorithms provided. By default, ssri will make
-a best-effort to pick the strongest/most reliable of the given algorithms. It
-may intentionally deprioritize algorithms with known vulnerabilities.
-
-##### Example
-
-```javascript
-ssri.parse('sha1-WEakDigEST sha512-yzd8ELD1piyANiWnmdnpCL5F52f10UfUdEkHywVZeqTt0ymgrxR63Qz0GB7TKPoeeZQmWCaz7T1').pickAlgorithm() // sha512
-```
-
-#### <a name="integrity-hex-digest"></a> `> Integrity#hexDigest() -> String`
-
-`Integrity` is assumed to be either a single-hash `Integrity` instance, or a
-`Hash` instance. Returns its `digest`, converted to a hex representation of the
-base64 data.
-
-##### Example
-
-```javascript
-ssri.parse('sha1-deadbeef').hexDigest() // '75e69d6de79f'
-```
-
-#### <a name="from-hex"></a> `> ssri.fromHex(hexDigest, algorithm, [opts]) -> Integrity`
-
-Creates an `Integrity` object with a single entry, based on a hex-formatted
-hash. This is a utility function to help convert existing shasums to the
-Integrity format, and is roughly equivalent to something like:
-
-```javascript
-algorithm + '-' + Buffer.from(hexDigest, 'hex').toString('base64')
-```
-
-`opts.options` may optionally be passed in: it must be an array of option
-strings that will be added to all generated integrity hashes generated by
-`fromData`. This is a loosely-specified feature of SRIs, and currently has no
-specified semantics besides being `?`-separated. Use at your own risk, and
-probably avoid if your integrity strings are meant to be used with browsers.
-
-If `opts.strict` is true, the integrity object will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-If `opts.single` is true, a single `Hash` object will be returned.
-
-##### Example
-
-```javascript
-ssri.fromHex('75e69d6de79f', 'sha1').toString() // 'sha1-deadbeef'
-```
-
-#### <a name="from-data"></a> `> ssri.fromData(data, [opts]) -> Integrity`
-
-Creates an `Integrity` object from either string or `Buffer` data, calculating
-all the requested hashes and adding any specified options to the object.
-
-`opts.algorithms` determines which algorithms to generate hashes for. All
-results will be included in a single `Integrity` object. The default value for
-`opts.algorithms` is `['sha512']`. All algorithm strings must be hashes listed
-in `crypto.getHashes()` for the host Node.js platform.
-
-`opts.options` may optionally be passed in: it must be an array of option
-strings that will be added to all generated integrity hashes generated by
-`fromData`. This is a loosely-specified feature of SRIs, and currently has no
-specified semantics besides being `?`-separated. Use at your own risk, and
-probably avoid if your integrity strings are meant to be used with browsers.
-
-If `opts.strict` is true, the integrity object will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-##### Example
-
-```javascript
-const integrityObj = ssri.fromData('foobarbaz', {
- algorithms: ['sha256', 'sha384', 'sha512']
-})
-integrity.toString('\n')
-// ->
-// sha256-l981iLWj8kurw4UbNy8Lpxqdzd7UOxS50Glhv8FwfZ0=
-// sha384-irnCxQ0CfQhYGlVAUdwTPC9bF3+YWLxlaDGM4xbYminxpbXEq+D+2GCEBTxcjES9
-// sha512-yzd8ELD1piyANiWnmdnpCL5F52f10UfUdEkHywVZeqTt0ymgrxR63Qz0GB7TKPoeeZQmWCaz7T1+9vBnypkYWg==
-```
-
-#### <a name="from-stream"></a> `> ssri.fromStream(stream, [opts]) -> Promise<Integrity>`
-
-Returns a Promise of an Integrity object calculated by reading data from
-a given `stream`.
-
-It accepts both `opts.algorithms` and `opts.options`, which are documented as
-part of [`ssri.fromData`](#from-data).
-
-Additionally, `opts.Promise` may be passed in to inject a Promise library of
-choice. By default, ssri will use Node's built-in Promises.
-
-If `opts.strict` is true, the integrity object will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-##### Example
-
-```javascript
-ssri.fromStream(fs.createReadStream('index.js'), {
- algorithms: ['sha1', 'sha512']
-}).then(integrity => {
- return ssri.checkStream(fs.createReadStream('index.js'), integrity)
-}) // succeeds
-```
-
-#### <a name="create"></a> `> ssri.create([opts]) -> <Hash>`
-
-Returns a Hash object with `update(<Buffer or string>[,enc])` and `digest()` methods.
-
-
-The Hash object provides the same methods as [crypto class Hash](https://nodejs.org/dist/latest-v6.x/docs/api/crypto.html#crypto_class_hash).
-`digest()` accepts no arguments and returns an Integrity object calculated by reading data from
-calls to update.
-
-It accepts both `opts.algorithms` and `opts.options`, which are documented as
-part of [`ssri.fromData`](#from-data).
-
-If `opts.strict` is true, the integrity object will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-##### Example
-
-```javascript
-const integrity = ssri.create().update('foobarbaz').digest()
-integrity.toString()
-// ->
-// sha512-yzd8ELD1piyANiWnmdnpCL5F52f10UfUdEkHywVZeqTt0ymgrxR63Qz0GB7TKPoeeZQmWCaz7T1+9vBnypkYWg==
-```
-
-#### <a name="check-data"></a> `> ssri.checkData(data, sri, [opts]) -> Hash|false`
-
-Verifies `data` integrity against an `sri` argument. `data` may be either a
-`String` or a `Buffer`, and `sri` can be any subresource integrity
-representation that [`ssri.parse`](#parse) can handle.
-
-If verification succeeds, `checkData` will return the name of the algorithm that
-was used for verification (a truthy value). Otherwise, it will return `false`.
-
-If `opts.pickAlgorithm` is provided, it will be used by
-[`Integrity#pickAlgorithm`](#integrity-pick-algorithm) when deciding which of
-the available digests to match against.
-
-If `opts.error` is true, and verification fails, `checkData` will throw either
-an `EBADSIZE` or an `EINTEGRITY` error, instead of just returning false.
-
-##### Example
-
-```javascript
-const data = fs.readFileSync('index.js')
-ssri.checkData(data, ssri.fromData(data)) // -> 'sha512'
-ssri.checkData(data, 'sha256-l981iLWj8kurw4UbNy8Lpxqdzd7UOxS50Glhv8FwfZ0')
-ssri.checkData(data, 'sha1-BaDDigEST') // -> false
-ssri.checkData(data, 'sha1-BaDDigEST', {error: true}) // -> Error! EINTEGRITY
-```
-
-#### <a name="check-stream"></a> `> ssri.checkStream(stream, sri, [opts]) -> Promise<Hash>`
-
-Verifies the contents of `stream` against an `sri` argument. `stream` will be
-consumed in its entirety by this process. `sri` can be any subresource integrity
-representation that [`ssri.parse`](#parse) can handle.
-
-`checkStream` will return a Promise that either resolves to the
-`Hash` that succeeded verification, or, if the verification fails
-or an error happens with `stream`, the Promise will be rejected.
-
-If the Promise is rejected because verification failed, the returned error will
-have `err.code` as `EINTEGRITY`.
-
-If `opts.size` is given, it will be matched against the stream size. An error
-with `err.code` `EBADSIZE` will be returned by a rejection if the expected size
-and actual size fail to match.
-
-If `opts.pickAlgorithm` is provided, it will be used by
-[`Integrity#pickAlgorithm`](#integrity-pick-algorithm) when deciding which of
-the available digests to match against.
-
-##### Example
-
-```javascript
-const integrity = ssri.fromData(fs.readFileSync('index.js'))
-
-ssri.checkStream(
- fs.createReadStream('index.js'),
- integrity
-)
-// ->
-// Promise<{
-// algorithm: 'sha512',
-// digest: 'sha512-yzd8ELD1piyANiWnmdnpCL5F52f10UfUdEkHywVZeqTt0ymgrxR63Qz0GB7TKPoeeZQmWCaz7T1'
-// }>
-
-ssri.checkStream(
- fs.createReadStream('index.js'),
- 'sha256-l981iLWj8kurw4UbNy8Lpxqdzd7UOxS50Glhv8FwfZ0'
-) // -> Promise<Hash>
-
-ssri.checkStream(
- fs.createReadStream('index.js'),
- 'sha1-BaDDigEST'
-) // -> Promise<Error<{code: 'EINTEGRITY'}>>
-```
-
-#### <a name="integrity-stream"></a> `> integrityStream([opts]) -> IntegrityStream`
-
-Returns a `Transform` stream that data can be piped through in order to generate
-and optionally check data integrity for piped data. When the stream completes
-successfully, it emits `size` and `integrity` events, containing the total
-number of bytes processed and a calculated `Integrity` instance based on stream
-data, respectively.
-
-If `opts.algorithms` is passed in, the listed algorithms will be calculated when
-generating the final `Integrity` instance. The default is `['sha512']`.
-
-If `opts.single` is passed in, a single `Hash` instance will be returned.
-
-If `opts.integrity` is passed in, it should be an `integrity` value understood
-by [`parse`](#parse) that the stream will check the data against. If
-verification succeeds, the integrity stream will emit a `verified` event whose
-value is a single `Hash` object that is the one that succeeded verification. If
-verification fails, the stream will error with an `EINTEGRITY` error code.
-
-If `opts.size` is given, it will be matched against the stream size. An error
-with `err.code` `EBADSIZE` will be emitted by the stream if the expected size
-and actual size fail to match.
-
-If `opts.pickAlgorithm` is provided, it will be passed two algorithms as
-arguments. ssri will prioritize whichever of the two algorithms is returned by
-this function. Note that the function may be called multiple times, and it
-**must** return one of the two algorithms provided. By default, ssri will make
-a best-effort to pick the strongest/most reliable of the given algorithms. It
-may intentionally deprioritize algorithms with known vulnerabilities.
-
-##### Example
-
-```javascript
-const integrity = ssri.fromData(fs.readFileSync('index.js'))
-fs.createReadStream('index.js')
-.pipe(ssri.integrityStream({integrity}))
-```
diff --git a/node_modules/libcipm/node_modules/ssri/index.js b/node_modules/libcipm/node_modules/ssri/index.js
deleted file mode 100644
index e102892b0..000000000
--- a/node_modules/libcipm/node_modules/ssri/index.js
+++ /dev/null
@@ -1,395 +0,0 @@
-'use strict'
-
-const crypto = require('crypto')
-const figgyPudding = require('figgy-pudding')
-const Transform = require('stream').Transform
-
-const SPEC_ALGORITHMS = ['sha256', 'sha384', 'sha512']
-
-const BASE64_REGEX = /^[a-z0-9+/]+(?:=?=?)$/i
-const SRI_REGEX = /^([^-]+)-([^?]+)([?\S*]*)$/
-const STRICT_SRI_REGEX = /^([^-]+)-([A-Za-z0-9+/=]{44,88})(\?[\x21-\x7E]*)*$/
-const VCHAR_REGEX = /^[\x21-\x7E]+$/
-
-const SsriOpts = figgyPudding({
- algorithms: {default: ['sha512']},
- error: {default: false},
- integrity: {},
- options: {default: []},
- pickAlgorithm: {default: () => getPrioritizedHash},
- Promise: {default: () => Promise},
- sep: {default: ' '},
- single: {default: false},
- size: {},
- strict: {default: false}
-})
-
-class Hash {
- get isHash () { return true }
- constructor (hash, opts) {
- opts = SsriOpts(opts)
- const strict = !!opts.strict
- this.source = hash.trim()
- // 3.1. Integrity metadata (called "Hash" by ssri)
- // https://w3c.github.io/webappsec-subresource-integrity/#integrity-metadata-description
- const match = this.source.match(
- strict
- ? STRICT_SRI_REGEX
- : SRI_REGEX
- )
- if (!match) { return }
- if (strict && !SPEC_ALGORITHMS.some(a => a === match[1])) { return }
- this.algorithm = match[1]
- this.digest = match[2]
-
- const rawOpts = match[3]
- this.options = rawOpts ? rawOpts.slice(1).split('?') : []
- }
- hexDigest () {
- return this.digest && Buffer.from(this.digest, 'base64').toString('hex')
- }
- toJSON () {
- return this.toString()
- }
- toString (opts) {
- opts = SsriOpts(opts)
- if (opts.strict) {
- // Strict mode enforces the standard as close to the foot of the
- // letter as it can.
- if (!(
- // The spec has very restricted productions for algorithms.
- // https://www.w3.org/TR/CSP2/#source-list-syntax
- SPEC_ALGORITHMS.some(x => x === this.algorithm) &&
- // Usually, if someone insists on using a "different" base64, we
- // leave it as-is, since there's multiple standards, and the
- // specified is not a URL-safe variant.
- // https://www.w3.org/TR/CSP2/#base64_value
- this.digest.match(BASE64_REGEX) &&
- // Option syntax is strictly visual chars.
- // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-option-expression
- // https://tools.ietf.org/html/rfc5234#appendix-B.1
- (this.options || []).every(opt => opt.match(VCHAR_REGEX))
- )) {
- return ''
- }
- }
- const options = this.options && this.options.length
- ? `?${this.options.join('?')}`
- : ''
- return `${this.algorithm}-${this.digest}${options}`
- }
-}
-
-class Integrity {
- get isIntegrity () { return true }
- toJSON () {
- return this.toString()
- }
- toString (opts) {
- opts = SsriOpts(opts)
- let sep = opts.sep || ' '
- if (opts.strict) {
- // Entries must be separated by whitespace, according to spec.
- sep = sep.replace(/\S+/g, ' ')
- }
- return Object.keys(this).map(k => {
- return this[k].map(hash => {
- return Hash.prototype.toString.call(hash, opts)
- }).filter(x => x.length).join(sep)
- }).filter(x => x.length).join(sep)
- }
- concat (integrity, opts) {
- opts = SsriOpts(opts)
- const other = typeof integrity === 'string'
- ? integrity
- : stringify(integrity, opts)
- return parse(`${this.toString(opts)} ${other}`, opts)
- }
- hexDigest () {
- return parse(this, {single: true}).hexDigest()
- }
- match (integrity, opts) {
- opts = SsriOpts(opts)
- const other = parse(integrity, opts)
- const algo = other.pickAlgorithm(opts)
- return (
- this[algo] &&
- other[algo] &&
- this[algo].find(hash =>
- other[algo].find(otherhash =>
- hash.digest === otherhash.digest
- )
- )
- ) || false
- }
- pickAlgorithm (opts) {
- opts = SsriOpts(opts)
- const pickAlgorithm = opts.pickAlgorithm
- const keys = Object.keys(this)
- if (!keys.length) {
- throw new Error(`No algorithms available for ${
- JSON.stringify(this.toString())
- }`)
- }
- return keys.reduce((acc, algo) => {
- return pickAlgorithm(acc, algo) || acc
- })
- }
-}
-
-module.exports.parse = parse
-function parse (sri, opts) {
- opts = SsriOpts(opts)
- if (typeof sri === 'string') {
- return _parse(sri, opts)
- } else if (sri.algorithm && sri.digest) {
- const fullSri = new Integrity()
- fullSri[sri.algorithm] = [sri]
- return _parse(stringify(fullSri, opts), opts)
- } else {
- return _parse(stringify(sri, opts), opts)
- }
-}
-
-function _parse (integrity, opts) {
- // 3.4.3. Parse metadata
- // https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata
- if (opts.single) {
- return new Hash(integrity, opts)
- }
- return integrity.trim().split(/\s+/).reduce((acc, string) => {
- const hash = new Hash(string, opts)
- if (hash.algorithm && hash.digest) {
- const algo = hash.algorithm
- if (!acc[algo]) { acc[algo] = [] }
- acc[algo].push(hash)
- }
- return acc
- }, new Integrity())
-}
-
-module.exports.stringify = stringify
-function stringify (obj, opts) {
- opts = SsriOpts(opts)
- if (obj.algorithm && obj.digest) {
- return Hash.prototype.toString.call(obj, opts)
- } else if (typeof obj === 'string') {
- return stringify(parse(obj, opts), opts)
- } else {
- return Integrity.prototype.toString.call(obj, opts)
- }
-}
-
-module.exports.fromHex = fromHex
-function fromHex (hexDigest, algorithm, opts) {
- opts = SsriOpts(opts)
- const optString = opts.options && opts.options.length
- ? `?${opts.options.join('?')}`
- : ''
- return parse(
- `${algorithm}-${
- Buffer.from(hexDigest, 'hex').toString('base64')
- }${optString}`, opts
- )
-}
-
-module.exports.fromData = fromData
-function fromData (data, opts) {
- opts = SsriOpts(opts)
- const algorithms = opts.algorithms
- const optString = opts.options && opts.options.length
- ? `?${opts.options.join('?')}`
- : ''
- return algorithms.reduce((acc, algo) => {
- const digest = crypto.createHash(algo).update(data).digest('base64')
- const hash = new Hash(
- `${algo}-${digest}${optString}`,
- opts
- )
- if (hash.algorithm && hash.digest) {
- const algo = hash.algorithm
- if (!acc[algo]) { acc[algo] = [] }
- acc[algo].push(hash)
- }
- return acc
- }, new Integrity())
-}
-
-module.exports.fromStream = fromStream
-function fromStream (stream, opts) {
- opts = SsriOpts(opts)
- const P = opts.Promise || Promise
- const istream = integrityStream(opts)
- return new P((resolve, reject) => {
- stream.pipe(istream)
- stream.on('error', reject)
- istream.on('error', reject)
- let sri
- istream.on('integrity', s => { sri = s })
- istream.on('end', () => resolve(sri))
- istream.on('data', () => {})
- })
-}
-
-module.exports.checkData = checkData
-function checkData (data, sri, opts) {
- opts = SsriOpts(opts)
- sri = parse(sri, opts)
- if (!Object.keys(sri).length) {
- if (opts.error) {
- throw Object.assign(
- new Error('No valid integrity hashes to check against'), {
- code: 'EINTEGRITY'
- }
- )
- } else {
- return false
- }
- }
- const algorithm = sri.pickAlgorithm(opts)
- const digest = crypto.createHash(algorithm).update(data).digest('base64')
- const newSri = parse({algorithm, digest})
- const match = newSri.match(sri, opts)
- if (match || !opts.error) {
- return match
- } else if (typeof opts.size === 'number' && (data.length !== opts.size)) {
- const err = new Error(`data size mismatch when checking ${sri}.\n Wanted: ${opts.size}\n Found: ${data.length}`)
- err.code = 'EBADSIZE'
- err.found = data.length
- err.expected = opts.size
- err.sri = sri
- throw err
- } else {
- const err = new Error(`Integrity checksum failed when using ${algorithm}: Wanted ${sri}, but got ${newSri}. (${data.length} bytes)`)
- err.code = 'EINTEGRITY'
- err.found = newSri
- err.expected = sri
- err.algorithm = algorithm
- err.sri = sri
- throw err
- }
-}
-
-module.exports.checkStream = checkStream
-function checkStream (stream, sri, opts) {
- opts = SsriOpts(opts)
- const P = opts.Promise || Promise
- const checker = integrityStream(opts.concat({
- integrity: sri
- }))
- return new P((resolve, reject) => {
- stream.pipe(checker)
- stream.on('error', reject)
- checker.on('error', reject)
- let sri
- checker.on('verified', s => { sri = s })
- checker.on('end', () => resolve(sri))
- checker.on('data', () => {})
- })
-}
-
-module.exports.integrityStream = integrityStream
-function integrityStream (opts) {
- opts = SsriOpts(opts)
- // For verification
- const sri = opts.integrity && parse(opts.integrity, opts)
- const goodSri = sri && Object.keys(sri).length
- const algorithm = goodSri && sri.pickAlgorithm(opts)
- const digests = goodSri && sri[algorithm]
- // Calculating stream
- const algorithms = Array.from(
- new Set(opts.algorithms.concat(algorithm ? [algorithm] : []))
- )
- const hashes = algorithms.map(crypto.createHash)
- let streamSize = 0
- const stream = new Transform({
- transform (chunk, enc, cb) {
- streamSize += chunk.length
- hashes.forEach(h => h.update(chunk, enc))
- cb(null, chunk, enc)
- }
- }).on('end', () => {
- const optString = (opts.options && opts.options.length)
- ? `?${opts.options.join('?')}`
- : ''
- const newSri = parse(hashes.map((h, i) => {
- return `${algorithms[i]}-${h.digest('base64')}${optString}`
- }).join(' '), opts)
- // Integrity verification mode
- const match = goodSri && newSri.match(sri, opts)
- if (typeof opts.size === 'number' && streamSize !== opts.size) {
- const err = new Error(`stream size mismatch when checking ${sri}.\n Wanted: ${opts.size}\n Found: ${streamSize}`)
- err.code = 'EBADSIZE'
- err.found = streamSize
- err.expected = opts.size
- err.sri = sri
- stream.emit('error', err)
- } else if (opts.integrity && !match) {
- const err = new Error(`${sri} integrity checksum failed when using ${algorithm}: wanted ${digests} but got ${newSri}. (${streamSize} bytes)`)
- err.code = 'EINTEGRITY'
- err.found = newSri
- err.expected = digests
- err.algorithm = algorithm
- err.sri = sri
- stream.emit('error', err)
- } else {
- stream.emit('size', streamSize)
- stream.emit('integrity', newSri)
- match && stream.emit('verified', match)
- }
- })
- return stream
-}
-
-module.exports.create = createIntegrity
-function createIntegrity (opts) {
- opts = SsriOpts(opts)
- const algorithms = opts.algorithms
- const optString = opts.options.length
- ? `?${opts.options.join('?')}`
- : ''
-
- const hashes = algorithms.map(crypto.createHash)
-
- return {
- update: function (chunk, enc) {
- hashes.forEach(h => h.update(chunk, enc))
- return this
- },
- digest: function (enc) {
- const integrity = algorithms.reduce((acc, algo) => {
- const digest = hashes.shift().digest('base64')
- const hash = new Hash(
- `${algo}-${digest}${optString}`,
- opts
- )
- if (hash.algorithm && hash.digest) {
- const algo = hash.algorithm
- if (!acc[algo]) { acc[algo] = [] }
- acc[algo].push(hash)
- }
- return acc
- }, new Integrity())
-
- return integrity
- }
- }
-}
-
-const NODE_HASHES = new Set(crypto.getHashes())
-
-// This is a Best Effort™ at a reasonable priority for hash algos
-const DEFAULT_PRIORITY = [
- 'md5', 'whirlpool', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512',
- // TODO - it's unclear _which_ of these Node will actually use as its name
- // for the algorithm, so we guesswork it based on the OpenSSL names.
- 'sha3',
- 'sha3-256', 'sha3-384', 'sha3-512',
- 'sha3_256', 'sha3_384', 'sha3_512'
-].filter(algo => NODE_HASHES.has(algo))
-
-function getPrioritizedHash (algo1, algo2) {
- return DEFAULT_PRIORITY.indexOf(algo1.toLowerCase()) >= DEFAULT_PRIORITY.indexOf(algo2.toLowerCase())
- ? algo1
- : algo2
-}
diff --git a/node_modules/libcipm/node_modules/ssri/package.json b/node_modules/libcipm/node_modules/ssri/package.json
deleted file mode 100644
index 07bf71768..000000000
--- a/node_modules/libcipm/node_modules/ssri/package.json
+++ /dev/null
@@ -1,90 +0,0 @@
-{
- "_from": "ssri@^6.0.1",
- "_id": "ssri@6.0.1",
- "_inBundle": false,
- "_integrity": "sha512-3Wge10hNcT1Kur4PDFwEieXSCMCJs/7WvSACcrMYrNp+b8kDL1/0wJch5Ni2WrtwEa2IO8OsVfeKIciKCDx/QA==",
- "_location": "/libcipm/ssri",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "ssri@^6.0.1",
- "name": "ssri",
- "escapedName": "ssri",
- "rawSpec": "^6.0.1",
- "saveSpec": null,
- "fetchSpec": "^6.0.1"
- },
- "_requiredBy": [
- "/libcipm/cacache",
- "/libcipm/pacote"
- ],
- "_resolved": "https://registry.npmjs.org/ssri/-/ssri-6.0.1.tgz",
- "_shasum": "2a3c41b28dd45b62b63676ecb74001265ae9edd8",
- "_spec": "ssri@^6.0.1",
- "_where": "/Users/claudiahdz/npm/cli/node_modules/libcipm/node_modules/pacote",
- "author": {
- "name": "Kat Marchán",
- "email": "kzm@sykosomatic.org"
- },
- "bugs": {
- "url": "https://github.com/zkat/ssri/issues"
- },
- "bundleDependencies": false,
- "config": {
- "nyc": {
- "exclude": [
- "node_modules/**",
- "test/**"
- ]
- }
- },
- "dependencies": {
- "figgy-pudding": "^3.5.1"
- },
- "deprecated": false,
- "description": "Standard Subresource Integrity library -- parses, serializes, generates, and verifies integrity metadata according to the SRI spec.",
- "devDependencies": {
- "nyc": "^11.4.1",
- "standard": "^10.0.3",
- "standard-version": "^4.3.0",
- "tap": "^11.1.0",
- "weallbehave": "^1.2.0",
- "weallcontribute": "^1.0.8"
- },
- "files": [
- "*.js"
- ],
- "homepage": "https://github.com/zkat/ssri#readme",
- "keywords": [
- "w3c",
- "web",
- "security",
- "integrity",
- "checksum",
- "hashing",
- "subresource integrity",
- "sri",
- "sri hash",
- "sri string",
- "sri generator",
- "html"
- ],
- "license": "ISC",
- "main": "index.js",
- "name": "ssri",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/zkat/ssri.git"
- },
- "scripts": {
- "postrelease": "npm publish && git push --follow-tags",
- "prerelease": "npm t",
- "pretest": "standard",
- "release": "standard-version -s",
- "test": "tap -J --coverage test/*.js",
- "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
- "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
- },
- "version": "6.0.1"
-}
diff --git a/node_modules/libcipm/node_modules/tar/LICENSE b/node_modules/libcipm/node_modules/tar/LICENSE
deleted file mode 100644
index 19129e315..000000000
--- a/node_modules/libcipm/node_modules/tar/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/libcipm/node_modules/tar/README.md b/node_modules/libcipm/node_modules/tar/README.md
deleted file mode 100644
index 034e4865c..000000000
--- a/node_modules/libcipm/node_modules/tar/README.md
+++ /dev/null
@@ -1,954 +0,0 @@
-# node-tar
-
-[![Build Status](https://travis-ci.org/npm/node-tar.svg?branch=master)](https://travis-ci.org/npm/node-tar)
-
-[Fast](./benchmarks) and full-featured Tar for Node.js
-
-The API is designed to mimic the behavior of `tar(1)` on unix systems.
-If you are familiar with how tar works, most of this will hopefully be
-straightforward for you. If not, then hopefully this module can teach
-you useful unix skills that may come in handy someday :)
-
-## Background
-
-A "tar file" or "tarball" is an archive of file system entries
-(directories, files, links, etc.) The name comes from "tape archive".
-If you run `man tar` on almost any Unix command line, you'll learn
-quite a bit about what it can do, and its history.
-
-Tar has 5 main top-level commands:
-
-* `c` Create an archive
-* `r` Replace entries within an archive
-* `u` Update entries within an archive (ie, replace if they're newer)
-* `t` List out the contents of an archive
-* `x` Extract an archive to disk
-
-The other flags and options modify how this top level function works.
-
-## High-Level API
-
-These 5 functions are the high-level API. All of them have a
-single-character name (for unix nerds familiar with `tar(1)`) as well
-as a long name (for everyone else).
-
-All the high-level functions take the following arguments, all three
-of which are optional and may be omitted.
-
-1. `options` - An optional object specifying various options
-2. `paths` - An array of paths to add or extract
-3. `callback` - Called when the command is completed, if async. (If
- sync or no file specified, providing a callback throws a
- `TypeError`.)
-
-If the command is sync (ie, if `options.sync=true`), then the
-callback is not allowed, since the action will be completed immediately.
-
-If a `file` argument is specified, and the command is async, then a
-`Promise` is returned. In this case, if async, a callback may be
-provided which is called when the command is completed.
-
-If a `file` option is not specified, then a stream is returned. For
-`create`, this is a readable stream of the generated archive. For
-`list` and `extract` this is a writable stream that an archive should
-be written into. If a file is not specified, then a callback is not
-allowed, because you're already getting a stream to work with.
-
-`replace` and `update` only work on existing archives, and so require
-a `file` argument.
-
-Sync commands without a file argument return a stream that acts on its
-input immediately in the same tick. For readable streams, this means
-that all of the data is immediately available by calling
-`stream.read()`. For writable streams, it will be acted upon as soon
-as it is provided, but this can be at any time.
-
-### Warnings
-
-Some things cause tar to emit a warning, but should usually not cause
-the entire operation to fail. There are three ways to handle
-warnings:
-
-1. **Ignore them** (default) Invalid entries won't be put in the
- archive, and invalid entries won't be unpacked. This is usually
- fine, but can hide failures that you might care about.
-2. **Notice them** Add an `onwarn` function to the options, or listen
- to the `'warn'` event on any tar stream. The function will get
- called as `onwarn(message, data)`. Handle as appropriate.
-3. **Explode them.** Set `strict: true` in the options object, and
- `warn` messages will be emitted as `'error'` events instead. If
- there's no `error` handler, this causes the program to crash. If
- used with a promise-returning/callback-taking method, then it'll
- send the error to the promise/callback.
-
-### Examples
-
-The API mimics the `tar(1)` command line functionality, with aliases
-for more human-readable option and function names. The goal is that
-if you know how to use `tar(1)` in Unix, then you know how to use
-`require('tar')` in JavaScript.
-
-To replicate `tar czf my-tarball.tgz files and folders`, you'd do:
-
-```js
-tar.c(
- {
- gzip: <true|gzip options>,
- file: 'my-tarball.tgz'
- },
- ['some', 'files', 'and', 'folders']
-).then(_ => { .. tarball has been created .. })
-```
-
-To replicate `tar cz files and folders > my-tarball.tgz`, you'd do:
-
-```js
-tar.c( // or tar.create
- {
- gzip: <true|gzip options>
- },
- ['some', 'files', 'and', 'folders']
-).pipe(fs.createWriteStream('my-tarball.tgz'))
-```
-
-To replicate `tar xf my-tarball.tgz` you'd do:
-
-```js
-tar.x( // or tar.extract(
- {
- file: 'my-tarball.tgz'
- }
-).then(_=> { .. tarball has been dumped in cwd .. })
-```
-
-To replicate `cat my-tarball.tgz | tar x -C some-dir --strip=1`:
-
-```js
-fs.createReadStream('my-tarball.tgz').pipe(
- tar.x({
- strip: 1,
- C: 'some-dir' // alias for cwd:'some-dir', also ok
- })
-)
-```
-
-To replicate `tar tf my-tarball.tgz`, do this:
-
-```js
-tar.t({
- file: 'my-tarball.tgz',
- onentry: entry => { .. do whatever with it .. }
-})
-```
-
-To replicate `cat my-tarball.tgz | tar t` do:
-
-```js
-fs.createReadStream('my-tarball.tgz')
- .pipe(tar.t())
- .on('entry', entry => { .. do whatever with it .. })
-```
-
-To do anything synchronous, add `sync: true` to the options. Note
-that sync functions don't take a callback and don't return a promise.
-When the function returns, it's already done. Sync methods without a
-file argument return a sync stream, which flushes immediately. But,
-of course, it still won't be done until you `.end()` it.
-
-To filter entries, add `filter: <function>` to the options.
-Tar-creating methods call the filter with `filter(path, stat)`.
-Tar-reading methods (including extraction) call the filter with
-`filter(path, entry)`. The filter is called in the `this`-context of
-the `Pack` or `Unpack` stream object.
-
-The arguments list to `tar t` and `tar x` specify a list of filenames
-to extract or list, so they're equivalent to a filter that tests if
-the file is in the list.
-
-For those who _aren't_ fans of tar's single-character command names:
-
-```
-tar.c === tar.create
-tar.r === tar.replace (appends to archive, file is required)
-tar.u === tar.update (appends if newer, file is required)
-tar.x === tar.extract
-tar.t === tar.list
-```
-
-Keep reading for all the command descriptions and options, as well as
-the low-level API that they are built on.
-
-### tar.c(options, fileList, callback) [alias: tar.create]
-
-Create a tarball archive.
-
-The `fileList` is an array of paths to add to the tarball. Adding a
-directory also adds its children recursively.
-
-An entry in `fileList` that starts with an `@` symbol is a tar archive
-whose entries will be added. To add a file that starts with `@`,
-prepend it with `./`.
-
-The following options are supported:
-
-- `file` Write the tarball archive to the specified filename. If this
- is specified, then the callback will be fired when the file has been
- written, and a promise will be returned that resolves when the file
- is written. If a filename is not specified, then a Readable Stream
- will be returned which will emit the file data. [Alias: `f`]
-- `sync` Act synchronously. If this is set, then any provided file
- will be fully written after the call to `tar.c`. If this is set,
- and a file is not provided, then the resulting stream will already
- have the data ready to `read` or `emit('data')` as soon as you
- request it.
-- `onwarn` A function that will get called with `(message, data)` for
- any warnings encountered.
-- `strict` Treat warnings as crash-worthy errors. Default false.
-- `cwd` The current working directory for creating the archive.
- Defaults to `process.cwd()`. [Alias: `C`]
-- `prefix` A path portion to prefix onto the entries in the archive.
-- `gzip` Set to any truthy value to create a gzipped archive, or an
- object with settings for `zlib.Gzip()` [Alias: `z`]
-- `filter` A function that gets called with `(path, stat)` for each
- entry being added. Return `true` to add the entry to the archive,
- or `false` to omit it.
-- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
- `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
- that `mtime` is still included, because this is necessary other
- time-based operations.
-- `preservePaths` Allow absolute paths. By default, `/` is stripped
- from absolute paths. [Alias: `P`]
-- `mode` The mode to set on the created file archive
-- `noDirRecurse` Do not recursively archive the contents of
- directories. [Alias: `n`]
-- `follow` Set to true to pack the targets of symbolic links. Without
- this option, symbolic links are archived as such. [Alias: `L`, `h`]
-- `noPax` Suppress pax extended headers. Note that this means that
- long paths and linkpaths will be truncated, and large or negative
- numeric values may be interpreted incorrectly.
-- `noMtime` Set to true to omit writing `mtime` values for entries.
- Note that this prevents using other mtime-based features like
- `tar.update` or the `keepNewer` option with the resulting tar archive.
- [Alias: `m`, `no-mtime`]
-- `mtime` Set to a `Date` object to force a specific `mtime` for
- everything added to the archive. Overridden by `noMtime`.
-
-
-The following options are mostly internal, but can be modified in some
-advanced use cases, such as re-using caches between runs.
-
-- `linkCache` A Map object containing the device and inode value for
- any file whose nlink is > 1, to identify hard links.
-- `statCache` A Map object that caches calls `lstat`.
-- `readdirCache` A Map object that caches calls to `readdir`.
-- `jobs` A number specifying how many concurrent jobs to run.
- Defaults to 4.
-- `maxReadSize` The maximum buffer size for `fs.read()` operations.
- Defaults to 16 MB.
-
-### tar.x(options, fileList, callback) [alias: tar.extract]
-
-Extract a tarball archive.
-
-The `fileList` is an array of paths to extract from the tarball. If
-no paths are provided, then all the entries are extracted.
-
-If the archive is gzipped, then tar will detect this and unzip it.
-
-Note that all directories that are created will be forced to be
-writable, readable, and listable by their owner, to avoid cases where
-a directory prevents extraction of child entries by virtue of its
-mode.
-
-Most extraction errors will cause a `warn` event to be emitted. If
-the `cwd` is missing, or not a directory, then the extraction will
-fail completely.
-
-The following options are supported:
-
-- `cwd` Extract files relative to the specified directory. Defaults
- to `process.cwd()`. If provided, this must exist and must be a
- directory. [Alias: `C`]
-- `file` The archive file to extract. If not specified, then a
- Writable stream is returned where the archive data should be
- written. [Alias: `f`]
-- `sync` Create files and directories synchronously.
-- `strict` Treat warnings as crash-worthy errors. Default false.
-- `filter` A function that gets called with `(path, entry)` for each
- entry being unpacked. Return `true` to unpack the entry from the
- archive, or `false` to skip it.
-- `newer` Set to true to keep the existing file on disk if it's newer
- than the file in the archive. [Alias: `keep-newer`,
- `keep-newer-files`]
-- `keep` Do not overwrite existing files. In particular, if a file
- appears more than once in an archive, later copies will not
- overwrite earlier copies. [Alias: `k`, `keep-existing`]
-- `preservePaths` Allow absolute paths, paths containing `..`, and
- extracting through symbolic links. By default, `/` is stripped from
- absolute paths, `..` paths are not extracted, and any file whose
- location would be modified by a symbolic link is not extracted.
- [Alias: `P`]
-- `unlink` Unlink files before creating them. Without this option,
- tar overwrites existing files, which preserves existing hardlinks.
- With this option, existing hardlinks will be broken, as will any
- symlink that would affect the location of an extracted file. [Alias:
- `U`]
-- `strip` Remove the specified number of leading path elements.
- Pathnames with fewer elements will be silently skipped. Note that
- the pathname is edited after applying the filter, but before
- security checks. [Alias: `strip-components`, `stripComponents`]
-- `onwarn` A function that will get called with `(message, data)` for
- any warnings encountered.
-- `preserveOwner` If true, tar will set the `uid` and `gid` of
- extracted entries to the `uid` and `gid` fields in the archive.
- This defaults to true when run as root, and false otherwise. If
- false, then files and directories will be set with the owner and
- group of the user running the process. This is similar to `-p` in
- `tar(1)`, but ACLs and other system-specific data is never unpacked
- in this implementation, and modes are set by default already.
- [Alias: `p`]
-- `uid` Set to a number to force ownership of all extracted files and
- folders, and all implicitly created directories, to be owned by the
- specified user id, regardless of the `uid` field in the archive.
- Cannot be used along with `preserveOwner`. Requires also setting a
- `gid` option.
-- `gid` Set to a number to force ownership of all extracted files and
- folders, and all implicitly created directories, to be owned by the
- specified group id, regardless of the `gid` field in the archive.
- Cannot be used along with `preserveOwner`. Requires also setting a
- `uid` option.
-- `noMtime` Set to true to omit writing `mtime` value for extracted
- entries. [Alias: `m`, `no-mtime`]
-- `transform` Provide a function that takes an `entry` object, and
- returns a stream, or any falsey value. If a stream is provided,
- then that stream's data will be written instead of the contents of
- the archive entry. If a falsey value is provided, then the entry is
- written to disk as normal. (To exclude items from extraction, use
- the `filter` option described above.)
-- `onentry` A function that gets called with `(entry)` for each entry
- that passes the filter.
-
-The following options are mostly internal, but can be modified in some
-advanced use cases, such as re-using caches between runs.
-
-- `maxReadSize` The maximum buffer size for `fs.read()` operations.
- Defaults to 16 MB.
-- `umask` Filter the modes of entries like `process.umask()`.
-- `dmode` Default mode for directories
-- `fmode` Default mode for files
-- `dirCache` A Map object of which directories exist.
-- `maxMetaEntrySize` The maximum size of meta entries that is
- supported. Defaults to 1 MB.
-
-Note that using an asynchronous stream type with the `transform`
-option will cause undefined behavior in sync extractions.
-[MiniPass](http://npm.im/minipass)-based streams are designed for this
-use case.
-
-### tar.t(options, fileList, callback) [alias: tar.list]
-
-List the contents of a tarball archive.
-
-The `fileList` is an array of paths to list from the tarball. If
-no paths are provided, then all the entries are listed.
-
-If the archive is gzipped, then tar will detect this and unzip it.
-
-Returns an event emitter that emits `entry` events with
-`tar.ReadEntry` objects. However, they don't emit `'data'` or `'end'`
-events. (If you want to get actual readable entries, use the
-`tar.Parse` class instead.)
-
-The following options are supported:
-
-- `cwd` Extract files relative to the specified directory. Defaults
- to `process.cwd()`. [Alias: `C`]
-- `file` The archive file to list. If not specified, then a
- Writable stream is returned where the archive data should be
- written. [Alias: `f`]
-- `sync` Read the specified file synchronously. (This has no effect
- when a file option isn't specified, because entries are emitted as
- fast as they are parsed from the stream anyway.)
-- `strict` Treat warnings as crash-worthy errors. Default false.
-- `filter` A function that gets called with `(path, entry)` for each
- entry being listed. Return `true` to emit the entry from the
- archive, or `false` to skip it.
-- `onentry` A function that gets called with `(entry)` for each entry
- that passes the filter. This is important for when both `file` and
- `sync` are set, because it will be called synchronously.
-- `maxReadSize` The maximum buffer size for `fs.read()` operations.
- Defaults to 16 MB.
-- `noResume` By default, `entry` streams are resumed immediately after
- the call to `onentry`. Set `noResume: true` to suppress this
- behavior. Note that by opting into this, the stream will never
- complete until the entry data is consumed.
-
-### tar.u(options, fileList, callback) [alias: tar.update]
-
-Add files to an archive if they are newer than the entry already in
-the tarball archive.
-
-The `fileList` is an array of paths to add to the tarball. Adding a
-directory also adds its children recursively.
-
-An entry in `fileList` that starts with an `@` symbol is a tar archive
-whose entries will be added. To add a file that starts with `@`,
-prepend it with `./`.
-
-The following options are supported:
-
-- `file` Required. Write the tarball archive to the specified
- filename. [Alias: `f`]
-- `sync` Act synchronously. If this is set, then any provided file
- will be fully written after the call to `tar.c`.
-- `onwarn` A function that will get called with `(message, data)` for
- any warnings encountered.
-- `strict` Treat warnings as crash-worthy errors. Default false.
-- `cwd` The current working directory for adding entries to the
- archive. Defaults to `process.cwd()`. [Alias: `C`]
-- `prefix` A path portion to prefix onto the entries in the archive.
-- `gzip` Set to any truthy value to create a gzipped archive, or an
- object with settings for `zlib.Gzip()` [Alias: `z`]
-- `filter` A function that gets called with `(path, stat)` for each
- entry being added. Return `true` to add the entry to the archive,
- or `false` to omit it.
-- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
- `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
- that `mtime` is still included, because this is necessary other
- time-based operations.
-- `preservePaths` Allow absolute paths. By default, `/` is stripped
- from absolute paths. [Alias: `P`]
-- `maxReadSize` The maximum buffer size for `fs.read()` operations.
- Defaults to 16 MB.
-- `noDirRecurse` Do not recursively archive the contents of
- directories. [Alias: `n`]
-- `follow` Set to true to pack the targets of symbolic links. Without
- this option, symbolic links are archived as such. [Alias: `L`, `h`]
-- `noPax` Suppress pax extended headers. Note that this means that
- long paths and linkpaths will be truncated, and large or negative
- numeric values may be interpreted incorrectly.
-- `noMtime` Set to true to omit writing `mtime` values for entries.
- Note that this prevents using other mtime-based features like
- `tar.update` or the `keepNewer` option with the resulting tar archive.
- [Alias: `m`, `no-mtime`]
-- `mtime` Set to a `Date` object to force a specific `mtime` for
- everything added to the archive. Overridden by `noMtime`.
-
-### tar.r(options, fileList, callback) [alias: tar.replace]
-
-Add files to an existing archive. Because later entries override
-earlier entries, this effectively replaces any existing entries.
-
-The `fileList` is an array of paths to add to the tarball. Adding a
-directory also adds its children recursively.
-
-An entry in `fileList` that starts with an `@` symbol is a tar archive
-whose entries will be added. To add a file that starts with `@`,
-prepend it with `./`.
-
-The following options are supported:
-
-- `file` Required. Write the tarball archive to the specified
- filename. [Alias: `f`]
-- `sync` Act synchronously. If this is set, then any provided file
- will be fully written after the call to `tar.c`.
-- `onwarn` A function that will get called with `(message, data)` for
- any warnings encountered.
-- `strict` Treat warnings as crash-worthy errors. Default false.
-- `cwd` The current working directory for adding entries to the
- archive. Defaults to `process.cwd()`. [Alias: `C`]
-- `prefix` A path portion to prefix onto the entries in the archive.
-- `gzip` Set to any truthy value to create a gzipped archive, or an
- object with settings for `zlib.Gzip()` [Alias: `z`]
-- `filter` A function that gets called with `(path, stat)` for each
- entry being added. Return `true` to add the entry to the archive,
- or `false` to omit it.
-- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
- `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
- that `mtime` is still included, because this is necessary other
- time-based operations.
-- `preservePaths` Allow absolute paths. By default, `/` is stripped
- from absolute paths. [Alias: `P`]
-- `maxReadSize` The maximum buffer size for `fs.read()` operations.
- Defaults to 16 MB.
-- `noDirRecurse` Do not recursively archive the contents of
- directories. [Alias: `n`]
-- `follow` Set to true to pack the targets of symbolic links. Without
- this option, symbolic links are archived as such. [Alias: `L`, `h`]
-- `noPax` Suppress pax extended headers. Note that this means that
- long paths and linkpaths will be truncated, and large or negative
- numeric values may be interpreted incorrectly.
-- `noMtime` Set to true to omit writing `mtime` values for entries.
- Note that this prevents using other mtime-based features like
- `tar.update` or the `keepNewer` option with the resulting tar archive.
- [Alias: `m`, `no-mtime`]
-- `mtime` Set to a `Date` object to force a specific `mtime` for
- everything added to the archive. Overridden by `noMtime`.
-
-
-## Low-Level API
-
-### class tar.Pack
-
-A readable tar stream.
-
-Has all the standard readable stream interface stuff. `'data'` and
-`'end'` events, `read()` method, `pause()` and `resume()`, etc.
-
-#### constructor(options)
-
-The following options are supported:
-
-- `onwarn` A function that will get called with `(message, data)` for
- any warnings encountered.
-- `strict` Treat warnings as crash-worthy errors. Default false.
-- `cwd` The current working directory for creating the archive.
- Defaults to `process.cwd()`.
-- `prefix` A path portion to prefix onto the entries in the archive.
-- `gzip` Set to any truthy value to create a gzipped archive, or an
- object with settings for `zlib.Gzip()`
-- `filter` A function that gets called with `(path, stat)` for each
- entry being added. Return `true` to add the entry to the archive,
- or `false` to omit it.
-- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
- `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
- that `mtime` is still included, because this is necessary other
- time-based operations.
-- `preservePaths` Allow absolute paths. By default, `/` is stripped
- from absolute paths.
-- `linkCache` A Map object containing the device and inode value for
- any file whose nlink is > 1, to identify hard links.
-- `statCache` A Map object that caches calls `lstat`.
-- `readdirCache` A Map object that caches calls to `readdir`.
-- `jobs` A number specifying how many concurrent jobs to run.
- Defaults to 4.
-- `maxReadSize` The maximum buffer size for `fs.read()` operations.
- Defaults to 16 MB.
-- `noDirRecurse` Do not recursively archive the contents of
- directories.
-- `follow` Set to true to pack the targets of symbolic links. Without
- this option, symbolic links are archived as such.
-- `noPax` Suppress pax extended headers. Note that this means that
- long paths and linkpaths will be truncated, and large or negative
- numeric values may be interpreted incorrectly.
-- `noMtime` Set to true to omit writing `mtime` values for entries.
- Note that this prevents using other mtime-based features like
- `tar.update` or the `keepNewer` option with the resulting tar archive.
-- `mtime` Set to a `Date` object to force a specific `mtime` for
- everything added to the archive. Overridden by `noMtime`.
-
-#### add(path)
-
-Adds an entry to the archive. Returns the Pack stream.
-
-#### write(path)
-
-Adds an entry to the archive. Returns true if flushed.
-
-#### end()
-
-Finishes the archive.
-
-### class tar.Pack.Sync
-
-Synchronous version of `tar.Pack`.
-
-### class tar.Unpack
-
-A writable stream that unpacks a tar archive onto the file system.
-
-All the normal writable stream stuff is supported. `write()` and
-`end()` methods, `'drain'` events, etc.
-
-Note that all directories that are created will be forced to be
-writable, readable, and listable by their owner, to avoid cases where
-a directory prevents extraction of child entries by virtue of its
-mode.
-
-`'close'` is emitted when it's done writing stuff to the file system.
-
-Most unpack errors will cause a `warn` event to be emitted. If the
-`cwd` is missing, or not a directory, then an error will be emitted.
-
-#### constructor(options)
-
-- `cwd` Extract files relative to the specified directory. Defaults
- to `process.cwd()`. If provided, this must exist and must be a
- directory.
-- `filter` A function that gets called with `(path, entry)` for each
- entry being unpacked. Return `true` to unpack the entry from the
- archive, or `false` to skip it.
-- `newer` Set to true to keep the existing file on disk if it's newer
- than the file in the archive.
-- `keep` Do not overwrite existing files. In particular, if a file
- appears more than once in an archive, later copies will not
- overwrite earlier copies.
-- `preservePaths` Allow absolute paths, paths containing `..`, and
- extracting through symbolic links. By default, `/` is stripped from
- absolute paths, `..` paths are not extracted, and any file whose
- location would be modified by a symbolic link is not extracted.
-- `unlink` Unlink files before creating them. Without this option,
- tar overwrites existing files, which preserves existing hardlinks.
- With this option, existing hardlinks will be broken, as will any
- symlink that would affect the location of an extracted file.
-- `strip` Remove the specified number of leading path elements.
- Pathnames with fewer elements will be silently skipped. Note that
- the pathname is edited after applying the filter, but before
- security checks.
-- `onwarn` A function that will get called with `(message, data)` for
- any warnings encountered.
-- `umask` Filter the modes of entries like `process.umask()`.
-- `dmode` Default mode for directories
-- `fmode` Default mode for files
-- `dirCache` A Map object of which directories exist.
-- `maxMetaEntrySize` The maximum size of meta entries that is
- supported. Defaults to 1 MB.
-- `preserveOwner` If true, tar will set the `uid` and `gid` of
- extracted entries to the `uid` and `gid` fields in the archive.
- This defaults to true when run as root, and false otherwise. If
- false, then files and directories will be set with the owner and
- group of the user running the process. This is similar to `-p` in
- `tar(1)`, but ACLs and other system-specific data is never unpacked
- in this implementation, and modes are set by default already.
-- `win32` True if on a windows platform. Causes behavior where
- filenames containing `<|>?` chars are converted to
- windows-compatible values while being unpacked.
-- `uid` Set to a number to force ownership of all extracted files and
- folders, and all implicitly created directories, to be owned by the
- specified user id, regardless of the `uid` field in the archive.
- Cannot be used along with `preserveOwner`. Requires also setting a
- `gid` option.
-- `gid` Set to a number to force ownership of all extracted files and
- folders, and all implicitly created directories, to be owned by the
- specified group id, regardless of the `gid` field in the archive.
- Cannot be used along with `preserveOwner`. Requires also setting a
- `uid` option.
-- `noMtime` Set to true to omit writing `mtime` value for extracted
- entries.
-- `transform` Provide a function that takes an `entry` object, and
- returns a stream, or any falsey value. If a stream is provided,
- then that stream's data will be written instead of the contents of
- the archive entry. If a falsey value is provided, then the entry is
- written to disk as normal. (To exclude items from extraction, use
- the `filter` option described above.)
-- `strict` Treat warnings as crash-worthy errors. Default false.
-- `onentry` A function that gets called with `(entry)` for each entry
- that passes the filter.
-- `onwarn` A function that will get called with `(message, data)` for
- any warnings encountered.
-
-### class tar.Unpack.Sync
-
-Synchronous version of `tar.Unpack`.
-
-Note that using an asynchronous stream type with the `transform`
-option will cause undefined behavior in sync unpack streams.
-[MiniPass](http://npm.im/minipass)-based streams are designed for this
-use case.
-
-### class tar.Parse
-
-A writable stream that parses a tar archive stream. All the standard
-writable stream stuff is supported.
-
-If the archive is gzipped, then tar will detect this and unzip it.
-
-Emits `'entry'` events with `tar.ReadEntry` objects, which are
-themselves readable streams that you can pipe wherever.
-
-Each `entry` will not emit until the one before it is flushed through,
-so make sure to either consume the data (with `on('data', ...)` or
-`.pipe(...)`) or throw it away with `.resume()` to keep the stream
-flowing.
-
-#### constructor(options)
-
-Returns an event emitter that emits `entry` events with
-`tar.ReadEntry` objects.
-
-The following options are supported:
-
-- `strict` Treat warnings as crash-worthy errors. Default false.
-- `filter` A function that gets called with `(path, entry)` for each
- entry being listed. Return `true` to emit the entry from the
- archive, or `false` to skip it.
-- `onentry` A function that gets called with `(entry)` for each entry
- that passes the filter.
-- `onwarn` A function that will get called with `(message, data)` for
- any warnings encountered.
-
-#### abort(message, error)
-
-Stop all parsing activities. This is called when there are zlib
-errors. It also emits a warning with the message and error provided.
-
-### class tar.ReadEntry extends [MiniPass](http://npm.im/minipass)
-
-A representation of an entry that is being read out of a tar archive.
-
-It has the following fields:
-
-- `extended` The extended metadata object provided to the constructor.
-- `globalExtended` The global extended metadata object provided to the
- constructor.
-- `remain` The number of bytes remaining to be written into the
- stream.
-- `blockRemain` The number of 512-byte blocks remaining to be written
- into the stream.
-- `ignore` Whether this entry should be ignored.
-- `meta` True if this represents metadata about the next entry, false
- if it represents a filesystem object.
-- All the fields from the header, extended header, and global extended
- header are added to the ReadEntry object. So it has `path`, `type`,
- `size, `mode`, and so on.
-
-#### constructor(header, extended, globalExtended)
-
-Create a new ReadEntry object with the specified header, extended
-header, and global extended header values.
-
-### class tar.WriteEntry extends [MiniPass](http://npm.im/minipass)
-
-A representation of an entry that is being written from the file
-system into a tar archive.
-
-Emits data for the Header, and for the Pax Extended Header if one is
-required, as well as any body data.
-
-Creating a WriteEntry for a directory does not also create
-WriteEntry objects for all of the directory contents.
-
-It has the following fields:
-
-- `path` The path field that will be written to the archive. By
- default, this is also the path from the cwd to the file system
- object.
-- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
- `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
- that `mtime` is still included, because this is necessary other
- time-based operations.
-- `myuid` If supported, the uid of the user running the current
- process.
-- `myuser` The `env.USER` string if set, or `''`. Set as the entry
- `uname` field if the file's `uid` matches `this.myuid`.
-- `maxReadSize` The maximum buffer size for `fs.read()` operations.
- Defaults to 1 MB.
-- `linkCache` A Map object containing the device and inode value for
- any file whose nlink is > 1, to identify hard links.
-- `statCache` A Map object that caches calls `lstat`.
-- `preservePaths` Allow absolute paths. By default, `/` is stripped
- from absolute paths.
-- `cwd` The current working directory for creating the archive.
- Defaults to `process.cwd()`.
-- `absolute` The absolute path to the entry on the filesystem. By
- default, this is `path.resolve(this.cwd, this.path)`, but it can be
- overridden explicitly.
-- `strict` Treat warnings as crash-worthy errors. Default false.
-- `win32` True if on a windows platform. Causes behavior where paths
- replace `\` with `/` and filenames containing the windows-compatible
- forms of `<|>?:` characters are converted to actual `<|>?:` characters
- in the archive.
-- `noPax` Suppress pax extended headers. Note that this means that
- long paths and linkpaths will be truncated, and large or negative
- numeric values may be interpreted incorrectly.
-- `noMtime` Set to true to omit writing `mtime` values for entries.
- Note that this prevents using other mtime-based features like
- `tar.update` or the `keepNewer` option with the resulting tar archive.
-
-
-#### constructor(path, options)
-
-`path` is the path of the entry as it is written in the archive.
-
-The following options are supported:
-
-- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
- `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
- that `mtime` is still included, because this is necessary other
- time-based operations.
-- `maxReadSize` The maximum buffer size for `fs.read()` operations.
- Defaults to 1 MB.
-- `linkCache` A Map object containing the device and inode value for
- any file whose nlink is > 1, to identify hard links.
-- `statCache` A Map object that caches calls `lstat`.
-- `preservePaths` Allow absolute paths. By default, `/` is stripped
- from absolute paths.
-- `cwd` The current working directory for creating the archive.
- Defaults to `process.cwd()`.
-- `absolute` The absolute path to the entry on the filesystem. By
- default, this is `path.resolve(this.cwd, this.path)`, but it can be
- overridden explicitly.
-- `strict` Treat warnings as crash-worthy errors. Default false.
-- `win32` True if on a windows platform. Causes behavior where paths
- replace `\` with `/`.
-- `onwarn` A function that will get called with `(message, data)` for
- any warnings encountered.
-- `noMtime` Set to true to omit writing `mtime` values for entries.
- Note that this prevents using other mtime-based features like
- `tar.update` or the `keepNewer` option with the resulting tar archive.
-- `umask` Set to restrict the modes on the entries in the archive,
- somewhat like how umask works on file creation. Defaults to
- `process.umask()` on unix systems, or `0o22` on Windows.
-
-#### warn(message, data)
-
-If strict, emit an error with the provided message.
-
-Othewise, emit a `'warn'` event with the provided message and data.
-
-### class tar.WriteEntry.Sync
-
-Synchronous version of tar.WriteEntry
-
-### class tar.WriteEntry.Tar
-
-A version of tar.WriteEntry that gets its data from a tar.ReadEntry
-instead of from the filesystem.
-
-#### constructor(readEntry, options)
-
-`readEntry` is the entry being read out of another archive.
-
-The following options are supported:
-
-- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
- `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
- that `mtime` is still included, because this is necessary other
- time-based operations.
-- `preservePaths` Allow absolute paths. By default, `/` is stripped
- from absolute paths.
-- `strict` Treat warnings as crash-worthy errors. Default false.
-- `onwarn` A function that will get called with `(message, data)` for
- any warnings encountered.
-- `noMtime` Set to true to omit writing `mtime` values for entries.
- Note that this prevents using other mtime-based features like
- `tar.update` or the `keepNewer` option with the resulting tar archive.
-
-### class tar.Header
-
-A class for reading and writing header blocks.
-
-It has the following fields:
-
-- `nullBlock` True if decoding a block which is entirely composed of
- `0x00` null bytes. (Useful because tar files are terminated by
- at least 2 null blocks.)
-- `cksumValid` True if the checksum in the header is valid, false
- otherwise.
-- `needPax` True if the values, as encoded, will require a Pax
- extended header.
-- `path` The path of the entry.
-- `mode` The 4 lowest-order octal digits of the file mode. That is,
- read/write/execute permissions for world, group, and owner, and the
- setuid, setgid, and sticky bits.
-- `uid` Numeric user id of the file owner
-- `gid` Numeric group id of the file owner
-- `size` Size of the file in bytes
-- `mtime` Modified time of the file
-- `cksum` The checksum of the header. This is generated by adding all
- the bytes of the header block, treating the checksum field itself as
- all ascii space characters (that is, `0x20`).
-- `type` The human-readable name of the type of entry this represents,
- or the alphanumeric key if unknown.
-- `typeKey` The alphanumeric key for the type of entry this header
- represents.
-- `linkpath` The target of Link and SymbolicLink entries.
-- `uname` Human-readable user name of the file owner
-- `gname` Human-readable group name of the file owner
-- `devmaj` The major portion of the device number. Always `0` for
- files, directories, and links.
-- `devmin` The minor portion of the device number. Always `0` for
- files, directories, and links.
-- `atime` File access time.
-- `ctime` File change time.
-
-#### constructor(data, [offset=0])
-
-`data` is optional. It is either a Buffer that should be interpreted
-as a tar Header starting at the specified offset and continuing for
-512 bytes, or a data object of keys and values to set on the header
-object, and eventually encode as a tar Header.
-
-#### decode(block, offset)
-
-Decode the provided buffer starting at the specified offset.
-
-Buffer length must be greater than 512 bytes.
-
-#### set(data)
-
-Set the fields in the data object.
-
-#### encode(buffer, offset)
-
-Encode the header fields into the buffer at the specified offset.
-
-Returns `this.needPax` to indicate whether a Pax Extended Header is
-required to properly encode the specified data.
-
-### class tar.Pax
-
-An object representing a set of key-value pairs in an Pax extended
-header entry.
-
-It has the following fields. Where the same name is used, they have
-the same semantics as the tar.Header field of the same name.
-
-- `global` True if this represents a global extended header, or false
- if it is for a single entry.
-- `atime`
-- `charset`
-- `comment`
-- `ctime`
-- `gid`
-- `gname`
-- `linkpath`
-- `mtime`
-- `path`
-- `size`
-- `uid`
-- `uname`
-- `dev`
-- `ino`
-- `nlink`
-
-#### constructor(object, global)
-
-Set the fields set in the object. `global` is a boolean that defaults
-to false.
-
-#### encode()
-
-Return a Buffer containing the header and body for the Pax extended
-header entry, or `null` if there is nothing to encode.
-
-#### encodeBody()
-
-Return a string representing the body of the pax extended header
-entry.
-
-#### encodeField(fieldName)
-
-Return a string representing the key/value encoding for the specified
-fieldName, or `''` if the field is unset.
-
-### tar.Pax.parse(string, extended, global)
-
-Return a new Pax object created by parsing the contents of the string
-provided.
-
-If the `extended` object is set, then also add the fields from that
-object. (This is necessary because multiple metadata entries can
-occur in sequence.)
-
-### tar.types
-
-A translation table for the `type` field in tar headers.
-
-#### tar.types.name.get(code)
-
-Get the human-readable name for a given alphanumeric code.
-
-#### tar.types.code.get(name)
-
-Get the alphanumeric code for a given human-readable name.
diff --git a/node_modules/libcipm/node_modules/tar/index.js b/node_modules/libcipm/node_modules/tar/index.js
deleted file mode 100644
index c9ae06e79..000000000
--- a/node_modules/libcipm/node_modules/tar/index.js
+++ /dev/null
@@ -1,18 +0,0 @@
-'use strict'
-
-// high-level commands
-exports.c = exports.create = require('./lib/create.js')
-exports.r = exports.replace = require('./lib/replace.js')
-exports.t = exports.list = require('./lib/list.js')
-exports.u = exports.update = require('./lib/update.js')
-exports.x = exports.extract = require('./lib/extract.js')
-
-// classes
-exports.Pack = require('./lib/pack.js')
-exports.Unpack = require('./lib/unpack.js')
-exports.Parse = require('./lib/parse.js')
-exports.ReadEntry = require('./lib/read-entry.js')
-exports.WriteEntry = require('./lib/write-entry.js')
-exports.Header = require('./lib/header.js')
-exports.Pax = require('./lib/pax.js')
-exports.types = require('./lib/types.js')
diff --git a/node_modules/libcipm/node_modules/tar/lib/buffer.js b/node_modules/libcipm/node_modules/tar/lib/buffer.js
deleted file mode 100644
index 7876d5b3e..000000000
--- a/node_modules/libcipm/node_modules/tar/lib/buffer.js
+++ /dev/null
@@ -1,11 +0,0 @@
-'use strict'
-
-// Buffer in node 4.x < 4.5.0 doesn't have working Buffer.from
-// or Buffer.alloc, and Buffer in node 10 deprecated the ctor.
-// .M, this is fine .\^/M..
-let B = Buffer
-/* istanbul ignore next */
-if (!B.alloc) {
- B = require('safe-buffer').Buffer
-}
-module.exports = B
diff --git a/node_modules/libcipm/node_modules/tar/lib/create.js b/node_modules/libcipm/node_modules/tar/lib/create.js
deleted file mode 100644
index a37aa52e6..000000000
--- a/node_modules/libcipm/node_modules/tar/lib/create.js
+++ /dev/null
@@ -1,105 +0,0 @@
-'use strict'
-
-// tar -c
-const hlo = require('./high-level-opt.js')
-
-const Pack = require('./pack.js')
-const fs = require('fs')
-const fsm = require('fs-minipass')
-const t = require('./list.js')
-const path = require('path')
-
-const c = module.exports = (opt_, files, cb) => {
- if (typeof files === 'function')
- cb = files
-
- if (Array.isArray(opt_))
- files = opt_, opt_ = {}
-
- if (!files || !Array.isArray(files) || !files.length)
- throw new TypeError('no files or directories specified')
-
- files = Array.from(files)
-
- const opt = hlo(opt_)
-
- if (opt.sync && typeof cb === 'function')
- throw new TypeError('callback not supported for sync tar functions')
-
- if (!opt.file && typeof cb === 'function')
- throw new TypeError('callback only supported with file option')
-
- return opt.file && opt.sync ? createFileSync(opt, files)
- : opt.file ? createFile(opt, files, cb)
- : opt.sync ? createSync(opt, files)
- : create(opt, files)
-}
-
-const createFileSync = (opt, files) => {
- const p = new Pack.Sync(opt)
- const stream = new fsm.WriteStreamSync(opt.file, {
- mode: opt.mode || 0o666
- })
- p.pipe(stream)
- addFilesSync(p, files)
-}
-
-const createFile = (opt, files, cb) => {
- const p = new Pack(opt)
- const stream = new fsm.WriteStream(opt.file, {
- mode: opt.mode || 0o666
- })
- p.pipe(stream)
-
- const promise = new Promise((res, rej) => {
- stream.on('error', rej)
- stream.on('close', res)
- p.on('error', rej)
- })
-
- addFilesAsync(p, files)
-
- return cb ? promise.then(cb, cb) : promise
-}
-
-const addFilesSync = (p, files) => {
- files.forEach(file => {
- if (file.charAt(0) === '@')
- t({
- file: path.resolve(p.cwd, file.substr(1)),
- sync: true,
- noResume: true,
- onentry: entry => p.add(entry)
- })
- else
- p.add(file)
- })
- p.end()
-}
-
-const addFilesAsync = (p, files) => {
- while (files.length) {
- const file = files.shift()
- if (file.charAt(0) === '@')
- return t({
- file: path.resolve(p.cwd, file.substr(1)),
- noResume: true,
- onentry: entry => p.add(entry)
- }).then(_ => addFilesAsync(p, files))
- else
- p.add(file)
- }
- p.end()
-}
-
-const createSync = (opt, files) => {
- const p = new Pack.Sync(opt)
- addFilesSync(p, files)
- return p
-}
-
-const create = (opt, files) => {
- const p = new Pack(opt)
- addFilesAsync(p, files)
- return p
-}
diff --git a/node_modules/libcipm/node_modules/tar/lib/extract.js b/node_modules/libcipm/node_modules/tar/lib/extract.js
deleted file mode 100644
index cbb458a0a..000000000
--- a/node_modules/libcipm/node_modules/tar/lib/extract.js
+++ /dev/null
@@ -1,112 +0,0 @@
-'use strict'
-
-// tar -x
-const hlo = require('./high-level-opt.js')
-const Unpack = require('./unpack.js')
-const fs = require('fs')
-const fsm = require('fs-minipass')
-const path = require('path')
-
-const x = module.exports = (opt_, files, cb) => {
- if (typeof opt_ === 'function')
- cb = opt_, files = null, opt_ = {}
- else if (Array.isArray(opt_))
- files = opt_, opt_ = {}
-
- if (typeof files === 'function')
- cb = files, files = null
-
- if (!files)
- files = []
- else
- files = Array.from(files)
-
- const opt = hlo(opt_)
-
- if (opt.sync && typeof cb === 'function')
- throw new TypeError('callback not supported for sync tar functions')
-
- if (!opt.file && typeof cb === 'function')
- throw new TypeError('callback only supported with file option')
-
- if (files.length)
- filesFilter(opt, files)
-
- return opt.file && opt.sync ? extractFileSync(opt)
- : opt.file ? extractFile(opt, cb)
- : opt.sync ? extractSync(opt)
- : extract(opt)
-}
-
-// construct a filter that limits the file entries listed
-// include child entries if a dir is included
-const filesFilter = (opt, files) => {
- const map = new Map(files.map(f => [f.replace(/\/+$/, ''), true]))
- const filter = opt.filter
-
- const mapHas = (file, r) => {
- const root = r || path.parse(file).root || '.'
- const ret = file === root ? false
- : map.has(file) ? map.get(file)
- : mapHas(path.dirname(file), root)
-
- map.set(file, ret)
- return ret
- }
-
- opt.filter = filter
- ? (file, entry) => filter(file, entry) && mapHas(file.replace(/\/+$/, ''))
- : file => mapHas(file.replace(/\/+$/, ''))
-}
-
-const extractFileSync = opt => {
- const u = new Unpack.Sync(opt)
-
- const file = opt.file
- let threw = true
- let fd
- const stat = fs.statSync(file)
- // This trades a zero-byte read() syscall for a stat
- // However, it will usually result in less memory allocation
- const readSize = opt.maxReadSize || 16*1024*1024
- const stream = new fsm.ReadStreamSync(file, {
- readSize: readSize,
- size: stat.size
- })
- stream.pipe(u)
-}
-
-const extractFile = (opt, cb) => {
- const u = new Unpack(opt)
- const readSize = opt.maxReadSize || 16*1024*1024
-
- const file = opt.file
- const p = new Promise((resolve, reject) => {
- u.on('error', reject)
- u.on('close', resolve)
-
- // This trades a zero-byte read() syscall for a stat
- // However, it will usually result in less memory allocation
- fs.stat(file, (er, stat) => {
- if (er)
- reject(er)
- else {
- const stream = new fsm.ReadStream(file, {
- readSize: readSize,
- size: stat.size
- })
- stream.on('error', reject)
- stream.pipe(u)
- }
- })
- })
- return cb ? p.then(cb, cb) : p
-}
-
-const extractSync = opt => {
- return new Unpack.Sync(opt)
-}
-
-const extract = opt => {
- return new Unpack(opt)
-}
diff --git a/node_modules/libcipm/node_modules/tar/lib/header.js b/node_modules/libcipm/node_modules/tar/lib/header.js
deleted file mode 100644
index d29c3b990..000000000
--- a/node_modules/libcipm/node_modules/tar/lib/header.js
+++ /dev/null
@@ -1,289 +0,0 @@
-'use strict'
-// parse a 512-byte header block to a data object, or vice-versa
-// encode returns `true` if a pax extended header is needed, because
-// the data could not be faithfully encoded in a simple header.
-// (Also, check header.needPax to see if it needs a pax header.)
-
-const Buffer = require('./buffer.js')
-const types = require('./types.js')
-const pathModule = require('path').posix
-const large = require('./large-numbers.js')
-
-const SLURP = Symbol('slurp')
-const TYPE = Symbol('type')
-
-class Header {
- constructor (data, off, ex, gex) {
- this.cksumValid = false
- this.needPax = false
- this.nullBlock = false
-
- this.block = null
- this.path = null
- this.mode = null
- this.uid = null
- this.gid = null
- this.size = null
- this.mtime = null
- this.cksum = null
- this[TYPE] = '0'
- this.linkpath = null
- this.uname = null
- this.gname = null
- this.devmaj = 0
- this.devmin = 0
- this.atime = null
- this.ctime = null
-
- if (Buffer.isBuffer(data))
- this.decode(data, off || 0, ex, gex)
- else if (data)
- this.set(data)
- }
-
- decode (buf, off, ex, gex) {
- if (!off)
- off = 0
-
- if (!buf || !(buf.length >= off + 512))
- throw new Error('need 512 bytes for header')
-
- this.path = decString(buf, off, 100)
- this.mode = decNumber(buf, off + 100, 8)
- this.uid = decNumber(buf, off + 108, 8)
- this.gid = decNumber(buf, off + 116, 8)
- this.size = decNumber(buf, off + 124, 12)
- this.mtime = decDate(buf, off + 136, 12)
- this.cksum = decNumber(buf, off + 148, 12)
-
- // if we have extended or global extended headers, apply them now
- // See https://github.com/npm/node-tar/pull/187
- this[SLURP](ex)
- this[SLURP](gex, true)
-
- // old tar versions marked dirs as a file with a trailing /
- this[TYPE] = decString(buf, off + 156, 1)
- if (this[TYPE] === '')
- this[TYPE] = '0'
- if (this[TYPE] === '0' && this.path.substr(-1) === '/')
- this[TYPE] = '5'
-
- // tar implementations sometimes incorrectly put the stat(dir).size
- // as the size in the tarball, even though Directory entries are
- // not able to have any body at all. In the very rare chance that
- // it actually DOES have a body, we weren't going to do anything with
- // it anyway, and it'll just be a warning about an invalid header.
- if (this[TYPE] === '5')
- this.size = 0
-
- this.linkpath = decString(buf, off + 157, 100)
- if (buf.slice(off + 257, off + 265).toString() === 'ustar\u000000') {
- this.uname = decString(buf, off + 265, 32)
- this.gname = decString(buf, off + 297, 32)
- this.devmaj = decNumber(buf, off + 329, 8)
- this.devmin = decNumber(buf, off + 337, 8)
- if (buf[off + 475] !== 0) {
- // definitely a prefix, definitely >130 chars.
- const prefix = decString(buf, off + 345, 155)
- this.path = prefix + '/' + this.path
- } else {
- const prefix = decString(buf, off + 345, 130)
- if (prefix)
- this.path = prefix + '/' + this.path
- this.atime = decDate(buf, off + 476, 12)
- this.ctime = decDate(buf, off + 488, 12)
- }
- }
-
- let sum = 8 * 0x20
- for (let i = off; i < off + 148; i++) {
- sum += buf[i]
- }
- for (let i = off + 156; i < off + 512; i++) {
- sum += buf[i]
- }
- this.cksumValid = sum === this.cksum
- if (this.cksum === null && sum === 8 * 0x20)
- this.nullBlock = true
- }
-
- [SLURP] (ex, global) {
- for (let k in ex) {
- // we slurp in everything except for the path attribute in
- // a global extended header, because that's weird.
- if (ex[k] !== null && ex[k] !== undefined &&
- !(global && k === 'path'))
- this[k] = ex[k]
- }
- }
-
- encode (buf, off) {
- if (!buf) {
- buf = this.block = Buffer.alloc(512)
- off = 0
- }
-
- if (!off)
- off = 0
-
- if (!(buf.length >= off + 512))
- throw new Error('need 512 bytes for header')
-
- const prefixSize = this.ctime || this.atime ? 130 : 155
- const split = splitPrefix(this.path || '', prefixSize)
- const path = split[0]
- const prefix = split[1]
- this.needPax = split[2]
-
- this.needPax = encString(buf, off, 100, path) || this.needPax
- this.needPax = encNumber(buf, off + 100, 8, this.mode) || this.needPax
- this.needPax = encNumber(buf, off + 108, 8, this.uid) || this.needPax
- this.needPax = encNumber(buf, off + 116, 8, this.gid) || this.needPax
- this.needPax = encNumber(buf, off + 124, 12, this.size) || this.needPax
- this.needPax = encDate(buf, off + 136, 12, this.mtime) || this.needPax
- buf[off + 156] = this[TYPE].charCodeAt(0)
- this.needPax = encString(buf, off + 157, 100, this.linkpath) || this.needPax
- buf.write('ustar\u000000', off + 257, 8)
- this.needPax = encString(buf, off + 265, 32, this.uname) || this.needPax
- this.needPax = encString(buf, off + 297, 32, this.gname) || this.needPax
- this.needPax = encNumber(buf, off + 329, 8, this.devmaj) || this.needPax
- this.needPax = encNumber(buf, off + 337, 8, this.devmin) || this.needPax
- this.needPax = encString(buf, off + 345, prefixSize, prefix) || this.needPax
- if (buf[off + 475] !== 0)
- this.needPax = encString(buf, off + 345, 155, prefix) || this.needPax
- else {
- this.needPax = encString(buf, off + 345, 130, prefix) || this.needPax
- this.needPax = encDate(buf, off + 476, 12, this.atime) || this.needPax
- this.needPax = encDate(buf, off + 488, 12, this.ctime) || this.needPax
- }
-
- let sum = 8 * 0x20
- for (let i = off; i < off + 148; i++) {
- sum += buf[i]
- }
- for (let i = off + 156; i < off + 512; i++) {
- sum += buf[i]
- }
- this.cksum = sum
- encNumber(buf, off + 148, 8, this.cksum)
- this.cksumValid = true
-
- return this.needPax
- }
-
- set (data) {
- for (let i in data) {
- if (data[i] !== null && data[i] !== undefined)
- this[i] = data[i]
- }
- }
-
- get type () {
- return types.name.get(this[TYPE]) || this[TYPE]
- }
-
- get typeKey () {
- return this[TYPE]
- }
-
- set type (type) {
- if (types.code.has(type))
- this[TYPE] = types.code.get(type)
- else
- this[TYPE] = type
- }
-}
-
-const splitPrefix = (p, prefixSize) => {
- const pathSize = 100
- let pp = p
- let prefix = ''
- let ret
- const root = pathModule.parse(p).root || '.'
-
- if (Buffer.byteLength(pp) < pathSize)
- ret = [pp, prefix, false]
- else {
- // first set prefix to the dir, and path to the base
- prefix = pathModule.dirname(pp)
- pp = pathModule.basename(pp)
-
- do {
- // both fit!
- if (Buffer.byteLength(pp) <= pathSize &&
- Buffer.byteLength(prefix) <= prefixSize)
- ret = [pp, prefix, false]
-
- // prefix fits in prefix, but path doesn't fit in path
- else if (Buffer.byteLength(pp) > pathSize &&
- Buffer.byteLength(prefix) <= prefixSize)
- ret = [pp.substr(0, pathSize - 1), prefix, true]
-
- else {
- // make path take a bit from prefix
- pp = pathModule.join(pathModule.basename(prefix), pp)
- prefix = pathModule.dirname(prefix)
- }
- } while (prefix !== root && !ret)
-
- // at this point, found no resolution, just truncate
- if (!ret)
- ret = [p.substr(0, pathSize - 1), '', true]
- }
- return ret
-}
-
-const decString = (buf, off, size) =>
- buf.slice(off, off + size).toString('utf8').replace(/\0.*/, '')
-
-const decDate = (buf, off, size) =>
- numToDate(decNumber(buf, off, size))
-
-const numToDate = num => num === null ? null : new Date(num * 1000)
-
-const decNumber = (buf, off, size) =>
- buf[off] & 0x80 ? large.parse(buf.slice(off, off + size))
- : decSmallNumber(buf, off, size)
-
-const nanNull = value => isNaN(value) ? null : value
-
-const decSmallNumber = (buf, off, size) =>
- nanNull(parseInt(
- buf.slice(off, off + size)
- .toString('utf8').replace(/\0.*$/, '').trim(), 8))
-
-// the maximum encodable as a null-terminated octal, by field size
-const MAXNUM = {
- 12: 0o77777777777,
- 8 : 0o7777777
-}
-
-const encNumber = (buf, off, size, number) =>
- number === null ? false :
- number > MAXNUM[size] || number < 0
- ? (large.encode(number, buf.slice(off, off + size)), true)
- : (encSmallNumber(buf, off, size, number), false)
-
-const encSmallNumber = (buf, off, size, number) =>
- buf.write(octalString(number, size), off, size, 'ascii')
-
-const octalString = (number, size) =>
- padOctal(Math.floor(number).toString(8), size)
-
-const padOctal = (string, size) =>
- (string.length === size - 1 ? string
- : new Array(size - string.length - 1).join('0') + string + ' ') + '\0'
-
-const encDate = (buf, off, size, date) =>
- date === null ? false :
- encNumber(buf, off, size, date.getTime() / 1000)
-
-// enough to fill the longest string we've got
-const NULLS = new Array(156).join('\0')
-// pad with nulls, return true if it's longer or non-ascii
-const encString = (buf, off, size, string) =>
- string === null ? false :
- (buf.write(string + NULLS, off, size, 'utf8'),
- string.length !== Buffer.byteLength(string) || string.length > size)
-
-module.exports = Header
diff --git a/node_modules/libcipm/node_modules/tar/lib/high-level-opt.js b/node_modules/libcipm/node_modules/tar/lib/high-level-opt.js
deleted file mode 100644
index 7333db915..000000000
--- a/node_modules/libcipm/node_modules/tar/lib/high-level-opt.js
+++ /dev/null
@@ -1,29 +0,0 @@
-'use strict'
-
-// turn tar(1) style args like `C` into the more verbose things like `cwd`
-
-const argmap = new Map([
- ['C', 'cwd'],
- ['f', 'file'],
- ['z', 'gzip'],
- ['P', 'preservePaths'],
- ['U', 'unlink'],
- ['strip-components', 'strip'],
- ['stripComponents', 'strip'],
- ['keep-newer', 'newer'],
- ['keepNewer', 'newer'],
- ['keep-newer-files', 'newer'],
- ['keepNewerFiles', 'newer'],
- ['k', 'keep'],
- ['keep-existing', 'keep'],
- ['keepExisting', 'keep'],
- ['m', 'noMtime'],
- ['no-mtime', 'noMtime'],
- ['p', 'preserveOwner'],
- ['L', 'follow'],
- ['h', 'follow']
-])
-
-const parse = module.exports = opt => opt ? Object.keys(opt).map(k => [
- argmap.has(k) ? argmap.get(k) : k, opt[k]
-]).reduce((set, kv) => (set[kv[0]] = kv[1], set), Object.create(null)) : {}
diff --git a/node_modules/libcipm/node_modules/tar/lib/large-numbers.js b/node_modules/libcipm/node_modules/tar/lib/large-numbers.js
deleted file mode 100644
index 3e5c99255..000000000
--- a/node_modules/libcipm/node_modules/tar/lib/large-numbers.js
+++ /dev/null
@@ -1,97 +0,0 @@
-'use strict'
-// Tar can encode large and negative numbers using a leading byte of
-// 0xff for negative, and 0x80 for positive.
-
-const encode = exports.encode = (num, buf) => {
- if (!Number.isSafeInteger(num))
- // The number is so large that javascript cannot represent it with integer
- // precision.
- throw TypeError('cannot encode number outside of javascript safe integer range')
- else if (num < 0)
- encodeNegative(num, buf)
- else
- encodePositive(num, buf)
- return buf
-}
-
-const encodePositive = (num, buf) => {
- buf[0] = 0x80
-
- for (var i = buf.length; i > 1; i--) {
- buf[i-1] = num & 0xff
- num = Math.floor(num / 0x100)
- }
-}
-
-const encodeNegative = (num, buf) => {
- buf[0] = 0xff
- var flipped = false
- num = num * -1
- for (var i = buf.length; i > 1; i--) {
- var byte = num & 0xff
- num = Math.floor(num / 0x100)
- if (flipped)
- buf[i-1] = onesComp(byte)
- else if (byte === 0)
- buf[i-1] = 0
- else {
- flipped = true
- buf[i-1] = twosComp(byte)
- }
- }
-}
-
-const parse = exports.parse = (buf) => {
- var post = buf[buf.length - 1]
- var pre = buf[0]
- var value;
- if (pre === 0x80)
- value = pos(buf.slice(1, buf.length))
- else if (pre === 0xff)
- value = twos(buf)
- else
- throw TypeError('invalid base256 encoding')
-
- if (!Number.isSafeInteger(value))
- // The number is so large that javascript cannot represent it with integer
- // precision.
- throw TypeError('parsed number outside of javascript safe integer range')
-
- return value
-}
-
-const twos = (buf) => {
- var len = buf.length
- var sum = 0
- var flipped = false
- for (var i = len - 1; i > -1; i--) {
- var byte = buf[i]
- var f
- if (flipped)
- f = onesComp(byte)
- else if (byte === 0)
- f = byte
- else {
- flipped = true
- f = twosComp(byte)
- }
- if (f !== 0)
- sum -= f * Math.pow(256, len - i - 1)
- }
- return sum
-}
-
-const pos = (buf) => {
- var len = buf.length
- var sum = 0
- for (var i = len - 1; i > -1; i--) {
- var byte = buf[i]
- if (byte !== 0)
- sum += byte * Math.pow(256, len - i - 1)
- }
- return sum
-}
-
-const onesComp = byte => (0xff ^ byte) & 0xff
-
-const twosComp = byte => ((0xff ^ byte) + 1) & 0xff
diff --git a/node_modules/libcipm/node_modules/tar/lib/list.js b/node_modules/libcipm/node_modules/tar/lib/list.js
deleted file mode 100644
index 250ebe001..000000000
--- a/node_modules/libcipm/node_modules/tar/lib/list.js
+++ /dev/null
@@ -1,130 +0,0 @@
-'use strict'
-
-const Buffer = require('./buffer.js')
-
-// XXX: This shares a lot in common with extract.js
-// maybe some DRY opportunity here?
-
-// tar -t
-const hlo = require('./high-level-opt.js')
-const Parser = require('./parse.js')
-const fs = require('fs')
-const fsm = require('fs-minipass')
-const path = require('path')
-
-const t = module.exports = (opt_, files, cb) => {
- if (typeof opt_ === 'function')
- cb = opt_, files = null, opt_ = {}
- else if (Array.isArray(opt_))
- files = opt_, opt_ = {}
-
- if (typeof files === 'function')
- cb = files, files = null
-
- if (!files)
- files = []
- else
- files = Array.from(files)
-
- const opt = hlo(opt_)
-
- if (opt.sync && typeof cb === 'function')
- throw new TypeError('callback not supported for sync tar functions')
-
- if (!opt.file && typeof cb === 'function')
- throw new TypeError('callback only supported with file option')
-
- if (files.length)
- filesFilter(opt, files)
-
- if (!opt.noResume)
- onentryFunction(opt)
-
- return opt.file && opt.sync ? listFileSync(opt)
- : opt.file ? listFile(opt, cb)
- : list(opt)
-}
-
-const onentryFunction = opt => {
- const onentry = opt.onentry
- opt.onentry = onentry ? e => {
- onentry(e)
- e.resume()
- } : e => e.resume()
-}
-
-// construct a filter that limits the file entries listed
-// include child entries if a dir is included
-const filesFilter = (opt, files) => {
- const map = new Map(files.map(f => [f.replace(/\/+$/, ''), true]))
- const filter = opt.filter
-
- const mapHas = (file, r) => {
- const root = r || path.parse(file).root || '.'
- const ret = file === root ? false
- : map.has(file) ? map.get(file)
- : mapHas(path.dirname(file), root)
-
- map.set(file, ret)
- return ret
- }
-
- opt.filter = filter
- ? (file, entry) => filter(file, entry) && mapHas(file.replace(/\/+$/, ''))
- : file => mapHas(file.replace(/\/+$/, ''))
-}
-
-const listFileSync = opt => {
- const p = list(opt)
- const file = opt.file
- let threw = true
- let fd
- try {
- const stat = fs.statSync(file)
- const readSize = opt.maxReadSize || 16*1024*1024
- if (stat.size < readSize) {
- p.end(fs.readFileSync(file))
- } else {
- let pos = 0
- const buf = Buffer.allocUnsafe(readSize)
- fd = fs.openSync(file, 'r')
- while (pos < stat.size) {
- let bytesRead = fs.readSync(fd, buf, 0, readSize, pos)
- pos += bytesRead
- p.write(buf.slice(0, bytesRead))
- }
- p.end()
- }
- threw = false
- } finally {
- if (threw && fd)
- try { fs.closeSync(fd) } catch (er) {}
- }
-}
-
-const listFile = (opt, cb) => {
- const parse = new Parser(opt)
- const readSize = opt.maxReadSize || 16*1024*1024
-
- const file = opt.file
- const p = new Promise((resolve, reject) => {
- parse.on('error', reject)
- parse.on('end', resolve)
-
- fs.stat(file, (er, stat) => {
- if (er)
- reject(er)
- else {
- const stream = new fsm.ReadStream(file, {
- readSize: readSize,
- size: stat.size
- })
- stream.on('error', reject)
- stream.pipe(parse)
- }
- })
- })
- return cb ? p.then(cb, cb) : p
-}
-
-const list = opt => new Parser(opt)
diff --git a/node_modules/libcipm/node_modules/tar/lib/mkdir.js b/node_modules/libcipm/node_modules/tar/lib/mkdir.js
deleted file mode 100644
index c6a154c24..000000000
--- a/node_modules/libcipm/node_modules/tar/lib/mkdir.js
+++ /dev/null
@@ -1,206 +0,0 @@
-'use strict'
-// wrapper around mkdirp for tar's needs.
-
-// TODO: This should probably be a class, not functionally
-// passing around state in a gazillion args.
-
-const mkdirp = require('mkdirp')
-const fs = require('fs')
-const path = require('path')
-const chownr = require('chownr')
-
-class SymlinkError extends Error {
- constructor (symlink, path) {
- super('Cannot extract through symbolic link')
- this.path = path
- this.symlink = symlink
- }
-
- get name () {
- return 'SylinkError'
- }
-}
-
-class CwdError extends Error {
- constructor (path, code) {
- super(code + ': Cannot cd into \'' + path + '\'')
- this.path = path
- this.code = code
- }
-
- get name () {
- return 'CwdError'
- }
-}
-
-const mkdir = module.exports = (dir, opt, cb) => {
- // if there's any overlap between mask and mode,
- // then we'll need an explicit chmod
- const umask = opt.umask
- const mode = opt.mode | 0o0700
- const needChmod = (mode & umask) !== 0
-
- const uid = opt.uid
- const gid = opt.gid
- const doChown = typeof uid === 'number' &&
- typeof gid === 'number' &&
- ( uid !== opt.processUid || gid !== opt.processGid )
-
- const preserve = opt.preserve
- const unlink = opt.unlink
- const cache = opt.cache
- const cwd = opt.cwd
-
- const done = (er, created) => {
- if (er)
- cb(er)
- else {
- cache.set(dir, true)
- if (created && doChown)
- chownr(created, uid, gid, er => done(er))
- else if (needChmod)
- fs.chmod(dir, mode, cb)
- else
- cb()
- }
- }
-
- if (cache && cache.get(dir) === true)
- return done()
-
- if (dir === cwd)
- return fs.stat(dir, (er, st) => {
- if (er || !st.isDirectory())
- er = new CwdError(dir, er && er.code || 'ENOTDIR')
- done(er)
- })
-
- if (preserve)
- return mkdirp(dir, mode, done)
-
- const sub = path.relative(cwd, dir)
- const parts = sub.split(/\/|\\/)
- mkdir_(cwd, parts, mode, cache, unlink, cwd, null, done)
-}
-
-const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => {
- if (!parts.length)
- return cb(null, created)
- const p = parts.shift()
- const part = base + '/' + p
- if (cache.get(part))
- return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
- fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))
-}
-
-const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => er => {
- if (er) {
- if (er.path && path.dirname(er.path) === cwd &&
- (er.code === 'ENOTDIR' || er.code === 'ENOENT'))
- return cb(new CwdError(cwd, er.code))
-
- fs.lstat(part, (statEr, st) => {
- if (statEr)
- cb(statEr)
- else if (st.isDirectory())
- mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
- else if (unlink)
- fs.unlink(part, er => {
- if (er)
- return cb(er)
- fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))
- })
- else if (st.isSymbolicLink())
- return cb(new SymlinkError(part, part + '/' + parts.join('/')))
- else
- cb(er)
- })
- } else {
- created = created || part
- mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
- }
-}
-
-const mkdirSync = module.exports.sync = (dir, opt) => {
- // if there's any overlap between mask and mode,
- // then we'll need an explicit chmod
- const umask = opt.umask
- const mode = opt.mode | 0o0700
- const needChmod = (mode & umask) !== 0
-
- const uid = opt.uid
- const gid = opt.gid
- const doChown = typeof uid === 'number' &&
- typeof gid === 'number' &&
- ( uid !== opt.processUid || gid !== opt.processGid )
-
- const preserve = opt.preserve
- const unlink = opt.unlink
- const cache = opt.cache
- const cwd = opt.cwd
-
- const done = (created) => {
- cache.set(dir, true)
- if (created && doChown)
- chownr.sync(created, uid, gid)
- if (needChmod)
- fs.chmodSync(dir, mode)
- }
-
- if (cache && cache.get(dir) === true)
- return done()
-
- if (dir === cwd) {
- let ok = false
- let code = 'ENOTDIR'
- try {
- ok = fs.statSync(dir).isDirectory()
- } catch (er) {
- code = er.code
- } finally {
- if (!ok)
- throw new CwdError(dir, code)
- }
- done()
- return
- }
-
- if (preserve)
- return done(mkdirp.sync(dir, mode))
-
- const sub = path.relative(cwd, dir)
- const parts = sub.split(/\/|\\/)
- let created = null
- for (let p = parts.shift(), part = cwd;
- p && (part += '/' + p);
- p = parts.shift()) {
-
- if (cache.get(part))
- continue
-
- try {
- fs.mkdirSync(part, mode)
- created = created || part
- cache.set(part, true)
- } catch (er) {
- if (er.path && path.dirname(er.path) === cwd &&
- (er.code === 'ENOTDIR' || er.code === 'ENOENT'))
- return new CwdError(cwd, er.code)
-
- const st = fs.lstatSync(part)
- if (st.isDirectory()) {
- cache.set(part, true)
- continue
- } else if (unlink) {
- fs.unlinkSync(part)
- fs.mkdirSync(part, mode)
- created = created || part
- cache.set(part, true)
- continue
- } else if (st.isSymbolicLink())
- return new SymlinkError(part, part + '/' + parts.join('/'))
- }
- }
-
- return done(created)
-}
diff --git a/node_modules/libcipm/node_modules/tar/lib/mode-fix.js b/node_modules/libcipm/node_modules/tar/lib/mode-fix.js
deleted file mode 100644
index 3363a3b15..000000000
--- a/node_modules/libcipm/node_modules/tar/lib/mode-fix.js
+++ /dev/null
@@ -1,14 +0,0 @@
-'use strict'
-module.exports = (mode, isDir) => {
- mode &= 0o7777
- // if dirs are readable, then they should be listable
- if (isDir) {
- if (mode & 0o400)
- mode |= 0o100
- if (mode & 0o40)
- mode |= 0o10
- if (mode & 0o4)
- mode |= 0o1
- }
- return mode
-}
diff --git a/node_modules/libcipm/node_modules/tar/lib/pack.js b/node_modules/libcipm/node_modules/tar/lib/pack.js
deleted file mode 100644
index 857cea910..000000000
--- a/node_modules/libcipm/node_modules/tar/lib/pack.js
+++ /dev/null
@@ -1,404 +0,0 @@
-'use strict'
-
-const Buffer = require('./buffer.js')
-
-// A readable tar stream creator
-// Technically, this is a transform stream that you write paths into,
-// and tar format comes out of.
-// The `add()` method is like `write()` but returns this,
-// and end() return `this` as well, so you can
-// do `new Pack(opt).add('files').add('dir').end().pipe(output)
-// You could also do something like:
-// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar'))
-
-class PackJob {
- constructor (path, absolute) {
- this.path = path || './'
- this.absolute = absolute
- this.entry = null
- this.stat = null
- this.readdir = null
- this.pending = false
- this.ignore = false
- this.piped = false
- }
-}
-
-const MiniPass = require('minipass')
-const zlib = require('minizlib')
-const ReadEntry = require('./read-entry.js')
-const WriteEntry = require('./write-entry.js')
-const WriteEntrySync = WriteEntry.Sync
-const WriteEntryTar = WriteEntry.Tar
-const Yallist = require('yallist')
-const EOF = Buffer.alloc(1024)
-const ONSTAT = Symbol('onStat')
-const ENDED = Symbol('ended')
-const QUEUE = Symbol('queue')
-const CURRENT = Symbol('current')
-const PROCESS = Symbol('process')
-const PROCESSING = Symbol('processing')
-const PROCESSJOB = Symbol('processJob')
-const JOBS = Symbol('jobs')
-const JOBDONE = Symbol('jobDone')
-const ADDFSENTRY = Symbol('addFSEntry')
-const ADDTARENTRY = Symbol('addTarEntry')
-const STAT = Symbol('stat')
-const READDIR = Symbol('readdir')
-const ONREADDIR = Symbol('onreaddir')
-const PIPE = Symbol('pipe')
-const ENTRY = Symbol('entry')
-const ENTRYOPT = Symbol('entryOpt')
-const WRITEENTRYCLASS = Symbol('writeEntryClass')
-const WRITE = Symbol('write')
-const ONDRAIN = Symbol('ondrain')
-
-const fs = require('fs')
-const path = require('path')
-const warner = require('./warn-mixin.js')
-
-const Pack = warner(class Pack extends MiniPass {
- constructor (opt) {
- super(opt)
- opt = opt || Object.create(null)
- this.opt = opt
- this.cwd = opt.cwd || process.cwd()
- this.maxReadSize = opt.maxReadSize
- this.preservePaths = !!opt.preservePaths
- this.strict = !!opt.strict
- this.noPax = !!opt.noPax
- this.prefix = (opt.prefix || '').replace(/(\\|\/)+$/, '')
- this.linkCache = opt.linkCache || new Map()
- this.statCache = opt.statCache || new Map()
- this.readdirCache = opt.readdirCache || new Map()
-
- this[WRITEENTRYCLASS] = WriteEntry
- if (typeof opt.onwarn === 'function')
- this.on('warn', opt.onwarn)
-
- this.zip = null
- if (opt.gzip) {
- if (typeof opt.gzip !== 'object')
- opt.gzip = {}
- this.zip = new zlib.Gzip(opt.gzip)
- this.zip.on('data', chunk => super.write(chunk))
- this.zip.on('end', _ => super.end())
- this.zip.on('drain', _ => this[ONDRAIN]())
- this.on('resume', _ => this.zip.resume())
- } else
- this.on('drain', this[ONDRAIN])
-
- this.portable = !!opt.portable
- this.noDirRecurse = !!opt.noDirRecurse
- this.follow = !!opt.follow
- this.noMtime = !!opt.noMtime
- this.mtime = opt.mtime || null
-
- this.filter = typeof opt.filter === 'function' ? opt.filter : _ => true
-
- this[QUEUE] = new Yallist
- this[JOBS] = 0
- this.jobs = +opt.jobs || 4
- this[PROCESSING] = false
- this[ENDED] = false
- }
-
- [WRITE] (chunk) {
- return super.write(chunk)
- }
-
- add (path) {
- this.write(path)
- return this
- }
-
- end (path) {
- if (path)
- this.write(path)
- this[ENDED] = true
- this[PROCESS]()
- return this
- }
-
- write (path) {
- if (this[ENDED])
- throw new Error('write after end')
-
- if (path instanceof ReadEntry)
- this[ADDTARENTRY](path)
- else
- this[ADDFSENTRY](path)
- return this.flowing
- }
-
- [ADDTARENTRY] (p) {
- const absolute = path.resolve(this.cwd, p.path)
- if (this.prefix)
- p.path = this.prefix + '/' + p.path.replace(/^\.(\/+|$)/, '')
-
- // in this case, we don't have to wait for the stat
- if (!this.filter(p.path, p))
- p.resume()
- else {
- const job = new PackJob(p.path, absolute, false)
- job.entry = new WriteEntryTar(p, this[ENTRYOPT](job))
- job.entry.on('end', _ => this[JOBDONE](job))
- this[JOBS] += 1
- this[QUEUE].push(job)
- }
-
- this[PROCESS]()
- }
-
- [ADDFSENTRY] (p) {
- const absolute = path.resolve(this.cwd, p)
- if (this.prefix)
- p = this.prefix + '/' + p.replace(/^\.(\/+|$)/, '')
-
- this[QUEUE].push(new PackJob(p, absolute))
- this[PROCESS]()
- }
-
- [STAT] (job) {
- job.pending = true
- this[JOBS] += 1
- const stat = this.follow ? 'stat' : 'lstat'
- fs[stat](job.absolute, (er, stat) => {
- job.pending = false
- this[JOBS] -= 1
- if (er)
- this.emit('error', er)
- else
- this[ONSTAT](job, stat)
- })
- }
-
- [ONSTAT] (job, stat) {
- this.statCache.set(job.absolute, stat)
- job.stat = stat
-
- // now we have the stat, we can filter it.
- if (!this.filter(job.path, stat))
- job.ignore = true
-
- this[PROCESS]()
- }
-
- [READDIR] (job) {
- job.pending = true
- this[JOBS] += 1
- fs.readdir(job.absolute, (er, entries) => {
- job.pending = false
- this[JOBS] -= 1
- if (er)
- return this.emit('error', er)
- this[ONREADDIR](job, entries)
- })
- }
-
- [ONREADDIR] (job, entries) {
- this.readdirCache.set(job.absolute, entries)
- job.readdir = entries
- this[PROCESS]()
- }
-
- [PROCESS] () {
- if (this[PROCESSING])
- return
-
- this[PROCESSING] = true
- for (let w = this[QUEUE].head;
- w !== null && this[JOBS] < this.jobs;
- w = w.next) {
- this[PROCESSJOB](w.value)
- if (w.value.ignore) {
- const p = w.next
- this[QUEUE].removeNode(w)
- w.next = p
- }
- }
-
- this[PROCESSING] = false
-
- if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) {
- if (this.zip)
- this.zip.end(EOF)
- else {
- super.write(EOF)
- super.end()
- }
- }
- }
-
- get [CURRENT] () {
- return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value
- }
-
- [JOBDONE] (job) {
- this[QUEUE].shift()
- this[JOBS] -= 1
- this[PROCESS]()
- }
-
- [PROCESSJOB] (job) {
- if (job.pending)
- return
-
- if (job.entry) {
- if (job === this[CURRENT] && !job.piped)
- this[PIPE](job)
- return
- }
-
- if (!job.stat) {
- if (this.statCache.has(job.absolute))
- this[ONSTAT](job, this.statCache.get(job.absolute))
- else
- this[STAT](job)
- }
- if (!job.stat)
- return
-
- // filtered out!
- if (job.ignore)
- return
-
- if (!this.noDirRecurse && job.stat.isDirectory() && !job.readdir) {
- if (this.readdirCache.has(job.absolute))
- this[ONREADDIR](job, this.readdirCache.get(job.absolute))
- else
- this[READDIR](job)
- if (!job.readdir)
- return
- }
-
- // we know it doesn't have an entry, because that got checked above
- job.entry = this[ENTRY](job)
- if (!job.entry) {
- job.ignore = true
- return
- }
-
- if (job === this[CURRENT] && !job.piped)
- this[PIPE](job)
- }
-
- [ENTRYOPT] (job) {
- return {
- onwarn: (msg, data) => {
- this.warn(msg, data)
- },
- noPax: this.noPax,
- cwd: this.cwd,
- absolute: job.absolute,
- preservePaths: this.preservePaths,
- maxReadSize: this.maxReadSize,
- strict: this.strict,
- portable: this.portable,
- linkCache: this.linkCache,
- statCache: this.statCache,
- noMtime: this.noMtime,
- mtime: this.mtime
- }
- }
-
- [ENTRY] (job) {
- this[JOBS] += 1
- try {
- return new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job))
- .on('end', () => this[JOBDONE](job))
- .on('error', er => this.emit('error', er))
- } catch (er) {
- this.emit('error', er)
- }
- }
-
- [ONDRAIN] () {
- if (this[CURRENT] && this[CURRENT].entry)
- this[CURRENT].entry.resume()
- }
-
- // like .pipe() but using super, because our write() is special
- [PIPE] (job) {
- job.piped = true
-
- if (job.readdir)
- job.readdir.forEach(entry => {
- const p = this.prefix ?
- job.path.slice(this.prefix.length + 1) || './'
- : job.path
-
- const base = p === './' ? '' : p.replace(/\/*$/, '/')
- this[ADDFSENTRY](base + entry)
- })
-
- const source = job.entry
- const zip = this.zip
-
- if (zip)
- source.on('data', chunk => {
- if (!zip.write(chunk))
- source.pause()
- })
- else
- source.on('data', chunk => {
- if (!super.write(chunk))
- source.pause()
- })
- }
-
- pause () {
- if (this.zip)
- this.zip.pause()
- return super.pause()
- }
-})
-
-class PackSync extends Pack {
- constructor (opt) {
- super(opt)
- this[WRITEENTRYCLASS] = WriteEntrySync
- }
-
- // pause/resume are no-ops in sync streams.
- pause () {}
- resume () {}
-
- [STAT] (job) {
- const stat = this.follow ? 'statSync' : 'lstatSync'
- this[ONSTAT](job, fs[stat](job.absolute))
- }
-
- [READDIR] (job, stat) {
- this[ONREADDIR](job, fs.readdirSync(job.absolute))
- }
-
- // gotta get it all in this tick
- [PIPE] (job) {
- const source = job.entry
- const zip = this.zip
-
- if (job.readdir)
- job.readdir.forEach(entry => {
- const p = this.prefix ?
- job.path.slice(this.prefix.length + 1) || './'
- : job.path
-
- const base = p === './' ? '' : p.replace(/\/*$/, '/')
- this[ADDFSENTRY](base + entry)
- })
-
- if (zip)
- source.on('data', chunk => {
- zip.write(chunk)
- })
- else
- source.on('data', chunk => {
- super[WRITE](chunk)
- })
- }
-}
-
-Pack.Sync = PackSync
-
-module.exports = Pack
diff --git a/node_modules/libcipm/node_modules/tar/lib/parse.js b/node_modules/libcipm/node_modules/tar/lib/parse.js
deleted file mode 100644
index 43d4383dd..000000000
--- a/node_modules/libcipm/node_modules/tar/lib/parse.js
+++ /dev/null
@@ -1,428 +0,0 @@
-'use strict'
-
-// this[BUFFER] is the remainder of a chunk if we're waiting for
-// the full 512 bytes of a header to come in. We will Buffer.concat()
-// it to the next write(), which is a mem copy, but a small one.
-//
-// this[QUEUE] is a Yallist of entries that haven't been emitted
-// yet this can only get filled up if the user keeps write()ing after
-// a write() returns false, or does a write() with more than one entry
-//
-// We don't buffer chunks, we always parse them and either create an
-// entry, or push it into the active entry. The ReadEntry class knows
-// to throw data away if .ignore=true
-//
-// Shift entry off the buffer when it emits 'end', and emit 'entry' for
-// the next one in the list.
-//
-// At any time, we're pushing body chunks into the entry at WRITEENTRY,
-// and waiting for 'end' on the entry at READENTRY
-//
-// ignored entries get .resume() called on them straight away
-
-const warner = require('./warn-mixin.js')
-const path = require('path')
-const Header = require('./header.js')
-const EE = require('events')
-const Yallist = require('yallist')
-const maxMetaEntrySize = 1024 * 1024
-const Entry = require('./read-entry.js')
-const Pax = require('./pax.js')
-const zlib = require('minizlib')
-const Buffer = require('./buffer.js')
-
-const gzipHeader = Buffer.from([0x1f, 0x8b])
-const STATE = Symbol('state')
-const WRITEENTRY = Symbol('writeEntry')
-const READENTRY = Symbol('readEntry')
-const NEXTENTRY = Symbol('nextEntry')
-const PROCESSENTRY = Symbol('processEntry')
-const EX = Symbol('extendedHeader')
-const GEX = Symbol('globalExtendedHeader')
-const META = Symbol('meta')
-const EMITMETA = Symbol('emitMeta')
-const BUFFER = Symbol('buffer')
-const QUEUE = Symbol('queue')
-const ENDED = Symbol('ended')
-const EMITTEDEND = Symbol('emittedEnd')
-const EMIT = Symbol('emit')
-const UNZIP = Symbol('unzip')
-const CONSUMECHUNK = Symbol('consumeChunk')
-const CONSUMECHUNKSUB = Symbol('consumeChunkSub')
-const CONSUMEBODY = Symbol('consumeBody')
-const CONSUMEMETA = Symbol('consumeMeta')
-const CONSUMEHEADER = Symbol('consumeHeader')
-const CONSUMING = Symbol('consuming')
-const BUFFERCONCAT = Symbol('bufferConcat')
-const MAYBEEND = Symbol('maybeEnd')
-const WRITING = Symbol('writing')
-const ABORTED = Symbol('aborted')
-const DONE = Symbol('onDone')
-
-const noop = _ => true
-
-module.exports = warner(class Parser extends EE {
- constructor (opt) {
- opt = opt || {}
- super(opt)
-
- if (opt.ondone)
- this.on(DONE, opt.ondone)
- else
- this.on(DONE, _ => {
- this.emit('prefinish')
- this.emit('finish')
- this.emit('end')
- this.emit('close')
- })
-
- this.strict = !!opt.strict
- this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize
- this.filter = typeof opt.filter === 'function' ? opt.filter : noop
-
- // have to set this so that streams are ok piping into it
- this.writable = true
- this.readable = false
-
- this[QUEUE] = new Yallist()
- this[BUFFER] = null
- this[READENTRY] = null
- this[WRITEENTRY] = null
- this[STATE] = 'begin'
- this[META] = ''
- this[EX] = null
- this[GEX] = null
- this[ENDED] = false
- this[UNZIP] = null
- this[ABORTED] = false
- if (typeof opt.onwarn === 'function')
- this.on('warn', opt.onwarn)
- if (typeof opt.onentry === 'function')
- this.on('entry', opt.onentry)
- }
-
- [CONSUMEHEADER] (chunk, position) {
- let header
- try {
- header = new Header(chunk, position, this[EX], this[GEX])
- } catch (er) {
- return this.warn('invalid entry', er)
- }
-
- if (header.nullBlock)
- this[EMIT]('nullBlock')
- else if (!header.cksumValid)
- this.warn('invalid entry', header)
- else if (!header.path)
- this.warn('invalid: path is required', header)
- else {
- const type = header.type
- if (/^(Symbolic)?Link$/.test(type) && !header.linkpath)
- this.warn('invalid: linkpath required', header)
- else if (!/^(Symbolic)?Link$/.test(type) && header.linkpath)
- this.warn('invalid: linkpath forbidden', header)
- else {
- const entry = this[WRITEENTRY] = new Entry(header, this[EX], this[GEX])
-
- if (entry.meta) {
- if (entry.size > this.maxMetaEntrySize) {
- entry.ignore = true
- this[EMIT]('ignoredEntry', entry)
- this[STATE] = 'ignore'
- } else if (entry.size > 0) {
- this[META] = ''
- entry.on('data', c => this[META] += c)
- this[STATE] = 'meta'
- }
- } else {
-
- this[EX] = null
- entry.ignore = entry.ignore || !this.filter(entry.path, entry)
- if (entry.ignore) {
- this[EMIT]('ignoredEntry', entry)
- this[STATE] = entry.remain ? 'ignore' : 'begin'
- } else {
- if (entry.remain)
- this[STATE] = 'body'
- else {
- this[STATE] = 'begin'
- entry.end()
- }
-
- if (!this[READENTRY]) {
- this[QUEUE].push(entry)
- this[NEXTENTRY]()
- } else
- this[QUEUE].push(entry)
- }
- }
- }
- }
- }
-
- [PROCESSENTRY] (entry) {
- let go = true
-
- if (!entry) {
- this[READENTRY] = null
- go = false
- } else if (Array.isArray(entry))
- this.emit.apply(this, entry)
- else {
- this[READENTRY] = entry
- this.emit('entry', entry)
- if (!entry.emittedEnd) {
- entry.on('end', _ => this[NEXTENTRY]())
- go = false
- }
- }
-
- return go
- }
-
- [NEXTENTRY] () {
- do {} while (this[PROCESSENTRY](this[QUEUE].shift()))
-
- if (!this[QUEUE].length) {
- // At this point, there's nothing in the queue, but we may have an
- // entry which is being consumed (readEntry).
- // If we don't, then we definitely can handle more data.
- // If we do, and either it's flowing, or it has never had any data
- // written to it, then it needs more.
- // The only other possibility is that it has returned false from a
- // write() call, so we wait for the next drain to continue.
- const re = this[READENTRY]
- const drainNow = !re || re.flowing || re.size === re.remain
- if (drainNow) {
- if (!this[WRITING])
- this.emit('drain')
- } else
- re.once('drain', _ => this.emit('drain'))
- }
- }
-
- [CONSUMEBODY] (chunk, position) {
- // write up to but no more than writeEntry.blockRemain
- const entry = this[WRITEENTRY]
- const br = entry.blockRemain
- const c = (br >= chunk.length && position === 0) ? chunk
- : chunk.slice(position, position + br)
-
- entry.write(c)
-
- if (!entry.blockRemain) {
- this[STATE] = 'begin'
- this[WRITEENTRY] = null
- entry.end()
- }
-
- return c.length
- }
-
- [CONSUMEMETA] (chunk, position) {
- const entry = this[WRITEENTRY]
- const ret = this[CONSUMEBODY](chunk, position)
-
- // if we finished, then the entry is reset
- if (!this[WRITEENTRY])
- this[EMITMETA](entry)
-
- return ret
- }
-
- [EMIT] (ev, data, extra) {
- if (!this[QUEUE].length && !this[READENTRY])
- this.emit(ev, data, extra)
- else
- this[QUEUE].push([ev, data, extra])
- }
-
- [EMITMETA] (entry) {
- this[EMIT]('meta', this[META])
- switch (entry.type) {
- case 'ExtendedHeader':
- case 'OldExtendedHeader':
- this[EX] = Pax.parse(this[META], this[EX], false)
- break
-
- case 'GlobalExtendedHeader':
- this[GEX] = Pax.parse(this[META], this[GEX], true)
- break
-
- case 'NextFileHasLongPath':
- case 'OldGnuLongPath':
- this[EX] = this[EX] || Object.create(null)
- this[EX].path = this[META].replace(/\0.*/, '')
- break
-
- case 'NextFileHasLongLinkpath':
- this[EX] = this[EX] || Object.create(null)
- this[EX].linkpath = this[META].replace(/\0.*/, '')
- break
-
- /* istanbul ignore next */
- default: throw new Error('unknown meta: ' + entry.type)
- }
- }
-
- abort (msg, error) {
- this[ABORTED] = true
- this.warn(msg, error)
- this.emit('abort', error)
- this.emit('error', error)
- }
-
- write (chunk) {
- if (this[ABORTED])
- return
-
- // first write, might be gzipped
- if (this[UNZIP] === null && chunk) {
- if (this[BUFFER]) {
- chunk = Buffer.concat([this[BUFFER], chunk])
- this[BUFFER] = null
- }
- if (chunk.length < gzipHeader.length) {
- this[BUFFER] = chunk
- return true
- }
- for (let i = 0; this[UNZIP] === null && i < gzipHeader.length; i++) {
- if (chunk[i] !== gzipHeader[i])
- this[UNZIP] = false
- }
- if (this[UNZIP] === null) {
- const ended = this[ENDED]
- this[ENDED] = false
- this[UNZIP] = new zlib.Unzip()
- this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk))
- this[UNZIP].on('error', er =>
- this.abort(er.message, er))
- this[UNZIP].on('end', _ => {
- this[ENDED] = true
- this[CONSUMECHUNK]()
- })
- this[WRITING] = true
- const ret = this[UNZIP][ended ? 'end' : 'write' ](chunk)
- this[WRITING] = false
- return ret
- }
- }
-
- this[WRITING] = true
- if (this[UNZIP])
- this[UNZIP].write(chunk)
- else
- this[CONSUMECHUNK](chunk)
- this[WRITING] = false
-
- // return false if there's a queue, or if the current entry isn't flowing
- const ret =
- this[QUEUE].length ? false :
- this[READENTRY] ? this[READENTRY].flowing :
- true
-
- // if we have no queue, then that means a clogged READENTRY
- if (!ret && !this[QUEUE].length)
- this[READENTRY].once('drain', _ => this.emit('drain'))
-
- return ret
- }
-
- [BUFFERCONCAT] (c) {
- if (c && !this[ABORTED])
- this[BUFFER] = this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c
- }
-
- [MAYBEEND] () {
- if (this[ENDED] &&
- !this[EMITTEDEND] &&
- !this[ABORTED] &&
- !this[CONSUMING]) {
- this[EMITTEDEND] = true
- const entry = this[WRITEENTRY]
- if (entry && entry.blockRemain) {
- const have = this[BUFFER] ? this[BUFFER].length : 0
- this.warn('Truncated input (needed ' + entry.blockRemain +
- ' more bytes, only ' + have + ' available)', entry)
- if (this[BUFFER])
- entry.write(this[BUFFER])
- entry.end()
- }
- this[EMIT](DONE)
- }
- }
-
- [CONSUMECHUNK] (chunk) {
- if (this[CONSUMING]) {
- this[BUFFERCONCAT](chunk)
- } else if (!chunk && !this[BUFFER]) {
- this[MAYBEEND]()
- } else {
- this[CONSUMING] = true
- if (this[BUFFER]) {
- this[BUFFERCONCAT](chunk)
- const c = this[BUFFER]
- this[BUFFER] = null
- this[CONSUMECHUNKSUB](c)
- } else {
- this[CONSUMECHUNKSUB](chunk)
- }
-
- while (this[BUFFER] && this[BUFFER].length >= 512 && !this[ABORTED]) {
- const c = this[BUFFER]
- this[BUFFER] = null
- this[CONSUMECHUNKSUB](c)
- }
- this[CONSUMING] = false
- }
-
- if (!this[BUFFER] || this[ENDED])
- this[MAYBEEND]()
- }
-
- [CONSUMECHUNKSUB] (chunk) {
- // we know that we are in CONSUMING mode, so anything written goes into
- // the buffer. Advance the position and put any remainder in the buffer.
- let position = 0
- let length = chunk.length
- while (position + 512 <= length && !this[ABORTED]) {
- switch (this[STATE]) {
- case 'begin':
- this[CONSUMEHEADER](chunk, position)
- position += 512
- break
-
- case 'ignore':
- case 'body':
- position += this[CONSUMEBODY](chunk, position)
- break
-
- case 'meta':
- position += this[CONSUMEMETA](chunk, position)
- break
-
- /* istanbul ignore next */
- default:
- throw new Error('invalid state: ' + this[STATE])
- }
- }
-
- if (position < length) {
- if (this[BUFFER])
- this[BUFFER] = Buffer.concat([chunk.slice(position), this[BUFFER]])
- else
- this[BUFFER] = chunk.slice(position)
- }
- }
-
- end (chunk) {
- if (!this[ABORTED]) {
- if (this[UNZIP])
- this[UNZIP].end(chunk)
- else {
- this[ENDED] = true
- this.write(chunk)
- }
- }
- }
-})
diff --git a/node_modules/libcipm/node_modules/tar/lib/pax.js b/node_modules/libcipm/node_modules/tar/lib/pax.js
deleted file mode 100644
index 9d7e4aba5..000000000
--- a/node_modules/libcipm/node_modules/tar/lib/pax.js
+++ /dev/null
@@ -1,146 +0,0 @@
-'use strict'
-const Buffer = require('./buffer.js')
-const Header = require('./header.js')
-const path = require('path')
-
-class Pax {
- constructor (obj, global) {
- this.atime = obj.atime || null
- this.charset = obj.charset || null
- this.comment = obj.comment || null
- this.ctime = obj.ctime || null
- this.gid = obj.gid || null
- this.gname = obj.gname || null
- this.linkpath = obj.linkpath || null
- this.mtime = obj.mtime || null
- this.path = obj.path || null
- this.size = obj.size || null
- this.uid = obj.uid || null
- this.uname = obj.uname || null
- this.dev = obj.dev || null
- this.ino = obj.ino || null
- this.nlink = obj.nlink || null
- this.global = global || false
- }
-
- encode () {
- const body = this.encodeBody()
- if (body === '')
- return null
-
- const bodyLen = Buffer.byteLength(body)
- // round up to 512 bytes
- // add 512 for header
- const bufLen = 512 * Math.ceil(1 + bodyLen / 512)
- const buf = Buffer.allocUnsafe(bufLen)
-
- // 0-fill the header section, it might not hit every field
- for (let i = 0; i < 512; i++) {
- buf[i] = 0
- }
-
- new Header({
- // XXX split the path
- // then the path should be PaxHeader + basename, but less than 99,
- // prepend with the dirname
- path: ('PaxHeader/' + path.basename(this.path)).slice(0, 99),
- mode: this.mode || 0o644,
- uid: this.uid || null,
- gid: this.gid || null,
- size: bodyLen,
- mtime: this.mtime || null,
- type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader',
- linkpath: '',
- uname: this.uname || '',
- gname: this.gname || '',
- devmaj: 0,
- devmin: 0,
- atime: this.atime || null,
- ctime: this.ctime || null
- }).encode(buf)
-
- buf.write(body, 512, bodyLen, 'utf8')
-
- // null pad after the body
- for (let i = bodyLen + 512; i < buf.length; i++) {
- buf[i] = 0
- }
-
- return buf
- }
-
- encodeBody () {
- return (
- this.encodeField('path') +
- this.encodeField('ctime') +
- this.encodeField('atime') +
- this.encodeField('dev') +
- this.encodeField('ino') +
- this.encodeField('nlink') +
- this.encodeField('charset') +
- this.encodeField('comment') +
- this.encodeField('gid') +
- this.encodeField('gname') +
- this.encodeField('linkpath') +
- this.encodeField('mtime') +
- this.encodeField('size') +
- this.encodeField('uid') +
- this.encodeField('uname')
- )
- }
-
- encodeField (field) {
- if (this[field] === null || this[field] === undefined)
- return ''
- const v = this[field] instanceof Date ? this[field].getTime() / 1000
- : this[field]
- const s = ' ' +
- (field === 'dev' || field === 'ino' || field === 'nlink'
- ? 'SCHILY.' : '') +
- field + '=' + v + '\n'
- const byteLen = Buffer.byteLength(s)
- // the digits includes the length of the digits in ascii base-10
- // so if it's 9 characters, then adding 1 for the 9 makes it 10
- // which makes it 11 chars.
- let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1
- if (byteLen + digits >= Math.pow(10, digits))
- digits += 1
- const len = digits + byteLen
- return len + s
- }
-}
-
-Pax.parse = (string, ex, g) => new Pax(merge(parseKV(string), ex), g)
-
-const merge = (a, b) =>
- b ? Object.keys(a).reduce((s, k) => (s[k] = a[k], s), b) : a
-
-const parseKV = string =>
- string
- .replace(/\n$/, '')
- .split('\n')
- .reduce(parseKVLine, Object.create(null))
-
-const parseKVLine = (set, line) => {
- const n = parseInt(line, 10)
-
- // XXX Values with \n in them will fail this.
- // Refactor to not be a naive line-by-line parse.
- if (n !== Buffer.byteLength(line) + 1)
- return set
-
- line = line.substr((n + ' ').length)
- const kv = line.split('=')
- const k = kv.shift().replace(/^SCHILY\.(dev|ino|nlink)/, '$1')
- if (!k)
- return set
-
- const v = kv.join('=')
- set[k] = /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k)
- ? new Date(v * 1000)
- : /^[0-9]+$/.test(v) ? +v
- : v
- return set
-}
-
-module.exports = Pax
diff --git a/node_modules/libcipm/node_modules/tar/lib/read-entry.js b/node_modules/libcipm/node_modules/tar/lib/read-entry.js
deleted file mode 100644
index 8acee94ba..000000000
--- a/node_modules/libcipm/node_modules/tar/lib/read-entry.js
+++ /dev/null
@@ -1,98 +0,0 @@
-'use strict'
-const types = require('./types.js')
-const MiniPass = require('minipass')
-
-const SLURP = Symbol('slurp')
-module.exports = class ReadEntry extends MiniPass {
- constructor (header, ex, gex) {
- super()
- // read entries always start life paused. this is to avoid the
- // situation where Minipass's auto-ending empty streams results
- // in an entry ending before we're ready for it.
- this.pause()
- this.extended = ex
- this.globalExtended = gex
- this.header = header
- this.startBlockSize = 512 * Math.ceil(header.size / 512)
- this.blockRemain = this.startBlockSize
- this.remain = header.size
- this.type = header.type
- this.meta = false
- this.ignore = false
- switch (this.type) {
- case 'File':
- case 'OldFile':
- case 'Link':
- case 'SymbolicLink':
- case 'CharacterDevice':
- case 'BlockDevice':
- case 'Directory':
- case 'FIFO':
- case 'ContiguousFile':
- case 'GNUDumpDir':
- break
-
- case 'NextFileHasLongLinkpath':
- case 'NextFileHasLongPath':
- case 'OldGnuLongPath':
- case 'GlobalExtendedHeader':
- case 'ExtendedHeader':
- case 'OldExtendedHeader':
- this.meta = true
- break
-
- // NOTE: gnutar and bsdtar treat unrecognized types as 'File'
- // it may be worth doing the same, but with a warning.
- default:
- this.ignore = true
- }
-
- this.path = header.path
- this.mode = header.mode
- if (this.mode)
- this.mode = this.mode & 0o7777
- this.uid = header.uid
- this.gid = header.gid
- this.uname = header.uname
- this.gname = header.gname
- this.size = header.size
- this.mtime = header.mtime
- this.atime = header.atime
- this.ctime = header.ctime
- this.linkpath = header.linkpath
- this.uname = header.uname
- this.gname = header.gname
-
- if (ex) this[SLURP](ex)
- if (gex) this[SLURP](gex, true)
- }
-
- write (data) {
- const writeLen = data.length
- if (writeLen > this.blockRemain)
- throw new Error('writing more to entry than is appropriate')
-
- const r = this.remain
- const br = this.blockRemain
- this.remain = Math.max(0, r - writeLen)
- this.blockRemain = Math.max(0, br - writeLen)
- if (this.ignore)
- return true
-
- if (r >= writeLen)
- return super.write(data)
-
- // r < writeLen
- return super.write(data.slice(0, r))
- }
-
- [SLURP] (ex, global) {
- for (let k in ex) {
- // we slurp in everything except for the path attribute in
- // a global extended header, because that's weird.
- if (ex[k] !== null && ex[k] !== undefined &&
- !(global && k === 'path'))
- this[k] = ex[k]
- }
- }
-}
diff --git a/node_modules/libcipm/node_modules/tar/lib/replace.js b/node_modules/libcipm/node_modules/tar/lib/replace.js
deleted file mode 100644
index 571cee94a..000000000
--- a/node_modules/libcipm/node_modules/tar/lib/replace.js
+++ /dev/null
@@ -1,220 +0,0 @@
-'use strict'
-const Buffer = require('./buffer.js')
-
-// tar -r
-const hlo = require('./high-level-opt.js')
-const Pack = require('./pack.js')
-const Parse = require('./parse.js')
-const fs = require('fs')
-const fsm = require('fs-minipass')
-const t = require('./list.js')
-const path = require('path')
-
-// starting at the head of the file, read a Header
-// If the checksum is invalid, that's our position to start writing
-// If it is, jump forward by the specified size (round up to 512)
-// and try again.
-// Write the new Pack stream starting there.
-
-const Header = require('./header.js')
-
-const r = module.exports = (opt_, files, cb) => {
- const opt = hlo(opt_)
-
- if (!opt.file)
- throw new TypeError('file is required')
-
- if (opt.gzip)
- throw new TypeError('cannot append to compressed archives')
-
- if (!files || !Array.isArray(files) || !files.length)
- throw new TypeError('no files or directories specified')
-
- files = Array.from(files)
-
- return opt.sync ? replaceSync(opt, files)
- : replace(opt, files, cb)
-}
-
-const replaceSync = (opt, files) => {
- const p = new Pack.Sync(opt)
-
- let threw = true
- let fd
- let position
-
- try {
- try {
- fd = fs.openSync(opt.file, 'r+')
- } catch (er) {
- if (er.code === 'ENOENT')
- fd = fs.openSync(opt.file, 'w+')
- else
- throw er
- }
-
- const st = fs.fstatSync(fd)
- const headBuf = Buffer.alloc(512)
-
- POSITION: for (position = 0; position < st.size; position += 512) {
- for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {
- bytes = fs.readSync(
- fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos
- )
-
- if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b)
- throw new Error('cannot append to compressed archives')
-
- if (!bytes)
- break POSITION
- }
-
- let h = new Header(headBuf)
- if (!h.cksumValid)
- break
- let entryBlockSize = 512 * Math.ceil(h.size / 512)
- if (position + entryBlockSize + 512 > st.size)
- break
- // the 512 for the header we just parsed will be added as well
- // also jump ahead all the blocks for the body
- position += entryBlockSize
- if (opt.mtimeCache)
- opt.mtimeCache.set(h.path, h.mtime)
- }
- threw = false
-
- streamSync(opt, p, position, fd, files)
- } finally {
- if (threw)
- try { fs.closeSync(fd) } catch (er) {}
- }
-}
-
-const streamSync = (opt, p, position, fd, files) => {
- const stream = new fsm.WriteStreamSync(opt.file, {
- fd: fd,
- start: position
- })
- p.pipe(stream)
- addFilesSync(p, files)
-}
-
-const replace = (opt, files, cb) => {
- files = Array.from(files)
- const p = new Pack(opt)
-
- const getPos = (fd, size, cb_) => {
- const cb = (er, pos) => {
- if (er)
- fs.close(fd, _ => cb_(er))
- else
- cb_(null, pos)
- }
-
- let position = 0
- if (size === 0)
- return cb(null, 0)
-
- let bufPos = 0
- const headBuf = Buffer.alloc(512)
- const onread = (er, bytes) => {
- if (er)
- return cb(er)
- bufPos += bytes
- if (bufPos < 512 && bytes)
- return fs.read(
- fd, headBuf, bufPos, headBuf.length - bufPos,
- position + bufPos, onread
- )
-
- if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b)
- return cb(new Error('cannot append to compressed archives'))
-
- // truncated header
- if (bufPos < 512)
- return cb(null, position)
-
- const h = new Header(headBuf)
- if (!h.cksumValid)
- return cb(null, position)
-
- const entryBlockSize = 512 * Math.ceil(h.size / 512)
- if (position + entryBlockSize + 512 > size)
- return cb(null, position)
-
- position += entryBlockSize + 512
- if (position >= size)
- return cb(null, position)
-
- if (opt.mtimeCache)
- opt.mtimeCache.set(h.path, h.mtime)
- bufPos = 0
- fs.read(fd, headBuf, 0, 512, position, onread)
- }
- fs.read(fd, headBuf, 0, 512, position, onread)
- }
-
- const promise = new Promise((resolve, reject) => {
- p.on('error', reject)
- let flag = 'r+'
- const onopen = (er, fd) => {
- if (er && er.code === 'ENOENT' && flag === 'r+') {
- flag = 'w+'
- return fs.open(opt.file, flag, onopen)
- }
-
- if (er)
- return reject(er)
-
- fs.fstat(fd, (er, st) => {
- if (er)
- return reject(er)
- getPos(fd, st.size, (er, position) => {
- if (er)
- return reject(er)
- const stream = new fsm.WriteStream(opt.file, {
- fd: fd,
- start: position
- })
- p.pipe(stream)
- stream.on('error', reject)
- stream.on('close', resolve)
- addFilesAsync(p, files)
- })
- })
- }
- fs.open(opt.file, flag, onopen)
- })
-
- return cb ? promise.then(cb, cb) : promise
-}
-
-const addFilesSync = (p, files) => {
- files.forEach(file => {
- if (file.charAt(0) === '@')
- t({
- file: path.resolve(p.cwd, file.substr(1)),
- sync: true,
- noResume: true,
- onentry: entry => p.add(entry)
- })
- else
- p.add(file)
- })
- p.end()
-}
-
-const addFilesAsync = (p, files) => {
- while (files.length) {
- const file = files.shift()
- if (file.charAt(0) === '@')
- return t({
- file: path.resolve(p.cwd, file.substr(1)),
- noResume: true,
- onentry: entry => p.add(entry)
- }).then(_ => addFilesAsync(p, files))
- else
- p.add(file)
- }
- p.end()
-}
diff --git a/node_modules/libcipm/node_modules/tar/lib/types.js b/node_modules/libcipm/node_modules/tar/lib/types.js
deleted file mode 100644
index df425652b..000000000
--- a/node_modules/libcipm/node_modules/tar/lib/types.js
+++ /dev/null
@@ -1,44 +0,0 @@
-'use strict'
-// map types from key to human-friendly name
-exports.name = new Map([
- ['0', 'File'],
- // same as File
- ['', 'OldFile'],
- ['1', 'Link'],
- ['2', 'SymbolicLink'],
- // Devices and FIFOs aren't fully supported
- // they are parsed, but skipped when unpacking
- ['3', 'CharacterDevice'],
- ['4', 'BlockDevice'],
- ['5', 'Directory'],
- ['6', 'FIFO'],
- // same as File
- ['7', 'ContiguousFile'],
- // pax headers
- ['g', 'GlobalExtendedHeader'],
- ['x', 'ExtendedHeader'],
- // vendor-specific stuff
- // skip
- ['A', 'SolarisACL'],
- // like 5, but with data, which should be skipped
- ['D', 'GNUDumpDir'],
- // metadata only, skip
- ['I', 'Inode'],
- // data = link path of next file
- ['K', 'NextFileHasLongLinkpath'],
- // data = path of next file
- ['L', 'NextFileHasLongPath'],
- // skip
- ['M', 'ContinuationFile'],
- // like L
- ['N', 'OldGnuLongPath'],
- // skip
- ['S', 'SparseFile'],
- // skip
- ['V', 'TapeVolumeHeader'],
- // like x
- ['X', 'OldExtendedHeader']
-])
-
-// map the other direction
-exports.code = new Map(Array.from(exports.name).map(kv => [kv[1], kv[0]]))
diff --git a/node_modules/libcipm/node_modules/tar/lib/unpack.js b/node_modules/libcipm/node_modules/tar/lib/unpack.js
deleted file mode 100644
index fc765096e..000000000
--- a/node_modules/libcipm/node_modules/tar/lib/unpack.js
+++ /dev/null
@@ -1,621 +0,0 @@
-'use strict'
-
-const assert = require('assert')
-const EE = require('events').EventEmitter
-const Parser = require('./parse.js')
-const fs = require('fs')
-const fsm = require('fs-minipass')
-const path = require('path')
-const mkdir = require('./mkdir.js')
-const mkdirSync = mkdir.sync
-const wc = require('./winchars.js')
-
-const ONENTRY = Symbol('onEntry')
-const CHECKFS = Symbol('checkFs')
-const ISREUSABLE = Symbol('isReusable')
-const MAKEFS = Symbol('makeFs')
-const FILE = Symbol('file')
-const DIRECTORY = Symbol('directory')
-const LINK = Symbol('link')
-const SYMLINK = Symbol('symlink')
-const HARDLINK = Symbol('hardlink')
-const UNSUPPORTED = Symbol('unsupported')
-const UNKNOWN = Symbol('unknown')
-const CHECKPATH = Symbol('checkPath')
-const MKDIR = Symbol('mkdir')
-const ONERROR = Symbol('onError')
-const PENDING = Symbol('pending')
-const PEND = Symbol('pend')
-const UNPEND = Symbol('unpend')
-const ENDED = Symbol('ended')
-const MAYBECLOSE = Symbol('maybeClose')
-const SKIP = Symbol('skip')
-const DOCHOWN = Symbol('doChown')
-const UID = Symbol('uid')
-const GID = Symbol('gid')
-const crypto = require('crypto')
-
-// Unlinks on Windows are not atomic.
-//
-// This means that if you have a file entry, followed by another
-// file entry with an identical name, and you cannot re-use the file
-// (because it's a hardlink, or because unlink:true is set, or it's
-// Windows, which does not have useful nlink values), then the unlink
-// will be committed to the disk AFTER the new file has been written
-// over the old one, deleting the new file.
-//
-// To work around this, on Windows systems, we rename the file and then
-// delete the renamed file. It's a sloppy kludge, but frankly, I do not
-// know of a better way to do this, given windows' non-atomic unlink
-// semantics.
-//
-// See: https://github.com/npm/node-tar/issues/183
-/* istanbul ignore next */
-const unlinkFile = (path, cb) => {
- if (process.platform !== 'win32')
- return fs.unlink(path, cb)
-
- const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex')
- fs.rename(path, name, er => {
- if (er)
- return cb(er)
- fs.unlink(name, cb)
- })
-}
-
-/* istanbul ignore next */
-const unlinkFileSync = path => {
- if (process.platform !== 'win32')
- return fs.unlinkSync(path)
-
- const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex')
- fs.renameSync(path, name)
- fs.unlinkSync(name)
-}
-
-// this.gid, entry.gid, this.processUid
-const uint32 = (a, b, c) =>
- a === a >>> 0 ? a
- : b === b >>> 0 ? b
- : c
-
-class Unpack extends Parser {
- constructor (opt) {
- if (!opt)
- opt = {}
-
- opt.ondone = _ => {
- this[ENDED] = true
- this[MAYBECLOSE]()
- }
-
- super(opt)
-
- this.transform = typeof opt.transform === 'function' ? opt.transform : null
-
- this.writable = true
- this.readable = false
-
- this[PENDING] = 0
- this[ENDED] = false
-
- this.dirCache = opt.dirCache || new Map()
-
- if (typeof opt.uid === 'number' || typeof opt.gid === 'number') {
- // need both or neither
- if (typeof opt.uid !== 'number' || typeof opt.gid !== 'number')
- throw new TypeError('cannot set owner without number uid and gid')
- if (opt.preserveOwner)
- throw new TypeError(
- 'cannot preserve owner in archive and also set owner explicitly')
- this.uid = opt.uid
- this.gid = opt.gid
- this.setOwner = true
- } else {
- this.uid = null
- this.gid = null
- this.setOwner = false
- }
-
- // default true for root
- if (opt.preserveOwner === undefined && typeof opt.uid !== 'number')
- this.preserveOwner = process.getuid && process.getuid() === 0
- else
- this.preserveOwner = !!opt.preserveOwner
-
- this.processUid = (this.preserveOwner || this.setOwner) && process.getuid ?
- process.getuid() : null
- this.processGid = (this.preserveOwner || this.setOwner) && process.getgid ?
- process.getgid() : null
-
- // mostly just for testing, but useful in some cases.
- // Forcibly trigger a chown on every entry, no matter what
- this.forceChown = opt.forceChown === true
-
- // turn ><?| in filenames into 0xf000-higher encoded forms
- this.win32 = !!opt.win32 || process.platform === 'win32'
-
- // do not unpack over files that are newer than what's in the archive
- this.newer = !!opt.newer
-
- // do not unpack over ANY files
- this.keep = !!opt.keep
-
- // do not set mtime/atime of extracted entries
- this.noMtime = !!opt.noMtime
-
- // allow .., absolute path entries, and unpacking through symlinks
- // without this, warn and skip .., relativize absolutes, and error
- // on symlinks in extraction path
- this.preservePaths = !!opt.preservePaths
-
- // unlink files and links before writing. This breaks existing hard
- // links, and removes symlink directories rather than erroring
- this.unlink = !!opt.unlink
-
- this.cwd = path.resolve(opt.cwd || process.cwd())
- this.strip = +opt.strip || 0
- this.processUmask = process.umask()
- this.umask = typeof opt.umask === 'number' ? opt.umask : this.processUmask
- // default mode for dirs created as parents
- this.dmode = opt.dmode || (0o0777 & (~this.umask))
- this.fmode = opt.fmode || (0o0666 & (~this.umask))
- this.on('entry', entry => this[ONENTRY](entry))
- }
-
- [MAYBECLOSE] () {
- if (this[ENDED] && this[PENDING] === 0) {
- this.emit('prefinish')
- this.emit('finish')
- this.emit('end')
- this.emit('close')
- }
- }
-
- [CHECKPATH] (entry) {
- if (this.strip) {
- const parts = entry.path.split(/\/|\\/)
- if (parts.length < this.strip)
- return false
- entry.path = parts.slice(this.strip).join('/')
-
- if (entry.type === 'Link') {
- const linkparts = entry.linkpath.split(/\/|\\/)
- if (linkparts.length >= this.strip)
- entry.linkpath = linkparts.slice(this.strip).join('/')
- }
- }
-
- if (!this.preservePaths) {
- const p = entry.path
- if (p.match(/(^|\/|\\)\.\.(\\|\/|$)/)) {
- this.warn('path contains \'..\'', p)
- return false
- }
-
- // absolutes on posix are also absolutes on win32
- // so we only need to test this one to get both
- if (path.win32.isAbsolute(p)) {
- const parsed = path.win32.parse(p)
- this.warn('stripping ' + parsed.root + ' from absolute path', p)
- entry.path = p.substr(parsed.root.length)
- }
- }
-
- // only encode : chars that aren't drive letter indicators
- if (this.win32) {
- const parsed = path.win32.parse(entry.path)
- entry.path = parsed.root === '' ? wc.encode(entry.path)
- : parsed.root + wc.encode(entry.path.substr(parsed.root.length))
- }
-
- if (path.isAbsolute(entry.path))
- entry.absolute = entry.path
- else
- entry.absolute = path.resolve(this.cwd, entry.path)
-
- return true
- }
-
- [ONENTRY] (entry) {
- if (!this[CHECKPATH](entry))
- return entry.resume()
-
- assert.equal(typeof entry.absolute, 'string')
-
- switch (entry.type) {
- case 'Directory':
- case 'GNUDumpDir':
- if (entry.mode)
- entry.mode = entry.mode | 0o700
-
- case 'File':
- case 'OldFile':
- case 'ContiguousFile':
- case 'Link':
- case 'SymbolicLink':
- return this[CHECKFS](entry)
-
- case 'CharacterDevice':
- case 'BlockDevice':
- case 'FIFO':
- return this[UNSUPPORTED](entry)
- }
- }
-
- [ONERROR] (er, entry) {
- // Cwd has to exist, or else nothing works. That's serious.
- // Other errors are warnings, which raise the error in strict
- // mode, but otherwise continue on.
- if (er.name === 'CwdError')
- this.emit('error', er)
- else {
- this.warn(er.message, er)
- this[UNPEND]()
- entry.resume()
- }
- }
-
- [MKDIR] (dir, mode, cb) {
- mkdir(dir, {
- uid: this.uid,
- gid: this.gid,
- processUid: this.processUid,
- processGid: this.processGid,
- umask: this.processUmask,
- preserve: this.preservePaths,
- unlink: this.unlink,
- cache: this.dirCache,
- cwd: this.cwd,
- mode: mode
- }, cb)
- }
-
- [DOCHOWN] (entry) {
- // in preserve owner mode, chown if the entry doesn't match process
- // in set owner mode, chown if setting doesn't match process
- return this.forceChown ||
- this.preserveOwner &&
- ( typeof entry.uid === 'number' && entry.uid !== this.processUid ||
- typeof entry.gid === 'number' && entry.gid !== this.processGid )
- ||
- ( typeof this.uid === 'number' && this.uid !== this.processUid ||
- typeof this.gid === 'number' && this.gid !== this.processGid )
- }
-
- [UID] (entry) {
- return uint32(this.uid, entry.uid, this.processUid)
- }
-
- [GID] (entry) {
- return uint32(this.gid, entry.gid, this.processGid)
- }
-
- [FILE] (entry) {
- const mode = entry.mode & 0o7777 || this.fmode
- const stream = new fsm.WriteStream(entry.absolute, {
- mode: mode,
- autoClose: false
- })
- stream.on('error', er => this[ONERROR](er, entry))
-
- let actions = 1
- const done = er => {
- if (er)
- return this[ONERROR](er, entry)
-
- if (--actions === 0)
- fs.close(stream.fd, _ => this[UNPEND]())
- }
-
- stream.on('finish', _ => {
- // if futimes fails, try utimes
- // if utimes fails, fail with the original error
- // same for fchown/chown
- const abs = entry.absolute
- const fd = stream.fd
-
- if (entry.mtime && !this.noMtime) {
- actions++
- const atime = entry.atime || new Date()
- const mtime = entry.mtime
- fs.futimes(fd, atime, mtime, er =>
- er ? fs.utimes(abs, atime, mtime, er2 => done(er2 && er))
- : done())
- }
-
- if (this[DOCHOWN](entry)) {
- actions++
- const uid = this[UID](entry)
- const gid = this[GID](entry)
- fs.fchown(fd, uid, gid, er =>
- er ? fs.chown(abs, uid, gid, er2 => done(er2 && er))
- : done())
- }
-
- done()
- })
-
- const tx = this.transform ? this.transform(entry) || entry : entry
- if (tx !== entry) {
- tx.on('error', er => this[ONERROR](er, entry))
- entry.pipe(tx)
- }
- tx.pipe(stream)
- }
-
- [DIRECTORY] (entry) {
- const mode = entry.mode & 0o7777 || this.dmode
- this[MKDIR](entry.absolute, mode, er => {
- if (er)
- return this[ONERROR](er, entry)
-
- let actions = 1
- const done = _ => {
- if (--actions === 0) {
- this[UNPEND]()
- entry.resume()
- }
- }
-
- if (entry.mtime && !this.noMtime) {
- actions++
- fs.utimes(entry.absolute, entry.atime || new Date(), entry.mtime, done)
- }
-
- if (this[DOCHOWN](entry)) {
- actions++
- fs.chown(entry.absolute, this[UID](entry), this[GID](entry), done)
- }
-
- done()
- })
- }
-
- [UNSUPPORTED] (entry) {
- this.warn('unsupported entry type: ' + entry.type, entry)
- entry.resume()
- }
-
- [SYMLINK] (entry) {
- this[LINK](entry, entry.linkpath, 'symlink')
- }
-
- [HARDLINK] (entry) {
- this[LINK](entry, path.resolve(this.cwd, entry.linkpath), 'link')
- }
-
- [PEND] () {
- this[PENDING]++
- }
-
- [UNPEND] () {
- this[PENDING]--
- this[MAYBECLOSE]()
- }
-
- [SKIP] (entry) {
- this[UNPEND]()
- entry.resume()
- }
-
- // Check if we can reuse an existing filesystem entry safely and
- // overwrite it, rather than unlinking and recreating
- // Windows doesn't report a useful nlink, so we just never reuse entries
- [ISREUSABLE] (entry, st) {
- return entry.type === 'File' &&
- !this.unlink &&
- st.isFile() &&
- st.nlink <= 1 &&
- process.platform !== 'win32'
- }
-
- // check if a thing is there, and if so, try to clobber it
- [CHECKFS] (entry) {
- this[PEND]()
- this[MKDIR](path.dirname(entry.absolute), this.dmode, er => {
- if (er)
- return this[ONERROR](er, entry)
- fs.lstat(entry.absolute, (er, st) => {
- if (st && (this.keep || this.newer && st.mtime > entry.mtime))
- this[SKIP](entry)
- else if (er || this[ISREUSABLE](entry, st))
- this[MAKEFS](null, entry)
- else if (st.isDirectory()) {
- if (entry.type === 'Directory') {
- if (!entry.mode || (st.mode & 0o7777) === entry.mode)
- this[MAKEFS](null, entry)
- else
- fs.chmod(entry.absolute, entry.mode, er => this[MAKEFS](er, entry))
- } else
- fs.rmdir(entry.absolute, er => this[MAKEFS](er, entry))
- } else
- unlinkFile(entry.absolute, er => this[MAKEFS](er, entry))
- })
- })
- }
-
- [MAKEFS] (er, entry) {
- if (er)
- return this[ONERROR](er, entry)
-
- switch (entry.type) {
- case 'File':
- case 'OldFile':
- case 'ContiguousFile':
- return this[FILE](entry)
-
- case 'Link':
- return this[HARDLINK](entry)
-
- case 'SymbolicLink':
- return this[SYMLINK](entry)
-
- case 'Directory':
- case 'GNUDumpDir':
- return this[DIRECTORY](entry)
- }
- }
-
- [LINK] (entry, linkpath, link) {
- // XXX: get the type ('file' or 'dir') for windows
- fs[link](linkpath, entry.absolute, er => {
- if (er)
- return this[ONERROR](er, entry)
- this[UNPEND]()
- entry.resume()
- })
- }
-}
-
-class UnpackSync extends Unpack {
- constructor (opt) {
- super(opt)
- }
-
- [CHECKFS] (entry) {
- const er = this[MKDIR](path.dirname(entry.absolute), this.dmode)
- if (er)
- return this[ONERROR](er, entry)
- try {
- const st = fs.lstatSync(entry.absolute)
- if (this.keep || this.newer && st.mtime > entry.mtime)
- return this[SKIP](entry)
- else if (this[ISREUSABLE](entry, st))
- return this[MAKEFS](null, entry)
- else {
- try {
- if (st.isDirectory()) {
- if (entry.type === 'Directory') {
- if (entry.mode && (st.mode & 0o7777) !== entry.mode)
- fs.chmodSync(entry.absolute, entry.mode)
- } else
- fs.rmdirSync(entry.absolute)
- } else
- unlinkFileSync(entry.absolute)
- return this[MAKEFS](null, entry)
- } catch (er) {
- return this[ONERROR](er, entry)
- }
- }
- } catch (er) {
- return this[MAKEFS](null, entry)
- }
- }
-
- [FILE] (entry) {
- const mode = entry.mode & 0o7777 || this.fmode
-
- const oner = er => {
- try { fs.closeSync(fd) } catch (_) {}
- if (er)
- this[ONERROR](er, entry)
- }
-
- let stream
- let fd
- try {
- fd = fs.openSync(entry.absolute, 'w', mode)
- } catch (er) {
- return oner(er)
- }
- const tx = this.transform ? this.transform(entry) || entry : entry
- if (tx !== entry) {
- tx.on('error', er => this[ONERROR](er, entry))
- entry.pipe(tx)
- }
-
- tx.on('data', chunk => {
- try {
- fs.writeSync(fd, chunk, 0, chunk.length)
- } catch (er) {
- oner(er)
- }
- })
-
- tx.on('end', _ => {
- let er = null
- // try both, falling futimes back to utimes
- // if either fails, handle the first error
- if (entry.mtime && !this.noMtime) {
- const atime = entry.atime || new Date()
- const mtime = entry.mtime
- try {
- fs.futimesSync(fd, atime, mtime)
- } catch (futimeser) {
- try {
- fs.utimesSync(entry.absolute, atime, mtime)
- } catch (utimeser) {
- er = futimeser
- }
- }
- }
-
- if (this[DOCHOWN](entry)) {
- const uid = this[UID](entry)
- const gid = this[GID](entry)
-
- try {
- fs.fchownSync(fd, uid, gid)
- } catch (fchowner) {
- try {
- fs.chownSync(entry.absolute, uid, gid)
- } catch (chowner) {
- er = er || fchowner
- }
- }
- }
-
- oner(er)
- })
- }
-
- [DIRECTORY] (entry) {
- const mode = entry.mode & 0o7777 || this.dmode
- const er = this[MKDIR](entry.absolute, mode)
- if (er)
- return this[ONERROR](er, entry)
- if (entry.mtime && !this.noMtime) {
- try {
- fs.utimesSync(entry.absolute, entry.atime || new Date(), entry.mtime)
- } catch (er) {}
- }
- if (this[DOCHOWN](entry)) {
- try {
- fs.chownSync(entry.absolute, this[UID](entry), this[GID](entry))
- } catch (er) {}
- }
- entry.resume()
- }
-
- [MKDIR] (dir, mode) {
- try {
- return mkdir.sync(dir, {
- uid: this.uid,
- gid: this.gid,
- processUid: this.processUid,
- processGid: this.processGid,
- umask: this.processUmask,
- preserve: this.preservePaths,
- unlink: this.unlink,
- cache: this.dirCache,
- cwd: this.cwd,
- mode: mode
- })
- } catch (er) {
- return er
- }
- }
-
- [LINK] (entry, linkpath, link) {
- try {
- fs[link + 'Sync'](linkpath, entry.absolute)
- entry.resume()
- } catch (er) {
- return this[ONERROR](er, entry)
- }
- }
-}
-
-Unpack.Sync = UnpackSync
-module.exports = Unpack
diff --git a/node_modules/libcipm/node_modules/tar/lib/update.js b/node_modules/libcipm/node_modules/tar/lib/update.js
deleted file mode 100644
index 16c3e93ed..000000000
--- a/node_modules/libcipm/node_modules/tar/lib/update.js
+++ /dev/null
@@ -1,36 +0,0 @@
-'use strict'
-
-// tar -u
-
-const hlo = require('./high-level-opt.js')
-const r = require('./replace.js')
-// just call tar.r with the filter and mtimeCache
-
-const u = module.exports = (opt_, files, cb) => {
- const opt = hlo(opt_)
-
- if (!opt.file)
- throw new TypeError('file is required')
-
- if (opt.gzip)
- throw new TypeError('cannot append to compressed archives')
-
- if (!files || !Array.isArray(files) || !files.length)
- throw new TypeError('no files or directories specified')
-
- files = Array.from(files)
-
- mtimeFilter(opt)
- return r(opt, files, cb)
-}
-
-const mtimeFilter = opt => {
- const filter = opt.filter
-
- if (!opt.mtimeCache)
- opt.mtimeCache = new Map()
-
- opt.filter = filter ? (path, stat) =>
- filter(path, stat) && !(opt.mtimeCache.get(path) > stat.mtime)
- : (path, stat) => !(opt.mtimeCache.get(path) > stat.mtime)
-}
diff --git a/node_modules/libcipm/node_modules/tar/lib/warn-mixin.js b/node_modules/libcipm/node_modules/tar/lib/warn-mixin.js
deleted file mode 100644
index 94a4b9b99..000000000
--- a/node_modules/libcipm/node_modules/tar/lib/warn-mixin.js
+++ /dev/null
@@ -1,14 +0,0 @@
-'use strict'
-module.exports = Base => class extends Base {
- warn (msg, data) {
- if (!this.strict)
- this.emit('warn', msg, data)
- else if (data instanceof Error)
- this.emit('error', data)
- else {
- const er = new Error(msg)
- er.data = data
- this.emit('error', er)
- }
- }
-}
diff --git a/node_modules/libcipm/node_modules/tar/lib/winchars.js b/node_modules/libcipm/node_modules/tar/lib/winchars.js
deleted file mode 100644
index cf6ea0606..000000000
--- a/node_modules/libcipm/node_modules/tar/lib/winchars.js
+++ /dev/null
@@ -1,23 +0,0 @@
-'use strict'
-
-// When writing files on Windows, translate the characters to their
-// 0xf000 higher-encoded versions.
-
-const raw = [
- '|',
- '<',
- '>',
- '?',
- ':'
-]
-
-const win = raw.map(char =>
- String.fromCharCode(0xf000 + char.charCodeAt(0)))
-
-const toWin = new Map(raw.map((char, i) => [char, win[i]]))
-const toRaw = new Map(win.map((char, i) => [char, raw[i]]))
-
-module.exports = {
- encode: s => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s),
- decode: s => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s)
-}
diff --git a/node_modules/libcipm/node_modules/tar/lib/write-entry.js b/node_modules/libcipm/node_modules/tar/lib/write-entry.js
deleted file mode 100644
index 0c019006f..000000000
--- a/node_modules/libcipm/node_modules/tar/lib/write-entry.js
+++ /dev/null
@@ -1,422 +0,0 @@
-'use strict'
-const Buffer = require('./buffer.js')
-const MiniPass = require('minipass')
-const Pax = require('./pax.js')
-const Header = require('./header.js')
-const ReadEntry = require('./read-entry.js')
-const fs = require('fs')
-const path = require('path')
-
-const types = require('./types.js')
-const maxReadSize = 16 * 1024 * 1024
-const PROCESS = Symbol('process')
-const FILE = Symbol('file')
-const DIRECTORY = Symbol('directory')
-const SYMLINK = Symbol('symlink')
-const HARDLINK = Symbol('hardlink')
-const HEADER = Symbol('header')
-const READ = Symbol('read')
-const LSTAT = Symbol('lstat')
-const ONLSTAT = Symbol('onlstat')
-const ONREAD = Symbol('onread')
-const ONREADLINK = Symbol('onreadlink')
-const OPENFILE = Symbol('openfile')
-const ONOPENFILE = Symbol('onopenfile')
-const CLOSE = Symbol('close')
-const MODE = Symbol('mode')
-const warner = require('./warn-mixin.js')
-const winchars = require('./winchars.js')
-
-const modeFix = require('./mode-fix.js')
-
-const WriteEntry = warner(class WriteEntry extends MiniPass {
- constructor (p, opt) {
- opt = opt || {}
- super(opt)
- if (typeof p !== 'string')
- throw new TypeError('path is required')
- this.path = p
- // suppress atime, ctime, uid, gid, uname, gname
- this.portable = !!opt.portable
- // until node has builtin pwnam functions, this'll have to do
- this.myuid = process.getuid && process.getuid()
- this.myuser = process.env.USER || ''
- this.maxReadSize = opt.maxReadSize || maxReadSize
- this.linkCache = opt.linkCache || new Map()
- this.statCache = opt.statCache || new Map()
- this.preservePaths = !!opt.preservePaths
- this.cwd = opt.cwd || process.cwd()
- this.strict = !!opt.strict
- this.noPax = !!opt.noPax
- this.noMtime = !!opt.noMtime
- this.mtime = opt.mtime || null
-
- if (typeof opt.onwarn === 'function')
- this.on('warn', opt.onwarn)
-
- if (!this.preservePaths && path.win32.isAbsolute(p)) {
- // absolutes on posix are also absolutes on win32
- // so we only need to test this one to get both
- const parsed = path.win32.parse(p)
- this.warn('stripping ' + parsed.root + ' from absolute path', p)
- this.path = p.substr(parsed.root.length)
- }
-
- this.win32 = !!opt.win32 || process.platform === 'win32'
- if (this.win32) {
- this.path = winchars.decode(this.path.replace(/\\/g, '/'))
- p = p.replace(/\\/g, '/')
- }
-
- this.absolute = opt.absolute || path.resolve(this.cwd, p)
-
- if (this.path === '')
- this.path = './'
-
- if (this.statCache.has(this.absolute))
- this[ONLSTAT](this.statCache.get(this.absolute))
- else
- this[LSTAT]()
- }
-
- [LSTAT] () {
- fs.lstat(this.absolute, (er, stat) => {
- if (er)
- return this.emit('error', er)
- this[ONLSTAT](stat)
- })
- }
-
- [ONLSTAT] (stat) {
- this.statCache.set(this.absolute, stat)
- this.stat = stat
- if (!stat.isFile())
- stat.size = 0
- this.type = getType(stat)
- this.emit('stat', stat)
- this[PROCESS]()
- }
-
- [PROCESS] () {
- switch (this.type) {
- case 'File': return this[FILE]()
- case 'Directory': return this[DIRECTORY]()
- case 'SymbolicLink': return this[SYMLINK]()
- // unsupported types are ignored.
- default: return this.end()
- }
- }
-
- [MODE] (mode) {
- return modeFix(mode, this.type === 'Directory')
- }
-
- [HEADER] () {
- if (this.type === 'Directory' && this.portable)
- this.noMtime = true
-
- this.header = new Header({
- path: this.path,
- linkpath: this.linkpath,
- // only the permissions and setuid/setgid/sticky bitflags
- // not the higher-order bits that specify file type
- mode: this[MODE](this.stat.mode),
- uid: this.portable ? null : this.stat.uid,
- gid: this.portable ? null : this.stat.gid,
- size: this.stat.size,
- mtime: this.noMtime ? null : this.mtime || this.stat.mtime,
- type: this.type,
- uname: this.portable ? null :
- this.stat.uid === this.myuid ? this.myuser : '',
- atime: this.portable ? null : this.stat.atime,
- ctime: this.portable ? null : this.stat.ctime
- })
-
- if (this.header.encode() && !this.noPax)
- this.write(new Pax({
- atime: this.portable ? null : this.header.atime,
- ctime: this.portable ? null : this.header.ctime,
- gid: this.portable ? null : this.header.gid,
- mtime: this.noMtime ? null : this.mtime || this.header.mtime,
- path: this.path,
- linkpath: this.linkpath,
- size: this.header.size,
- uid: this.portable ? null : this.header.uid,
- uname: this.portable ? null : this.header.uname,
- dev: this.portable ? null : this.stat.dev,
- ino: this.portable ? null : this.stat.ino,
- nlink: this.portable ? null : this.stat.nlink
- }).encode())
- this.write(this.header.block)
- }
-
- [DIRECTORY] () {
- if (this.path.substr(-1) !== '/')
- this.path += '/'
- this.stat.size = 0
- this[HEADER]()
- this.end()
- }
-
- [SYMLINK] () {
- fs.readlink(this.absolute, (er, linkpath) => {
- if (er)
- return this.emit('error', er)
- this[ONREADLINK](linkpath)
- })
- }
-
- [ONREADLINK] (linkpath) {
- this.linkpath = linkpath
- this[HEADER]()
- this.end()
- }
-
- [HARDLINK] (linkpath) {
- this.type = 'Link'
- this.linkpath = path.relative(this.cwd, linkpath)
- this.stat.size = 0
- this[HEADER]()
- this.end()
- }
-
- [FILE] () {
- if (this.stat.nlink > 1) {
- const linkKey = this.stat.dev + ':' + this.stat.ino
- if (this.linkCache.has(linkKey)) {
- const linkpath = this.linkCache.get(linkKey)
- if (linkpath.indexOf(this.cwd) === 0)
- return this[HARDLINK](linkpath)
- }
- this.linkCache.set(linkKey, this.absolute)
- }
-
- this[HEADER]()
- if (this.stat.size === 0)
- return this.end()
-
- this[OPENFILE]()
- }
-
- [OPENFILE] () {
- fs.open(this.absolute, 'r', (er, fd) => {
- if (er)
- return this.emit('error', er)
- this[ONOPENFILE](fd)
- })
- }
-
- [ONOPENFILE] (fd) {
- const blockLen = 512 * Math.ceil(this.stat.size / 512)
- const bufLen = Math.min(blockLen, this.maxReadSize)
- const buf = Buffer.allocUnsafe(bufLen)
- this[READ](fd, buf, 0, buf.length, 0, this.stat.size, blockLen)
- }
-
- [READ] (fd, buf, offset, length, pos, remain, blockRemain) {
- fs.read(fd, buf, offset, length, pos, (er, bytesRead) => {
- if (er)
- return this[CLOSE](fd, _ => this.emit('error', er))
- this[ONREAD](fd, buf, offset, length, pos, remain, blockRemain, bytesRead)
- })
- }
-
- [CLOSE] (fd, cb) {
- fs.close(fd, cb)
- }
-
- [ONREAD] (fd, buf, offset, length, pos, remain, blockRemain, bytesRead) {
- if (bytesRead <= 0 && remain > 0) {
- const er = new Error('encountered unexpected EOF')
- er.path = this.absolute
- er.syscall = 'read'
- er.code = 'EOF'
- this[CLOSE](fd, _ => _)
- return this.emit('error', er)
- }
-
- if (bytesRead > remain) {
- const er = new Error('did not encounter expected EOF')
- er.path = this.absolute
- er.syscall = 'read'
- er.code = 'EOF'
- this[CLOSE](fd, _ => _)
- return this.emit('error', er)
- }
-
- // null out the rest of the buffer, if we could fit the block padding
- if (bytesRead === remain) {
- for (let i = bytesRead; i < length && bytesRead < blockRemain; i++) {
- buf[i + offset] = 0
- bytesRead ++
- remain ++
- }
- }
-
- const writeBuf = offset === 0 && bytesRead === buf.length ?
- buf : buf.slice(offset, offset + bytesRead)
- remain -= bytesRead
- blockRemain -= bytesRead
- pos += bytesRead
- offset += bytesRead
-
- this.write(writeBuf)
-
- if (!remain) {
- if (blockRemain)
- this.write(Buffer.alloc(blockRemain))
- this.end()
- this[CLOSE](fd, _ => _)
- return
- }
-
- if (offset >= length) {
- buf = Buffer.allocUnsafe(length)
- offset = 0
- }
- length = buf.length - offset
- this[READ](fd, buf, offset, length, pos, remain, blockRemain)
- }
-})
-
-class WriteEntrySync extends WriteEntry {
- constructor (path, opt) {
- super(path, opt)
- }
-
- [LSTAT] () {
- this[ONLSTAT](fs.lstatSync(this.absolute))
- }
-
- [SYMLINK] () {
- this[ONREADLINK](fs.readlinkSync(this.absolute))
- }
-
- [OPENFILE] () {
- this[ONOPENFILE](fs.openSync(this.absolute, 'r'))
- }
-
- [READ] (fd, buf, offset, length, pos, remain, blockRemain) {
- let threw = true
- try {
- const bytesRead = fs.readSync(fd, buf, offset, length, pos)
- this[ONREAD](fd, buf, offset, length, pos, remain, blockRemain, bytesRead)
- threw = false
- } finally {
- if (threw)
- try { this[CLOSE](fd) } catch (er) {}
- }
- }
-
- [CLOSE] (fd) {
- fs.closeSync(fd)
- }
-}
-
-const WriteEntryTar = warner(class WriteEntryTar extends MiniPass {
- constructor (readEntry, opt) {
- opt = opt || {}
- super(opt)
- this.preservePaths = !!opt.preservePaths
- this.portable = !!opt.portable
- this.strict = !!opt.strict
- this.noPax = !!opt.noPax
- this.noMtime = !!opt.noMtime
-
- this.readEntry = readEntry
- this.type = readEntry.type
- if (this.type === 'Directory' && this.portable)
- this.noMtime = true
-
- this.path = readEntry.path
- this.mode = this[MODE](readEntry.mode)
- this.uid = this.portable ? null : readEntry.uid
- this.gid = this.portable ? null : readEntry.gid
- this.uname = this.portable ? null : readEntry.uname
- this.gname = this.portable ? null : readEntry.gname
- this.size = readEntry.size
- this.mtime = this.noMtime ? null : opt.mtime || readEntry.mtime
- this.atime = this.portable ? null : readEntry.atime
- this.ctime = this.portable ? null : readEntry.ctime
- this.linkpath = readEntry.linkpath
-
- if (typeof opt.onwarn === 'function')
- this.on('warn', opt.onwarn)
-
- if (path.isAbsolute(this.path) && !this.preservePaths) {
- const parsed = path.parse(this.path)
- this.warn(
- 'stripping ' + parsed.root + ' from absolute path',
- this.path
- )
- this.path = this.path.substr(parsed.root.length)
- }
-
- this.remain = readEntry.size
- this.blockRemain = readEntry.startBlockSize
-
- this.header = new Header({
- path: this.path,
- linkpath: this.linkpath,
- // only the permissions and setuid/setgid/sticky bitflags
- // not the higher-order bits that specify file type
- mode: this.mode,
- uid: this.portable ? null : this.uid,
- gid: this.portable ? null : this.gid,
- size: this.size,
- mtime: this.noMtime ? null : this.mtime,
- type: this.type,
- uname: this.portable ? null : this.uname,
- atime: this.portable ? null : this.atime,
- ctime: this.portable ? null : this.ctime
- })
-
- if (this.header.encode() && !this.noPax)
- super.write(new Pax({
- atime: this.portable ? null : this.atime,
- ctime: this.portable ? null : this.ctime,
- gid: this.portable ? null : this.gid,
- mtime: this.noMtime ? null : this.mtime,
- path: this.path,
- linkpath: this.linkpath,
- size: this.size,
- uid: this.portable ? null : this.uid,
- uname: this.portable ? null : this.uname,
- dev: this.portable ? null : this.readEntry.dev,
- ino: this.portable ? null : this.readEntry.ino,
- nlink: this.portable ? null : this.readEntry.nlink
- }).encode())
-
- super.write(this.header.block)
- readEntry.pipe(this)
- }
-
- [MODE] (mode) {
- return modeFix(mode, this.type === 'Directory')
- }
-
- write (data) {
- const writeLen = data.length
- if (writeLen > this.blockRemain)
- throw new Error('writing more to entry than is appropriate')
- this.blockRemain -= writeLen
- return super.write(data)
- }
-
- end () {
- if (this.blockRemain)
- this.write(Buffer.alloc(this.blockRemain))
- return super.end()
- }
-})
-
-WriteEntry.Sync = WriteEntrySync
-WriteEntry.Tar = WriteEntryTar
-
-const getType = stat =>
- stat.isFile() ? 'File'
- : stat.isDirectory() ? 'Directory'
- : stat.isSymbolicLink() ? 'SymbolicLink'
- : 'Unsupported'
-
-module.exports = WriteEntry
diff --git a/node_modules/libcipm/node_modules/tar/node_modules/yallist/LICENSE b/node_modules/libcipm/node_modules/tar/node_modules/yallist/LICENSE
deleted file mode 100644
index 19129e315..000000000
--- a/node_modules/libcipm/node_modules/tar/node_modules/yallist/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/libcipm/node_modules/tar/node_modules/yallist/README.md b/node_modules/libcipm/node_modules/tar/node_modules/yallist/README.md
deleted file mode 100644
index f58610186..000000000
--- a/node_modules/libcipm/node_modules/tar/node_modules/yallist/README.md
+++ /dev/null
@@ -1,204 +0,0 @@
-# yallist
-
-Yet Another Linked List
-
-There are many doubly-linked list implementations like it, but this
-one is mine.
-
-For when an array would be too big, and a Map can't be iterated in
-reverse order.
-
-
-[![Build Status](https://travis-ci.org/isaacs/yallist.svg?branch=master)](https://travis-ci.org/isaacs/yallist) [![Coverage Status](https://coveralls.io/repos/isaacs/yallist/badge.svg?service=github)](https://coveralls.io/github/isaacs/yallist)
-
-## basic usage
-
-```javascript
-var yallist = require('yallist')
-var myList = yallist.create([1, 2, 3])
-myList.push('foo')
-myList.unshift('bar')
-// of course pop() and shift() are there, too
-console.log(myList.toArray()) // ['bar', 1, 2, 3, 'foo']
-myList.forEach(function (k) {
- // walk the list head to tail
-})
-myList.forEachReverse(function (k, index, list) {
- // walk the list tail to head
-})
-var myDoubledList = myList.map(function (k) {
- return k + k
-})
-// now myDoubledList contains ['barbar', 2, 4, 6, 'foofoo']
-// mapReverse is also a thing
-var myDoubledListReverse = myList.mapReverse(function (k) {
- return k + k
-}) // ['foofoo', 6, 4, 2, 'barbar']
-
-var reduced = myList.reduce(function (set, entry) {
- set += entry
- return set
-}, 'start')
-console.log(reduced) // 'startfoo123bar'
-```
-
-## api
-
-The whole API is considered "public".
-
-Functions with the same name as an Array method work more or less the
-same way.
-
-There's reverse versions of most things because that's the point.
-
-### Yallist
-
-Default export, the class that holds and manages a list.
-
-Call it with either a forEach-able (like an array) or a set of
-arguments, to initialize the list.
-
-The Array-ish methods all act like you'd expect. No magic length,
-though, so if you change that it won't automatically prune or add
-empty spots.
-
-### Yallist.create(..)
-
-Alias for Yallist function. Some people like factories.
-
-#### yallist.head
-
-The first node in the list
-
-#### yallist.tail
-
-The last node in the list
-
-#### yallist.length
-
-The number of nodes in the list. (Change this at your peril. It is
-not magic like Array length.)
-
-#### yallist.toArray()
-
-Convert the list to an array.
-
-#### yallist.forEach(fn, [thisp])
-
-Call a function on each item in the list.
-
-#### yallist.forEachReverse(fn, [thisp])
-
-Call a function on each item in the list, in reverse order.
-
-#### yallist.get(n)
-
-Get the data at position `n` in the list. If you use this a lot,
-probably better off just using an Array.
-
-#### yallist.getReverse(n)
-
-Get the data at position `n`, counting from the tail.
-
-#### yallist.map(fn, thisp)
-
-Create a new Yallist with the result of calling the function on each
-item.
-
-#### yallist.mapReverse(fn, thisp)
-
-Same as `map`, but in reverse.
-
-#### yallist.pop()
-
-Get the data from the list tail, and remove the tail from the list.
-
-#### yallist.push(item, ...)
-
-Insert one or more items to the tail of the list.
-
-#### yallist.reduce(fn, initialValue)
-
-Like Array.reduce.
-
-#### yallist.reduceReverse
-
-Like Array.reduce, but in reverse.
-
-#### yallist.reverse
-
-Reverse the list in place.
-
-#### yallist.shift()
-
-Get the data from the list head, and remove the head from the list.
-
-#### yallist.slice([from], [to])
-
-Just like Array.slice, but returns a new Yallist.
-
-#### yallist.sliceReverse([from], [to])
-
-Just like yallist.slice, but the result is returned in reverse.
-
-#### yallist.toArray()
-
-Create an array representation of the list.
-
-#### yallist.toArrayReverse()
-
-Create a reversed array representation of the list.
-
-#### yallist.unshift(item, ...)
-
-Insert one or more items to the head of the list.
-
-#### yallist.unshiftNode(node)
-
-Move a Node object to the front of the list. (That is, pull it out of
-wherever it lives, and make it the new head.)
-
-If the node belongs to a different list, then that list will remove it
-first.
-
-#### yallist.pushNode(node)
-
-Move a Node object to the end of the list. (That is, pull it out of
-wherever it lives, and make it the new tail.)
-
-If the node belongs to a list already, then that list will remove it
-first.
-
-#### yallist.removeNode(node)
-
-Remove a node from the list, preserving referential integrity of head
-and tail and other nodes.
-
-Will throw an error if you try to have a list remove a node that
-doesn't belong to it.
-
-### Yallist.Node
-
-The class that holds the data and is actually the list.
-
-Call with `var n = new Node(value, previousNode, nextNode)`
-
-Note that if you do direct operations on Nodes themselves, it's very
-easy to get into weird states where the list is broken. Be careful :)
-
-#### node.next
-
-The next node in the list.
-
-#### node.prev
-
-The previous node in the list.
-
-#### node.value
-
-The data the node contains.
-
-#### node.list
-
-The list to which this node belongs. (Null if it does not belong to
-any list.)
diff --git a/node_modules/libcipm/node_modules/tar/node_modules/yallist/iterator.js b/node_modules/libcipm/node_modules/tar/node_modules/yallist/iterator.js
deleted file mode 100644
index d41c97a19..000000000
--- a/node_modules/libcipm/node_modules/tar/node_modules/yallist/iterator.js
+++ /dev/null
@@ -1,8 +0,0 @@
-'use strict'
-module.exports = function (Yallist) {
- Yallist.prototype[Symbol.iterator] = function* () {
- for (let walker = this.head; walker; walker = walker.next) {
- yield walker.value
- }
- }
-}
diff --git a/node_modules/libcipm/node_modules/tar/node_modules/yallist/package.json b/node_modules/libcipm/node_modules/tar/node_modules/yallist/package.json
deleted file mode 100644
index 38492ed32..000000000
--- a/node_modules/libcipm/node_modules/tar/node_modules/yallist/package.json
+++ /dev/null
@@ -1,62 +0,0 @@
-{
- "_from": "yallist@^3.0.3",
- "_id": "yallist@3.1.1",
- "_inBundle": false,
- "_integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==",
- "_location": "/libcipm/tar/yallist",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "yallist@^3.0.3",
- "name": "yallist",
- "escapedName": "yallist",
- "rawSpec": "^3.0.3",
- "saveSpec": null,
- "fetchSpec": "^3.0.3"
- },
- "_requiredBy": [
- "/libcipm/tar"
- ],
- "_resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz",
- "_shasum": "dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd",
- "_spec": "yallist@^3.0.3",
- "_where": "/Users/isaacs/dev/npm/cli/node_modules/libcipm/node_modules/tar",
- "author": {
- "name": "Isaac Z. Schlueter",
- "email": "i@izs.me",
- "url": "http://blog.izs.me/"
- },
- "bugs": {
- "url": "https://github.com/isaacs/yallist/issues"
- },
- "bundleDependencies": false,
- "dependencies": {},
- "deprecated": false,
- "description": "Yet Another Linked List",
- "devDependencies": {
- "tap": "^12.1.0"
- },
- "directories": {
- "test": "test"
- },
- "files": [
- "yallist.js",
- "iterator.js"
- ],
- "homepage": "https://github.com/isaacs/yallist#readme",
- "license": "ISC",
- "main": "yallist.js",
- "name": "yallist",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/isaacs/yallist.git"
- },
- "scripts": {
- "postpublish": "git push origin --all; git push origin --tags",
- "postversion": "npm publish",
- "preversion": "npm test",
- "test": "tap test/*.js --100"
- },
- "version": "3.1.1"
-}
diff --git a/node_modules/libcipm/node_modules/tar/node_modules/yallist/yallist.js b/node_modules/libcipm/node_modules/tar/node_modules/yallist/yallist.js
deleted file mode 100644
index ed4e7303a..000000000
--- a/node_modules/libcipm/node_modules/tar/node_modules/yallist/yallist.js
+++ /dev/null
@@ -1,426 +0,0 @@
-'use strict'
-module.exports = Yallist
-
-Yallist.Node = Node
-Yallist.create = Yallist
-
-function Yallist (list) {
- var self = this
- if (!(self instanceof Yallist)) {
- self = new Yallist()
- }
-
- self.tail = null
- self.head = null
- self.length = 0
-
- if (list && typeof list.forEach === 'function') {
- list.forEach(function (item) {
- self.push(item)
- })
- } else if (arguments.length > 0) {
- for (var i = 0, l = arguments.length; i < l; i++) {
- self.push(arguments[i])
- }
- }
-
- return self
-}
-
-Yallist.prototype.removeNode = function (node) {
- if (node.list !== this) {
- throw new Error('removing node which does not belong to this list')
- }
-
- var next = node.next
- var prev = node.prev
-
- if (next) {
- next.prev = prev
- }
-
- if (prev) {
- prev.next = next
- }
-
- if (node === this.head) {
- this.head = next
- }
- if (node === this.tail) {
- this.tail = prev
- }
-
- node.list.length--
- node.next = null
- node.prev = null
- node.list = null
-
- return next
-}
-
-Yallist.prototype.unshiftNode = function (node) {
- if (node === this.head) {
- return
- }
-
- if (node.list) {
- node.list.removeNode(node)
- }
-
- var head = this.head
- node.list = this
- node.next = head
- if (head) {
- head.prev = node
- }
-
- this.head = node
- if (!this.tail) {
- this.tail = node
- }
- this.length++
-}
-
-Yallist.prototype.pushNode = function (node) {
- if (node === this.tail) {
- return
- }
-
- if (node.list) {
- node.list.removeNode(node)
- }
-
- var tail = this.tail
- node.list = this
- node.prev = tail
- if (tail) {
- tail.next = node
- }
-
- this.tail = node
- if (!this.head) {
- this.head = node
- }
- this.length++
-}
-
-Yallist.prototype.push = function () {
- for (var i = 0, l = arguments.length; i < l; i++) {
- push(this, arguments[i])
- }
- return this.length
-}
-
-Yallist.prototype.unshift = function () {
- for (var i = 0, l = arguments.length; i < l; i++) {
- unshift(this, arguments[i])
- }
- return this.length
-}
-
-Yallist.prototype.pop = function () {
- if (!this.tail) {
- return undefined
- }
-
- var res = this.tail.value
- this.tail = this.tail.prev
- if (this.tail) {
- this.tail.next = null
- } else {
- this.head = null
- }
- this.length--
- return res
-}
-
-Yallist.prototype.shift = function () {
- if (!this.head) {
- return undefined
- }
-
- var res = this.head.value
- this.head = this.head.next
- if (this.head) {
- this.head.prev = null
- } else {
- this.tail = null
- }
- this.length--
- return res
-}
-
-Yallist.prototype.forEach = function (fn, thisp) {
- thisp = thisp || this
- for (var walker = this.head, i = 0; walker !== null; i++) {
- fn.call(thisp, walker.value, i, this)
- walker = walker.next
- }
-}
-
-Yallist.prototype.forEachReverse = function (fn, thisp) {
- thisp = thisp || this
- for (var walker = this.tail, i = this.length - 1; walker !== null; i--) {
- fn.call(thisp, walker.value, i, this)
- walker = walker.prev
- }
-}
-
-Yallist.prototype.get = function (n) {
- for (var i = 0, walker = this.head; walker !== null && i < n; i++) {
- // abort out of the list early if we hit a cycle
- walker = walker.next
- }
- if (i === n && walker !== null) {
- return walker.value
- }
-}
-
-Yallist.prototype.getReverse = function (n) {
- for (var i = 0, walker = this.tail; walker !== null && i < n; i++) {
- // abort out of the list early if we hit a cycle
- walker = walker.prev
- }
- if (i === n && walker !== null) {
- return walker.value
- }
-}
-
-Yallist.prototype.map = function (fn, thisp) {
- thisp = thisp || this
- var res = new Yallist()
- for (var walker = this.head; walker !== null;) {
- res.push(fn.call(thisp, walker.value, this))
- walker = walker.next
- }
- return res
-}
-
-Yallist.prototype.mapReverse = function (fn, thisp) {
- thisp = thisp || this
- var res = new Yallist()
- for (var walker = this.tail; walker !== null;) {
- res.push(fn.call(thisp, walker.value, this))
- walker = walker.prev
- }
- return res
-}
-
-Yallist.prototype.reduce = function (fn, initial) {
- var acc
- var walker = this.head
- if (arguments.length > 1) {
- acc = initial
- } else if (this.head) {
- walker = this.head.next
- acc = this.head.value
- } else {
- throw new TypeError('Reduce of empty list with no initial value')
- }
-
- for (var i = 0; walker !== null; i++) {
- acc = fn(acc, walker.value, i)
- walker = walker.next
- }
-
- return acc
-}
-
-Yallist.prototype.reduceReverse = function (fn, initial) {
- var acc
- var walker = this.tail
- if (arguments.length > 1) {
- acc = initial
- } else if (this.tail) {
- walker = this.tail.prev
- acc = this.tail.value
- } else {
- throw new TypeError('Reduce of empty list with no initial value')
- }
-
- for (var i = this.length - 1; walker !== null; i--) {
- acc = fn(acc, walker.value, i)
- walker = walker.prev
- }
-
- return acc
-}
-
-Yallist.prototype.toArray = function () {
- var arr = new Array(this.length)
- for (var i = 0, walker = this.head; walker !== null; i++) {
- arr[i] = walker.value
- walker = walker.next
- }
- return arr
-}
-
-Yallist.prototype.toArrayReverse = function () {
- var arr = new Array(this.length)
- for (var i = 0, walker = this.tail; walker !== null; i++) {
- arr[i] = walker.value
- walker = walker.prev
- }
- return arr
-}
-
-Yallist.prototype.slice = function (from, to) {
- to = to || this.length
- if (to < 0) {
- to += this.length
- }
- from = from || 0
- if (from < 0) {
- from += this.length
- }
- var ret = new Yallist()
- if (to < from || to < 0) {
- return ret
- }
- if (from < 0) {
- from = 0
- }
- if (to > this.length) {
- to = this.length
- }
- for (var i = 0, walker = this.head; walker !== null && i < from; i++) {
- walker = walker.next
- }
- for (; walker !== null && i < to; i++, walker = walker.next) {
- ret.push(walker.value)
- }
- return ret
-}
-
-Yallist.prototype.sliceReverse = function (from, to) {
- to = to || this.length
- if (to < 0) {
- to += this.length
- }
- from = from || 0
- if (from < 0) {
- from += this.length
- }
- var ret = new Yallist()
- if (to < from || to < 0) {
- return ret
- }
- if (from < 0) {
- from = 0
- }
- if (to > this.length) {
- to = this.length
- }
- for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) {
- walker = walker.prev
- }
- for (; walker !== null && i > from; i--, walker = walker.prev) {
- ret.push(walker.value)
- }
- return ret
-}
-
-Yallist.prototype.splice = function (start, deleteCount /*, ...nodes */) {
- if (start > this.length) {
- start = this.length - 1
- }
- if (start < 0) {
- start = this.length + start;
- }
-
- for (var i = 0, walker = this.head; walker !== null && i < start; i++) {
- walker = walker.next
- }
-
- var ret = []
- for (var i = 0; walker && i < deleteCount; i++) {
- ret.push(walker.value)
- walker = this.removeNode(walker)
- }
- if (walker === null) {
- walker = this.tail
- }
-
- if (walker !== this.head && walker !== this.tail) {
- walker = walker.prev
- }
-
- for (var i = 2; i < arguments.length; i++) {
- walker = insert(this, walker, arguments[i])
- }
- return ret;
-}
-
-Yallist.prototype.reverse = function () {
- var head = this.head
- var tail = this.tail
- for (var walker = head; walker !== null; walker = walker.prev) {
- var p = walker.prev
- walker.prev = walker.next
- walker.next = p
- }
- this.head = tail
- this.tail = head
- return this
-}
-
-function insert (self, node, value) {
- var inserted = node === self.head ?
- new Node(value, null, node, self) :
- new Node(value, node, node.next, self)
-
- if (inserted.next === null) {
- self.tail = inserted
- }
- if (inserted.prev === null) {
- self.head = inserted
- }
-
- self.length++
-
- return inserted
-}
-
-function push (self, item) {
- self.tail = new Node(item, self.tail, null, self)
- if (!self.head) {
- self.head = self.tail
- }
- self.length++
-}
-
-function unshift (self, item) {
- self.head = new Node(item, null, self.head, self)
- if (!self.tail) {
- self.tail = self.head
- }
- self.length++
-}
-
-function Node (value, prev, next, list) {
- if (!(this instanceof Node)) {
- return new Node(value, prev, next, list)
- }
-
- this.list = list
- this.value = value
-
- if (prev) {
- prev.next = this
- this.prev = prev
- } else {
- this.prev = null
- }
-
- if (next) {
- next.prev = this
- this.next = next
- } else {
- this.next = null
- }
-}
-
-try {
- // add if support for Symbol.iterator is present
- require('./iterator.js')(Yallist)
-} catch (er) {}
diff --git a/node_modules/libcipm/node_modules/tar/package.json b/node_modules/libcipm/node_modules/tar/package.json
deleted file mode 100644
index 7b73405f9..000000000
--- a/node_modules/libcipm/node_modules/tar/package.json
+++ /dev/null
@@ -1,82 +0,0 @@
-{
- "_from": "tar@^4.4.10",
- "_id": "tar@4.4.13",
- "_inBundle": false,
- "_integrity": "sha512-w2VwSrBoHa5BsSyH+KxEqeQBAllHhccyMFVHtGtdMpF4W7IRWfZjFiQceJPChOeTsSDVUpER2T8FA93pr0L+QA==",
- "_location": "/libcipm/tar",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "tar@^4.4.10",
- "name": "tar",
- "escapedName": "tar",
- "rawSpec": "^4.4.10",
- "saveSpec": null,
- "fetchSpec": "^4.4.10"
- },
- "_requiredBy": [
- "/libcipm/pacote"
- ],
- "_resolved": "https://registry.npmjs.org/tar/-/tar-4.4.13.tgz",
- "_shasum": "43b364bc52888d555298637b10d60790254ab525",
- "_spec": "tar@^4.4.10",
- "_where": "/Users/claudiahdz/npm/cli/node_modules/libcipm/node_modules/pacote",
- "author": {
- "name": "Isaac Z. Schlueter",
- "email": "i@izs.me",
- "url": "http://blog.izs.me/"
- },
- "bugs": {
- "url": "https://github.com/npm/node-tar/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "chownr": "^1.1.1",
- "fs-minipass": "^1.2.5",
- "minipass": "^2.8.6",
- "minizlib": "^1.2.1",
- "mkdirp": "^0.5.0",
- "safe-buffer": "^5.1.2",
- "yallist": "^3.0.3"
- },
- "deprecated": false,
- "description": "tar for node",
- "devDependencies": {
- "chmodr": "^1.2.0",
- "end-of-stream": "^1.4.1",
- "events-to-array": "^1.1.2",
- "mutate-fs": "^2.1.1",
- "rimraf": "^2.6.3",
- "tap": "^14.6.5",
- "tar-fs": "^1.16.3",
- "tar-stream": "^1.6.2"
- },
- "engines": {
- "node": ">=4.5"
- },
- "files": [
- "index.js",
- "lib/"
- ],
- "homepage": "https://github.com/npm/node-tar#readme",
- "license": "ISC",
- "name": "tar",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/node-tar.git"
- },
- "scripts": {
- "bench": "for i in benchmarks/*/*.js; do echo $i; for j in {1..5}; do node $i || break; done; done",
- "genparse": "node scripts/generate-parse-fixtures.js",
- "postpublish": "git push origin --follow-tags",
- "postversion": "npm publish",
- "preversion": "npm test",
- "test": "tap"
- },
- "tap": {
- "coverage-map": "map.js",
- "check-coverage": true
- },
- "version": "4.4.13"
-}
diff --git a/node_modules/libcipm/node_modules/which/CHANGELOG.md b/node_modules/libcipm/node_modules/which/CHANGELOG.md
deleted file mode 100644
index 3d83d2694..000000000
--- a/node_modules/libcipm/node_modules/which/CHANGELOG.md
+++ /dev/null
@@ -1,152 +0,0 @@
-# Changes
-
-
-## 1.3.1
-
-* update deps
-* update travis
-
-## v1.3.0
-
-* Add nothrow option to which.sync
-* update tap
-
-## v1.2.14
-
-* appveyor: drop node 5 and 0.x
-* travis-ci: add node 6, drop 0.x
-
-## v1.2.13
-
-* test: Pass missing option to pass on windows
-* update tap
-* update isexe to 2.0.0
-* neveragain.tech pledge request
-
-## v1.2.12
-
-* Removed unused require
-
-## v1.2.11
-
-* Prevent changelog script from being included in package
-
-## v1.2.10
-
-* Use env.PATH only, not env.Path
-
-## v1.2.9
-
-* fix for paths starting with ../
-* Remove unused `is-absolute` module
-
-## v1.2.8
-
-* bullet items in changelog that contain (but don't start with) #
-
-## v1.2.7
-
-* strip 'update changelog' changelog entries out of changelog
-
-## v1.2.6
-
-* make the changelog bulleted
-
-## v1.2.5
-
-* make a changelog, and keep it up to date
-* don't include tests in package
-* Properly handle relative-path executables
-* appveyor
-* Attach error code to Not Found error
-* Make tests pass on Windows
-
-## v1.2.4
-
-* Fix typo
-
-## v1.2.3
-
-* update isexe, fix regression in pathExt handling
-
-## v1.2.2
-
-* update deps, use isexe module, test windows
-
-## v1.2.1
-
-* Sometimes windows PATH entries are quoted
-* Fixed a bug in the check for group and user mode bits. This bug was introduced during refactoring for supporting strict mode.
-* doc cli
-
-## v1.2.0
-
-* Add support for opt.all and -as cli flags
-* test the bin
-* update travis
-* Allow checking for multiple programs in bin/which
-* tap 2
-
-## v1.1.2
-
-* travis
-* Refactored and fixed undefined error on Windows
-* Support strict mode
-
-## v1.1.1
-
-* test +g exes against secondary groups, if available
-* Use windows exe semantics on cygwin & msys
-* cwd should be first in path on win32, not last
-* Handle lower-case 'env.Path' on Windows
-* Update docs
-* use single-quotes
-
-## v1.1.0
-
-* Add tests, depend on is-absolute
-
-## v1.0.9
-
-* which.js: root is allowed to execute files owned by anyone
-
-## v1.0.8
-
-* don't use graceful-fs
-
-## v1.0.7
-
-* add license to package.json
-
-## v1.0.6
-
-* isc license
-
-## 1.0.5
-
-* Awful typo
-
-## 1.0.4
-
-* Test for path absoluteness properly
-* win: Allow '' as a pathext if cmd has a . in it
-
-## 1.0.3
-
-* Remove references to execPath
-* Make `which.sync()` work on Windows by honoring the PATHEXT variable.
-* Make `isExe()` always return true on Windows.
-* MIT
-
-## 1.0.2
-
-* Only files can be exes
-
-## 1.0.1
-
-* Respect the PATHEXT env for win32 support
-* should 0755 the bin
-* binary
-* guts
-* package
-* 1st
diff --git a/node_modules/libcipm/node_modules/which/LICENSE b/node_modules/libcipm/node_modules/which/LICENSE
deleted file mode 100644
index 19129e315..000000000
--- a/node_modules/libcipm/node_modules/which/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/libcipm/node_modules/which/README.md b/node_modules/libcipm/node_modules/which/README.md
deleted file mode 100644
index 8c0b0cbf7..000000000
--- a/node_modules/libcipm/node_modules/which/README.md
+++ /dev/null
@@ -1,51 +0,0 @@
-# which
-
-Like the unix `which` utility.
-
-Finds the first instance of a specified executable in the PATH
-environment variable. Does not cache the results, so `hash -r` is not
-needed when the PATH changes.
-
-## USAGE
-
-```javascript
-var which = require('which')
-
-// async usage
-which('node', function (er, resolvedPath) {
- // er is returned if no "node" is found on the PATH
- // if it is found, then the absolute path to the exec is returned
-})
-
-// sync usage
-// throws if not found
-var resolved = which.sync('node')
-
-// if nothrow option is used, returns null if not found
-resolved = which.sync('node', {nothrow: true})
-
-// Pass options to override the PATH and PATHEXT environment vars.
-which('node', { path: someOtherPath }, function (er, resolved) {
- if (er)
- throw er
- console.log('found at %j', resolved)
-})
-```
-
-## CLI USAGE
-
-Same as the BSD `which(1)` binary.
-
-```
-usage: which [-as] program ...
-```
-
-## OPTIONS
-
-You may pass an options object as the second argument.
-
-- `path`: Use instead of the `PATH` environment variable.
-- `pathExt`: Use instead of the `PATHEXT` environment variable.
-- `all`: Return all matches, instead of just the first one. Note that
- this means the function returns an array of strings instead of a
- single string.
diff --git a/node_modules/libcipm/node_modules/which/bin/which b/node_modules/libcipm/node_modules/which/bin/which
deleted file mode 100755
index 7cee3729e..000000000
--- a/node_modules/libcipm/node_modules/which/bin/which
+++ /dev/null
@@ -1,52 +0,0 @@
-#!/usr/bin/env node
-var which = require("../")
-if (process.argv.length < 3)
- usage()
-
-function usage () {
- console.error('usage: which [-as] program ...')
- process.exit(1)
-}
-
-var all = false
-var silent = false
-var dashdash = false
-var args = process.argv.slice(2).filter(function (arg) {
- if (dashdash || !/^-/.test(arg))
- return true
-
- if (arg === '--') {
- dashdash = true
- return false
- }
-
- var flags = arg.substr(1).split('')
- for (var f = 0; f < flags.length; f++) {
- var flag = flags[f]
- switch (flag) {
- case 's':
- silent = true
- break
- case 'a':
- all = true
- break
- default:
- console.error('which: illegal option -- ' + flag)
- usage()
- }
- }
- return false
-})
-
-process.exit(args.reduce(function (pv, current) {
- try {
- var f = which.sync(current, { all: all })
- if (all)
- f = f.join('\n')
- if (!silent)
- console.log(f)
- return pv;
- } catch (e) {
- return 1;
- }
-}, 0))
diff --git a/node_modules/libcipm/node_modules/which/package.json b/node_modules/libcipm/node_modules/which/package.json
deleted file mode 100644
index 46e540a3e..000000000
--- a/node_modules/libcipm/node_modules/which/package.json
+++ /dev/null
@@ -1,65 +0,0 @@
-{
- "_from": "which@^1.3.1",
- "_id": "which@1.3.1",
- "_inBundle": false,
- "_integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==",
- "_location": "/libcipm/which",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "which@^1.3.1",
- "name": "which",
- "escapedName": "which",
- "rawSpec": "^1.3.1",
- "saveSpec": null,
- "fetchSpec": "^1.3.1"
- },
- "_requiredBy": [
- "/libcipm/pacote"
- ],
- "_resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz",
- "_shasum": "a45043d54f5805316da8d62f9f50918d3da70b0a",
- "_spec": "which@^1.3.1",
- "_where": "/Users/claudiahdz/npm/cli/node_modules/libcipm/node_modules/pacote",
- "author": {
- "name": "Isaac Z. Schlueter",
- "email": "i@izs.me",
- "url": "http://blog.izs.me"
- },
- "bin": {
- "which": "bin/which"
- },
- "bugs": {
- "url": "https://github.com/isaacs/node-which/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "isexe": "^2.0.0"
- },
- "deprecated": false,
- "description": "Like which(1) unix command. Find the first instance of an executable in the PATH.",
- "devDependencies": {
- "mkdirp": "^0.5.0",
- "rimraf": "^2.6.2",
- "tap": "^12.0.1"
- },
- "files": [
- "which.js",
- "bin/which"
- ],
- "homepage": "https://github.com/isaacs/node-which#readme",
- "license": "ISC",
- "main": "which.js",
- "name": "which",
- "repository": {
- "type": "git",
- "url": "git://github.com/isaacs/node-which.git"
- },
- "scripts": {
- "changelog": "bash gen-changelog.sh",
- "postversion": "npm run changelog && git add CHANGELOG.md && git commit -m 'update changelog - '${npm_package_version}",
- "test": "tap test/*.js --cov"
- },
- "version": "1.3.1"
-}
diff --git a/node_modules/libcipm/node_modules/which/which.js b/node_modules/libcipm/node_modules/which/which.js
deleted file mode 100644
index 4347f91a1..000000000
--- a/node_modules/libcipm/node_modules/which/which.js
+++ /dev/null
@@ -1,135 +0,0 @@
-module.exports = which
-which.sync = whichSync
-
-var isWindows = process.platform === 'win32' ||
- process.env.OSTYPE === 'cygwin' ||
- process.env.OSTYPE === 'msys'
-
-var path = require('path')
-var COLON = isWindows ? ';' : ':'
-var isexe = require('isexe')
-
-function getNotFoundError (cmd) {
- var er = new Error('not found: ' + cmd)
- er.code = 'ENOENT'
-
- return er
-}
-
-function getPathInfo (cmd, opt) {
- var colon = opt.colon || COLON
- var pathEnv = opt.path || process.env.PATH || ''
- var pathExt = ['']
-
- pathEnv = pathEnv.split(colon)
-
- var pathExtExe = ''
- if (isWindows) {
- pathEnv.unshift(process.cwd())
- pathExtExe = (opt.pathExt || process.env.PATHEXT || '.EXE;.CMD;.BAT;.COM')
- pathExt = pathExtExe.split(colon)
-
-
- // Always test the cmd itself first. isexe will check to make sure
- // it's found in the pathExt set.
- if (cmd.indexOf('.') !== -1 && pathExt[0] !== '')
- pathExt.unshift('')
- }
-
- // If it has a slash, then we don't bother searching the pathenv.
- // just check the file itself, and that's it.
- if (cmd.match(/\//) || isWindows && cmd.match(/\\/))
- pathEnv = ['']
-
- return {
- env: pathEnv,
- ext: pathExt,
- extExe: pathExtExe
- }
-}
-
-function which (cmd, opt, cb) {
- if (typeof opt === 'function') {
- cb = opt
- opt = {}
- }
-
- var info = getPathInfo(cmd, opt)
- var pathEnv = info.env
- var pathExt = info.ext
- var pathExtExe = info.extExe
- var found = []
-
- ;(function F (i, l) {
- if (i === l) {
- if (opt.all && found.length)
- return cb(null, found)
- else
- return cb(getNotFoundError(cmd))
- }
-
- var pathPart = pathEnv[i]
- if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"')
- pathPart = pathPart.slice(1, -1)
-
- var p = path.join(pathPart, cmd)
- if (!pathPart && (/^\.[\\\/]/).test(cmd)) {
- p = cmd.slice(0, 2) + p
- }
- ;(function E (ii, ll) {
- if (ii === ll) return F(i + 1, l)
- var ext = pathExt[ii]
- isexe(p + ext, { pathExt: pathExtExe }, function (er, is) {
- if (!er && is) {
- if (opt.all)
- found.push(p + ext)
- else
- return cb(null, p + ext)
- }
- return E(ii + 1, ll)
- })
- })(0, pathExt.length)
- })(0, pathEnv.length)
-}
-
-function whichSync (cmd, opt) {
- opt = opt || {}
-
- var info = getPathInfo(cmd, opt)
- var pathEnv = info.env
- var pathExt = info.ext
- var pathExtExe = info.extExe
- var found = []
-
- for (var i = 0, l = pathEnv.length; i < l; i ++) {
- var pathPart = pathEnv[i]
- if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"')
- pathPart = pathPart.slice(1, -1)
-
- var p = path.join(pathPart, cmd)
- if (!pathPart && /^\.[\\\/]/.test(cmd)) {
- p = cmd.slice(0, 2) + p
- }
- for (var j = 0, ll = pathExt.length; j < ll; j ++) {
- var cur = p + pathExt[j]
- var is
- try {
- is = isexe.sync(cur, { pathExt: pathExtExe })
- if (is) {
- if (opt.all)
- found.push(cur)
- else
- return cur
- }
- } catch (ex) {}
- }
- }
-
- if (opt.all && found.length)
- return found
-
- if (opt.nothrow)
- return null
-
- throw getNotFoundError(cmd)
-}
diff --git a/node_modules/libcipm/package.json b/node_modules/libcipm/package.json
deleted file mode 100644
index 1411b4095..000000000
--- a/node_modules/libcipm/package.json
+++ /dev/null
@@ -1,100 +0,0 @@
-{
- "_from": "libcipm@4.0.7",
- "_id": "libcipm@4.0.7",
- "_inBundle": false,
- "_integrity": "sha512-fTq33otU3PNXxxCTCYCYe7V96o59v/o7bvtspmbORXpgFk+wcWrGf5x6tBgui5gCed/45/wtPomBsZBYm5KbIw==",
- "_location": "/libcipm",
- "_phantomChildren": {},
- "_requested": {
- "type": "version",
- "registry": true,
- "raw": "libcipm@4.0.7",
- "name": "libcipm",
- "escapedName": "libcipm",
- "rawSpec": "4.0.7",
- "saveSpec": null,
- "fetchSpec": "4.0.7"
- },
- "_requiredBy": [
- "#USER",
- "/"
- ],
- "_resolved": "https://registry.npmjs.org/libcipm/-/libcipm-4.0.7.tgz",
- "_shasum": "76cd675c98bdaae64db88b782b01b804b6d02c8a",
- "_spec": "libcipm@4.0.7",
- "_where": "/Users/mperrotte/npminc/cli",
- "author": {
- "name": "Kat Marchán",
- "email": "kzm@sykosomatic.org"
- },
- "bugs": {
- "url": "https://github.com/npm/libcipm/issues"
- },
- "bundleDependencies": false,
- "config": {
- "nyc": {
- "exclude": [
- "node_modules/**",
- "test/**"
- ]
- }
- },
- "dependencies": {
- "bin-links": "^1.1.2",
- "bluebird": "^3.5.1",
- "figgy-pudding": "^3.5.1",
- "find-npm-prefix": "^1.0.2",
- "graceful-fs": "^4.1.11",
- "ini": "^1.3.5",
- "lock-verify": "^2.0.2",
- "mkdirp": "^0.5.1",
- "npm-lifecycle": "^3.0.0",
- "npm-logical-tree": "^1.2.1",
- "npm-package-arg": "^6.1.0",
- "pacote": "^9.1.0",
- "read-package-json": "^2.0.13",
- "rimraf": "^2.6.2",
- "worker-farm": "^1.6.0"
- },
- "deprecated": false,
- "description": "programmatic API for cipm: a ci-oriented package installer for npm",
- "devDependencies": {
- "npmlog": "^4.1.2",
- "nyc": "^11.8.0",
- "require-inject": "^1.4.3",
- "standard": "^11.0.1",
- "standard-version": "^4.4.0",
- "tacks": "^1.2.6",
- "tap": "^12.0.1",
- "weallbehave": "^1.2.0",
- "weallcontribute": "^1.0.8"
- },
- "files": [
- "*.js",
- "lib"
- ],
- "homepage": "https://github.com/npm/libcipm#readme",
- "keywords": [
- "npm",
- "package manager",
- "caching",
- "downloader"
- ],
- "license": "MIT",
- "main": "index.js",
- "name": "libcipm",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/libcipm.git"
- },
- "scripts": {
- "postrelease": "npm publish && git push --follow-tags",
- "prerelease": "npm t",
- "pretest": "standard",
- "release": "standard-version -s",
- "test": "tap -J --nyc-arg=--all --coverage test/specs",
- "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
- "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
- },
- "version": "4.0.7"
-}
diff --git a/node_modules/make-fetch-happen/CHANGELOG.md b/node_modules/make-fetch-happen/CHANGELOG.md
deleted file mode 100644
index c73bd4de4..000000000
--- a/node_modules/make-fetch-happen/CHANGELOG.md
+++ /dev/null
@@ -1,587 +0,0 @@
-# Change Log
-
-All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
-
-<a name="5.0.2"></a>
-## [5.0.2](https://github.com/zkat/make-fetch-happen/compare/v5.0.1...v5.0.2) (2019-11-14)
-
-
-### Bug Fixes
-
-* **streams:** only provide a size and not a boolean to highWaterMark & update travis environments ([a367a14](https://github.com/zkat/make-fetch-happen/commit/a367a14))
-* `highWaterMark` bug @ v5 ([#10](https://github.com/zkat/make-fetch-happen/issues/10)) ([4e4f4e0](https://github.com/zkat/make-fetch-happen/commit/4e4f4e0))
-
-
-
-<a name="5.0.1"></a>
-## [5.0.1](https://github.com/zkat/make-fetch-happen/compare/v5.0.0...v5.0.1) (2019-10-23)
-
-
-
-<a name="5.0.0"></a>
-# [5.0.0](https://github.com/zkat/make-fetch-happen/compare/v4.0.2...v5.0.0) (2019-07-15)
-
-
-### Features
-
-* cacache@12, no need for uid/gid opts ([fdb956f](https://github.com/zkat/make-fetch-happen/commit/fdb956f))
-
-
-### BREAKING CHANGES
-
-* cache uid and gid are inferred from the cache folder itself,
-not passed in as options.
-
-
-
-<a name="4.0.2"></a>
-## [4.0.2](https://github.com/zkat/make-fetch-happen/compare/v4.0.1...v4.0.2) (2019-07-02)
-
-
-
-<a name="4.0.1"></a>
-## [4.0.1](https://github.com/zkat/make-fetch-happen/compare/v4.0.0...v4.0.1) (2018-04-12)
-
-
-### Bug Fixes
-
-* **integrity:** use new sri.match() for verification ([4f371a0](https://github.com/zkat/make-fetch-happen/commit/4f371a0))
-
-
-
-<a name="4.0.0"></a>
-# [4.0.0](https://github.com/zkat/make-fetch-happen/compare/v3.0.0...v4.0.0) (2018-04-09)
-
-
-### meta
-
-* drop node@4, add node@9 ([7b0191a](https://github.com/zkat/make-fetch-happen/commit/7b0191a))
-
-
-### BREAKING CHANGES
-
-* node@4 is no longer supported
-
-
-
-<a name="3.0.0"></a>
-# [3.0.0](https://github.com/zkat/make-fetch-happen/compare/v2.6.0...v3.0.0) (2018-03-12)
-
-
-### Bug Fixes
-
-* **license:** switch to ISC ([#49](https://github.com/zkat/make-fetch-happen/issues/49)) ([bf90c6d](https://github.com/zkat/make-fetch-happen/commit/bf90c6d))
-* **standard:** standard@11 update ([ff0aa70](https://github.com/zkat/make-fetch-happen/commit/ff0aa70))
-
-
-### BREAKING CHANGES
-
-* **license:** license changed from CC0 to ISC.
-
-
-
-<a name="2.6.0"></a>
-# [2.6.0](https://github.com/zkat/make-fetch-happen/compare/v2.5.0...v2.6.0) (2017-11-14)
-
-
-### Bug Fixes
-
-* **integrity:** disable node-fetch compress when checking integrity (#42) ([a7cc74c](https://github.com/zkat/make-fetch-happen/commit/a7cc74c))
-
-
-### Features
-
-* **onretry:** Add `options.onRetry` (#48) ([f90ccff](https://github.com/zkat/make-fetch-happen/commit/f90ccff))
-
-
-
-<a name="2.5.0"></a>
-# [2.5.0](https://github.com/zkat/make-fetch-happen/compare/v2.4.13...v2.5.0) (2017-08-24)
-
-
-### Bug Fixes
-
-* **agent:** support timeout durations greater than 30 seconds ([04875ae](https://github.com/zkat/make-fetch-happen/commit/04875ae)), closes [#35](https://github.com/zkat/make-fetch-happen/issues/35)
-
-
-### Features
-
-* **cache:** export cache deletion functionality (#40) ([3da4250](https://github.com/zkat/make-fetch-happen/commit/3da4250))
-
-
-
-<a name="2.4.13"></a>
-## [2.4.13](https://github.com/zkat/make-fetch-happen/compare/v2.4.12...v2.4.13) (2017-06-29)
-
-
-### Bug Fixes
-
-* **deps:** bump other deps for bugfixes ([eab8297](https://github.com/zkat/make-fetch-happen/commit/eab8297))
-* **proxy:** bump proxy deps with bugfixes (#32) ([632f860](https://github.com/zkat/make-fetch-happen/commit/632f860)), closes [#32](https://github.com/zkat/make-fetch-happen/issues/32)
-
-
-
-<a name="2.4.12"></a>
-## [2.4.12](https://github.com/zkat/make-fetch-happen/compare/v2.4.11...v2.4.12) (2017-06-06)
-
-
-### Bug Fixes
-
-* **cache:** encode x-local-cache-etc headers to be header-safe ([dc9fb1b](https://github.com/zkat/make-fetch-happen/commit/dc9fb1b))
-
-
-
-<a name="2.4.11"></a>
-## [2.4.11](https://github.com/zkat/make-fetch-happen/compare/v2.4.10...v2.4.11) (2017-06-05)
-
-
-### Bug Fixes
-
-* **deps:** bump deps with ssri fix ([bef1994](https://github.com/zkat/make-fetch-happen/commit/bef1994))
-
-
-
-<a name="2.4.10"></a>
-## [2.4.10](https://github.com/zkat/make-fetch-happen/compare/v2.4.9...v2.4.10) (2017-05-31)
-
-
-### Bug Fixes
-
-* **deps:** bump dep versions with bugfixes ([0af4003](https://github.com/zkat/make-fetch-happen/commit/0af4003))
-* **proxy:** use auth parameter for proxy authentication (#30) ([c687306](https://github.com/zkat/make-fetch-happen/commit/c687306))
-
-
-
-<a name="2.4.9"></a>
-## [2.4.9](https://github.com/zkat/make-fetch-happen/compare/v2.4.8...v2.4.9) (2017-05-25)
-
-
-### Bug Fixes
-
-* **cache:** use the passed-in promise for resolving cache stuff ([4c46257](https://github.com/zkat/make-fetch-happen/commit/4c46257))
-
-
-
-<a name="2.4.8"></a>
-## [2.4.8](https://github.com/zkat/make-fetch-happen/compare/v2.4.7...v2.4.8) (2017-05-25)
-
-
-### Bug Fixes
-
-* **cache:** pass uid/gid/Promise through to cache ([a847c92](https://github.com/zkat/make-fetch-happen/commit/a847c92))
-
-
-
-<a name="2.4.7"></a>
-## [2.4.7](https://github.com/zkat/make-fetch-happen/compare/v2.4.6...v2.4.7) (2017-05-24)
-
-
-### Bug Fixes
-
-* **deps:** pull in various fixes from deps ([fc2a587](https://github.com/zkat/make-fetch-happen/commit/fc2a587))
-
-
-
-<a name="2.4.6"></a>
-## [2.4.6](https://github.com/zkat/make-fetch-happen/compare/v2.4.5...v2.4.6) (2017-05-24)
-
-
-### Bug Fixes
-
-* **proxy:** choose agent for http(s)-proxy by protocol of destUrl ([ea4832a](https://github.com/zkat/make-fetch-happen/commit/ea4832a))
-* **proxy:** make socks proxy working ([1de810a](https://github.com/zkat/make-fetch-happen/commit/1de810a))
-* **proxy:** revert previous proxy solution ([563b0d8](https://github.com/zkat/make-fetch-happen/commit/563b0d8))
-
-
-
-<a name="2.4.5"></a>
-## [2.4.5](https://github.com/zkat/make-fetch-happen/compare/v2.4.4...v2.4.5) (2017-05-24)
-
-
-### Bug Fixes
-
-* **proxy:** use the destination url when determining agent ([1a714e7](https://github.com/zkat/make-fetch-happen/commit/1a714e7))
-
-
-
-<a name="2.4.4"></a>
-## [2.4.4](https://github.com/zkat/make-fetch-happen/compare/v2.4.3...v2.4.4) (2017-05-23)
-
-
-### Bug Fixes
-
-* **redirect:** handle redirects explicitly (#27) ([4c4af54](https://github.com/zkat/make-fetch-happen/commit/4c4af54))
-
-
-
-<a name="2.4.3"></a>
-## [2.4.3](https://github.com/zkat/make-fetch-happen/compare/v2.4.2...v2.4.3) (2017-05-06)
-
-
-### Bug Fixes
-
-* **redirect:** redirects now delete authorization if hosts fail to match ([c071805](https://github.com/zkat/make-fetch-happen/commit/c071805))
-
-
-
-<a name="2.4.2"></a>
-## [2.4.2](https://github.com/zkat/make-fetch-happen/compare/v2.4.1...v2.4.2) (2017-05-04)
-
-
-### Bug Fixes
-
-* **cache:** reduce race condition window by checking for content ([24544b1](https://github.com/zkat/make-fetch-happen/commit/24544b1))
-* **match:** Rewrite the conditional stream logic (#25) ([66bba4b](https://github.com/zkat/make-fetch-happen/commit/66bba4b))
-
-
-
-<a name="2.4.1"></a>
-## [2.4.1](https://github.com/zkat/make-fetch-happen/compare/v2.4.0...v2.4.1) (2017-04-28)
-
-
-### Bug Fixes
-
-* **memoization:** missed spots + allow passthrough of memo objs ([ac0cd12](https://github.com/zkat/make-fetch-happen/commit/ac0cd12))
-
-
-
-<a name="2.4.0"></a>
-# [2.4.0](https://github.com/zkat/make-fetch-happen/compare/v2.3.0...v2.4.0) (2017-04-28)
-
-
-### Bug Fixes
-
-* **memoize:** cacache had a broken memoizer ([8a9ed4c](https://github.com/zkat/make-fetch-happen/commit/8a9ed4c))
-
-
-### Features
-
-* **memoization:** only slurp stuff into memory if opts.memoize is not false ([0744adc](https://github.com/zkat/make-fetch-happen/commit/0744adc))
-
-
-
-<a name="2.3.0"></a>
-# [2.3.0](https://github.com/zkat/make-fetch-happen/compare/v2.2.6...v2.3.0) (2017-04-27)
-
-
-### Features
-
-* **agent:** added opts.strictSSL and opts.localAddress ([c35015a](https://github.com/zkat/make-fetch-happen/commit/c35015a))
-* **proxy:** Added opts.noProxy and NO_PROXY support ([f45c915](https://github.com/zkat/make-fetch-happen/commit/f45c915))
-
-
-
-<a name="2.2.6"></a>
-## [2.2.6](https://github.com/zkat/make-fetch-happen/compare/v2.2.5...v2.2.6) (2017-04-26)
-
-
-### Bug Fixes
-
-* **agent:** check uppercase & lowercase proxy env (#24) ([acf2326](https://github.com/zkat/make-fetch-happen/commit/acf2326)), closes [#22](https://github.com/zkat/make-fetch-happen/issues/22)
-* **deps:** switch to node-fetch-npm and stop bundling ([3db603b](https://github.com/zkat/make-fetch-happen/commit/3db603b))
-
-
-
-<a name="2.2.5"></a>
-## [2.2.5](https://github.com/zkat/make-fetch-happen/compare/v2.2.4...v2.2.5) (2017-04-23)
-
-
-### Bug Fixes
-
-* **deps:** bump cacache and use its size feature ([926c1d3](https://github.com/zkat/make-fetch-happen/commit/926c1d3))
-
-
-
-<a name="2.2.4"></a>
-## [2.2.4](https://github.com/zkat/make-fetch-happen/compare/v2.2.3...v2.2.4) (2017-04-18)
-
-
-### Bug Fixes
-
-* **integrity:** hash verification issues fixed ([07f9402](https://github.com/zkat/make-fetch-happen/commit/07f9402))
-
-
-
-<a name="2.2.3"></a>
-## [2.2.3](https://github.com/zkat/make-fetch-happen/compare/v2.2.2...v2.2.3) (2017-04-18)
-
-
-### Bug Fixes
-
-* **staleness:** responses older than 8h were never stale :< ([b54dd75](https://github.com/zkat/make-fetch-happen/commit/b54dd75))
-* **warning:** remove spurious warning, make format more spec-compliant ([2e4f6bb](https://github.com/zkat/make-fetch-happen/commit/2e4f6bb))
-
-
-
-<a name="2.2.2"></a>
-## [2.2.2](https://github.com/zkat/make-fetch-happen/compare/v2.2.1...v2.2.2) (2017-04-12)
-
-
-### Bug Fixes
-
-* **retry:** stop retrying 404s ([6fafd53](https://github.com/zkat/make-fetch-happen/commit/6fafd53))
-
-
-
-<a name="2.2.1"></a>
-## [2.2.1](https://github.com/zkat/make-fetch-happen/compare/v2.2.0...v2.2.1) (2017-04-10)
-
-
-### Bug Fixes
-
-* **deps:** move test-only deps to devDeps ([2daaf80](https://github.com/zkat/make-fetch-happen/commit/2daaf80))
-
-
-
-<a name="2.2.0"></a>
-# [2.2.0](https://github.com/zkat/make-fetch-happen/compare/v2.1.0...v2.2.0) (2017-04-09)
-
-
-### Bug Fixes
-
-* **cache:** treat caches as private ([57b7dc2](https://github.com/zkat/make-fetch-happen/commit/57b7dc2))
-
-
-### Features
-
-* **retry:** accept shorthand retry settings ([dfed69d](https://github.com/zkat/make-fetch-happen/commit/dfed69d))
-
-
-
-<a name="2.1.0"></a>
-# [2.1.0](https://github.com/zkat/make-fetch-happen/compare/v2.0.4...v2.1.0) (2017-04-09)
-
-
-### Features
-
-* **cache:** cache now obeys Age and a variety of other things (#13) ([7b9652d](https://github.com/zkat/make-fetch-happen/commit/7b9652d))
-
-
-
-<a name="2.0.4"></a>
-## [2.0.4](https://github.com/zkat/make-fetch-happen/compare/v2.0.3...v2.0.4) (2017-04-09)
-
-
-### Bug Fixes
-
-* **agent:** accept Request as fetch input, not just strings ([b71669a](https://github.com/zkat/make-fetch-happen/commit/b71669a))
-
-
-
-<a name="2.0.3"></a>
-## [2.0.3](https://github.com/zkat/make-fetch-happen/compare/v2.0.2...v2.0.3) (2017-04-09)
-
-
-### Bug Fixes
-
-* **deps:** seriously ([c29e7e7](https://github.com/zkat/make-fetch-happen/commit/c29e7e7))
-
-
-
-<a name="2.0.2"></a>
-## [2.0.2](https://github.com/zkat/make-fetch-happen/compare/v2.0.1...v2.0.2) (2017-04-09)
-
-
-### Bug Fixes
-
-* **deps:** use bundleDeps instead ([c36ebf0](https://github.com/zkat/make-fetch-happen/commit/c36ebf0))
-
-
-
-<a name="2.0.1"></a>
-## [2.0.1](https://github.com/zkat/make-fetch-happen/compare/v2.0.0...v2.0.1) (2017-04-09)
-
-
-### Bug Fixes
-
-* **deps:** make sure node-fetch tarball included in release ([3bf49d1](https://github.com/zkat/make-fetch-happen/commit/3bf49d1))
-
-
-
-<a name="2.0.0"></a>
-# [2.0.0](https://github.com/zkat/make-fetch-happen/compare/v1.7.0...v2.0.0) (2017-04-09)
-
-
-### Bug Fixes
-
-* **deps:** manually pull in newer node-fetch to avoid babel prod dep ([66e5e87](https://github.com/zkat/make-fetch-happen/commit/66e5e87))
-* **retry:** be more specific about when we retry ([a47b782](https://github.com/zkat/make-fetch-happen/commit/a47b782))
-
-
-### Features
-
-* **agent:** add ca/cert/key support to auto-agent (#15) ([57585a7](https://github.com/zkat/make-fetch-happen/commit/57585a7))
-
-
-### BREAKING CHANGES
-
-* **agent:** pac proxies are no longer supported.
-* **retry:** Retry logic has changes.
-
-* 404s, 420s, and 429s all retry now.
-* ENOTFOUND no longer retries.
-* Only ECONNRESET, ECONNREFUSED, EADDRINUSE, ETIMEDOUT, and `request-timeout` errors are retried.
-
-
-
-<a name="1.7.0"></a>
-# [1.7.0](https://github.com/zkat/make-fetch-happen/compare/v1.6.0...v1.7.0) (2017-04-08)
-
-
-### Features
-
-* **cache:** add useful headers to inform users about cached data ([9bd7b00](https://github.com/zkat/make-fetch-happen/commit/9bd7b00))
-
-
-
-<a name="1.6.0"></a>
-# [1.6.0](https://github.com/zkat/make-fetch-happen/compare/v1.5.1...v1.6.0) (2017-04-06)
-
-
-### Features
-
-* **agent:** better, keepalive-supporting, default http agents ([16277f6](https://github.com/zkat/make-fetch-happen/commit/16277f6))
-
-
-
-<a name="1.5.1"></a>
-## [1.5.1](https://github.com/zkat/make-fetch-happen/compare/v1.5.0...v1.5.1) (2017-04-05)
-
-
-### Bug Fixes
-
-* **cache:** bump cacache for its fixed error messages ([2f2b916](https://github.com/zkat/make-fetch-happen/commit/2f2b916))
-* **cache:** fix handling of errors in cache reads ([5729222](https://github.com/zkat/make-fetch-happen/commit/5729222))
-
-
-
-<a name="1.5.0"></a>
-# [1.5.0](https://github.com/zkat/make-fetch-happen/compare/v1.4.0...v1.5.0) (2017-04-04)
-
-
-### Features
-
-* **retry:** retry requests on 408 timeouts, too ([8d8b5bd](https://github.com/zkat/make-fetch-happen/commit/8d8b5bd))
-
-
-
-<a name="1.4.0"></a>
-# [1.4.0](https://github.com/zkat/make-fetch-happen/compare/v1.3.1...v1.4.0) (2017-04-04)
-
-
-### Bug Fixes
-
-* **cache:** stop relying on BB.catch ([2b04494](https://github.com/zkat/make-fetch-happen/commit/2b04494))
-
-
-### Features
-
-* **retry:** report retry attempt number as extra header ([fd50927](https://github.com/zkat/make-fetch-happen/commit/fd50927))
-
-
-
-<a name="1.3.1"></a>
-## [1.3.1](https://github.com/zkat/make-fetch-happen/compare/v1.3.0...v1.3.1) (2017-04-04)
-
-
-### Bug Fixes
-
-* **cache:** pretend cache entry is missing on ENOENT ([9c2bb26](https://github.com/zkat/make-fetch-happen/commit/9c2bb26))
-
-
-
-<a name="1.3.0"></a>
-# [1.3.0](https://github.com/zkat/make-fetch-happen/compare/v1.2.1...v1.3.0) (2017-04-04)
-
-
-### Bug Fixes
-
-* **cache:** if metadata is missing for some odd reason, ignore the entry ([a021a6b](https://github.com/zkat/make-fetch-happen/commit/a021a6b))
-
-
-### Features
-
-* **cache:** add special headers when request was loaded straight from cache ([8a7dbd1](https://github.com/zkat/make-fetch-happen/commit/8a7dbd1))
-* **cache:** allow configuring algorithms to be calculated on insertion ([bf4a0f2](https://github.com/zkat/make-fetch-happen/commit/bf4a0f2))
-
-
-
-<a name="1.2.1"></a>
-## [1.2.1](https://github.com/zkat/make-fetch-happen/compare/v1.2.0...v1.2.1) (2017-04-03)
-
-
-### Bug Fixes
-
-* **integrity:** update cacache and ssri and change EBADCHECKSUM -> EINTEGRITY ([b6cf6f6](https://github.com/zkat/make-fetch-happen/commit/b6cf6f6))
-
-
-
-<a name="1.2.0"></a>
-# [1.2.0](https://github.com/zkat/make-fetch-happen/compare/v1.1.0...v1.2.0) (2017-04-03)
-
-
-### Features
-
-* **integrity:** full Subresource Integrity support (#10) ([a590159](https://github.com/zkat/make-fetch-happen/commit/a590159))
-
-
-
-<a name="1.1.0"></a>
-# [1.1.0](https://github.com/zkat/make-fetch-happen/compare/v1.0.1...v1.1.0) (2017-04-01)
-
-
-### Features
-
-* **opts:** fetch.defaults() for default options ([522a65e](https://github.com/zkat/make-fetch-happen/commit/522a65e))
-
-
-
-<a name="1.0.1"></a>
-## [1.0.1](https://github.com/zkat/make-fetch-happen/compare/v1.0.0...v1.0.1) (2017-04-01)
-
-
-
-<a name="1.0.0"></a>
-# 1.0.0 (2017-04-01)
-
-
-### Bug Fixes
-
-* **cache:** default on cache-control header ([b872a2c](https://github.com/zkat/make-fetch-happen/commit/b872a2c))
-* standard stuff and cache matching ([753f2c2](https://github.com/zkat/make-fetch-happen/commit/753f2c2))
-* **agent:** nudge around things with opts.agent ([ed62b57](https://github.com/zkat/make-fetch-happen/commit/ed62b57))
-* **agent:** {agent: false} has special behavior ([b8cc923](https://github.com/zkat/make-fetch-happen/commit/b8cc923))
-* **cache:** invalidation on non-GET ([fe78fac](https://github.com/zkat/make-fetch-happen/commit/fe78fac))
-* **cache:** make force-cache and only-if-cached work as expected ([f50e9df](https://github.com/zkat/make-fetch-happen/commit/f50e9df))
-* **cache:** more spec compliance ([d5a56db](https://github.com/zkat/make-fetch-happen/commit/d5a56db))
-* **cache:** only cache 200 gets ([0abb25a](https://github.com/zkat/make-fetch-happen/commit/0abb25a))
-* **cache:** only load cache code if cache opt is a string ([250fcd5](https://github.com/zkat/make-fetch-happen/commit/250fcd5))
-* **cache:** oops ([e3fa15a](https://github.com/zkat/make-fetch-happen/commit/e3fa15a))
-* **cache:** refactored warning removal into main file ([5b0a9f9](https://github.com/zkat/make-fetch-happen/commit/5b0a9f9))
-* **cache:** req constructor no longer needed in Cache ([5b74cbc](https://github.com/zkat/make-fetch-happen/commit/5b74cbc))
-* **cache:** standard fetch api calls cacheMode "cache" ([6fba805](https://github.com/zkat/make-fetch-happen/commit/6fba805))
-* **cache:** was using wrong method for non-GET/HEAD cache invalidation ([810763a](https://github.com/zkat/make-fetch-happen/commit/810763a))
-* **caching:** a bunch of cache-related fixes ([8ebda1d](https://github.com/zkat/make-fetch-happen/commit/8ebda1d))
-* **deps:** `cacache[@6](https://github.com/6).3.0` - race condition fixes ([9528442](https://github.com/zkat/make-fetch-happen/commit/9528442))
-* **freshness:** fix regex for cacheControl matching ([070db86](https://github.com/zkat/make-fetch-happen/commit/070db86))
-* **freshness:** fixed default freshness heuristic value ([5d29e88](https://github.com/zkat/make-fetch-happen/commit/5d29e88))
-* **logging:** remove console.log calls ([a1d0a47](https://github.com/zkat/make-fetch-happen/commit/a1d0a47))
-* **method:** node-fetch guarantees uppercase ([a1d68d6](https://github.com/zkat/make-fetch-happen/commit/a1d68d6))
-* **opts:** simplified opts handling ([516fd6e](https://github.com/zkat/make-fetch-happen/commit/516fd6e))
-* **proxy:** pass proxy option directly to ProxyAgent ([3398460](https://github.com/zkat/make-fetch-happen/commit/3398460))
-* **retry:** false -> {retries: 0} ([297fbb6](https://github.com/zkat/make-fetch-happen/commit/297fbb6))
-* **retry:** only retry put if body is not a stream ([a24e599](https://github.com/zkat/make-fetch-happen/commit/a24e599))
-* **retry:** skip retries if body is a stream for ANY method ([780c0f8](https://github.com/zkat/make-fetch-happen/commit/780c0f8))
-
-
-### Features
-
-* **api:** initial implementation -- can make and cache requests ([7d55b49](https://github.com/zkat/make-fetch-happen/commit/7d55b49))
-* **fetch:** injectable cache, and retry support ([87b84bf](https://github.com/zkat/make-fetch-happen/commit/87b84bf))
-
-
-### BREAKING CHANGES
-
-* **cache:** opts.cache -> opts.cacheManager; opts.cacheMode -> opts.cache
-* **fetch:** opts.cache accepts a Cache-like obj or a path. Requests are now retried.
-* **api:** actual api implemented
diff --git a/node_modules/make-fetch-happen/LICENSE b/node_modules/make-fetch-happen/LICENSE
deleted file mode 100644
index 8d28acf86..000000000
--- a/node_modules/make-fetch-happen/LICENSE
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/make-fetch-happen/README.md b/node_modules/make-fetch-happen/README.md
deleted file mode 100644
index 4d12d8dae..000000000
--- a/node_modules/make-fetch-happen/README.md
+++ /dev/null
@@ -1,404 +0,0 @@
-# make-fetch-happen [![npm version](https://img.shields.io/npm/v/make-fetch-happen.svg)](https://npm.im/make-fetch-happen) [![license](https://img.shields.io/npm/l/make-fetch-happen.svg)](https://npm.im/make-fetch-happen) [![Travis](https://img.shields.io/travis/zkat/make-fetch-happen.svg)](https://travis-ci.org/zkat/make-fetch-happen) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/make-fetch-happen?svg=true)](https://ci.appveyor.com/project/zkat/make-fetch-happen) [![Coverage Status](https://coveralls.io/repos/github/zkat/make-fetch-happen/badge.svg?branch=latest)](https://coveralls.io/github/zkat/make-fetch-happen?branch=latest)
-
-
-[`make-fetch-happen`](https://github.com/zkat/make-fetch-happen) is a Node.js
-library that wraps [`node-fetch-npm`](https://github.com/npm/node-fetch-npm) with additional
-features [`node-fetch`](https://github.com/bitinn/node-fetch) doesn't intend to include, including HTTP Cache support, request
-pooling, proxies, retries, [and more](#features)!
-
-## Install
-
-`$ npm install --save make-fetch-happen`
-
-## Table of Contents
-
-* [Example](#example)
-* [Features](#features)
-* [Contributing](#contributing)
-* [API](#api)
- * [`fetch`](#fetch)
- * [`fetch.defaults`](#fetch-defaults)
- * [`node-fetch` options](#node-fetch-options)
- * [`make-fetch-happen` options](#extra-options)
- * [`opts.cacheManager`](#opts-cache-manager)
- * [`opts.cache`](#opts-cache)
- * [`opts.proxy`](#opts-proxy)
- * [`opts.noProxy`](#opts-no-proxy)
- * [`opts.ca, opts.cert, opts.key`](#https-opts)
- * [`opts.maxSockets`](#opts-max-sockets)
- * [`opts.retry`](#opts-retry)
- * [`opts.onRetry`](#opts-onretry)
- * [`opts.integrity`](#opts-integrity)
-* [Message From Our Sponsors](#wow)
-
-### Example
-
-```javascript
-const fetch = require('make-fetch-happen').defaults({
- cacheManager: './my-cache' // path where cache will be written (and read)
-})
-
-fetch('https://registry.npmjs.org/make-fetch-happen').then(res => {
- return res.json() // download the body as JSON
-}).then(body => {
- console.log(`got ${body.name} from web`)
- return fetch('https://registry.npmjs.org/make-fetch-happen', {
- cache: 'no-cache' // forces a conditional request
- })
-}).then(res => {
- console.log(res.status) // 304! cache validated!
- return res.json().then(body => {
- console.log(`got ${body.name} from cache`)
- })
-})
-```
-
-### Features
-
-* Builds around [`node-fetch`](https://npm.im/node-fetch) for the core [`fetch` API](https://fetch.spec.whatwg.org) implementation
-* Request pooling out of the box
-* Quite fast, really
-* Automatic HTTP-semantics-aware request retries
-* Cache-fallback automatic "offline mode"
-* Proxy support (http, https, socks, socks4, socks5)
-* Built-in request caching following full HTTP caching rules (`Cache-Control`, `ETag`, `304`s, cache fallback on error, etc).
-* Customize cache storage with any [Cache API](https://developer.mozilla.org/en-US/docs/Web/API/Cache)-compliant `Cache` instance. Cache to Redis!
-* Node.js Stream support
-* Transparent gzip and deflate support
-* [Subresource Integrity](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity) support
-* Literally punches nazis
-* (PENDING) Range request caching and resuming
-
-### Contributing
-
-The make-fetch-happen team enthusiastically welcomes contributions and project participation! There's a bunch of things you can do if you want to contribute! The [Contributor Guide](CONTRIBUTING.md) has all the information you need for everything from reporting bugs to contributing entire new features. Please don't hesitate to jump in if you'd like to, or even ask us questions if something isn't clear.
-
-All participants and maintainers in this project are expected to follow [Code of Conduct](CODE_OF_CONDUCT.md), and just generally be excellent to each other.
-
-Please refer to the [Changelog](CHANGELOG.md) for project history details, too.
-
-Happy hacking!
-
-### API
-
-#### <a name="fetch"></a> `> fetch(uriOrRequest, [opts]) -> Promise<Response>`
-
-This function implements most of the [`fetch` API](https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/fetch): given a `uri` string or a `Request` instance, it will fire off an http request and return a Promise containing the relevant response.
-
-If `opts` is provided, the [`node-fetch`-specific options](#node-fetch-options) will be passed to that library. There are also [additional options](#extra-options) specific to make-fetch-happen that add various features, such as HTTP caching, integrity verification, proxy support, and more.
-
-##### Example
-
-```javascript
-fetch('https://google.com').then(res => res.buffer())
-```
-
-#### <a name="fetch-defaults"></a> `> fetch.defaults([defaultUrl], [defaultOpts])`
-
-Returns a new `fetch` function that will call `make-fetch-happen` using `defaultUrl` and `defaultOpts` as default values to any calls.
-
-A defaulted `fetch` will also have a `.defaults()` method, so they can be chained.
-
-##### Example
-
-```javascript
-const fetch = require('make-fetch-happen').defaults({
- cacheManager: './my-local-cache'
-})
-
-fetch('https://registry.npmjs.org/make-fetch-happen') // will always use the cache
-```
-
-#### <a name="node-fetch-options"></a> `> node-fetch options`
-
-The following options for `node-fetch` are used as-is:
-
-* method
-* body
-* redirect
-* follow
-* timeout
-* compress
-* size
-
-These other options are modified or augmented by make-fetch-happen:
-
-* headers - Default `User-Agent` set to make-fetch happen. `Connection` is set to `keep-alive` or `close` automatically depending on `opts.agent`.
-* agent
- * If agent is null, an http or https Agent will be automatically used. By default, these will be `http.globalAgent` and `https.globalAgent`.
- * If [`opts.proxy`](#opts-proxy) is provided and `opts.agent` is null, the agent will be set to an appropriate proxy-handling agent.
- * If `opts.agent` is an object, it will be used as the request-pooling agent argument for this request.
- * If `opts.agent` is `false`, it will be passed as-is to the underlying request library. This causes a new Agent to be spawned for every request.
-
-For more details, see [the documentation for `node-fetch` itself](https://github.com/bitinn/node-fetch#options).
-
-#### <a name="extra-options"></a> `> make-fetch-happen options`
-
-make-fetch-happen augments the `node-fetch` API with additional features available through extra options. The following extra options are available:
-
-* [`opts.cacheManager`](#opts-cache-manager) - Cache target to read/write
-* [`opts.cache`](#opts-cache) - `fetch` cache mode. Controls cache *behavior*.
-* [`opts.proxy`](#opts-proxy) - Proxy agent
-* [`opts.noProxy`](#opts-no-proxy) - Domain segments to disable proxying for.
-* [`opts.ca, opts.cert, opts.key, opts.strictSSL`](#https-opts)
-* [`opts.localAddress`](#opts-local-address)
-* [`opts.maxSockets`](#opts-max-sockets)
-* [`opts.retry`](#opts-retry) - Request retry settings
-* [`opts.onRetry`](#opts-onretry) - a function called whenever a retry is attempted
-* [`opts.integrity`](#opts-integrity) - [Subresource Integrity](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity) metadata.
-
-#### <a name="opts-cache-manager"></a> `> opts.cacheManager`
-
-Either a `String` or a `Cache`. If the former, it will be assumed to be a `Path` to be used as the cache root for [`cacache`](https://npm.im/cacache).
-
-If an object is provided, it will be assumed to be a compliant [`Cache` instance](https://developer.mozilla.org/en-US/docs/Web/API/Cache). Only `Cache.match()`, `Cache.put()`, and `Cache.delete()` are required. Options objects will not be passed in to `match()` or `delete()`.
-
-By implementing this API, you can customize the storage backend for make-fetch-happen itself -- for example, you could implement a cache that uses `redis` for caching, or simply keeps everything in memory. Most of the caching logic exists entirely on the make-fetch-happen side, so the only thing you need to worry about is reading, writing, and deleting, as well as making sure `fetch.Response` objects are what gets returned.
-
-You can refer to `cache.js` in the make-fetch-happen source code for a reference implementation.
-
-**NOTE**: Requests will not be cached unless their response bodies are consumed. You will need to use one of the `res.json()`, `res.buffer()`, etc methods on the response, or drain the `res.body` stream, in order for it to be written.
-
-The default cache manager also adds the following headers to cached responses:
-
-* `X-Local-Cache`: Path to the cache the content was found in
-* `X-Local-Cache-Key`: Unique cache entry key for this response
-* `X-Local-Cache-Hash`: Specific integrity hash for the cached entry
-* `X-Local-Cache-Time`: UTCString of the cache insertion time for the entry
-
-Using [`cacache`](https://npm.im/cacache), a call like this may be used to
-manually fetch the cached entry:
-
-```javascript
-const h = response.headers
-cacache.get(h.get('x-local-cache'), h.get('x-local-cache-key'))
-
-// grab content only, directly:
-cacache.get.byDigest(h.get('x-local-cache'), h.get('x-local-cache-hash'))
-```
-
-##### Example
-
-```javascript
-fetch('https://registry.npmjs.org/make-fetch-happen', {
- cacheManager: './my-local-cache'
-}) // -> 200-level response will be written to disk
-
-fetch('https://npm.im/cacache', {
- cacheManager: new MyCustomRedisCache(process.env.PORT)
-}) // -> 200-level response will be written to redis
-```
-
-A possible (minimal) implementation for `MyCustomRedisCache`:
-
-```javascript
-const bluebird = require('bluebird')
-const redis = require("redis")
-bluebird.promisifyAll(redis.RedisClient.prototype)
-class MyCustomRedisCache {
- constructor (opts) {
- this.redis = redis.createClient(opts)
- }
- match (req) {
- return this.redis.getAsync(req.url).then(res => {
- if (res) {
- const parsed = JSON.parse(res)
- return new fetch.Response(parsed.body, {
- url: req.url,
- headers: parsed.headers,
- status: 200
- })
- }
- })
- }
- put (req, res) {
- return res.buffer().then(body => {
- return this.redis.setAsync(req.url, JSON.stringify({
- body: body,
- headers: res.headers.raw()
- }))
- }).then(() => {
- // return the response itself
- return res
- })
- }
- 'delete' (req) {
- return this.redis.unlinkAsync(req.url)
- }
-}
-```
-
-#### <a name="opts-cache"></a> `> opts.cache`
-
-This option follows the standard `fetch` API cache option. This option will do nothing if [`opts.cacheManager`](#opts-cache-manager) is null. The following values are accepted (as strings):
-
-* `default` - Fetch will inspect the HTTP cache on the way to the network. If there is a fresh response it will be used. If there is a stale response a conditional request will be created, and a normal request otherwise. It then updates the HTTP cache with the response. If the revalidation request fails (for example, on a 500 or if you're offline), the stale response will be returned.
-* `no-store` - Fetch behaves as if there is no HTTP cache at all.
-* `reload` - Fetch behaves as if there is no HTTP cache on the way to the network. Ergo, it creates a normal request and updates the HTTP cache with the response.
-* `no-cache` - Fetch creates a conditional request if there is a response in the HTTP cache and a normal request otherwise. It then updates the HTTP cache with the response.
-* `force-cache` - Fetch uses any response in the HTTP cache matching the request, not paying attention to staleness. If there was no response, it creates a normal request and updates the HTTP cache with the response.
-* `only-if-cached` - Fetch uses any response in the HTTP cache matching the request, not paying attention to staleness. If there was no response, it returns a network error. (Can only be used when request’s mode is "same-origin". Any cached redirects will be followed assuming request’s redirect mode is "follow" and the redirects do not violate request’s mode.)
-
-(Note: option descriptions are taken from https://fetch.spec.whatwg.org/#http-network-or-cache-fetch)
-
-##### Example
-
-```javascript
-const fetch = require('make-fetch-happen').defaults({
- cacheManager: './my-cache'
-})
-
-// Will error with ENOTCACHED if we haven't already cached this url
-fetch('https://registry.npmjs.org/make-fetch-happen', {
- cache: 'only-if-cached'
-})
-
-// Will refresh any local content and cache the new response
-fetch('https://registry.npmjs.org/make-fetch-happen', {
- cache: 'reload'
-})
-
-// Will use any local data, even if stale. Otherwise, will hit network.
-fetch('https://registry.npmjs.org/make-fetch-happen', {
- cache: 'force-cache'
-})
-```
-
-#### <a name="opts-proxy"></a> `> opts.proxy`
-
-A string or `url.parse`-d URI to proxy through. Different Proxy handlers will be
-used depending on the proxy's protocol.
-
-Additionally, `process.env.HTTP_PROXY`, `process.env.HTTPS_PROXY`, and
-`process.env.PROXY` are used if present and no `opts.proxy` value is provided.
-
-(Pending) `process.env.NO_PROXY` may also be configured to skip proxying requests for all, or specific domains.
-
-##### Example
-
-```javascript
-fetch('https://registry.npmjs.org/make-fetch-happen', {
- proxy: 'https://corporate.yourcompany.proxy:4445'
-})
-
-fetch('https://registry.npmjs.org/make-fetch-happen', {
- proxy: {
- protocol: 'https:',
- hostname: 'corporate.yourcompany.proxy',
- port: 4445
- }
-})
-```
-
-#### <a name="opts-no-proxy"></a> `> opts.noProxy`
-
-If present, should be a comma-separated string or an array of domain extensions
-that a proxy should _not_ be used for.
-
-This option may also be provided through `process.env.NO_PROXY`.
-
-#### <a name="https-opts"></a> `> opts.ca, opts.cert, opts.key, opts.strictSSL`
-
-These values are passed in directly to the HTTPS agent and will be used for both
-proxied and unproxied outgoing HTTPS requests. They mostly correspond to the
-same options the `https` module accepts, which will be themselves passed to
-`tls.connect()`. `opts.strictSSL` corresponds to `rejectUnauthorized`.
-
-#### <a name="opts-local-address"></a> `> opts.localAddress`
-
-Passed directly to `http` and `https` request calls. Determines the local
-address to bind to.
-
-#### <a name="opts-max-sockets"></a> `> opts.maxSockets`
-
-Default: 15
-
-Maximum number of active concurrent sockets to use for the underlying
-Http/Https/Proxy agents. This setting applies once per spawned agent.
-
-15 is probably a _pretty good value_ for most use-cases, and balances speed
-with, uh, not knocking out people's routers. 🤓
-
-#### <a name="opts-retry"></a> `> opts.retry`
-
-An object that can be used to tune request retry settings. Retries will only be attempted on the following conditions:
-
-* Request method is NOT `POST` AND
-* Request status is one of: `408`, `420`, `429`, or any status in the 500-range. OR
-* Request errored with `ECONNRESET`, `ECONNREFUSED`, `EADDRINUSE`, `ETIMEDOUT`, or the `fetch` error `request-timeout`.
-
-The following are worth noting as explicitly not retried:
-
-* `getaddrinfo ENOTFOUND` and will be assumed to be either an unreachable domain or the user will be assumed offline. If a response is cached, it will be returned immediately.
-* `ECONNRESET` currently has no support for restarting. It will eventually be supported but requires a bit more juggling due to streaming.
-
-If `opts.retry` is `false`, it is equivalent to `{retries: 0}`
-
-If `opts.retry` is a number, it is equivalent to `{retries: num}`
-
-The following retry options are available if you want more control over it:
-
-* retries
-* factor
-* minTimeout
-* maxTimeout
-* randomize
-
-For details on what each of these do, refer to the [`retry`](https://npm.im/retry) documentation.
-
-##### Example
-
-```javascript
-fetch('https://flaky.site.com', {
- retry: {
- retries: 10,
- randomize: true
- }
-})
-
-fetch('http://reliable.site.com', {
- retry: false
-})
-
-fetch('http://one-more.site.com', {
- retry: 3
-})
-```
-
-#### <a name="opts-onretry"></a> `> opts.onRetry`
-
-A function called whenever a retry is attempted.
-
-##### Example
-
-```javascript
-fetch('https://flaky.site.com', {
- onRetry() {
- console.log('we will retry!')
- }
-})
-```
-
-#### <a name="opts-integrity"></a> `> opts.integrity`
-
-Matches the response body against the given [Subresource Integrity](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity) metadata. If verification fails, the request will fail with an `EINTEGRITY` error.
-
-`integrity` may either be a string or an [`ssri`](https://npm.im/ssri) `Integrity`-like.
-
-##### Example
-
-```javascript
-fetch('https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-1.0.0.tgz', {
- integrity: 'sha1-o47j7zAYnedYFn1dF/fR9OV3z8Q='
-}) // -> ok
-
-fetch('https://malicious-registry.org/make-fetch-happen/-/make-fetch-happen-1.0.0.tgz', {
- integrity: 'sha1-o47j7zAYnedYFn1dF/fR9OV3z8Q='
-}) // Error: EINTEGRITY
-```
-
-### <a name="wow"></a> Message From Our Sponsors
-
-![](stop.gif)
-
-![](happening.gif)
diff --git a/node_modules/make-fetch-happen/agent.js b/node_modules/make-fetch-happen/agent.js
deleted file mode 100644
index 55675946a..000000000
--- a/node_modules/make-fetch-happen/agent.js
+++ /dev/null
@@ -1,171 +0,0 @@
-'use strict'
-const LRU = require('lru-cache')
-const url = require('url')
-
-let AGENT_CACHE = new LRU({ max: 50 })
-let HttpsAgent
-let HttpAgent
-
-module.exports = getAgent
-
-function getAgent (uri, opts) {
- const parsedUri = url.parse(typeof uri === 'string' ? uri : uri.url)
- const isHttps = parsedUri.protocol === 'https:'
- const pxuri = getProxyUri(uri, opts)
-
- const key = [
- `https:${isHttps}`,
- pxuri
- ? `proxy:${pxuri.protocol}//${pxuri.host}:${pxuri.port}`
- : '>no-proxy<',
- `local-address:${opts.localAddress || '>no-local-address<'}`,
- `strict-ssl:${isHttps ? !!opts.strictSSL : '>no-strict-ssl<'}`,
- `ca:${(isHttps && opts.ca) || '>no-ca<'}`,
- `cert:${(isHttps && opts.cert) || '>no-cert<'}`,
- `key:${(isHttps && opts.key) || '>no-key<'}`
- ].join(':')
-
- if (opts.agent != null) { // `agent: false` has special behavior!
- return opts.agent
- }
-
- if (AGENT_CACHE.peek(key)) {
- return AGENT_CACHE.get(key)
- }
-
- if (pxuri) {
- const proxy = getProxy(pxuri, opts, isHttps)
- AGENT_CACHE.set(key, proxy)
- return proxy
- }
-
- if (isHttps && !HttpsAgent) {
- HttpsAgent = require('agentkeepalive').HttpsAgent
- } else if (!isHttps && !HttpAgent) {
- HttpAgent = require('agentkeepalive')
- }
-
- // If opts.timeout is zero, set the agentTimeout to zero as well. A timeout
- // of zero disables the timeout behavior (OS limits still apply). Else, if
- // opts.timeout is a non-zero value, set it to timeout + 1, to ensure that
- // the node-fetch-npm timeout will always fire first, giving us more
- // consistent errors.
- const agentTimeout = opts.timeout === 0 ? 0 : opts.timeout + 1
-
- const agent = isHttps ? new HttpsAgent({
- maxSockets: opts.maxSockets || 15,
- ca: opts.ca,
- cert: opts.cert,
- key: opts.key,
- localAddress: opts.localAddress,
- rejectUnauthorized: opts.strictSSL,
- timeout: agentTimeout
- }) : new HttpAgent({
- maxSockets: opts.maxSockets || 15,
- localAddress: opts.localAddress,
- timeout: agentTimeout
- })
- AGENT_CACHE.set(key, agent)
- return agent
-}
-
-function checkNoProxy (uri, opts) {
- const host = url.parse(uri).hostname.split('.').reverse()
- let noproxy = (opts.noProxy || getProcessEnv('no_proxy'))
- if (typeof noproxy === 'string') {
- noproxy = noproxy.split(/\s*,\s*/g)
- }
- return noproxy && noproxy.some(no => {
- const noParts = no.split('.').filter(x => x).reverse()
- if (!noParts.length) { return false }
- for (let i = 0; i < noParts.length; i++) {
- if (host[i] !== noParts[i]) {
- return false
- }
- }
- return true
- })
-}
-
-module.exports.getProcessEnv = getProcessEnv
-
-function getProcessEnv (env) {
- if (!env) { return }
-
- let value
-
- if (Array.isArray(env)) {
- for (let e of env) {
- value = process.env[e] ||
- process.env[e.toUpperCase()] ||
- process.env[e.toLowerCase()]
- if (typeof value !== 'undefined') { break }
- }
- }
-
- if (typeof env === 'string') {
- value = process.env[env] ||
- process.env[env.toUpperCase()] ||
- process.env[env.toLowerCase()]
- }
-
- return value
-}
-
-function getProxyUri (uri, opts) {
- const protocol = url.parse(uri).protocol
-
- const proxy = opts.proxy || (
- protocol === 'https:' && getProcessEnv('https_proxy')
- ) || (
- protocol === 'http:' && getProcessEnv(['https_proxy', 'http_proxy', 'proxy'])
- )
- if (!proxy) { return null }
-
- const parsedProxy = (typeof proxy === 'string') ? url.parse(proxy) : proxy
-
- return !checkNoProxy(uri, opts) && parsedProxy
-}
-
-let HttpProxyAgent
-let HttpsProxyAgent
-let SocksProxyAgent
-function getProxy (proxyUrl, opts, isHttps) {
- let popts = {
- host: proxyUrl.hostname,
- port: proxyUrl.port,
- protocol: proxyUrl.protocol,
- path: proxyUrl.path,
- auth: proxyUrl.auth,
- ca: opts.ca,
- cert: opts.cert,
- key: opts.key,
- timeout: opts.timeout === 0 ? 0 : opts.timeout + 1,
- localAddress: opts.localAddress,
- maxSockets: opts.maxSockets || 15,
- rejectUnauthorized: opts.strictSSL
- }
-
- if (proxyUrl.protocol === 'http:' || proxyUrl.protocol === 'https:') {
- if (!isHttps) {
- if (!HttpProxyAgent) {
- HttpProxyAgent = require('http-proxy-agent')
- }
-
- return new HttpProxyAgent(popts)
- } else {
- if (!HttpsProxyAgent) {
- HttpsProxyAgent = require('https-proxy-agent')
- }
-
- return new HttpsProxyAgent(popts)
- }
- }
- if (proxyUrl.protocol.startsWith('socks')) {
- if (!SocksProxyAgent) {
- SocksProxyAgent = require('socks-proxy-agent')
- }
-
- return new SocksProxyAgent(popts)
- }
-}
diff --git a/node_modules/make-fetch-happen/cache.js b/node_modules/make-fetch-happen/cache.js
deleted file mode 100644
index f00de14a8..000000000
--- a/node_modules/make-fetch-happen/cache.js
+++ /dev/null
@@ -1,245 +0,0 @@
-'use strict'
-
-const cacache = require('cacache')
-const fetch = require('node-fetch-npm')
-const pipe = require('mississippi').pipe
-const ssri = require('ssri')
-const through = require('mississippi').through
-const to = require('mississippi').to
-const url = require('url')
-const stream = require('stream')
-
-const MAX_MEM_SIZE = 5 * 1024 * 1024 // 5MB
-
-function cacheKey (req) {
- const parsed = url.parse(req.url)
- return `make-fetch-happen:request-cache:${
- url.format({
- protocol: parsed.protocol,
- slashes: parsed.slashes,
- host: parsed.host,
- hostname: parsed.hostname,
- pathname: parsed.pathname
- })
- }`
-}
-
-// This is a cacache-based implementation of the Cache standard,
-// using node-fetch.
-// docs: https://developer.mozilla.org/en-US/docs/Web/API/Cache
-//
-module.exports = class Cache {
- constructor (path, opts) {
- this._path = path
- this.Promise = (opts && opts.Promise) || Promise
- }
-
- // Returns a Promise that resolves to the response associated with the first
- // matching request in the Cache object.
- match (req, opts) {
- opts = opts || {}
- const key = cacheKey(req)
- return cacache.get.info(this._path, key).then(info => {
- return info && cacache.get.hasContent(
- this._path, info.integrity, opts
- ).then(exists => exists && info)
- }).then(info => {
- if (info && info.metadata && matchDetails(req, {
- url: info.metadata.url,
- reqHeaders: new fetch.Headers(info.metadata.reqHeaders),
- resHeaders: new fetch.Headers(info.metadata.resHeaders),
- cacheIntegrity: info.integrity,
- integrity: opts && opts.integrity
- })) {
- const resHeaders = new fetch.Headers(info.metadata.resHeaders)
- addCacheHeaders(resHeaders, this._path, key, info.integrity, info.time)
- if (req.method === 'HEAD') {
- return new fetch.Response(null, {
- url: req.url,
- headers: resHeaders,
- status: 200
- })
- }
- let body
- const cachePath = this._path
- // avoid opening cache file handles until a user actually tries to
- // read from it.
- if (opts.memoize !== false && info.size > MAX_MEM_SIZE) {
- body = new stream.PassThrough()
- const realRead = body._read
- body._read = function (size) {
- body._read = realRead
- pipe(
- cacache.get.stream.byDigest(cachePath, info.integrity, {
- memoize: opts.memoize
- }),
- body,
- err => body.emit(err))
- return realRead.call(this, size)
- }
- } else {
- let readOnce = false
- // cacache is much faster at bulk reads
- body = new stream.Readable({
- read () {
- if (readOnce) return this.push(null)
- readOnce = true
- cacache.get.byDigest(cachePath, info.integrity, {
- memoize: opts.memoize
- }).then(data => {
- this.push(data)
- this.push(null)
- }, err => this.emit('error', err))
- }
- })
- }
- return this.Promise.resolve(new fetch.Response(body, {
- url: req.url,
- headers: resHeaders,
- status: 200,
- size: info.size
- }))
- }
- })
- }
-
- // Takes both a request and its response and adds it to the given cache.
- put (req, response, opts) {
- opts = opts || {}
- const size = response.headers.get('content-length')
- const fitInMemory = !!size && opts.memoize !== false && size < MAX_MEM_SIZE
- const ckey = cacheKey(req)
- const cacheOpts = {
- algorithms: opts.algorithms,
- metadata: {
- url: req.url,
- reqHeaders: req.headers.raw(),
- resHeaders: response.headers.raw()
- },
- size,
- memoize: fitInMemory && opts.memoize
- }
- if (req.method === 'HEAD' || response.status === 304) {
- // Update metadata without writing
- return cacache.get.info(this._path, ckey).then(info => {
- // Providing these will bypass content write
- cacheOpts.integrity = info.integrity
- addCacheHeaders(
- response.headers, this._path, ckey, info.integrity, info.time
- )
- return new this.Promise((resolve, reject) => {
- pipe(
- cacache.get.stream.byDigest(this._path, info.integrity, cacheOpts),
- cacache.put.stream(this._path, cacheKey(req), cacheOpts),
- err => err ? reject(err) : resolve(response)
- )
- })
- }).then(() => response)
- }
- let buf = []
- let bufSize = 0
- let cacheTargetStream = false
- const cachePath = this._path
- let cacheStream = to((chunk, enc, cb) => {
- if (!cacheTargetStream) {
- if (fitInMemory) {
- cacheTargetStream =
- to({highWaterMark: MAX_MEM_SIZE}, (chunk, enc, cb) => {
- buf.push(chunk)
- bufSize += chunk.length
- cb()
- }, done => {
- cacache.put(
- cachePath,
- cacheKey(req),
- Buffer.concat(buf, bufSize),
- cacheOpts
- ).then(
- () => done(),
- done
- )
- })
- } else {
- cacheTargetStream =
- cacache.put.stream(cachePath, cacheKey(req), cacheOpts)
- }
- }
- cacheTargetStream.write(chunk, enc, cb)
- }, done => {
- cacheTargetStream ? cacheTargetStream.end(done) : done()
- })
- const oldBody = response.body
- const newBody = through({highWaterMark: MAX_MEM_SIZE})
- response.body = newBody
- oldBody.once('error', err => newBody.emit('error', err))
- newBody.once('error', err => oldBody.emit('error', err))
- cacheStream.once('error', err => newBody.emit('error', err))
- pipe(oldBody, to((chunk, enc, cb) => {
- cacheStream.write(chunk, enc, () => {
- newBody.write(chunk, enc, cb)
- })
- }, done => {
- cacheStream.end(() => {
- newBody.end(() => {
- done()
- })
- })
- }), err => err && newBody.emit('error', err))
- return response
- }
-
- // Finds the Cache entry whose key is the request, and if found, deletes the
- // Cache entry and returns a Promise that resolves to true. If no Cache entry
- // is found, it returns false.
- 'delete' (req, opts) {
- opts = opts || {}
- if (typeof opts.memoize === 'object') {
- if (opts.memoize.reset) {
- opts.memoize.reset()
- } else if (opts.memoize.clear) {
- opts.memoize.clear()
- } else {
- Object.keys(opts.memoize).forEach(k => {
- opts.memoize[k] = null
- })
- }
- }
- return cacache.rm.entry(
- this._path,
- cacheKey(req)
- // TODO - true/false
- ).then(() => false)
- }
-}
-
-function matchDetails (req, cached) {
- const reqUrl = url.parse(req.url)
- const cacheUrl = url.parse(cached.url)
- const vary = cached.resHeaders.get('Vary')
- // https://tools.ietf.org/html/rfc7234#section-4.1
- if (vary) {
- if (vary.match(/\*/)) {
- return false
- } else {
- const fieldsMatch = vary.split(/\s*,\s*/).every(field => {
- return cached.reqHeaders.get(field) === req.headers.get(field)
- })
- if (!fieldsMatch) {
- return false
- }
- }
- }
- if (cached.integrity) {
- return ssri.parse(cached.integrity).match(cached.cacheIntegrity)
- }
- reqUrl.hash = null
- cacheUrl.hash = null
- return url.format(reqUrl) === url.format(cacheUrl)
-}
-
-function addCacheHeaders (resHeaders, path, key, hash, time) {
- resHeaders.set('X-Local-Cache', encodeURIComponent(path))
- resHeaders.set('X-Local-Cache-Key', encodeURIComponent(key))
- resHeaders.set('X-Local-Cache-Hash', encodeURIComponent(hash))
- resHeaders.set('X-Local-Cache-Time', new Date(time).toUTCString())
-}
diff --git a/node_modules/make-fetch-happen/index.js b/node_modules/make-fetch-happen/index.js
deleted file mode 100644
index 0f2c164e1..000000000
--- a/node_modules/make-fetch-happen/index.js
+++ /dev/null
@@ -1,482 +0,0 @@
-'use strict'
-
-let Cache
-const url = require('url')
-const CachePolicy = require('http-cache-semantics')
-const fetch = require('node-fetch-npm')
-const pkg = require('./package.json')
-const retry = require('promise-retry')
-let ssri
-const Stream = require('stream')
-const getAgent = require('./agent')
-const setWarning = require('./warning')
-
-const isURL = /^https?:/
-const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})`
-
-const RETRY_ERRORS = [
- 'ECONNRESET', // remote socket closed on us
- 'ECONNREFUSED', // remote host refused to open connection
- 'EADDRINUSE', // failed to bind to a local port (proxy?)
- 'ETIMEDOUT' // someone in the transaction is WAY TOO SLOW
- // Known codes we do NOT retry on:
- // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline)
-]
-
-const RETRY_TYPES = [
- 'request-timeout'
-]
-
-// https://fetch.spec.whatwg.org/#http-network-or-cache-fetch
-module.exports = cachingFetch
-cachingFetch.defaults = function (_uri, _opts) {
- const fetch = this
- if (typeof _uri === 'object') {
- _opts = _uri
- _uri = null
- }
-
- function defaultedFetch (uri, opts) {
- const finalOpts = Object.assign({}, _opts || {}, opts || {})
- return fetch(uri || _uri, finalOpts)
- }
-
- defaultedFetch.defaults = fetch.defaults
- defaultedFetch.delete = fetch.delete
- return defaultedFetch
-}
-
-cachingFetch.delete = cacheDelete
-function cacheDelete (uri, opts) {
- opts = configureOptions(opts)
- if (opts.cacheManager) {
- const req = new fetch.Request(uri, {
- method: opts.method,
- headers: opts.headers
- })
- return opts.cacheManager.delete(req, opts)
- }
-}
-
-function initializeCache (opts) {
- if (typeof opts.cacheManager === 'string') {
- if (!Cache) {
- // Default cacache-based cache
- Cache = require('./cache')
- }
-
- opts.cacheManager = new Cache(opts.cacheManager, opts)
- }
-
- opts.cache = opts.cache || 'default'
-
- if (opts.cache === 'default' && isHeaderConditional(opts.headers)) {
- // If header list contains `If-Modified-Since`, `If-None-Match`,
- // `If-Unmodified-Since`, `If-Match`, or `If-Range`, fetch will set cache
- // mode to "no-store" if it is "default".
- opts.cache = 'no-store'
- }
-}
-
-function configureOptions (_opts) {
- const opts = Object.assign({}, _opts || {})
- opts.method = (opts.method || 'GET').toUpperCase()
-
- if (opts.retry && typeof opts.retry === 'number') {
- opts.retry = { retries: opts.retry }
- }
-
- if (opts.retry === false) {
- opts.retry = { retries: 0 }
- }
-
- if (opts.cacheManager) {
- initializeCache(opts)
- }
-
- return opts
-}
-
-function initializeSsri () {
- if (!ssri) {
- ssri = require('ssri')
- }
-}
-
-function cachingFetch (uri, _opts) {
- const opts = configureOptions(_opts)
-
- if (opts.integrity) {
- initializeSsri()
- // if verifying integrity, node-fetch must not decompress
- opts.compress = false
- }
-
- const isCachable = (opts.method === 'GET' || opts.method === 'HEAD') &&
- opts.cacheManager &&
- opts.cache !== 'no-store' &&
- opts.cache !== 'reload'
-
- if (isCachable) {
- const req = new fetch.Request(uri, {
- method: opts.method,
- headers: opts.headers
- })
-
- return opts.cacheManager.match(req, opts).then(res => {
- if (res) {
- const warningCode = (res.headers.get('Warning') || '').match(/^\d+/)
- if (warningCode && +warningCode >= 100 && +warningCode < 200) {
- // https://tools.ietf.org/html/rfc7234#section-4.3.4
- //
- // If a stored response is selected for update, the cache MUST:
- //
- // * delete any Warning header fields in the stored response with
- // warn-code 1xx (see Section 5.5);
- //
- // * retain any Warning header fields in the stored response with
- // warn-code 2xx;
- //
- res.headers.delete('Warning')
- }
-
- if (opts.cache === 'default' && !isStale(req, res)) {
- return res
- }
-
- if (opts.cache === 'default' || opts.cache === 'no-cache') {
- return conditionalFetch(req, res, opts)
- }
-
- if (opts.cache === 'force-cache' || opts.cache === 'only-if-cached') {
- // 112 Disconnected operation
- // SHOULD be included if the cache is intentionally disconnected from
- // the rest of the network for a period of time.
- // (https://tools.ietf.org/html/rfc2616#section-14.46)
- setWarning(res, 112, 'Disconnected operation')
- return res
- }
- }
-
- if (!res && opts.cache === 'only-if-cached') {
- const errorMsg = `request to ${
- uri
- } failed: cache mode is 'only-if-cached' but no cached response available.`
-
- const err = new Error(errorMsg)
- err.code = 'ENOTCACHED'
- throw err
- }
-
- // Missing cache entry, or mode is default (if stale), reload, no-store
- return remoteFetch(req.url, opts)
- })
- }
-
- return remoteFetch(uri, opts)
-}
-
-function iterableToObject (iter) {
- const obj = {}
- for (let k of iter.keys()) {
- obj[k] = iter.get(k)
- }
- return obj
-}
-
-function makePolicy (req, res) {
- const _req = {
- url: req.url,
- method: req.method,
- headers: iterableToObject(req.headers)
- }
- const _res = {
- status: res.status,
- headers: iterableToObject(res.headers)
- }
-
- return new CachePolicy(_req, _res, { shared: false })
-}
-
-// https://tools.ietf.org/html/rfc7234#section-4.2
-function isStale (req, res) {
- if (!res) {
- return null
- }
-
- const _req = {
- url: req.url,
- method: req.method,
- headers: iterableToObject(req.headers)
- }
-
- const policy = makePolicy(req, res)
-
- const responseTime = res.headers.get('x-local-cache-time') ||
- res.headers.get('date') ||
- 0
-
- policy._responseTime = new Date(responseTime)
-
- const bool = !policy.satisfiesWithoutRevalidation(_req)
- return bool
-}
-
-function mustRevalidate (res) {
- return (res.headers.get('cache-control') || '').match(/must-revalidate/i)
-}
-
-function conditionalFetch (req, cachedRes, opts) {
- const _req = {
- url: req.url,
- method: req.method,
- headers: Object.assign({}, opts.headers || {})
- }
-
- const policy = makePolicy(req, cachedRes)
- opts.headers = policy.revalidationHeaders(_req)
-
- return remoteFetch(req.url, opts)
- .then(condRes => {
- const revalidatedPolicy = policy.revalidatedPolicy(_req, {
- status: condRes.status,
- headers: iterableToObject(condRes.headers)
- })
-
- if (condRes.status >= 500 && !mustRevalidate(cachedRes)) {
- // 111 Revalidation failed
- // MUST be included if a cache returns a stale response because an
- // attempt to revalidate the response failed, due to an inability to
- // reach the server.
- // (https://tools.ietf.org/html/rfc2616#section-14.46)
- setWarning(cachedRes, 111, 'Revalidation failed')
- return cachedRes
- }
-
- if (condRes.status === 304) { // 304 Not Modified
- condRes.body = cachedRes.body
- return opts.cacheManager.put(req, condRes, opts)
- .then(newRes => {
- newRes.headers = new fetch.Headers(revalidatedPolicy.policy.responseHeaders())
- return newRes
- })
- }
-
- return condRes
- })
- .then(res => res)
- .catch(err => {
- if (mustRevalidate(cachedRes)) {
- throw err
- } else {
- // 111 Revalidation failed
- // MUST be included if a cache returns a stale response because an
- // attempt to revalidate the response failed, due to an inability to
- // reach the server.
- // (https://tools.ietf.org/html/rfc2616#section-14.46)
- setWarning(cachedRes, 111, 'Revalidation failed')
- // 199 Miscellaneous warning
- // The warning text MAY include arbitrary information to be presented to
- // a human user, or logged. A system receiving this warning MUST NOT take
- // any automated action, besides presenting the warning to the user.
- // (https://tools.ietf.org/html/rfc2616#section-14.46)
- setWarning(
- cachedRes,
- 199,
- `Miscellaneous Warning ${err.code}: ${err.message}`
- )
-
- return cachedRes
- }
- })
-}
-
-function remoteFetchHandleIntegrity (res, integrity) {
- const oldBod = res.body
- const newBod = ssri.integrityStream({
- integrity
- })
- oldBod.pipe(newBod)
- res.body = newBod
- oldBod.once('error', err => {
- newBod.emit('error', err)
- })
- newBod.once('error', err => {
- oldBod.emit('error', err)
- })
-}
-
-function remoteFetch (uri, opts) {
- const agent = getAgent(uri, opts)
- const headers = Object.assign({
- 'connection': agent ? 'keep-alive' : 'close',
- 'user-agent': USER_AGENT
- }, opts.headers || {})
-
- const reqOpts = {
- agent,
- body: opts.body,
- compress: opts.compress,
- follow: opts.follow,
- headers: new fetch.Headers(headers),
- method: opts.method,
- redirect: 'manual',
- size: opts.size,
- counter: opts.counter,
- timeout: opts.timeout
- }
-
- return retry(
- (retryHandler, attemptNum) => {
- const req = new fetch.Request(uri, reqOpts)
- return fetch(req)
- .then(res => {
- res.headers.set('x-fetch-attempts', attemptNum)
-
- if (opts.integrity) {
- remoteFetchHandleIntegrity(res, opts.integrity)
- }
-
- const isStream = req.body instanceof Stream
-
- if (opts.cacheManager) {
- const isMethodGetHead = req.method === 'GET' ||
- req.method === 'HEAD'
-
- const isCachable = opts.cache !== 'no-store' &&
- isMethodGetHead &&
- makePolicy(req, res).storable() &&
- res.status === 200 // No other statuses should be stored!
-
- if (isCachable) {
- return opts.cacheManager.put(req, res, opts)
- }
-
- if (!isMethodGetHead) {
- return opts.cacheManager.delete(req).then(() => {
- if (res.status >= 500 && req.method !== 'POST' && !isStream) {
- if (typeof opts.onRetry === 'function') {
- opts.onRetry(res)
- }
-
- return retryHandler(res)
- }
-
- return res
- })
- }
- }
-
- const isRetriable = req.method !== 'POST' &&
- !isStream && (
- res.status === 408 || // Request Timeout
- res.status === 420 || // Enhance Your Calm (usually Twitter rate-limit)
- res.status === 429 || // Too Many Requests ("standard" rate-limiting)
- res.status >= 500 // Assume server errors are momentary hiccups
- )
-
- if (isRetriable) {
- if (typeof opts.onRetry === 'function') {
- opts.onRetry(res)
- }
-
- return retryHandler(res)
- }
-
- if (!fetch.isRedirect(res.status) || opts.redirect === 'manual') {
- return res
- }
-
- // handle redirects - matches behavior of npm-fetch: https://github.com/bitinn/node-fetch
- if (opts.redirect === 'error') {
- const err = new Error(`redirect mode is set to error: ${uri}`)
- err.code = 'ENOREDIRECT'
- throw err
- }
-
- if (!res.headers.get('location')) {
- const err = new Error(`redirect location header missing at: ${uri}`)
- err.code = 'EINVALIDREDIRECT'
- throw err
- }
-
- if (req.counter >= req.follow) {
- const err = new Error(`maximum redirect reached at: ${uri}`)
- err.code = 'EMAXREDIRECT'
- throw err
- }
-
- const resolvedUrl = url.resolve(req.url, res.headers.get('location'))
- let redirectURL = url.parse(resolvedUrl)
-
- if (isURL.test(res.headers.get('location'))) {
- redirectURL = url.parse(res.headers.get('location'))
- }
-
- // Remove authorization if changing hostnames (but not if just
- // changing ports or protocols). This matches the behavior of request:
- // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138
- if (url.parse(req.url).hostname !== redirectURL.hostname) {
- req.headers.delete('authorization')
- }
-
- // for POST request with 301/302 response, or any request with 303 response,
- // use GET when following redirect
- if (res.status === 303 ||
- ((res.status === 301 || res.status === 302) && req.method === 'POST')) {
- opts.method = 'GET'
- opts.body = null
- req.headers.delete('content-length')
- }
-
- opts.headers = {}
- req.headers.forEach((value, name) => {
- opts.headers[name] = value
- })
-
- opts.counter = ++req.counter
- return cachingFetch(resolvedUrl, opts)
- })
- .catch(err => {
- const code = err.code === 'EPROMISERETRY' ? err.retried.code : err.code
-
- const isRetryError = RETRY_ERRORS.indexOf(code) === -1 &&
- RETRY_TYPES.indexOf(err.type) === -1
-
- if (req.method === 'POST' || isRetryError) {
- throw err
- }
-
- if (typeof opts.onRetry === 'function') {
- opts.onRetry(err)
- }
-
- return retryHandler(err)
- })
- },
- opts.retry
- ).catch(err => {
- if (err.status >= 400) {
- return err
- }
-
- throw err
- })
-}
-
-function isHeaderConditional (headers) {
- if (!headers || typeof headers !== 'object') {
- return false
- }
-
- const modifiers = [
- 'if-modified-since',
- 'if-none-match',
- 'if-unmodified-since',
- 'if-match',
- 'if-range'
- ]
-
- return Object.keys(headers)
- .some(h => modifiers.indexOf(h.toLowerCase()) !== -1)
-}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/CHANGELOG.md b/node_modules/make-fetch-happen/node_modules/cacache/CHANGELOG.md
deleted file mode 100644
index f67fbc8b4..000000000
--- a/node_modules/make-fetch-happen/node_modules/cacache/CHANGELOG.md
+++ /dev/null
@@ -1,657 +0,0 @@
-# Changelog
-
-All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
-
-### [12.0.3](https://github.com/npm/cacache/compare/v12.0.2...v12.0.3) (2019-08-19)
-
-
-### Bug Fixes
-
-* do not chown if not running as root ([2d80af9](https://github.com/npm/cacache/commit/2d80af9))
-
-
-
-### [12.0.2](https://github.com/npm/cacache/compare/v12.0.1...v12.0.2) (2019-07-19)
-
-
-
-### [12.0.1](https://github.com/npm/cacache/compare/v12.0.0...v12.0.1) (2019-07-19)
-
-* **deps** Abstracted out `lib/util/infer-owner.js` to
- [@npmcli/infer-owner](https://www.npmjs.com/package/@npmcli/infer-owner)
- so that it could be more easily used in other parts of the npm CLI.
-
-
-## [12.0.0](https://github.com/npm/cacache/compare/v11.3.3...v12.0.0) (2019-07-15)
-
-
-### Features
-
-* infer uid/gid instead of accepting as options ([ac84d14](https://github.com/npm/cacache/commit/ac84d14))
-* **i18n:** add another error message ([676cb32](https://github.com/npm/cacache/commit/676cb32))
-
-
-### BREAKING CHANGES
-
-* the uid gid options are no longer respected or
-necessary. As of this change, cacache will always match the cache
-contents to the ownership of the cache directory (or its parent
-directory), regardless of what the caller passes in.
-
-Reasoning:
-
-The number one reason to use a uid or gid option was to keep root-owned
-files from causing problems in the cache. In npm's case, this meant
-that CLI's ./lib/command.js had to work out the appropriate uid and gid,
-then pass it to the libnpmcommand module, which had to in turn pass the
-uid and gid to npm-registry-fetch, which then passed it to
-make-fetch-happen, which passed it to cacache. (For package fetching,
-pacote would be in that mix as well.)
-
-Added to that, `cacache.rm()` will actually _write_ a file into the
-cache index, but has no way to accept an option so that its call to
-entry-index.js will write the index with the appropriate uid/gid.
-Little ownership bugs were all over the place, and tricky to trace
-through. (Why should make-fetch-happen even care about accepting or
-passing uids and gids? It's an http library.)
-
-This change allows us to keep the cache from having mixed ownership in
-any situation.
-
-Of course, this _does_ mean that if you have a root-owned but
-user-writable folder (for example, `/tmp`), then the cache will try to
-chown everything to root.
-
-The solution is for the user to create a folder, make it user-owned, and
-use that, rather than relying on cacache to create the root cache folder.
-
-If we decide to restore the uid/gid opts, and use ownership inferrence
-only when uid/gid are unset, then take care to also make rm take an
-option object, and pass it through to entry-index.js.
-
-
-
-### [11.3.3](https://github.com/npm/cacache/compare/v11.3.2...v11.3.3) (2019-06-17)
-
-
-### Bug Fixes
-
-* **audit:** npm audit fix ([200a6d5](https://github.com/npm/cacache/commit/200a6d5))
-* **config:** Add ssri config 'error' option ([#146](https://github.com/npm/cacache/issues/146)) ([47de8f5](https://github.com/npm/cacache/commit/47de8f5))
-* **deps:** npm audit fix ([481a7dc](https://github.com/npm/cacache/commit/481a7dc))
-* **standard:** standard --fix ([7799149](https://github.com/npm/cacache/commit/7799149))
-* **write:** avoid another cb never called situation ([5156561](https://github.com/npm/cacache/commit/5156561))
-
-
-
-<a name="11.3.2"></a>
-## [11.3.2](https://github.com/npm/cacache/compare/v11.3.1...v11.3.2) (2018-12-21)
-
-
-### Bug Fixes
-
-* **get:** make sure to handle errors in the .then ([b10bcd0](https://github.com/npm/cacache/commit/b10bcd0))
-
-
-
-<a name="11.3.1"></a>
-## [11.3.1](https://github.com/npm/cacache/compare/v11.3.0...v11.3.1) (2018-11-05)
-
-
-### Bug Fixes
-
-* **get:** export hasContent.sync properly ([d76c920](https://github.com/npm/cacache/commit/d76c920))
-
-
-
-<a name="11.3.0"></a>
-# [11.3.0](https://github.com/npm/cacache/compare/v11.2.0...v11.3.0) (2018-11-05)
-
-
-### Features
-
-* **get:** add sync API for reading ([db1e094](https://github.com/npm/cacache/commit/db1e094))
-
-
-
-<a name="11.2.0"></a>
-# [11.2.0](https://github.com/npm/cacache/compare/v11.1.0...v11.2.0) (2018-08-08)
-
-
-### Features
-
-* **read:** add sync support to other internal read.js fns ([fe638b6](https://github.com/npm/cacache/commit/fe638b6))
-
-
-
-<a name="11.1.0"></a>
-# [11.1.0](https://github.com/npm/cacache/compare/v11.0.3...v11.1.0) (2018-08-01)
-
-
-### Features
-
-* **read:** add sync support for low-level content read ([b43af83](https://github.com/npm/cacache/commit/b43af83))
-
-
-
-<a name="11.0.3"></a>
-## [11.0.3](https://github.com/npm/cacache/compare/v11.0.2...v11.0.3) (2018-08-01)
-
-
-### Bug Fixes
-
-* **config:** add ssri config options ([#136](https://github.com/npm/cacache/issues/136)) ([10d5d9a](https://github.com/npm/cacache/commit/10d5d9a))
-* **perf:** refactor content.read to avoid lstats ([c5ac10e](https://github.com/npm/cacache/commit/c5ac10e))
-* **test:** oops when removing safe-buffer ([1950490](https://github.com/npm/cacache/commit/1950490))
-
-
-
-<a name="11.0.2"></a>
-## [11.0.2](https://github.com/npm/cacache/compare/v11.0.1...v11.0.2) (2018-05-07)
-
-
-### Bug Fixes
-
-* **verify:** size param no longer lost in a verify ([#131](https://github.com/npm/cacache/issues/131)) ([c614a19](https://github.com/npm/cacache/commit/c614a19)), closes [#130](https://github.com/npm/cacache/issues/130)
-
-
-
-<a name="11.0.1"></a>
-## [11.0.1](https://github.com/npm/cacache/compare/v11.0.0...v11.0.1) (2018-04-10)
-
-
-
-<a name="11.0.0"></a>
-# [11.0.0](https://github.com/npm/cacache/compare/v10.0.4...v11.0.0) (2018-04-09)
-
-
-### Features
-
-* **opts:** use figgy-pudding for opts ([#128](https://github.com/npm/cacache/issues/128)) ([33d4eed](https://github.com/npm/cacache/commit/33d4eed))
-
-
-### meta
-
-* drop support for node@4 ([529f347](https://github.com/npm/cacache/commit/529f347))
-
-
-### BREAKING CHANGES
-
-* node@4 is no longer supported
-
-
-
-<a name="10.0.4"></a>
-## [10.0.4](https://github.com/npm/cacache/compare/v10.0.3...v10.0.4) (2018-02-16)
-
-
-
-<a name="10.0.3"></a>
-## [10.0.3](https://github.com/npm/cacache/compare/v10.0.2...v10.0.3) (2018-02-16)
-
-
-### Bug Fixes
-
-* **content:** rethrow aggregate errors as ENOENT ([fa918f5](https://github.com/npm/cacache/commit/fa918f5))
-
-
-
-<a name="10.0.2"></a>
-## [10.0.2](https://github.com/npm/cacache/compare/v10.0.1...v10.0.2) (2018-01-07)
-
-
-### Bug Fixes
-
-* **ls:** deleted entries could cause a premature stream EOF ([347dc36](https://github.com/npm/cacache/commit/347dc36))
-
-
-
-<a name="10.0.1"></a>
-## [10.0.1](https://github.com/npm/cacache/compare/v10.0.0...v10.0.1) (2017-11-15)
-
-
-### Bug Fixes
-
-* **move-file:** actually use the fallback to `move-concurrently` (#110) ([073fbe1](https://github.com/npm/cacache/commit/073fbe1))
-
-
-
-<a name="10.0.0"></a>
-# [10.0.0](https://github.com/npm/cacache/compare/v9.3.0...v10.0.0) (2017-10-23)
-
-
-### Features
-
-* **license:** relicense to ISC (#111) ([fdbb4e5](https://github.com/npm/cacache/commit/fdbb4e5))
-
-
-### Performance Improvements
-
-* more copyFile benchmarks ([63787bb](https://github.com/npm/cacache/commit/63787bb))
-
-
-### BREAKING CHANGES
-
-* **license:** the license has been changed from CC0-1.0 to ISC.
-
-
-
-<a name="9.3.0"></a>
-# [9.3.0](https://github.com/npm/cacache/compare/v9.2.9...v9.3.0) (2017-10-07)
-
-
-### Features
-
-* **copy:** added cacache.get.copy api for fast copies (#107) ([067b5f6](https://github.com/npm/cacache/commit/067b5f6))
-
-
-
-<a name="9.2.9"></a>
-## [9.2.9](https://github.com/npm/cacache/compare/v9.2.8...v9.2.9) (2017-06-17)
-
-
-
-<a name="9.2.8"></a>
-## [9.2.8](https://github.com/npm/cacache/compare/v9.2.7...v9.2.8) (2017-06-05)
-
-
-### Bug Fixes
-
-* **ssri:** bump ssri for bugfix ([c3232ea](https://github.com/npm/cacache/commit/c3232ea))
-
-
-
-<a name="9.2.7"></a>
-## [9.2.7](https://github.com/npm/cacache/compare/v9.2.6...v9.2.7) (2017-06-05)
-
-
-### Bug Fixes
-
-* **content:** make verified content completely read-only (#96) ([4131196](https://github.com/npm/cacache/commit/4131196))
-
-
-
-<a name="9.2.6"></a>
-## [9.2.6](https://github.com/npm/cacache/compare/v9.2.5...v9.2.6) (2017-05-31)
-
-
-### Bug Fixes
-
-* **node:** update ssri to prevent old node 4 crash ([5209ffe](https://github.com/npm/cacache/commit/5209ffe))
-
-
-
-<a name="9.2.5"></a>
-## [9.2.5](https://github.com/npm/cacache/compare/v9.2.4...v9.2.5) (2017-05-25)
-
-
-### Bug Fixes
-
-* **deps:** fix lockfile issues and bump ssri ([84e1d7e](https://github.com/npm/cacache/commit/84e1d7e))
-
-
-
-<a name="9.2.4"></a>
-## [9.2.4](https://github.com/npm/cacache/compare/v9.2.3...v9.2.4) (2017-05-24)
-
-
-### Bug Fixes
-
-* **deps:** bumping deps ([bbccb12](https://github.com/npm/cacache/commit/bbccb12))
-
-
-
-<a name="9.2.3"></a>
-## [9.2.3](https://github.com/npm/cacache/compare/v9.2.2...v9.2.3) (2017-05-24)
-
-
-### Bug Fixes
-
-* **rm:** stop crashing if content is missing on rm ([ac90bc0](https://github.com/npm/cacache/commit/ac90bc0))
-
-
-
-<a name="9.2.2"></a>
-## [9.2.2](https://github.com/npm/cacache/compare/v9.2.1...v9.2.2) (2017-05-14)
-
-
-### Bug Fixes
-
-* **i18n:** lets pretend this didn't happen ([519b4ee](https://github.com/npm/cacache/commit/519b4ee))
-
-
-
-<a name="9.2.1"></a>
-## [9.2.1](https://github.com/npm/cacache/compare/v9.2.0...v9.2.1) (2017-05-14)
-
-
-### Bug Fixes
-
-* **docs:** fixing translation messup ([bb9e4f9](https://github.com/npm/cacache/commit/bb9e4f9))
-
-
-
-<a name="9.2.0"></a>
-# [9.2.0](https://github.com/npm/cacache/compare/v9.1.0...v9.2.0) (2017-05-14)
-
-
-### Features
-
-* **i18n:** add Spanish translation for API ([531f9a4](https://github.com/npm/cacache/commit/531f9a4))
-
-
-
-<a name="9.1.0"></a>
-# [9.1.0](https://github.com/npm/cacache/compare/v9.0.0...v9.1.0) (2017-05-14)
-
-
-### Features
-
-* **i18n:** Add Spanish translation and i18n setup (#91) ([323b90c](https://github.com/npm/cacache/commit/323b90c))
-
-
-
-<a name="9.0.0"></a>
-# [9.0.0](https://github.com/npm/cacache/compare/v8.0.0...v9.0.0) (2017-04-28)
-
-
-### Bug Fixes
-
-* **memoization:** actually use the LRU ([0e55dc9](https://github.com/npm/cacache/commit/0e55dc9))
-
-
-### Features
-
-* **memoization:** memoizers can be injected through opts.memoize (#90) ([e5614c7](https://github.com/npm/cacache/commit/e5614c7))
-
-
-### BREAKING CHANGES
-
-* **memoization:** If you were passing an object to opts.memoize, it will now be used as an injected memoization object. If you were only passing booleans and other non-objects through that option, no changes are needed.
-
-
-
-<a name="8.0.0"></a>
-# [8.0.0](https://github.com/npm/cacache/compare/v7.1.0...v8.0.0) (2017-04-22)
-
-
-### Features
-
-* **read:** change hasContent to return {sri, size} (#88) ([bad6c49](https://github.com/npm/cacache/commit/bad6c49)), closes [#87](https://github.com/npm/cacache/issues/87)
-
-
-### BREAKING CHANGES
-
-* **read:** hasContent now returns an object with `{sri, size}` instead of `sri`. Use `result.sri` anywhere that needed the old return value.
-
-
-
-<a name="7.1.0"></a>
-# [7.1.0](https://github.com/npm/cacache/compare/v7.0.5...v7.1.0) (2017-04-20)
-
-
-### Features
-
-* **size:** handle content size info (#49) ([91230af](https://github.com/npm/cacache/commit/91230af))
-
-
-
-<a name="7.0.5"></a>
-## [7.0.5](https://github.com/npm/cacache/compare/v7.0.4...v7.0.5) (2017-04-18)
-
-
-### Bug Fixes
-
-* **integrity:** new ssri with fixed integrity stream ([6d13e8e](https://github.com/npm/cacache/commit/6d13e8e))
-* **write:** wrap stuff in promises to improve errors ([3624fc5](https://github.com/npm/cacache/commit/3624fc5))
-
-
-
-<a name="7.0.4"></a>
-## [7.0.4](https://github.com/npm/cacache/compare/v7.0.3...v7.0.4) (2017-04-15)
-
-
-### Bug Fixes
-
-* **fix-owner:** throw away ENOENTs on chownr ([d49bbcd](https://github.com/npm/cacache/commit/d49bbcd))
-
-
-
-<a name="7.0.3"></a>
-## [7.0.3](https://github.com/npm/cacache/compare/v7.0.2...v7.0.3) (2017-04-05)
-
-
-### Bug Fixes
-
-* **read:** fixing error message for integrity verification failures ([9d4f0a5](https://github.com/npm/cacache/commit/9d4f0a5))
-
-
-
-<a name="7.0.2"></a>
-## [7.0.2](https://github.com/npm/cacache/compare/v7.0.1...v7.0.2) (2017-04-03)
-
-
-### Bug Fixes
-
-* **integrity:** use EINTEGRITY error code and update ssri ([8dc2e62](https://github.com/npm/cacache/commit/8dc2e62))
-
-
-
-<a name="7.0.1"></a>
-## [7.0.1](https://github.com/npm/cacache/compare/v7.0.0...v7.0.1) (2017-04-03)
-
-
-### Bug Fixes
-
-* **docs:** fix header name conflict in readme ([afcd456](https://github.com/npm/cacache/commit/afcd456))
-
-
-
-<a name="7.0.0"></a>
-# [7.0.0](https://github.com/npm/cacache/compare/v6.3.0...v7.0.0) (2017-04-03)
-
-
-### Bug Fixes
-
-* **test:** fix content.write tests when running in docker ([d2e9b6a](https://github.com/npm/cacache/commit/d2e9b6a))
-
-
-### Features
-
-* **integrity:** subresource integrity support (#78) ([b1e731f](https://github.com/npm/cacache/commit/b1e731f))
-
-
-### BREAKING CHANGES
-
-* **integrity:** The entire API has been overhauled to use SRI hashes instead of digest/hashAlgorithm pairs. SRI hashes follow the Subresource Integrity standard and support strings and objects compatible with [`ssri`](https://npm.im/ssri).
-
-* This change bumps the index version, which will invalidate all previous index entries. Content entries will remain intact, and existing caches will automatically reuse any content from before this breaking change.
-
-* `cacache.get.info()`, `cacache.ls()`, and `cacache.ls.stream()` will now return objects that looks like this:
-
-```
-{
- key: String,
- integrity: '<algorithm>-<base64hash>',
- path: ContentPath,
- time: Date<ms>,
- metadata: Any
-}
-```
-
-* `opts.digest` and `opts.hashAlgorithm` are obsolete for any API calls that used them.
-
-* Anywhere `opts.digest` was accepted, `opts.integrity` is now an option. Any valid SRI hash is accepted here -- multiple hash entries will be resolved according to the standard: first, the "strongest" hash algorithm will be picked, and then each of the entries for that algorithm will be matched against the content. Content will be validated if *any* of the entries match (so, a single integrity string can be used for multiple "versions" of the same document/data).
-
-* `put.byDigest()`, `put.stream.byDigest`, `get.byDigest()` and `get.stream.byDigest()` now expect an SRI instead of a `digest` + `opts.hashAlgorithm` pairing.
-
-* `get.hasContent()` now expects an integrity hash instead of a digest. If content exists, it will return the specific single integrity hash that was found in the cache.
-
-* `verify()` has learned to handle integrity-based caches, and forgotten how to handle old-style cache indices due to the format change.
-
-* `cacache.rm.content()` now expects an integrity hash instead of a hex digest.
-
-
-
-<a name="6.3.0"></a>
-# [6.3.0](https://github.com/npm/cacache/compare/v6.2.0...v6.3.0) (2017-04-01)
-
-
-### Bug Fixes
-
-* **fixOwner:** ignore EEXIST race condition from mkdirp ([4670e9b](https://github.com/npm/cacache/commit/4670e9b))
-* **index:** ignore index removal races when inserting ([b9d2fa2](https://github.com/npm/cacache/commit/b9d2fa2))
-* **memo:** use lru-cache for better mem management (#75) ([d8ac5aa](https://github.com/npm/cacache/commit/d8ac5aa))
-
-
-### Features
-
-* **dependencies:** Switch to move-concurrently (#77) ([dc6482d](https://github.com/npm/cacache/commit/dc6482d))
-
-
-
-<a name="6.2.0"></a>
-# [6.2.0](https://github.com/npm/cacache/compare/v6.1.2...v6.2.0) (2017-03-15)
-
-
-### Bug Fixes
-
-* **index:** additional bucket entry verification with checksum (#72) ([f8e0f25](https://github.com/npm/cacache/commit/f8e0f25))
-* **verify:** return fixOwner.chownr promise ([6818521](https://github.com/npm/cacache/commit/6818521))
-
-
-### Features
-
-* **tmp:** safe tmp dir creation/management util (#73) ([c42da71](https://github.com/npm/cacache/commit/c42da71))
-
-
-
-<a name="6.1.2"></a>
-## [6.1.2](https://github.com/npm/cacache/compare/v6.1.1...v6.1.2) (2017-03-13)
-
-
-### Bug Fixes
-
-* **index:** set default hashAlgorithm ([d6eb2f0](https://github.com/npm/cacache/commit/d6eb2f0))
-
-
-
-<a name="6.1.1"></a>
-## [6.1.1](https://github.com/npm/cacache/compare/v6.1.0...v6.1.1) (2017-03-13)
-
-
-### Bug Fixes
-
-* **coverage:** bumping coverage for verify (#71) ([0b7faf6](https://github.com/npm/cacache/commit/0b7faf6))
-* **deps:** glob should have been a regular dep :< ([0640bc4](https://github.com/npm/cacache/commit/0640bc4))
-
-
-
-<a name="6.1.0"></a>
-# [6.1.0](https://github.com/npm/cacache/compare/v6.0.2...v6.1.0) (2017-03-12)
-
-
-### Bug Fixes
-
-* **coverage:** more coverage for content reads (#70) ([ef4f70a](https://github.com/npm/cacache/commit/ef4f70a))
-* **tests:** use safe-buffer because omfg (#69) ([6ab8132](https://github.com/npm/cacache/commit/6ab8132))
-
-
-### Features
-
-* **rm:** limited rm.all and fixed bugs (#66) ([d5d25ba](https://github.com/npm/cacache/commit/d5d25ba)), closes [#66](https://github.com/npm/cacache/issues/66)
-* **verify:** tested, working cache verifier/gc (#68) ([45ad77a](https://github.com/npm/cacache/commit/45ad77a))
-
-
-
-<a name="6.0.2"></a>
-## [6.0.2](https://github.com/npm/cacache/compare/v6.0.1...v6.0.2) (2017-03-11)
-
-
-### Bug Fixes
-
-* **index:** segment cache items with another subbucket (#64) ([c3644e5](https://github.com/npm/cacache/commit/c3644e5))
-
-
-
-<a name="6.0.1"></a>
-## [6.0.1](https://github.com/npm/cacache/compare/v6.0.0...v6.0.1) (2017-03-05)
-
-
-### Bug Fixes
-
-* **docs:** Missed spots in README ([8ffb7fa](https://github.com/npm/cacache/commit/8ffb7fa))
-
-
-
-<a name="6.0.0"></a>
-# [6.0.0](https://github.com/npm/cacache/compare/v5.0.3...v6.0.0) (2017-03-05)
-
-
-### Bug Fixes
-
-* **api:** keep memo cache mostly-internal ([2f72d0a](https://github.com/npm/cacache/commit/2f72d0a))
-* **content:** use the rest of the string, not the whole string ([fa8f3c3](https://github.com/npm/cacache/commit/fa8f3c3))
-* **deps:** removed `format-number@2.0.2` ([1187791](https://github.com/npm/cacache/commit/1187791))
-* **deps:** removed inflight@1.0.6 ([0d1819c](https://github.com/npm/cacache/commit/0d1819c))
-* **deps:** rimraf@2.6.1 ([9efab6b](https://github.com/npm/cacache/commit/9efab6b))
-* **deps:** standard@9.0.0 ([4202cba](https://github.com/npm/cacache/commit/4202cba))
-* **deps:** tap@10.3.0 ([aa03088](https://github.com/npm/cacache/commit/aa03088))
-* **deps:** weallcontribute@1.0.8 ([ad4f4dc](https://github.com/npm/cacache/commit/ad4f4dc))
-* **docs:** add security note to hashKey ([03f81ba](https://github.com/npm/cacache/commit/03f81ba))
-* **hashes:** change default hashAlgorithm to sha512 ([ea00ba6](https://github.com/npm/cacache/commit/ea00ba6))
-* **hashes:** missed a spot for hashAlgorithm defaults ([45997d8](https://github.com/npm/cacache/commit/45997d8))
-* **index:** add length header before JSON for verification ([fb8cb4d](https://github.com/npm/cacache/commit/fb8cb4d))
-* **index:** change index filenames to sha1s of keys ([bbc5fca](https://github.com/npm/cacache/commit/bbc5fca))
-* **index:** who cares about race conditions anyway ([b1d3888](https://github.com/npm/cacache/commit/b1d3888))
-* **perf:** bulk-read get+read for massive speed ([d26cdf9](https://github.com/npm/cacache/commit/d26cdf9))
-* **perf:** use bulk file reads for index reads ([79a8891](https://github.com/npm/cacache/commit/79a8891))
-* **put-stream:** remove tmp file on stream insert error ([65f6632](https://github.com/npm/cacache/commit/65f6632))
-* **put-stream:** robustified and predictibilized ([daf9e08](https://github.com/npm/cacache/commit/daf9e08))
-* **put-stream:** use new promise API for moves ([1d36013](https://github.com/npm/cacache/commit/1d36013))
-* **readme:** updated to reflect new default hashAlgo ([c60a2fa](https://github.com/npm/cacache/commit/c60a2fa))
-* **verify:** tiny typo fix ([db22d05](https://github.com/npm/cacache/commit/db22d05))
-
-
-### Features
-
-* **api:** converted external api ([7bf032f](https://github.com/npm/cacache/commit/7bf032f))
-* **cacache:** exported clearMemoized() utility ([8d2c5b6](https://github.com/npm/cacache/commit/8d2c5b6))
-* **cache:** add versioning to content and index ([31bc549](https://github.com/npm/cacache/commit/31bc549))
-* **content:** collate content files into subdirs ([c094d9f](https://github.com/npm/cacache/commit/c094d9f))
-* **deps:** `@npmcorp/move@1.0.0` ([bdd00bf](https://github.com/npm/cacache/commit/bdd00bf))
-* **deps:** `bluebird@3.4.7` ([3a17aff](https://github.com/npm/cacache/commit/3a17aff))
-* **deps:** `promise-inflight@1.0.1` ([a004fe6](https://github.com/npm/cacache/commit/a004fe6))
-* **get:** added memoization support for get ([c77d794](https://github.com/npm/cacache/commit/c77d794))
-* **get:** export hasContent ([2956ec3](https://github.com/npm/cacache/commit/2956ec3))
-* **index:** add hashAlgorithm and format insert ret val ([b639746](https://github.com/npm/cacache/commit/b639746))
-* **index:** collate index files into subdirs ([e8402a5](https://github.com/npm/cacache/commit/e8402a5))
-* **index:** promisify entry index ([cda3335](https://github.com/npm/cacache/commit/cda3335))
-* **memo:** added memoization lib ([da07b92](https://github.com/npm/cacache/commit/da07b92))
-* **memo:** export memoization api ([954b1b3](https://github.com/npm/cacache/commit/954b1b3))
-* **move-file:** add move fallback for weird errors ([5cf4616](https://github.com/npm/cacache/commit/5cf4616))
-* **perf:** bulk content write api ([51b536e](https://github.com/npm/cacache/commit/51b536e))
-* **put:** added memoization support to put ([b613a70](https://github.com/npm/cacache/commit/b613a70))
-* **read:** switched to promises ([a869362](https://github.com/npm/cacache/commit/a869362))
-* **rm:** added memoization support to rm ([4205cf0](https://github.com/npm/cacache/commit/4205cf0))
-* **rm:** switched to promises ([a000d24](https://github.com/npm/cacache/commit/a000d24))
-* **util:** promise-inflight ownership fix requests ([9517cd7](https://github.com/npm/cacache/commit/9517cd7))
-* **util:** use promises for api ([ae204bb](https://github.com/npm/cacache/commit/ae204bb))
-* **verify:** converted to Promises ([f0b3974](https://github.com/npm/cacache/commit/f0b3974))
-
-
-### BREAKING CHANGES
-
-* cache: index/content directories are now versioned. Previous caches are no longer compatible and cannot be migrated.
-* util: fix-owner now uses Promises instead of callbacks
-* index: Previously-generated index entries are no longer compatible and the index must be regenerated.
-* index: The index format has changed and previous caches are no longer compatible. Existing caches will need to be regenerated.
-* hashes: Default hashAlgorithm changed from sha1 to sha512. If you
-rely on the prior setting, pass `opts.hashAlgorithm` in explicitly.
-* content: Previously-generated content directories are no longer compatible
-and must be regenerated.
-* verify: API is now promise-based
-* read: Switches to a Promise-based API and removes callback stuff
-* rm: Switches to a Promise-based API and removes callback stuff
-* index: this changes the API to work off promises instead of callbacks
-* api: this means we are going all in on promises now
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md b/node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md
deleted file mode 100644
index 8d28acf86..000000000
--- a/node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/README.es.md b/node_modules/make-fetch-happen/node_modules/cacache/README.es.md
deleted file mode 100644
index 55007e20d..000000000
--- a/node_modules/make-fetch-happen/node_modules/cacache/README.es.md
+++ /dev/null
@@ -1,628 +0,0 @@
-# cacache [![npm version](https://img.shields.io/npm/v/cacache.svg)](https://npm.im/cacache) [![license](https://img.shields.io/npm/l/cacache.svg)](https://npm.im/cacache) [![Travis](https://img.shields.io/travis/zkat/cacache.svg)](https://travis-ci.org/zkat/cacache) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/cacache?svg=true)](https://ci.appveyor.com/project/zkat/cacache) [![Coverage Status](https://coveralls.io/repos/github/zkat/cacache/badge.svg?branch=latest)](https://coveralls.io/github/zkat/cacache?branch=latest)
-
-[`cacache`](https://github.com/zkat/cacache) es una librería de Node.js para
-manejar caches locales en disco, con acceso tanto con claves únicas como
-direcciones de contenido (hashes/hacheos). Es súper rápida, excelente con el
-acceso concurrente, y jamás te dará datos incorrectos, aún si se corrompen o
-manipulan directamente los ficheros del cache.
-
-El propósito original era reemplazar el caché local de
-[npm](https://npm.im/npm), pero se puede usar por su propia cuenta.
-
-_Traducciones: [English](README.md)_
-
-## Instalación
-
-`$ npm install --save cacache`
-
-## Índice
-
-* [Ejemplo](#ejemplo)
-* [Características](#características)
-* [Cómo Contribuir](#cómo-contribuir)
-* [API](#api)
- * [Usando el API en español](#localized-api)
- * Leer
- * [`ls`](#ls)
- * [`ls.flujo`](#ls-stream)
- * [`saca`](#get-data)
- * [`saca.flujo`](#get-stream)
- * [`saca.info`](#get-info)
- * [`saca.tieneDatos`](#get-hasContent)
- * Escribir
- * [`mete`](#put-data)
- * [`mete.flujo`](#put-stream)
- * [opciones para `mete*`](#put-options)
- * [`rm.todo`](#rm-all)
- * [`rm.entrada`](#rm-entry)
- * [`rm.datos`](#rm-content)
- * Utilidades
- * [`ponLenguaje`](#set-locale)
- * [`limpiaMemoizado`](#clear-memoized)
- * [`tmp.hazdir`](#tmp-mkdir)
- * [`tmp.conTmp`](#with-tmp)
- * Integridad
- * [Subresource Integrity](#integrity)
- * [`verifica`](#verify)
- * [`verifica.ultimaVez`](#verify-last-run)
-
-### Ejemplo
-
-```javascript
-const cacache = require('cacache/es')
-const fs = require('fs')
-
-const tarbol = '/ruta/a/mi-tar.tgz'
-const rutaCache = '/tmp/my-toy-cache'
-const clave = 'mi-clave-única-1234'
-
-// ¡Añádelo al caché! Usa `rutaCache` como raíz del caché.
-cacache.mete(rutaCache, clave, '10293801983029384').then(integrity => {
- console.log(`Saved content to ${rutaCache}.`)
-})
-
-const destino = '/tmp/mytar.tgz'
-
-// Copia el contenido del caché a otro fichero, pero esta vez con flujos.
-cacache.saca.flujo(
- rutaCache, clave
-).pipe(
- fs.createWriteStream(destino)
-).on('finish', () => {
- console.log('extracción completada')
-})
-
-// La misma cosa, pero accesando el contenido directamente, sin tocar el índice.
-cacache.saca.porHacheo(rutaCache, integridad).then(datos => {
- fs.writeFile(destino, datos, err => {
- console.log('datos del tarbol sacados basado en su sha512, y escrito a otro fichero')
- })
-})
-```
-
-### Características
-
-* Extracción por clave o por dirección de contenido (shasum, etc)
-* Usa el estándard de web, [Subresource Integrity](#integrity)
-* Compatible con multiples algoritmos - usa sha1, sha512, etc, en el mismo caché sin problema
-* Entradas con contenido idéntico comparten ficheros
-* Tolerancia de fallas (inmune a corrupción, ficheros parciales, carreras de proceso, etc)
-* Verificación completa de datos cuando (escribiendo y leyendo)
-* Concurrencia rápida, segura y "lockless"
-* Compatible con `stream`s (flujos)
-* Compatible con `Promise`s (promesas)
-* Bastante rápida -- acceso, incluyendo verificación, en microsegundos
-* Almacenaje de metadatos arbitrarios
-* Colección de basura y verificación adicional fuera de banda
-* Cobertura rigurosa de pruebas
-* Probablente hay un "Bloom filter" por ahí en algún lado. Eso le mola a la gente, ¿Verdad? 🤔
-
-### Cómo Contribuir
-
-El equipo de cacache felizmente acepta contribuciones de código y otras maneras de participación. ¡Hay muchas formas diferentes de contribuir! La [Guía de Colaboradores](CONTRIBUTING.md) (en inglés) tiene toda la información que necesitas para cualquier tipo de contribución: todo desde cómo reportar errores hasta cómo someter parches con nuevas características. Con todo y eso, no se preocupe por si lo que haces está exáctamente correcto: no hay ningún problema en hacer preguntas si algo no está claro, o no lo encuentras.
-
-El equipo de cacache tiene miembros hispanohablantes: es completamente aceptable crear `issues` y `pull requests` en español/castellano.
-
-Todos los participantes en este proyecto deben obedecer el [Código de Conducta](CODE_OF_CONDUCT.md) (en inglés), y en general actuar de forma amable y respetuosa mientras participan en esta comunidad.
-
-Por favor refiérase al [Historial de Cambios](CHANGELOG.md) (en inglés) para detalles sobre cambios importantes incluídos en cada versión.
-
-Finalmente, cacache tiene un sistema de localización de lenguaje. Si te interesa añadir lenguajes o mejorar los que existen, mira en el directorio `./locales` para comenzar.
-
-Happy hacking!
-
-### API
-
-#### <a name="localized-api"></a> Usando el API en español
-
-cacache incluye una traducción completa de su API al castellano, con las mismas
-características. Para usar el API como está documentado en este documento, usa
-`require('cacache/es')`
-
-cacache también tiene otros lenguajes: encuéntralos bajo `./locales`, y podrás
-usar el API en ese lenguaje con `require('cacache/<lenguaje>')`
-
-#### <a name="ls"></a> `> cacache.ls(cache) -> Promise<Object>`
-
-Enumera todas las entradas en el caché, dentro de un solo objeto. Cada entrada
-en el objeto tendrá como clave la clave única usada para el índice, el valor
-siendo un objeto de [`saca.info`](#get-info).
-
-##### Ejemplo
-
-```javascript
-cacache.ls(rutaCache).then(console.log)
-// Salida
-{
- 'my-thing': {
- key: 'my-thing',
- integrity: 'sha512-BaSe64/EnCoDED+HAsh=='
- path: '.testcache/content/deadbeef', // unido con `rutaCache`
- time: 12345698490,
- size: 4023948,
- metadata: {
- name: 'blah',
- version: '1.2.3',
- description: 'this was once a package but now it is my-thing'
- }
- },
- 'other-thing': {
- key: 'other-thing',
- integrity: 'sha1-ANothER+hasH=',
- path: '.testcache/content/bada55',
- time: 11992309289,
- size: 111112
- }
-}
-```
-
-#### <a name="ls-stream"></a> `> cacache.ls.flujo(cache) -> Readable`
-
-Enumera todas las entradas en el caché, emitiendo un objeto de
-[`saca.info`](#get-info) por cada evento de `data` en el flujo.
-
-##### Ejemplo
-
-```javascript
-cacache.ls.flujo(rutaCache).on('data', console.log)
-// Salida
-{
- key: 'my-thing',
- integrity: 'sha512-BaSe64HaSh',
- path: '.testcache/content/deadbeef', // unido con `rutaCache`
- time: 12345698490,
- size: 13423,
- metadata: {
- name: 'blah',
- version: '1.2.3',
- description: 'this was once a package but now it is my-thing'
- }
-}
-
-{
- key: 'other-thing',
- integrity: 'whirlpool-WoWSoMuchSupport',
- path: '.testcache/content/bada55',
- time: 11992309289,
- size: 498023984029
-}
-
-{
- ...
-}
-```
-
-#### <a name="get-data"></a> `> cacache.saca(cache, clave, [ops]) -> Promise({data, metadata, integrity})`
-
-Devuelve un objeto con los datos, hacheo de integridad y metadatos identificados
-por la `clave`. La propiedad `data` de este objeto será una instancia de
-`Buffer` con los datos almacenados en el caché. to do with it! cacache just
-won't care.
-
-`integrity` es un `string` de [Subresource Integrity](#integrity). Dígase, un
-`string` que puede ser usado para verificar a la `data`, que tiene como formato
-`<algoritmo>-<hacheo-integridad-base64>`.
-
-So no existe ninguna entrada identificada por `clave`, o se los datos
-almacenados localmente fallan verificación, el `Promise` fallará.
-
-Una sub-función, `saca.porHacheo`, tiene casi el mismo comportamiento, excepto
-que busca entradas usando el hacheo de integridad, sin tocar el índice general.
-Esta versión *sólo* devuelve `data`, sin ningún objeto conteniéndola.
-
-##### Nota
-
-Esta función lee la entrada completa a la memoria antes de devolverla. Si estás
-almacenando datos Muy Grandes, es posible que [`saca.flujo`](#get-stream) sea
-una mejor solución.
-
-##### Ejemplo
-
-```javascript
-// Busca por clave
-cache.saca(rutaCache, 'my-thing').then(console.log)
-// Salida:
-{
- metadata: {
- thingName: 'my'
- },
- integrity: 'sha512-BaSe64HaSh',
- data: Buffer#<deadbeef>,
- size: 9320
-}
-
-// Busca por hacheo
-cache.saca.porHacheo(rutaCache, 'sha512-BaSe64HaSh').then(console.log)
-// Salida:
-Buffer#<deadbeef>
-```
-
-#### <a name="get-stream"></a> `> cacache.saca.flujo(cache, clave, [ops]) -> Readable`
-
-Devuelve un [Readable
-Stream](https://nodejs.org/api/stream.html#stream_readable_streams) de los datos
-almacenados bajo `clave`.
-
-So no existe ninguna entrada identificada por `clave`, o se los datos
-almacenados localmente fallan verificación, el `Promise` fallará.
-
-`metadata` y `integrity` serán emitidos como eventos antes de que el flujo
-cierre.
-
-Una sub-función, `saca.flujo.porHacheo`, tiene casi el mismo comportamiento,
-excepto que busca entradas usando el hacheo de integridad, sin tocar el índice
-general. Esta versión no emite eventos de `metadata` o `integrity`.
-
-##### Ejemplo
-
-```javascript
-// Busca por clave
-cache.saca.flujo(
- rutaCache, 'my-thing'
-).on('metadata', metadata => {
- console.log('metadata:', metadata)
-}).on('integrity', integrity => {
- console.log('integrity:', integrity)
-}).pipe(
- fs.createWriteStream('./x.tgz')
-)
-// Salidas:
-metadata: { ... }
-integrity: 'sha512-SoMeDIGest+64=='
-
-// Busca por hacheo
-cache.saca.flujo.porHacheo(
- rutaCache, 'sha512-SoMeDIGest+64=='
-).pipe(
- fs.createWriteStream('./x.tgz')
-)
-```
-
-#### <a name="get-info"></a> `> cacache.saca.info(cache, clave) -> Promise`
-
-Busca la `clave` en el índice del caché, devolviendo información sobre la
-entrada si existe.
-
-##### Campos
-
-* `key` - Clave de la entrada. Igual al argumento `clave`.
-* `integrity` - [hacheo de Subresource Integrity](#integrity) del contenido al que se refiere esta entrada.
-* `path` - Dirección del fichero de datos almacenados, unida al argumento `cache`.
-* `time` - Hora de creación de la entrada
-* `metadata` - Metadatos asignados a esta entrada por el usuario
-
-##### Ejemplo
-
-```javascript
-cacache.saca.info(rutaCache, 'my-thing').then(console.log)
-
-// Salida
-{
- key: 'my-thing',
- integrity: 'sha256-MUSTVERIFY+ALL/THINGS=='
- path: '.testcache/content/deadbeef',
- time: 12345698490,
- size: 849234,
- metadata: {
- name: 'blah',
- version: '1.2.3',
- description: 'this was once a package but now it is my-thing'
- }
-}
-```
-
-#### <a name="get-hasContent"></a> `> cacache.saca.tieneDatos(cache, integrity) -> Promise`
-
-Busca un [hacheo Subresource Integrity](#integrity) en el caché. Si existe el
-contenido asociado con `integrity`, devuelve un objeto con dos campos: el hacheo
-_específico_ que se usó para la búsqueda, `sri`, y el tamaño total del
-contenido, `size`. Si no existe ningún contenido asociado con `integrity`,
-devuelve `false`.
-
-##### Ejemplo
-
-```javascript
-cacache.saca.tieneDatos(rutaCache, 'sha256-MUSTVERIFY+ALL/THINGS==').then(console.log)
-
-// Salida
-{
- sri: {
- source: 'sha256-MUSTVERIFY+ALL/THINGS==',
- algorithm: 'sha256',
- digest: 'MUSTVERIFY+ALL/THINGS==',
- options: []
- },
- size: 9001
-}
-
-cacache.saca.tieneDatos(rutaCache, 'sha521-NOT+IN/CACHE==').then(console.log)
-
-// Salida
-false
-```
-
-#### <a name="put-data"></a> `> cacache.mete(cache, clave, datos, [ops]) -> Promise`
-
-Inserta `datos` en el caché. El `Promise` devuelto se resuelve con un hacheo
-(generado conforme a [`ops.algorithms`](#optsalgorithms)) después que la entrada
-haya sido escrita en completo.
-
-##### Ejemplo
-
-```javascript
-fetch(
- 'https://registry.npmjs.org/cacache/-/cacache-1.0.0.tgz'
-).then(datos => {
- return cacache.mete(rutaCache, 'registry.npmjs.org|cacache@1.0.0', datos)
-}).then(integridad => {
- console.log('el hacheo de integridad es', integridad)
-})
-```
-
-#### <a name="put-stream"></a> `> cacache.mete.flujo(cache, clave, [ops]) -> Writable`
-
-Devuelve un [Writable
-Stream](https://nodejs.org/api/stream.html#stream_writable_streams) que inserta
-al caché los datos escritos a él. Emite un evento `integrity` con el hacheo del
-contenido escrito, cuando completa.
-
-##### Ejemplo
-
-```javascript
-request.get(
- 'https://registry.npmjs.org/cacache/-/cacache-1.0.0.tgz'
-).pipe(
- cacache.mete.flujo(
- rutaCache, 'registry.npmjs.org|cacache@1.0.0'
- ).on('integrity', d => console.log(`integrity digest is ${d}`))
-)
-```
-
-#### <a name="put-options"></a> `> opciones para cacache.mete`
-
-La funciones `cacache.mete` tienen un número de opciones en común.
-
-##### `ops.metadata`
-
-Metadatos del usuario que se almacenarán con la entrada.
-
-##### `ops.size`
-
-El tamaño declarado de los datos que se van a insertar. Si es proveído, cacache
-verificará que los datos escritos sean de ese tamaño, o si no, fallará con un
-error con código `EBADSIZE`.
-
-##### `ops.integrity`
-
-El hacheo de integridad de los datos siendo escritos.
-
-Si es proveído, y los datos escritos no le corresponden, la operación fallará
-con un error con código `EINTEGRITY`.
-
-`ops.algorithms` no tiene ningún efecto si esta opción está presente.
-
-##### `ops.algorithms`
-
-Por Defecto: `['sha512']`
-
-Algoritmos que se deben usar cuando se calcule el hacheo de [subresource
-integrity](#integrity) para los datos insertados. Puede usar cualquier algoritmo
-enumerado en `crypto.getHashes()`.
-
-Por el momento, sólo se acepta un algoritmo (dígase, un array con exáctamente un
-valor). No tiene ningún efecto si `ops.integrity` también ha sido proveido.
-
-##### `ops.uid`/`ops.gid`
-
-Si están presentes, cacache hará todo lo posible para asegurarse que todos los
-ficheros creados en el proceso de sus operaciones en el caché usen esta
-combinación en particular.
-
-##### `ops.memoize`
-
-Por Defecto: `null`
-
-Si es verdad, cacache tratará de memoizar los datos de la entrada en memoria. La
-próxima vez que el proceso corriente trate de accesar los datos o entrada,
-cacache buscará en memoria antes de buscar en disco.
-
-Si `ops.memoize` es un objeto regular o un objeto como `Map` (es decir, un
-objeto con métodos `get()` y `set()`), este objeto en sí sera usado en vez del
-caché de memoria global. Esto permite tener lógica específica a tu aplicación
-encuanto al almacenaje en memoria de tus datos.
-
-Si quieres asegurarte que los datos se lean del disco en vez de memoria, usa
-`memoize: false` cuando uses funciones de `cacache.saca`.
-
-#### <a name="rm-all"></a> `> cacache.rm.todo(cache) -> Promise`
-
-Borra el caché completo, incluyendo ficheros temporeros, ficheros de datos, y el
-índice del caché.
-
-##### Ejemplo
-
-```javascript
-cacache.rm.todo(rutaCache).then(() => {
- console.log('THE APOCALYPSE IS UPON US 😱')
-})
-```
-
-#### <a name="rm-entry"></a> `> cacache.rm.entrada(cache, clave) -> Promise`
-
-Alias: `cacache.rm`
-
-Borra la entrada `clave` del índuce. El contenido asociado con esta entrada
-seguirá siendo accesible por hacheo usando
-[`saca.flujo.porHacheo`](#get-stream).
-
-Para borrar el contenido en sí, usa [`rm.datos`](#rm-content). Si quieres hacer
-esto de manera más segura (pues ficheros de contenido pueden ser usados por
-multiples entradas), usa [`verifica`](#verify) para borrar huérfanos.
-
-##### Ejemplo
-
-```javascript
-cacache.rm.entrada(rutaCache, 'my-thing').then(() => {
- console.log('I did not like it anyway')
-})
-```
-
-#### <a name="rm-content"></a> `> cacache.rm.datos(cache, integrity) -> Promise`
-
-Borra el contenido identificado por `integrity`. Cualquier entrada que se
-refiera a este contenido quedarán huérfanas y se invalidarán si se tratan de
-accesar, al menos que contenido idéntico sea añadido bajo `integrity`.
-
-##### Ejemplo
-
-```javascript
-cacache.rm.datos(rutaCache, 'sha512-SoMeDIGest/IN+BaSE64==').then(() => {
- console.log('los datos para `mi-cosa` se borraron')
-})
-```
-
-#### <a name="set-locale"></a> `> cacache.ponLenguaje(locale)`
-
-Configura el lenguaje usado para mensajes y errores de cacache. La lista de
-lenguajes disponibles está en el directorio `./locales` del proyecto.
-
-_Te interesa añadir más lenguajes? [Somete un PR](CONTRIBUTING.md)!_
-
-#### <a name="clear-memoized"></a> `> cacache.limpiaMemoizado()`
-
-Completamente reinicializa el caché de memoria interno. Si estás usando tu
-propio objecto con `ops.memoize`, debes hacer esto de manera específica a él.
-
-#### <a name="tmp-mkdir"></a> `> tmp.hazdir(cache, ops) -> Promise<Path>`
-
-Alias: `tmp.mkdir`
-
-Devuelve un directorio único dentro del directorio `tmp` del caché.
-
-Una vez tengas el directorio, es responsabilidad tuya asegurarte que todos los
-ficheros escrito a él sean creados usando los permisos y `uid`/`gid` concordante
-con el caché. Si no, puedes pedirle a cacache que lo haga llamando a
-[`cacache.tmp.fix()`](#tmp-fix). Esta función arreglará todos los permisos en el
-directorio tmp.
-
-Si quieres que cacache limpie el directorio automáticamente cuando termines, usa
-[`cacache.tmp.conTmp()`](#with-tpm).
-
-##### Ejemplo
-
-```javascript
-cacache.tmp.mkdir(cache).then(dir => {
- fs.writeFile(path.join(dir, 'blablabla'), Buffer#<1234>, ...)
-})
-```
-
-#### <a name="with-tmp"></a> `> tmp.conTmp(cache, ops, cb) -> Promise`
-
-Crea un directorio temporero con [`tmp.mkdir()`](#tmp-mkdir) y ejecuta `cb` con
-él como primer argumento. El directorio creado será removido automáticamente
-cuando el valor devolvido por `cb()` se resuelva.
-
-Las mismas advertencias aplican en cuanto a manejando permisos para los ficheros
-dentro del directorio.
-
-##### Ejemplo
-
-```javascript
-cacache.tmp.conTmp(cache, dir => {
- return fs.writeFileAsync(path.join(dir, 'blablabla'), Buffer#<1234>, ...)
-}).then(() => {
- // `dir` no longer exists
-})
-```
-
-#### <a name="integrity"></a> Hacheos de Subresource Integrity
-
-cacache usa strings que siguen la especificación de [Subresource Integrity
-spec](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity).
-
-Es decir, donde quiera cacache espera un argumento o opción `integrity`, ese
-string debería usar el formato `<algoritmo>-<hacheo-base64>`.
-
-Una variación importante sobre los hacheos que cacache acepta es que acepta el
-nombre de cualquier algoritmo aceptado por el proceso de Node.js donde se usa.
-Puedes usar `crypto.getHashes()` para ver cuales están disponibles.
-
-##### Generando tus propios hacheos
-
-Si tienes un `shasum`, en general va a estar en formato de string hexadecimal
-(es decir, un `sha1` se vería como algo así:
-`5f5513f8822fdbe5145af33b64d8d970dcf95c6e`).
-
-Para ser compatible con cacache, necesitas convertir esto a su equivalente en
-subresource integrity. Por ejemplo, el hacheo correspondiente al ejemplo
-anterior sería: `sha1-X1UT+IIv2+UUWvM7ZNjZcNz5XG4=`.
-
-Puedes usar código así para generarlo por tu cuenta:
-
-```javascript
-const crypto = require('crypto')
-const algoritmo = 'sha512'
-const datos = 'foobarbaz'
-
-const integrity = (
- algorithm +
- '-' +
- crypto.createHash(algoritmo).update(datos).digest('base64')
-)
-```
-
-También puedes usar [`ssri`](https://npm.im/ssri) para deferir el trabajo a otra
-librería que garantiza que todo esté correcto, pues maneja probablemente todas
-las operaciones que tendrías que hacer con SRIs, incluyendo convirtiendo entre
-hexadecimal y el formato SRI.
-
-#### <a name="verify"></a> `> cacache.verifica(cache, ops) -> Promise`
-
-Examina y arregla tu caché:
-
-* Limpia entradas inválidas, huérfanas y corrompidas
-* Te deja filtrar cuales entradas retener, con tu propio filtro
-* Reclama cualquier ficheros de contenido sin referencias en el índice
-* Verifica integridad de todos los ficheros de contenido y remueve los malos
-* Arregla permisos del caché
-* Remieve el directorio `tmp` en el caché, y todo su contenido.
-
-Cuando termine, devuelve un objeto con varias estadísticas sobre el proceso de
-verificación, por ejemplo la cantidad de espacio de disco reclamado, el número
-de entradas válidas, número de entradas removidas, etc.
-
-##### Opciones
-
-* `ops.uid` - uid para asignarle al caché y su contenido
-* `ops.gid` - gid para asignarle al caché y su contenido
-* `ops.filter` - recibe una entrada como argumento. Devuelve falso para removerla. Nota: es posible que esta función sea invocada con la misma entrada más de una vez.
-
-##### Example
-
-```sh
-echo somegarbage >> $RUTACACHE/content/deadbeef
-```
-
-```javascript
-cacache.verifica(rutaCache).then(stats => {
- // deadbeef collected, because of invalid checksum.
- console.log('cache is much nicer now! stats:', stats)
-})
-```
-
-#### <a name="verify-last-run"></a> `> cacache.verifica.ultimaVez(cache) -> Promise`
-
-Alias: `últimaVez`
-
-Devuelve un `Date` que representa la última vez que `cacache.verifica` fue
-ejecutada en `cache`.
-
-##### Example
-
-```javascript
-cacache.verifica(rutaCache).then(() => {
- cacache.verifica.ultimaVez(rutaCache).then(última => {
- console.log('La última vez que se usó cacache.verifica() fue ' + última)
- })
-})
-```
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/README.md b/node_modules/make-fetch-happen/node_modules/cacache/README.md
deleted file mode 100644
index 7f8ec5eec..000000000
--- a/node_modules/make-fetch-happen/node_modules/cacache/README.md
+++ /dev/null
@@ -1,641 +0,0 @@
-# cacache [![npm version](https://img.shields.io/npm/v/cacache.svg)](https://npm.im/cacache) [![license](https://img.shields.io/npm/l/cacache.svg)](https://npm.im/cacache) [![Travis](https://img.shields.io/travis/npm/cacache.svg)](https://travis-ci.org/npm/cacache) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/npm/cacache?svg=true)](https://ci.appveyor.com/project/npm/cacache) [![Coverage Status](https://coveralls.io/repos/github/npm/cacache/badge.svg?branch=latest)](https://coveralls.io/github/npm/cacache?branch=latest)
-
-[`cacache`](https://github.com/npm/cacache) is a Node.js library for managing
-local key and content address caches. It's really fast, really good at
-concurrency, and it will never give you corrupted data, even if cache files
-get corrupted or manipulated.
-
-On systems that support user and group settings on files, cacache will
-match the `uid` and `gid` values to the folder where the cache lives, even
-when running as `root`.
-
-It was written to be used as [npm](https://npm.im)'s local cache, but can
-just as easily be used on its own.
-
-_Translations: [español](README.es.md)_
-
-## Install
-
-`$ npm install --save cacache`
-
-## Table of Contents
-
-* [Example](#example)
-* [Features](#features)
-* [Contributing](#contributing)
-* [API](#api)
- * [Using localized APIs](#localized-api)
- * Reading
- * [`ls`](#ls)
- * [`ls.stream`](#ls-stream)
- * [`get`](#get-data)
- * [`get.stream`](#get-stream)
- * [`get.info`](#get-info)
- * [`get.hasContent`](#get-hasContent)
- * Writing
- * [`put`](#put-data)
- * [`put.stream`](#put-stream)
- * [`put*` opts](#put-options)
- * [`rm.all`](#rm-all)
- * [`rm.entry`](#rm-entry)
- * [`rm.content`](#rm-content)
- * Utilities
- * [`setLocale`](#set-locale)
- * [`clearMemoized`](#clear-memoized)
- * [`tmp.mkdir`](#tmp-mkdir)
- * [`tmp.withTmp`](#with-tmp)
- * Integrity
- * [Subresource Integrity](#integrity)
- * [`verify`](#verify)
- * [`verify.lastRun`](#verify-last-run)
-
-### Example
-
-```javascript
-const cacache = require('cacache/en')
-const fs = require('fs')
-
-const tarball = '/path/to/mytar.tgz'
-const cachePath = '/tmp/my-toy-cache'
-const key = 'my-unique-key-1234'
-
-// Cache it! Use `cachePath` as the root of the content cache
-cacache.put(cachePath, key, '10293801983029384').then(integrity => {
- console.log(`Saved content to ${cachePath}.`)
-})
-
-const destination = '/tmp/mytar.tgz'
-
-// Copy the contents out of the cache and into their destination!
-// But this time, use stream instead!
-cacache.get.stream(
- cachePath, key
-).pipe(
- fs.createWriteStream(destination)
-).on('finish', () => {
- console.log('done extracting!')
-})
-
-// The same thing, but skip the key index.
-cacache.get.byDigest(cachePath, integrityHash).then(data => {
- fs.writeFile(destination, data, err => {
- console.log('tarball data fetched based on its sha512sum and written out!')
- })
-})
-```
-
-### Features
-
-* Extraction by key or by content address (shasum, etc)
-* [Subresource Integrity](#integrity) web standard support
-* Multi-hash support - safely host sha1, sha512, etc, in a single cache
-* Automatic content deduplication
-* Fault tolerance (immune to corruption, partial writes, process races, etc)
-* Consistency guarantees on read and write (full data verification)
-* Lockless, high-concurrency cache access
-* Streaming support
-* Promise support
-* Pretty darn fast -- sub-millisecond reads and writes including verification
-* Arbitrary metadata storage
-* Garbage collection and additional offline verification
-* Thorough test coverage
-* There's probably a bloom filter in there somewhere. Those are cool, right? 🤔
-
-### Contributing
-
-The cacache team enthusiastically welcomes contributions and project participation! There's a bunch of things you can do if you want to contribute! The [Contributor Guide](CONTRIBUTING.md) has all the information you need for everything from reporting bugs to contributing entire new features. Please don't hesitate to jump in if you'd like to, or even ask us questions if something isn't clear.
-
-All participants and maintainers in this project are expected to follow [Code of Conduct](CODE_OF_CONDUCT.md), and just generally be excellent to each other.
-
-Please refer to the [Changelog](CHANGELOG.md) for project history details, too.
-
-Happy hacking!
-
-### API
-
-#### <a name="localized-api"></a> Using localized APIs
-
-cacache includes a complete API in English, with the same features as other
-translations. To use the English API as documented in this README, use
-`require('cacache/en')`. This is also currently the default if you do
-`require('cacache')`, but may change in the future.
-
-cacache also supports other languages! You can find the list of currently
-supported ones by looking in `./locales` in the source directory. You can use
-the API in that language with `require('cacache/<lang>')`.
-
-Want to add support for a new language? Please go ahead! You should be able to
-copy `./locales/en.js` and `./locales/en.json` and fill them in. Translating the
-`README.md` is a bit more work, but also appreciated if you get around to it. 👍🏼
-
-#### <a name="ls"></a> `> cacache.ls(cache) -> Promise<Object>`
-
-Lists info for all entries currently in the cache as a single large object. Each
-entry in the object will be keyed by the unique index key, with corresponding
-[`get.info`](#get-info) objects as the values.
-
-##### Example
-
-```javascript
-cacache.ls(cachePath).then(console.log)
-// Output
-{
- 'my-thing': {
- key: 'my-thing',
- integrity: 'sha512-BaSe64/EnCoDED+HAsh=='
- path: '.testcache/content/deadbeef', // joined with `cachePath`
- time: 12345698490,
- size: 4023948,
- metadata: {
- name: 'blah',
- version: '1.2.3',
- description: 'this was once a package but now it is my-thing'
- }
- },
- 'other-thing': {
- key: 'other-thing',
- integrity: 'sha1-ANothER+hasH=',
- path: '.testcache/content/bada55',
- time: 11992309289,
- size: 111112
- }
-}
-```
-
-#### <a name="ls-stream"></a> `> cacache.ls.stream(cache) -> Readable`
-
-Lists info for all entries currently in the cache as a single large object.
-
-This works just like [`ls`](#ls), except [`get.info`](#get-info) entries are
-returned as `'data'` events on the returned stream.
-
-##### Example
-
-```javascript
-cacache.ls.stream(cachePath).on('data', console.log)
-// Output
-{
- key: 'my-thing',
- integrity: 'sha512-BaSe64HaSh',
- path: '.testcache/content/deadbeef', // joined with `cachePath`
- time: 12345698490,
- size: 13423,
- metadata: {
- name: 'blah',
- version: '1.2.3',
- description: 'this was once a package but now it is my-thing'
- }
-}
-
-{
- key: 'other-thing',
- integrity: 'whirlpool-WoWSoMuchSupport',
- path: '.testcache/content/bada55',
- time: 11992309289,
- size: 498023984029
-}
-
-{
- ...
-}
-```
-
-#### <a name="get-data"></a> `> cacache.get(cache, key, [opts]) -> Promise({data, metadata, integrity})`
-
-Returns an object with the cached data, digest, and metadata identified by
-`key`. The `data` property of this object will be a `Buffer` instance that
-presumably holds some data that means something to you. I'm sure you know what
-to do with it! cacache just won't care.
-
-`integrity` is a [Subresource
-Integrity](#integrity)
-string. That is, a string that can be used to verify `data`, which looks like
-`<hash-algorithm>-<base64-integrity-hash>`.
-
-If there is no content identified by `key`, or if the locally-stored data does
-not pass the validity checksum, the promise will be rejected.
-
-A sub-function, `get.byDigest` may be used for identical behavior, except lookup
-will happen by integrity hash, bypassing the index entirely. This version of the
-function *only* returns `data` itself, without any wrapper.
-
-##### Note
-
-This function loads the entire cache entry into memory before returning it. If
-you're dealing with Very Large data, consider using [`get.stream`](#get-stream)
-instead.
-
-##### Example
-
-```javascript
-// Look up by key
-cache.get(cachePath, 'my-thing').then(console.log)
-// Output:
-{
- metadata: {
- thingName: 'my'
- },
- integrity: 'sha512-BaSe64HaSh',
- data: Buffer#<deadbeef>,
- size: 9320
-}
-
-// Look up by digest
-cache.get.byDigest(cachePath, 'sha512-BaSe64HaSh').then(console.log)
-// Output:
-Buffer#<deadbeef>
-```
-
-#### <a name="get-stream"></a> `> cacache.get.stream(cache, key, [opts]) -> Readable`
-
-Returns a [Readable Stream](https://nodejs.org/api/stream.html#stream_readable_streams) of the cached data identified by `key`.
-
-If there is no content identified by `key`, or if the locally-stored data does
-not pass the validity checksum, an error will be emitted.
-
-`metadata` and `integrity` events will be emitted before the stream closes, if
-you need to collect that extra data about the cached entry.
-
-A sub-function, `get.stream.byDigest` may be used for identical behavior,
-except lookup will happen by integrity hash, bypassing the index entirely. This
-version does not emit the `metadata` and `integrity` events at all.
-
-##### Example
-
-```javascript
-// Look up by key
-cache.get.stream(
- cachePath, 'my-thing'
-).on('metadata', metadata => {
- console.log('metadata:', metadata)
-}).on('integrity', integrity => {
- console.log('integrity:', integrity)
-}).pipe(
- fs.createWriteStream('./x.tgz')
-)
-// Outputs:
-metadata: { ... }
-integrity: 'sha512-SoMeDIGest+64=='
-
-// Look up by digest
-cache.get.stream.byDigest(
- cachePath, 'sha512-SoMeDIGest+64=='
-).pipe(
- fs.createWriteStream('./x.tgz')
-)
-```
-
-#### <a name="get-info"></a> `> cacache.get.info(cache, key) -> Promise`
-
-Looks up `key` in the cache index, returning information about the entry if
-one exists.
-
-##### Fields
-
-* `key` - Key the entry was looked up under. Matches the `key` argument.
-* `integrity` - [Subresource Integrity hash](#integrity) for the content this entry refers to.
-* `path` - Filesystem path where content is stored, joined with `cache` argument.
-* `time` - Timestamp the entry was first added on.
-* `metadata` - User-assigned metadata associated with the entry/content.
-
-##### Example
-
-```javascript
-cacache.get.info(cachePath, 'my-thing').then(console.log)
-
-// Output
-{
- key: 'my-thing',
- integrity: 'sha256-MUSTVERIFY+ALL/THINGS=='
- path: '.testcache/content/deadbeef',
- time: 12345698490,
- size: 849234,
- metadata: {
- name: 'blah',
- version: '1.2.3',
- description: 'this was once a package but now it is my-thing'
- }
-}
-```
-
-#### <a name="get-hasContent"></a> `> cacache.get.hasContent(cache, integrity) -> Promise`
-
-Looks up a [Subresource Integrity hash](#integrity) in the cache. If content
-exists for this `integrity`, it will return an object, with the specific single integrity hash
-that was found in `sri` key, and the size of the found content as `size`. If no content exists for this integrity, it will return `false`.
-
-##### Example
-
-```javascript
-cacache.get.hasContent(cachePath, 'sha256-MUSTVERIFY+ALL/THINGS==').then(console.log)
-
-// Output
-{
- sri: {
- source: 'sha256-MUSTVERIFY+ALL/THINGS==',
- algorithm: 'sha256',
- digest: 'MUSTVERIFY+ALL/THINGS==',
- options: []
- },
- size: 9001
-}
-
-cacache.get.hasContent(cachePath, 'sha521-NOT+IN/CACHE==').then(console.log)
-
-// Output
-false
-```
-
-#### <a name="put-data"></a> `> cacache.put(cache, key, data, [opts]) -> Promise`
-
-Inserts data passed to it into the cache. The returned Promise resolves with a
-digest (generated according to [`opts.algorithms`](#optsalgorithms)) after the
-cache entry has been successfully written.
-
-##### Example
-
-```javascript
-fetch(
- 'https://registry.npmjs.org/cacache/-/cacache-1.0.0.tgz'
-).then(data => {
- return cacache.put(cachePath, 'registry.npmjs.org|cacache@1.0.0', data)
-}).then(integrity => {
- console.log('integrity hash is', integrity)
-})
-```
-
-#### <a name="put-stream"></a> `> cacache.put.stream(cache, key, [opts]) -> Writable`
-
-Returns a [Writable
-Stream](https://nodejs.org/api/stream.html#stream_writable_streams) that inserts
-data written to it into the cache. Emits an `integrity` event with the digest of
-written contents when it succeeds.
-
-##### Example
-
-```javascript
-request.get(
- 'https://registry.npmjs.org/cacache/-/cacache-1.0.0.tgz'
-).pipe(
- cacache.put.stream(
- cachePath, 'registry.npmjs.org|cacache@1.0.0'
- ).on('integrity', d => console.log(`integrity digest is ${d}`))
-)
-```
-
-#### <a name="put-options"></a> `> cacache.put options`
-
-`cacache.put` functions have a number of options in common.
-
-##### `opts.metadata`
-
-Arbitrary metadata to be attached to the inserted key.
-
-##### `opts.size`
-
-If provided, the data stream will be verified to check that enough data was
-passed through. If there's more or less data than expected, insertion will fail
-with an `EBADSIZE` error.
-
-##### `opts.integrity`
-
-If present, the pre-calculated digest for the inserted content. If this option
-if provided and does not match the post-insertion digest, insertion will fail
-with an `EINTEGRITY` error.
-
-`algorithms` has no effect if this option is present.
-
-##### `opts.algorithms`
-
-Default: ['sha512']
-
-Hashing algorithms to use when calculating the [subresource integrity
-digest](#integrity)
-for inserted data. Can use any algorithm listed in `crypto.getHashes()` or
-`'omakase'`/`'お任せします'` to pick a random hash algorithm on each insertion. You
-may also use any anagram of `'modnar'` to use this feature.
-
-Currently only supports one algorithm at a time (i.e., an array length of
-exactly `1`). Has no effect if `opts.integrity` is present.
-
-##### `opts.memoize`
-
-Default: null
-
-If provided, cacache will memoize the given cache insertion in memory, bypassing
-any filesystem checks for that key or digest in future cache fetches. Nothing
-will be written to the in-memory cache unless this option is explicitly truthy.
-
-If `opts.memoize` is an object or a `Map`-like (that is, an object with `get`
-and `set` methods), it will be written to instead of the global memoization
-cache.
-
-Reading from disk data can be forced by explicitly passing `memoize: false` to
-the reader functions, but their default will be to read from memory.
-
-#### <a name="rm-all"></a> `> cacache.rm.all(cache) -> Promise`
-
-Clears the entire cache. Mainly by blowing away the cache directory itself.
-
-##### Example
-
-```javascript
-cacache.rm.all(cachePath).then(() => {
- console.log('THE APOCALYPSE IS UPON US 😱')
-})
-```
-
-#### <a name="rm-entry"></a> `> cacache.rm.entry(cache, key) -> Promise`
-
-Alias: `cacache.rm`
-
-Removes the index entry for `key`. Content will still be accessible if
-requested directly by content address ([`get.stream.byDigest`](#get-stream)).
-
-To remove the content itself (which might still be used by other entries), use
-[`rm.content`](#rm-content). Or, to safely vacuum any unused content, use
-[`verify`](#verify).
-
-##### Example
-
-```javascript
-cacache.rm.entry(cachePath, 'my-thing').then(() => {
- console.log('I did not like it anyway')
-})
-```
-
-#### <a name="rm-content"></a> `> cacache.rm.content(cache, integrity) -> Promise`
-
-Removes the content identified by `integrity`. Any index entries referring to it
-will not be usable again until the content is re-added to the cache with an
-identical digest.
-
-##### Example
-
-```javascript
-cacache.rm.content(cachePath, 'sha512-SoMeDIGest/IN+BaSE64==').then(() => {
- console.log('data for my-thing is gone!')
-})
-```
-
-#### <a name="set-locale"></a> `> cacache.setLocale(locale)`
-
-Configure the language/locale used for messages and errors coming from cacache.
-The list of available locales is in the `./locales` directory in the project
-root.
-
-_Interested in contributing more languages! [Submit a PR](CONTRIBUTING.md)!_
-
-#### <a name="clear-memoized"></a> `> cacache.clearMemoized()`
-
-Completely resets the in-memory entry cache.
-
-#### <a name="tmp-mkdir"></a> `> tmp.mkdir(cache, opts) -> Promise<Path>`
-
-Returns a unique temporary directory inside the cache's `tmp` dir. This
-directory will use the same safe user assignment that all the other stuff use.
-
-Once the directory is made, it's the user's responsibility that all files
-within are given the appropriate `gid`/`uid` ownership settings to match
-the rest of the cache. If not, you can ask cacache to do it for you by
-calling [`tmp.fix()`](#tmp-fix), which will fix all tmp directory
-permissions.
-
-If you want automatic cleanup of this directory, use
-[`tmp.withTmp()`](#with-tpm)
-
-##### Example
-
-```javascript
-cacache.tmp.mkdir(cache).then(dir => {
- fs.writeFile(path.join(dir, 'blablabla'), Buffer#<1234>, ...)
-})
-```
-
-#### <a name="tmp-fix"></a> `> tmp.fix(cache) -> Promise`
-
-Sets the `uid` and `gid` properties on all files and folders within the tmp
-folder to match the rest of the cache.
-
-Use this after manually writing files into [`tmp.mkdir`](#tmp-mkdir) or
-[`tmp.withTmp`](#with-tmp).
-
-##### Example
-
-```javascript
-cacache.tmp.mkdir(cache).then(dir => {
- writeFile(path.join(dir, 'file'), someData).then(() => {
- // make sure we didn't just put a root-owned file in the cache
- cacache.tmp.fix().then(() => {
- // all uids and gids match now
- })
- })
-})
-```
-
-#### <a name="with-tmp"></a> `> tmp.withTmp(cache, opts, cb) -> Promise`
-
-Creates a temporary directory with [`tmp.mkdir()`](#tmp-mkdir) and calls `cb`
-with it. The created temporary directory will be removed when the return value
-of `cb()` resolves -- that is, if you return a Promise from `cb()`, the tmp
-directory will be automatically deleted once that promise completes.
-
-The same caveats apply when it comes to managing permissions for the tmp dir's
-contents.
-
-##### Example
-
-```javascript
-cacache.tmp.withTmp(cache, dir => {
- return fs.writeFileAsync(path.join(dir, 'blablabla'), Buffer#<1234>, ...)
-}).then(() => {
- // `dir` no longer exists
-})
-```
-
-#### <a name="integrity"></a> Subresource Integrity Digests
-
-For content verification and addressing, cacache uses strings following the
-[Subresource
-Integrity spec](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity).
-That is, any time cacache expects an `integrity` argument or option, it
-should be in the format `<hashAlgorithm>-<base64-hash>`.
-
-One deviation from the current spec is that cacache will support any hash
-algorithms supported by the underlying Node.js process. You can use
-`crypto.getHashes()` to see which ones you can use.
-
-##### Generating Digests Yourself
-
-If you have an existing content shasum, they are generally formatted as a
-hexadecimal string (that is, a sha1 would look like:
-`5f5513f8822fdbe5145af33b64d8d970dcf95c6e`). In order to be compatible with
-cacache, you'll need to convert this to an equivalent subresource integrity
-string. For this example, the corresponding hash would be:
-`sha1-X1UT+IIv2+UUWvM7ZNjZcNz5XG4=`.
-
-If you want to generate an integrity string yourself for existing data, you can
-use something like this:
-
-```javascript
-const crypto = require('crypto')
-const hashAlgorithm = 'sha512'
-const data = 'foobarbaz'
-
-const integrity = (
- hashAlgorithm +
- '-' +
- crypto.createHash(hashAlgorithm).update(data).digest('base64')
-)
-```
-
-You can also use [`ssri`](https://npm.im/ssri) to have a richer set of functionality
-around SRI strings, including generation, parsing, and translating from existing
-hex-formatted strings.
-
-#### <a name="verify"></a> `> cacache.verify(cache, opts) -> Promise`
-
-Checks out and fixes up your cache:
-
-* Cleans up corrupted or invalid index entries.
-* Custom entry filtering options.
-* Garbage collects any content entries not referenced by the index.
-* Checks integrity for all content entries and removes invalid content.
-* Fixes cache ownership.
-* Removes the `tmp` directory in the cache and all its contents.
-
-When it's done, it'll return an object with various stats about the verification
-process, including amount of storage reclaimed, number of valid entries, number
-of entries removed, etc.
-
-##### Options
-
-* `opts.filter` - receives a formatted entry. Return false to remove it.
- Note: might be called more than once on the same entry.
-
-##### Example
-
-```sh
-echo somegarbage >> $CACHEPATH/content/deadbeef
-```
-
-```javascript
-cacache.verify(cachePath).then(stats => {
- // deadbeef collected, because of invalid checksum.
- console.log('cache is much nicer now! stats:', stats)
-})
-```
-
-#### <a name="verify-last-run"></a> `> cacache.verify.lastRun(cache) -> Promise`
-
-Returns a `Date` representing the last time `cacache.verify` was run on `cache`.
-
-##### Example
-
-```javascript
-cacache.verify(cachePath).then(() => {
- cacache.verify.lastRun(cachePath).then(lastTime => {
- console.log('cacache.verify was last called on' + lastTime)
- })
-})
-```
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/en.js b/node_modules/make-fetch-happen/node_modules/cacache/en.js
deleted file mode 100644
index a3db581c9..000000000
--- a/node_modules/make-fetch-happen/node_modules/cacache/en.js
+++ /dev/null
@@ -1,3 +0,0 @@
-'use strict'
-
-module.exports = require('./locales/en.js')
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/es.js b/node_modules/make-fetch-happen/node_modules/cacache/es.js
deleted file mode 100644
index 6282363c3..000000000
--- a/node_modules/make-fetch-happen/node_modules/cacache/es.js
+++ /dev/null
@@ -1,3 +0,0 @@
-'use strict'
-
-module.exports = require('./locales/es.js')
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/get.js b/node_modules/make-fetch-happen/node_modules/cacache/get.js
deleted file mode 100644
index 008cb83a9..000000000
--- a/node_modules/make-fetch-happen/node_modules/cacache/get.js
+++ /dev/null
@@ -1,247 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const figgyPudding = require('figgy-pudding')
-const fs = require('fs')
-const index = require('./lib/entry-index')
-const memo = require('./lib/memoization')
-const pipe = require('mississippi').pipe
-const pipeline = require('mississippi').pipeline
-const read = require('./lib/content/read')
-const through = require('mississippi').through
-
-const GetOpts = figgyPudding({
- integrity: {},
- memoize: {},
- size: {}
-})
-
-module.exports = function get (cache, key, opts) {
- return getData(false, cache, key, opts)
-}
-module.exports.byDigest = function getByDigest (cache, digest, opts) {
- return getData(true, cache, digest, opts)
-}
-function getData (byDigest, cache, key, opts) {
- opts = GetOpts(opts)
- const memoized = (
- byDigest
- ? memo.get.byDigest(cache, key, opts)
- : memo.get(cache, key, opts)
- )
- if (memoized && opts.memoize !== false) {
- return BB.resolve(byDigest ? memoized : {
- metadata: memoized.entry.metadata,
- data: memoized.data,
- integrity: memoized.entry.integrity,
- size: memoized.entry.size
- })
- }
- return (
- byDigest ? BB.resolve(null) : index.find(cache, key, opts)
- ).then(entry => {
- if (!entry && !byDigest) {
- throw new index.NotFoundError(cache, key)
- }
- return read(cache, byDigest ? key : entry.integrity, {
- integrity: opts.integrity,
- size: opts.size
- }).then(data => byDigest ? data : {
- metadata: entry.metadata,
- data: data,
- size: entry.size,
- integrity: entry.integrity
- }).then(res => {
- if (opts.memoize && byDigest) {
- memo.put.byDigest(cache, key, res, opts)
- } else if (opts.memoize) {
- memo.put(cache, entry, res.data, opts)
- }
- return res
- })
- })
-}
-
-module.exports.sync = function get (cache, key, opts) {
- return getDataSync(false, cache, key, opts)
-}
-module.exports.sync.byDigest = function getByDigest (cache, digest, opts) {
- return getDataSync(true, cache, digest, opts)
-}
-function getDataSync (byDigest, cache, key, opts) {
- opts = GetOpts(opts)
- const memoized = (
- byDigest
- ? memo.get.byDigest(cache, key, opts)
- : memo.get(cache, key, opts)
- )
- if (memoized && opts.memoize !== false) {
- return byDigest ? memoized : {
- metadata: memoized.entry.metadata,
- data: memoized.data,
- integrity: memoized.entry.integrity,
- size: memoized.entry.size
- }
- }
- const entry = !byDigest && index.find.sync(cache, key, opts)
- if (!entry && !byDigest) {
- throw new index.NotFoundError(cache, key)
- }
- const data = read.sync(
- cache,
- byDigest ? key : entry.integrity,
- {
- integrity: opts.integrity,
- size: opts.size
- }
- )
- const res = byDigest
- ? data
- : {
- metadata: entry.metadata,
- data: data,
- size: entry.size,
- integrity: entry.integrity
- }
- if (opts.memoize && byDigest) {
- memo.put.byDigest(cache, key, res, opts)
- } else if (opts.memoize) {
- memo.put(cache, entry, res.data, opts)
- }
- return res
-}
-
-module.exports.stream = getStream
-function getStream (cache, key, opts) {
- opts = GetOpts(opts)
- let stream = through()
- const memoized = memo.get(cache, key, opts)
- if (memoized && opts.memoize !== false) {
- stream.on('newListener', function (ev, cb) {
- ev === 'metadata' && cb(memoized.entry.metadata)
- ev === 'integrity' && cb(memoized.entry.integrity)
- ev === 'size' && cb(memoized.entry.size)
- })
- stream.write(memoized.data, () => stream.end())
- return stream
- }
- index.find(cache, key).then(entry => {
- if (!entry) {
- return stream.emit(
- 'error', new index.NotFoundError(cache, key)
- )
- }
- let memoStream
- if (opts.memoize) {
- let memoData = []
- let memoLength = 0
- memoStream = through((c, en, cb) => {
- memoData && memoData.push(c)
- memoLength += c.length
- cb(null, c, en)
- }, cb => {
- memoData && memo.put(cache, entry, Buffer.concat(memoData, memoLength), opts)
- cb()
- })
- } else {
- memoStream = through()
- }
- stream.emit('metadata', entry.metadata)
- stream.emit('integrity', entry.integrity)
- stream.emit('size', entry.size)
- stream.on('newListener', function (ev, cb) {
- ev === 'metadata' && cb(entry.metadata)
- ev === 'integrity' && cb(entry.integrity)
- ev === 'size' && cb(entry.size)
- })
- pipe(
- read.readStream(cache, entry.integrity, opts.concat({
- size: opts.size == null ? entry.size : opts.size
- })),
- memoStream,
- stream
- )
- }).catch(err => stream.emit('error', err))
- return stream
-}
-
-module.exports.stream.byDigest = getStreamDigest
-function getStreamDigest (cache, integrity, opts) {
- opts = GetOpts(opts)
- const memoized = memo.get.byDigest(cache, integrity, opts)
- if (memoized && opts.memoize !== false) {
- const stream = through()
- stream.write(memoized, () => stream.end())
- return stream
- } else {
- let stream = read.readStream(cache, integrity, opts)
- if (opts.memoize) {
- let memoData = []
- let memoLength = 0
- const memoStream = through((c, en, cb) => {
- memoData && memoData.push(c)
- memoLength += c.length
- cb(null, c, en)
- }, cb => {
- memoData && memo.put.byDigest(
- cache,
- integrity,
- Buffer.concat(memoData, memoLength),
- opts
- )
- cb()
- })
- stream = pipeline(stream, memoStream)
- }
- return stream
- }
-}
-
-module.exports.info = info
-function info (cache, key, opts) {
- opts = GetOpts(opts)
- const memoized = memo.get(cache, key, opts)
- if (memoized && opts.memoize !== false) {
- return BB.resolve(memoized.entry)
- } else {
- return index.find(cache, key)
- }
-}
-
-module.exports.hasContent = read.hasContent
-
-module.exports.copy = function cp (cache, key, dest, opts) {
- return copy(false, cache, key, dest, opts)
-}
-module.exports.copy.byDigest = function cpDigest (cache, digest, dest, opts) {
- return copy(true, cache, digest, dest, opts)
-}
-function copy (byDigest, cache, key, dest, opts) {
- opts = GetOpts(opts)
- if (read.copy) {
- return (
- byDigest ? BB.resolve(null) : index.find(cache, key, opts)
- ).then(entry => {
- if (!entry && !byDigest) {
- throw new index.NotFoundError(cache, key)
- }
- return read.copy(
- cache, byDigest ? key : entry.integrity, dest, opts
- ).then(() => byDigest ? key : {
- metadata: entry.metadata,
- size: entry.size,
- integrity: entry.integrity
- })
- })
- } else {
- return getData(byDigest, cache, key, opts).then(res => {
- return fs.writeFileAsync(dest, byDigest ? res : res.data)
- .then(() => byDigest ? key : {
- metadata: res.metadata,
- size: res.size,
- integrity: res.integrity
- })
- })
- }
-}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/index.js b/node_modules/make-fetch-happen/node_modules/cacache/index.js
deleted file mode 100644
index a3db581c9..000000000
--- a/node_modules/make-fetch-happen/node_modules/cacache/index.js
+++ /dev/null
@@ -1,3 +0,0 @@
-'use strict'
-
-module.exports = require('./locales/en.js')
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js
deleted file mode 100644
index c67c28061..000000000
--- a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js
+++ /dev/null
@@ -1,26 +0,0 @@
-'use strict'
-
-const contentVer = require('../../package.json')['cache-version'].content
-const hashToSegments = require('../util/hash-to-segments')
-const path = require('path')
-const ssri = require('ssri')
-
-// Current format of content file path:
-//
-// sha512-BaSE64Hex= ->
-// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee
-//
-module.exports = contentPath
-function contentPath (cache, integrity) {
- const sri = ssri.parse(integrity, { single: true })
- // contentPath is the *strongest* algo given
- return path.join.apply(path, [
- contentDir(cache),
- sri.algorithm
- ].concat(hashToSegments(sri.hexDigest())))
-}
-
-module.exports._contentDir = contentDir
-function contentDir (cache) {
- return path.join(cache, `content-v${contentVer}`)
-}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js
deleted file mode 100644
index 7929524f8..000000000
--- a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js
+++ /dev/null
@@ -1,195 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const contentPath = require('./path')
-const figgyPudding = require('figgy-pudding')
-const fs = require('graceful-fs')
-const PassThrough = require('stream').PassThrough
-const pipe = BB.promisify(require('mississippi').pipe)
-const ssri = require('ssri')
-const Y = require('../util/y.js')
-
-const lstatAsync = BB.promisify(fs.lstat)
-const readFileAsync = BB.promisify(fs.readFile)
-
-const ReadOpts = figgyPudding({
- size: {}
-})
-
-module.exports = read
-function read (cache, integrity, opts) {
- opts = ReadOpts(opts)
- return withContentSri(cache, integrity, (cpath, sri) => {
- return readFileAsync(cpath, null).then(data => {
- if (typeof opts.size === 'number' && opts.size !== data.length) {
- throw sizeError(opts.size, data.length)
- } else if (ssri.checkData(data, sri)) {
- return data
- } else {
- throw integrityError(sri, cpath)
- }
- })
- })
-}
-
-module.exports.sync = readSync
-function readSync (cache, integrity, opts) {
- opts = ReadOpts(opts)
- return withContentSriSync(cache, integrity, (cpath, sri) => {
- const data = fs.readFileSync(cpath)
- if (typeof opts.size === 'number' && opts.size !== data.length) {
- throw sizeError(opts.size, data.length)
- } else if (ssri.checkData(data, sri)) {
- return data
- } else {
- throw integrityError(sri, cpath)
- }
- })
-}
-
-module.exports.stream = readStream
-module.exports.readStream = readStream
-function readStream (cache, integrity, opts) {
- opts = ReadOpts(opts)
- const stream = new PassThrough()
- withContentSri(cache, integrity, (cpath, sri) => {
- return lstatAsync(cpath).then(stat => ({ cpath, sri, stat }))
- }).then(({ cpath, sri, stat }) => {
- return pipe(
- fs.createReadStream(cpath),
- ssri.integrityStream({
- integrity: sri,
- size: opts.size
- }),
- stream
- )
- }).catch(err => {
- stream.emit('error', err)
- })
- return stream
-}
-
-let copyFileAsync
-if (fs.copyFile) {
- module.exports.copy = copy
- module.exports.copy.sync = copySync
- copyFileAsync = BB.promisify(fs.copyFile)
-}
-
-function copy (cache, integrity, dest, opts) {
- opts = ReadOpts(opts)
- return withContentSri(cache, integrity, (cpath, sri) => {
- return copyFileAsync(cpath, dest)
- })
-}
-
-function copySync (cache, integrity, dest, opts) {
- opts = ReadOpts(opts)
- return withContentSriSync(cache, integrity, (cpath, sri) => {
- return fs.copyFileSync(cpath, dest)
- })
-}
-
-module.exports.hasContent = hasContent
-function hasContent (cache, integrity) {
- if (!integrity) { return BB.resolve(false) }
- return withContentSri(cache, integrity, (cpath, sri) => {
- return lstatAsync(cpath).then(stat => ({ size: stat.size, sri, stat }))
- }).catch(err => {
- if (err.code === 'ENOENT') { return false }
- if (err.code === 'EPERM') {
- if (process.platform !== 'win32') {
- throw err
- } else {
- return false
- }
- }
- })
-}
-
-module.exports.hasContent.sync = hasContentSync
-function hasContentSync (cache, integrity) {
- if (!integrity) { return false }
- return withContentSriSync(cache, integrity, (cpath, sri) => {
- try {
- const stat = fs.lstatSync(cpath)
- return { size: stat.size, sri, stat }
- } catch (err) {
- if (err.code === 'ENOENT') { return false }
- if (err.code === 'EPERM') {
- if (process.platform !== 'win32') {
- throw err
- } else {
- return false
- }
- }
- }
- })
-}
-
-function withContentSri (cache, integrity, fn) {
- return BB.try(() => {
- const sri = ssri.parse(integrity)
- // If `integrity` has multiple entries, pick the first digest
- // with available local data.
- const algo = sri.pickAlgorithm()
- const digests = sri[algo]
- if (digests.length <= 1) {
- const cpath = contentPath(cache, digests[0])
- return fn(cpath, digests[0])
- } else {
- return BB.any(sri[sri.pickAlgorithm()].map(meta => {
- return withContentSri(cache, meta, fn)
- }, { concurrency: 1 }))
- .catch(err => {
- if ([].some.call(err, e => e.code === 'ENOENT')) {
- throw Object.assign(
- new Error('No matching content found for ' + sri.toString()),
- { code: 'ENOENT' }
- )
- } else {
- throw err[0]
- }
- })
- }
- })
-}
-
-function withContentSriSync (cache, integrity, fn) {
- const sri = ssri.parse(integrity)
- // If `integrity` has multiple entries, pick the first digest
- // with available local data.
- const algo = sri.pickAlgorithm()
- const digests = sri[algo]
- if (digests.length <= 1) {
- const cpath = contentPath(cache, digests[0])
- return fn(cpath, digests[0])
- } else {
- let lastErr = null
- for (const meta of sri[sri.pickAlgorithm()]) {
- try {
- return withContentSriSync(cache, meta, fn)
- } catch (err) {
- lastErr = err
- }
- }
- if (lastErr) { throw lastErr }
- }
-}
-
-function sizeError (expected, found) {
- var err = new Error(Y`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
- err.expected = expected
- err.found = found
- err.code = 'EBADSIZE'
- return err
-}
-
-function integrityError (sri, path) {
- var err = new Error(Y`Integrity verification failed for ${sri} (${path})`)
- err.code = 'EINTEGRITY'
- err.sri = sri
- err.path = path
- return err
-}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js
deleted file mode 100644
index 12cf15823..000000000
--- a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js
+++ /dev/null
@@ -1,21 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const contentPath = require('./path')
-const hasContent = require('./read').hasContent
-const rimraf = BB.promisify(require('rimraf'))
-
-module.exports = rm
-function rm (cache, integrity) {
- return hasContent(cache, integrity).then(content => {
- if (content) {
- const sri = content.sri
- if (sri) {
- return rimraf(contentPath(cache, sri)).then(() => true)
- }
- } else {
- return false
- }
- })
-}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js
deleted file mode 100644
index 4d96a3cff..000000000
--- a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js
+++ /dev/null
@@ -1,164 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const contentPath = require('./path')
-const fixOwner = require('../util/fix-owner')
-const fs = require('graceful-fs')
-const moveFile = require('../util/move-file')
-const PassThrough = require('stream').PassThrough
-const path = require('path')
-const pipe = BB.promisify(require('mississippi').pipe)
-const rimraf = BB.promisify(require('rimraf'))
-const ssri = require('ssri')
-const to = require('mississippi').to
-const uniqueFilename = require('unique-filename')
-const Y = require('../util/y.js')
-
-const writeFileAsync = BB.promisify(fs.writeFile)
-
-module.exports = write
-function write (cache, data, opts) {
- opts = opts || {}
- if (opts.algorithms && opts.algorithms.length > 1) {
- throw new Error(
- Y`opts.algorithms only supports a single algorithm for now`
- )
- }
- if (typeof opts.size === 'number' && data.length !== opts.size) {
- return BB.reject(sizeError(opts.size, data.length))
- }
- const sri = ssri.fromData(data, {
- algorithms: opts.algorithms
- })
- if (opts.integrity && !ssri.checkData(data, opts.integrity, opts)) {
- return BB.reject(checksumError(opts.integrity, sri))
- }
- return BB.using(makeTmp(cache, opts), tmp => (
- writeFileAsync(
- tmp.target, data, { flag: 'wx' }
- ).then(() => (
- moveToDestination(tmp, cache, sri, opts)
- ))
- )).then(() => ({ integrity: sri, size: data.length }))
-}
-
-module.exports.stream = writeStream
-function writeStream (cache, opts) {
- opts = opts || {}
- const inputStream = new PassThrough()
- let inputErr = false
- function errCheck () {
- if (inputErr) { throw inputErr }
- }
-
- let allDone
- const ret = to((c, n, cb) => {
- if (!allDone) {
- allDone = handleContent(inputStream, cache, opts, errCheck)
- }
- inputStream.write(c, n, cb)
- }, cb => {
- inputStream.end(() => {
- if (!allDone) {
- const e = new Error(Y`Cache input stream was empty`)
- e.code = 'ENODATA'
- return ret.emit('error', e)
- }
- allDone.then(res => {
- res.integrity && ret.emit('integrity', res.integrity)
- res.size !== null && ret.emit('size', res.size)
- cb()
- }, e => {
- ret.emit('error', e)
- })
- })
- })
- ret.once('error', e => {
- inputErr = e
- })
- return ret
-}
-
-function handleContent (inputStream, cache, opts, errCheck) {
- return BB.using(makeTmp(cache, opts), tmp => {
- errCheck()
- return pipeToTmp(
- inputStream, cache, tmp.target, opts, errCheck
- ).then(res => {
- return moveToDestination(
- tmp, cache, res.integrity, opts, errCheck
- ).then(() => res)
- })
- })
-}
-
-function pipeToTmp (inputStream, cache, tmpTarget, opts, errCheck) {
- return BB.resolve().then(() => {
- let integrity
- let size
- const hashStream = ssri.integrityStream({
- integrity: opts.integrity,
- algorithms: opts.algorithms,
- size: opts.size
- }).on('integrity', s => {
- integrity = s
- }).on('size', s => {
- size = s
- })
- const outStream = fs.createWriteStream(tmpTarget, {
- flags: 'wx'
- })
- errCheck()
- return pipe(inputStream, hashStream, outStream).then(() => {
- return { integrity, size }
- }).catch(err => {
- return rimraf(tmpTarget).then(() => { throw err })
- })
- })
-}
-
-function makeTmp (cache, opts) {
- const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
- return fixOwner.mkdirfix(
- cache, path.dirname(tmpTarget)
- ).then(() => ({
- target: tmpTarget,
- moved: false
- })).disposer(tmp => (!tmp.moved && rimraf(tmp.target)))
-}
-
-function moveToDestination (tmp, cache, sri, opts, errCheck) {
- errCheck && errCheck()
- const destination = contentPath(cache, sri)
- const destDir = path.dirname(destination)
-
- return fixOwner.mkdirfix(
- cache, destDir
- ).then(() => {
- errCheck && errCheck()
- return moveFile(tmp.target, destination)
- }).then(() => {
- errCheck && errCheck()
- tmp.moved = true
- return fixOwner.chownr(cache, destination)
- })
-}
-
-function sizeError (expected, found) {
- var err = new Error(Y`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
- err.expected = expected
- err.found = found
- err.code = 'EBADSIZE'
- return err
-}
-
-function checksumError (expected, found) {
- var err = new Error(Y`Integrity check failed:
- Wanted: ${expected}
- Found: ${found}`)
- err.code = 'EINTEGRITY'
- err.expected = expected
- err.found = found
- return err
-}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js
deleted file mode 100644
index dee1824b1..000000000
--- a/node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js
+++ /dev/null
@@ -1,288 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const contentPath = require('./content/path')
-const crypto = require('crypto')
-const figgyPudding = require('figgy-pudding')
-const fixOwner = require('./util/fix-owner')
-const fs = require('graceful-fs')
-const hashToSegments = require('./util/hash-to-segments')
-const ms = require('mississippi')
-const path = require('path')
-const ssri = require('ssri')
-const Y = require('./util/y.js')
-
-const indexV = require('../package.json')['cache-version'].index
-
-const appendFileAsync = BB.promisify(fs.appendFile)
-const readFileAsync = BB.promisify(fs.readFile)
-const readdirAsync = BB.promisify(fs.readdir)
-const concat = ms.concat
-const from = ms.from
-
-module.exports.NotFoundError = class NotFoundError extends Error {
- constructor (cache, key) {
- super(Y`No cache entry for \`${key}\` found in \`${cache}\``)
- this.code = 'ENOENT'
- this.cache = cache
- this.key = key
- }
-}
-
-const IndexOpts = figgyPudding({
- metadata: {},
- size: {}
-})
-
-module.exports.insert = insert
-function insert (cache, key, integrity, opts) {
- opts = IndexOpts(opts)
- const bucket = bucketPath(cache, key)
- const entry = {
- key,
- integrity: integrity && ssri.stringify(integrity),
- time: Date.now(),
- size: opts.size,
- metadata: opts.metadata
- }
- return fixOwner.mkdirfix(
- cache, path.dirname(bucket)
- ).then(() => {
- const stringified = JSON.stringify(entry)
- // NOTE - Cleverness ahoy!
- //
- // This works because it's tremendously unlikely for an entry to corrupt
- // another while still preserving the string length of the JSON in
- // question. So, we just slap the length in there and verify it on read.
- //
- // Thanks to @isaacs for the whiteboarding session that ended up with this.
- return appendFileAsync(
- bucket, `\n${hashEntry(stringified)}\t${stringified}`
- )
- }).then(
- () => fixOwner.chownr(cache, bucket)
- ).catch({ code: 'ENOENT' }, () => {
- // There's a class of race conditions that happen when things get deleted
- // during fixOwner, or between the two mkdirfix/chownr calls.
- //
- // It's perfectly fine to just not bother in those cases and lie
- // that the index entry was written. Because it's a cache.
- }).then(() => {
- return formatEntry(cache, entry)
- })
-}
-
-module.exports.insert.sync = insertSync
-function insertSync (cache, key, integrity, opts) {
- opts = IndexOpts(opts)
- const bucket = bucketPath(cache, key)
- const entry = {
- key,
- integrity: integrity && ssri.stringify(integrity),
- time: Date.now(),
- size: opts.size,
- metadata: opts.metadata
- }
- fixOwner.mkdirfix.sync(cache, path.dirname(bucket))
- const stringified = JSON.stringify(entry)
- fs.appendFileSync(
- bucket, `\n${hashEntry(stringified)}\t${stringified}`
- )
- try {
- fixOwner.chownr.sync(cache, bucket)
- } catch (err) {
- if (err.code !== 'ENOENT') {
- throw err
- }
- }
- return formatEntry(cache, entry)
-}
-
-module.exports.find = find
-function find (cache, key) {
- const bucket = bucketPath(cache, key)
- return bucketEntries(bucket).then(entries => {
- return entries.reduce((latest, next) => {
- if (next && next.key === key) {
- return formatEntry(cache, next)
- } else {
- return latest
- }
- }, null)
- }).catch(err => {
- if (err.code === 'ENOENT') {
- return null
- } else {
- throw err
- }
- })
-}
-
-module.exports.find.sync = findSync
-function findSync (cache, key) {
- const bucket = bucketPath(cache, key)
- try {
- return bucketEntriesSync(bucket).reduce((latest, next) => {
- if (next && next.key === key) {
- return formatEntry(cache, next)
- } else {
- return latest
- }
- }, null)
- } catch (err) {
- if (err.code === 'ENOENT') {
- return null
- } else {
- throw err
- }
- }
-}
-
-module.exports.delete = del
-function del (cache, key, opts) {
- return insert(cache, key, null, opts)
-}
-
-module.exports.delete.sync = delSync
-function delSync (cache, key, opts) {
- return insertSync(cache, key, null, opts)
-}
-
-module.exports.lsStream = lsStream
-function lsStream (cache) {
- const indexDir = bucketDir(cache)
- const stream = from.obj()
-
- // "/cachename/*"
- readdirOrEmpty(indexDir).map(bucket => {
- const bucketPath = path.join(indexDir, bucket)
-
- // "/cachename/<bucket 0xFF>/*"
- return readdirOrEmpty(bucketPath).map(subbucket => {
- const subbucketPath = path.join(bucketPath, subbucket)
-
- // "/cachename/<bucket 0xFF>/<bucket 0xFF>/*"
- return readdirOrEmpty(subbucketPath).map(entry => {
- const getKeyToEntry = bucketEntries(
- path.join(subbucketPath, entry)
- ).reduce((acc, entry) => {
- acc.set(entry.key, entry)
- return acc
- }, new Map())
-
- return getKeyToEntry.then(reduced => {
- for (let entry of reduced.values()) {
- const formatted = formatEntry(cache, entry)
- formatted && stream.push(formatted)
- }
- }).catch({ code: 'ENOENT' }, nop)
- })
- })
- }).then(() => {
- stream.push(null)
- }, err => {
- stream.emit('error', err)
- })
-
- return stream
-}
-
-module.exports.ls = ls
-function ls (cache) {
- return BB.fromNode(cb => {
- lsStream(cache).on('error', cb).pipe(concat(entries => {
- cb(null, entries.reduce((acc, xs) => {
- acc[xs.key] = xs
- return acc
- }, {}))
- }))
- })
-}
-
-function bucketEntries (bucket, filter) {
- return readFileAsync(
- bucket, 'utf8'
- ).then(data => _bucketEntries(data, filter))
-}
-
-function bucketEntriesSync (bucket, filter) {
- const data = fs.readFileSync(bucket, 'utf8')
- return _bucketEntries(data, filter)
-}
-
-function _bucketEntries (data, filter) {
- let entries = []
- data.split('\n').forEach(entry => {
- if (!entry) { return }
- const pieces = entry.split('\t')
- if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
- // Hash is no good! Corruption or malice? Doesn't matter!
- // EJECT EJECT
- return
- }
- let obj
- try {
- obj = JSON.parse(pieces[1])
- } catch (e) {
- // Entry is corrupted!
- return
- }
- if (obj) {
- entries.push(obj)
- }
- })
- return entries
-}
-
-module.exports._bucketDir = bucketDir
-function bucketDir (cache) {
- return path.join(cache, `index-v${indexV}`)
-}
-
-module.exports._bucketPath = bucketPath
-function bucketPath (cache, key) {
- const hashed = hashKey(key)
- return path.join.apply(path, [bucketDir(cache)].concat(
- hashToSegments(hashed)
- ))
-}
-
-module.exports._hashKey = hashKey
-function hashKey (key) {
- return hash(key, 'sha256')
-}
-
-module.exports._hashEntry = hashEntry
-function hashEntry (str) {
- return hash(str, 'sha1')
-}
-
-function hash (str, digest) {
- return crypto
- .createHash(digest)
- .update(str)
- .digest('hex')
-}
-
-function formatEntry (cache, entry) {
- // Treat null digests as deletions. They'll shadow any previous entries.
- if (!entry.integrity) { return null }
- return {
- key: entry.key,
- integrity: entry.integrity,
- path: contentPath(cache, entry.integrity),
- size: entry.size,
- time: entry.time,
- metadata: entry.metadata
- }
-}
-
-function readdirOrEmpty (dir) {
- return readdirAsync(dir)
- .catch({ code: 'ENOENT' }, () => [])
- .catch({ code: 'ENOTDIR' }, () => [])
-}
-
-function nop () {
-}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js
deleted file mode 100644
index 92179c7ac..000000000
--- a/node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js
+++ /dev/null
@@ -1,69 +0,0 @@
-'use strict'
-
-const LRU = require('lru-cache')
-
-const MAX_SIZE = 50 * 1024 * 1024 // 50MB
-const MAX_AGE = 3 * 60 * 1000
-
-let MEMOIZED = new LRU({
- max: MAX_SIZE,
- maxAge: MAX_AGE,
- length: (entry, key) => {
- if (key.startsWith('key:')) {
- return entry.data.length
- } else if (key.startsWith('digest:')) {
- return entry.length
- }
- }
-})
-
-module.exports.clearMemoized = clearMemoized
-function clearMemoized () {
- const old = {}
- MEMOIZED.forEach((v, k) => {
- old[k] = v
- })
- MEMOIZED.reset()
- return old
-}
-
-module.exports.put = put
-function put (cache, entry, data, opts) {
- pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data })
- putDigest(cache, entry.integrity, data, opts)
-}
-
-module.exports.put.byDigest = putDigest
-function putDigest (cache, integrity, data, opts) {
- pickMem(opts).set(`digest:${cache}:${integrity}`, data)
-}
-
-module.exports.get = get
-function get (cache, key, opts) {
- return pickMem(opts).get(`key:${cache}:${key}`)
-}
-
-module.exports.get.byDigest = getDigest
-function getDigest (cache, integrity, opts) {
- return pickMem(opts).get(`digest:${cache}:${integrity}`)
-}
-
-class ObjProxy {
- constructor (obj) {
- this.obj = obj
- }
- get (key) { return this.obj[key] }
- set (key, val) { this.obj[key] = val }
-}
-
-function pickMem (opts) {
- if (!opts || !opts.memoize) {
- return MEMOIZED
- } else if (opts.memoize.get && opts.memoize.set) {
- return opts.memoize
- } else if (typeof opts.memoize === 'object') {
- return new ObjProxy(opts.memoize)
- } else {
- return MEMOIZED
- }
-}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/fix-owner.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/fix-owner.js
deleted file mode 100644
index f5c33db5f..000000000
--- a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/fix-owner.js
+++ /dev/null
@@ -1,128 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const chownr = BB.promisify(require('chownr'))
-const mkdirp = BB.promisify(require('mkdirp'))
-const inflight = require('promise-inflight')
-const inferOwner = require('infer-owner')
-
-// Memoize getuid()/getgid() calls.
-// patch process.setuid/setgid to invalidate cached value on change
-const self = { uid: null, gid: null }
-const getSelf = () => {
- if (typeof self.uid !== 'number') {
- self.uid = process.getuid()
- const setuid = process.setuid
- process.setuid = (uid) => {
- self.uid = null
- process.setuid = setuid
- return process.setuid(uid)
- }
- }
- if (typeof self.gid !== 'number') {
- self.gid = process.getgid()
- const setgid = process.setgid
- process.setgid = (gid) => {
- self.gid = null
- process.setgid = setgid
- return process.setgid(gid)
- }
- }
-}
-
-module.exports.chownr = fixOwner
-function fixOwner (cache, filepath) {
- if (!process.getuid) {
- // This platform doesn't need ownership fixing
- return BB.resolve()
- }
-
- getSelf()
- if (self.uid !== 0) {
- // almost certainly can't chown anyway
- return BB.resolve()
- }
-
- return BB.resolve(inferOwner(cache)).then(owner => {
- const { uid, gid } = owner
-
- // No need to override if it's already what we used.
- if (self.uid === uid && self.gid === gid) {
- return
- }
-
- return inflight(
- 'fixOwner: fixing ownership on ' + filepath,
- () => chownr(
- filepath,
- typeof uid === 'number' ? uid : self.uid,
- typeof gid === 'number' ? gid : self.gid
- ).catch({ code: 'ENOENT' }, () => null)
- )
- })
-}
-
-module.exports.chownr.sync = fixOwnerSync
-function fixOwnerSync (cache, filepath) {
- if (!process.getuid) {
- // This platform doesn't need ownership fixing
- return
- }
- const { uid, gid } = inferOwner.sync(cache)
- getSelf()
- if (self.uid === uid && self.gid === gid) {
- // No need to override if it's already what we used.
- return
- }
- try {
- chownr.sync(
- filepath,
- typeof uid === 'number' ? uid : self.uid,
- typeof gid === 'number' ? gid : self.gid
- )
- } catch (err) {
- // only catch ENOENT, any other error is a problem.
- if (err.code === 'ENOENT') {
- return null
- }
- throw err
- }
-}
-
-module.exports.mkdirfix = mkdirfix
-function mkdirfix (cache, p, cb) {
- // we have to infer the owner _before_ making the directory, even though
- // we aren't going to use the results, since the cache itself might not
- // exist yet. If we mkdirp it, then our current uid/gid will be assumed
- // to be correct if it creates the cache folder in the process.
- return BB.resolve(inferOwner(cache)).then(() => {
- return mkdirp(p).then(made => {
- if (made) {
- return fixOwner(cache, made).then(() => made)
- }
- }).catch({ code: 'EEXIST' }, () => {
- // There's a race in mkdirp!
- return fixOwner(cache, p).then(() => null)
- })
- })
-}
-
-module.exports.mkdirfix.sync = mkdirfixSync
-function mkdirfixSync (cache, p) {
- try {
- inferOwner.sync(cache)
- const made = mkdirp.sync(p)
- if (made) {
- fixOwnerSync(cache, made)
- return made
- }
- } catch (err) {
- if (err.code === 'EEXIST') {
- fixOwnerSync(cache, p)
- return null
- } else {
- throw err
- }
- }
-}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js
deleted file mode 100644
index 192be2a6d..000000000
--- a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js
+++ /dev/null
@@ -1,11 +0,0 @@
-'use strict'
-
-module.exports = hashToSegments
-
-function hashToSegments (hash) {
- return [
- hash.slice(0, 2),
- hash.slice(2, 4),
- hash.slice(4)
- ]
-}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/move-file.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/move-file.js
deleted file mode 100644
index b43744b3d..000000000
--- a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/move-file.js
+++ /dev/null
@@ -1,51 +0,0 @@
-'use strict'
-
-const fs = require('graceful-fs')
-const BB = require('bluebird')
-const chmod = BB.promisify(fs.chmod)
-const unlink = BB.promisify(fs.unlink)
-let move
-let pinflight
-
-module.exports = moveFile
-function moveFile (src, dest) {
- // This isn't quite an fs.rename -- the assumption is that
- // if `dest` already exists, and we get certain errors while
- // trying to move it, we should just not bother.
- //
- // In the case of cache corruption, users will receive an
- // EINTEGRITY error elsewhere, and can remove the offending
- // content their own way.
- //
- // Note that, as the name suggests, this strictly only supports file moves.
- return BB.fromNode(cb => {
- fs.link(src, dest, err => {
- if (err) {
- if (err.code === 'EEXIST' || err.code === 'EBUSY') {
- // file already exists, so whatever
- } else if (err.code === 'EPERM' && process.platform === 'win32') {
- // file handle stayed open even past graceful-fs limits
- } else {
- return cb(err)
- }
- }
- return cb()
- })
- }).then(() => {
- // content should never change for any reason, so make it read-only
- return BB.join(unlink(src), process.platform !== 'win32' && chmod(dest, '0444'))
- }).catch(() => {
- if (!pinflight) { pinflight = require('promise-inflight') }
- return pinflight('cacache-move-file:' + dest, () => {
- return BB.promisify(fs.stat)(dest).catch(err => {
- if (err.code !== 'ENOENT') {
- // Something else is wrong here. Bail bail bail
- throw err
- }
- // file doesn't already exist! let's try a rename -> copy fallback
- if (!move) { move = require('move-concurrently') }
- return move(src, dest, { BB, fs })
- })
- })
- })
-}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js
deleted file mode 100644
index 78494b8ea..000000000
--- a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js
+++ /dev/null
@@ -1,37 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const figgyPudding = require('figgy-pudding')
-const fixOwner = require('./fix-owner')
-const path = require('path')
-const rimraf = BB.promisify(require('rimraf'))
-const uniqueFilename = require('unique-filename')
-
-const TmpOpts = figgyPudding({
- tmpPrefix: {}
-})
-
-module.exports.mkdir = mktmpdir
-function mktmpdir (cache, opts) {
- opts = TmpOpts(opts)
- const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
- return fixOwner.mkdirfix(cache, tmpTarget).then(() => {
- return tmpTarget
- })
-}
-
-module.exports.withTmp = withTmp
-function withTmp (cache, opts, cb) {
- if (!cb) {
- cb = opts
- opts = null
- }
- opts = TmpOpts(opts)
- return BB.using(mktmpdir(cache, opts).disposer(rimraf), cb)
-}
-
-module.exports.fix = fixtmpdir
-function fixtmpdir (cache) {
- return fixOwner(cache, path.join(cache, 'tmp'))
-}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/y.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/y.js
deleted file mode 100644
index d62bedacb..000000000
--- a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/y.js
+++ /dev/null
@@ -1,25 +0,0 @@
-'use strict'
-
-const path = require('path')
-const y18n = require('y18n')({
- directory: path.join(__dirname, '../../locales'),
- locale: 'en',
- updateFiles: process.env.CACACHE_UPDATE_LOCALE_FILES === 'true'
-})
-
-module.exports = yTag
-function yTag (parts) {
- let str = ''
- parts.forEach((part, i) => {
- const arg = arguments[i + 1]
- str += part
- if (arg) {
- str += '%s'
- }
- })
- return y18n.__.apply(null, [str].concat([].slice.call(arguments, 1)))
-}
-
-module.exports.setLocale = locale => {
- y18n.setLocale(locale)
-}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js
deleted file mode 100644
index 617d38db1..000000000
--- a/node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js
+++ /dev/null
@@ -1,227 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const contentPath = require('./content/path')
-const figgyPudding = require('figgy-pudding')
-const finished = BB.promisify(require('mississippi').finished)
-const fixOwner = require('./util/fix-owner')
-const fs = require('graceful-fs')
-const glob = BB.promisify(require('glob'))
-const index = require('./entry-index')
-const path = require('path')
-const rimraf = BB.promisify(require('rimraf'))
-const ssri = require('ssri')
-
-BB.promisifyAll(fs)
-
-const VerifyOpts = figgyPudding({
- concurrency: {
- default: 20
- },
- filter: {},
- log: {
- default: { silly () {} }
- }
-})
-
-module.exports = verify
-function verify (cache, opts) {
- opts = VerifyOpts(opts)
- opts.log.silly('verify', 'verifying cache at', cache)
- return BB.reduce([
- markStartTime,
- fixPerms,
- garbageCollect,
- rebuildIndex,
- cleanTmp,
- writeVerifile,
- markEndTime
- ], (stats, step, i) => {
- const label = step.name || `step #${i}`
- const start = new Date()
- return BB.resolve(step(cache, opts)).then(s => {
- s && Object.keys(s).forEach(k => {
- stats[k] = s[k]
- })
- const end = new Date()
- if (!stats.runTime) { stats.runTime = {} }
- stats.runTime[label] = end - start
- return stats
- })
- }, {}).tap(stats => {
- stats.runTime.total = stats.endTime - stats.startTime
- opts.log.silly('verify', 'verification finished for', cache, 'in', `${stats.runTime.total}ms`)
- })
-}
-
-function markStartTime (cache, opts) {
- return { startTime: new Date() }
-}
-
-function markEndTime (cache, opts) {
- return { endTime: new Date() }
-}
-
-function fixPerms (cache, opts) {
- opts.log.silly('verify', 'fixing cache permissions')
- return fixOwner.mkdirfix(cache, cache).then(() => {
- // TODO - fix file permissions too
- return fixOwner.chownr(cache, cache)
- }).then(() => null)
-}
-
-// Implements a naive mark-and-sweep tracing garbage collector.
-//
-// The algorithm is basically as follows:
-// 1. Read (and filter) all index entries ("pointers")
-// 2. Mark each integrity value as "live"
-// 3. Read entire filesystem tree in `content-vX/` dir
-// 4. If content is live, verify its checksum and delete it if it fails
-// 5. If content is not marked as live, rimraf it.
-//
-function garbageCollect (cache, opts) {
- opts.log.silly('verify', 'garbage collecting content')
- const indexStream = index.lsStream(cache)
- const liveContent = new Set()
- indexStream.on('data', entry => {
- if (opts.filter && !opts.filter(entry)) { return }
- liveContent.add(entry.integrity.toString())
- })
- return finished(indexStream).then(() => {
- const contentDir = contentPath._contentDir(cache)
- return glob(path.join(contentDir, '**'), {
- follow: false,
- nodir: true,
- nosort: true
- }).then(files => {
- return BB.resolve({
- verifiedContent: 0,
- reclaimedCount: 0,
- reclaimedSize: 0,
- badContentCount: 0,
- keptSize: 0
- }).tap((stats) => BB.map(files, (f) => {
- const split = f.split(/[/\\]/)
- const digest = split.slice(split.length - 3).join('')
- const algo = split[split.length - 4]
- const integrity = ssri.fromHex(digest, algo)
- if (liveContent.has(integrity.toString())) {
- return verifyContent(f, integrity).then(info => {
- if (!info.valid) {
- stats.reclaimedCount++
- stats.badContentCount++
- stats.reclaimedSize += info.size
- } else {
- stats.verifiedContent++
- stats.keptSize += info.size
- }
- return stats
- })
- } else {
- // No entries refer to this content. We can delete.
- stats.reclaimedCount++
- return fs.statAsync(f).then(s => {
- return rimraf(f).then(() => {
- stats.reclaimedSize += s.size
- return stats
- })
- })
- }
- }, { concurrency: opts.concurrency }))
- })
- })
-}
-
-function verifyContent (filepath, sri) {
- return fs.statAsync(filepath).then(stat => {
- const contentInfo = {
- size: stat.size,
- valid: true
- }
- return ssri.checkStream(
- fs.createReadStream(filepath),
- sri
- ).catch(err => {
- if (err.code !== 'EINTEGRITY') { throw err }
- return rimraf(filepath).then(() => {
- contentInfo.valid = false
- })
- }).then(() => contentInfo)
- }).catch({ code: 'ENOENT' }, () => ({ size: 0, valid: false }))
-}
-
-function rebuildIndex (cache, opts) {
- opts.log.silly('verify', 'rebuilding index')
- return index.ls(cache).then(entries => {
- const stats = {
- missingContent: 0,
- rejectedEntries: 0,
- totalEntries: 0
- }
- const buckets = {}
- for (let k in entries) {
- if (entries.hasOwnProperty(k)) {
- const hashed = index._hashKey(k)
- const entry = entries[k]
- const excluded = opts.filter && !opts.filter(entry)
- excluded && stats.rejectedEntries++
- if (buckets[hashed] && !excluded) {
- buckets[hashed].push(entry)
- } else if (buckets[hashed] && excluded) {
- // skip
- } else if (excluded) {
- buckets[hashed] = []
- buckets[hashed]._path = index._bucketPath(cache, k)
- } else {
- buckets[hashed] = [entry]
- buckets[hashed]._path = index._bucketPath(cache, k)
- }
- }
- }
- return BB.map(Object.keys(buckets), key => {
- return rebuildBucket(cache, buckets[key], stats, opts)
- }, { concurrency: opts.concurrency }).then(() => stats)
- })
-}
-
-function rebuildBucket (cache, bucket, stats, opts) {
- return fs.truncateAsync(bucket._path).then(() => {
- // This needs to be serialized because cacache explicitly
- // lets very racy bucket conflicts clobber each other.
- return BB.mapSeries(bucket, entry => {
- const content = contentPath(cache, entry.integrity)
- return fs.statAsync(content).then(() => {
- return index.insert(cache, entry.key, entry.integrity, {
- metadata: entry.metadata,
- size: entry.size
- }).then(() => { stats.totalEntries++ })
- }).catch({ code: 'ENOENT' }, () => {
- stats.rejectedEntries++
- stats.missingContent++
- })
- })
- })
-}
-
-function cleanTmp (cache, opts) {
- opts.log.silly('verify', 'cleaning tmp directory')
- return rimraf(path.join(cache, 'tmp'))
-}
-
-function writeVerifile (cache, opts) {
- const verifile = path.join(cache, '_lastverified')
- opts.log.silly('verify', 'writing verifile to ' + verifile)
- try {
- return fs.writeFileAsync(verifile, '' + (+(new Date())))
- } finally {
- fixOwner.chownr.sync(cache, verifile)
- }
-}
-
-module.exports.lastRun = lastRun
-function lastRun (cache) {
- return fs.readFileAsync(
- path.join(cache, '_lastverified'), 'utf8'
- ).then(data => new Date(+data))
-}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/locales/en.js b/node_modules/make-fetch-happen/node_modules/cacache/locales/en.js
deleted file mode 100644
index 1715fdb53..000000000
--- a/node_modules/make-fetch-happen/node_modules/cacache/locales/en.js
+++ /dev/null
@@ -1,47 +0,0 @@
-'use strict'
-
-const ls = require('../ls.js')
-const get = require('../get.js')
-const put = require('../put.js')
-const rm = require('../rm.js')
-const verify = require('../verify.js')
-const setLocale = require('../lib/util/y.js').setLocale
-const clearMemoized = require('../lib/memoization.js').clearMemoized
-const tmp = require('../lib/util/tmp.js')
-
-setLocale('en')
-
-const x = module.exports
-
-x.ls = cache => ls(cache)
-x.ls.stream = cache => ls.stream(cache)
-
-x.get = (cache, key, opts) => get(cache, key, opts)
-x.get.byDigest = (cache, hash, opts) => get.byDigest(cache, hash, opts)
-x.get.sync = (cache, key, opts) => get.sync(cache, key, opts)
-x.get.sync.byDigest = (cache, key, opts) => get.sync.byDigest(cache, key, opts)
-x.get.stream = (cache, key, opts) => get.stream(cache, key, opts)
-x.get.stream.byDigest = (cache, hash, opts) => get.stream.byDigest(cache, hash, opts)
-x.get.copy = (cache, key, dest, opts) => get.copy(cache, key, dest, opts)
-x.get.copy.byDigest = (cache, hash, dest, opts) => get.copy.byDigest(cache, hash, dest, opts)
-x.get.info = (cache, key) => get.info(cache, key)
-x.get.hasContent = (cache, hash) => get.hasContent(cache, hash)
-x.get.hasContent.sync = (cache, hash) => get.hasContent.sync(cache, hash)
-
-x.put = (cache, key, data, opts) => put(cache, key, data, opts)
-x.put.stream = (cache, key, opts) => put.stream(cache, key, opts)
-
-x.rm = (cache, key) => rm.entry(cache, key)
-x.rm.all = cache => rm.all(cache)
-x.rm.entry = x.rm
-x.rm.content = (cache, hash) => rm.content(cache, hash)
-
-x.setLocale = lang => setLocale(lang)
-x.clearMemoized = () => clearMemoized()
-
-x.tmp = {}
-x.tmp.mkdir = (cache, opts) => tmp.mkdir(cache, opts)
-x.tmp.withTmp = (cache, opts, cb) => tmp.withTmp(cache, opts, cb)
-
-x.verify = (cache, opts) => verify(cache, opts)
-x.verify.lastRun = cache => verify.lastRun(cache)
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/locales/en.json b/node_modules/make-fetch-happen/node_modules/cacache/locales/en.json
deleted file mode 100644
index 4f1452884..000000000
--- a/node_modules/make-fetch-happen/node_modules/cacache/locales/en.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "No cache entry for `%s` found in `%s`": "No cache entry for %s found in %s",
- "Integrity verification failed for %s (%s)": "Integrity verification failed for %s (%s)",
- "Bad data size: expected inserted data to be %s bytes, but got %s instead": "Bad data size: expected inserted data to be %s bytes, but got %s instead",
- "Cache input stream was empty": "Cache input stream was empty",
- "Integrity check failed:\n Wanted: %s\n Found: %s": "Integrity check failed:\n Wanted: %s\n Found: %s"
-} \ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/locales/es.js b/node_modules/make-fetch-happen/node_modules/cacache/locales/es.js
deleted file mode 100644
index ac4e4cfe7..000000000
--- a/node_modules/make-fetch-happen/node_modules/cacache/locales/es.js
+++ /dev/null
@@ -1,49 +0,0 @@
-'use strict'
-
-const ls = require('../ls.js')
-const get = require('../get.js')
-const put = require('../put.js')
-const rm = require('../rm.js')
-const verify = require('../verify.js')
-const setLocale = require('../lib/util/y.js').setLocale
-const clearMemoized = require('../lib/memoization.js').clearMemoized
-const tmp = require('../lib/util/tmp.js')
-
-setLocale('es')
-
-const x = module.exports
-
-x.ls = cache => ls(cache)
-x.ls.flujo = cache => ls.stream(cache)
-
-x.saca = (cache, clave, ops) => get(cache, clave, ops)
-x.saca.porHacheo = (cache, hacheo, ops) => get.byDigest(cache, hacheo, ops)
-x.saca.sinc = (cache, clave, ops) => get.sync(cache, clave, ops)
-x.saca.sinc.porHacheo = (cache, hacheo, ops) => get.sync.byDigest(cache, hacheo, ops)
-x.saca.flujo = (cache, clave, ops) => get.stream(cache, clave, ops)
-x.saca.flujo.porHacheo = (cache, hacheo, ops) => get.stream.byDigest(cache, hacheo, ops)
-x.sava.copia = (cache, clave, destino, opts) => get.copy(cache, clave, destino, opts)
-x.sava.copia.porHacheo = (cache, hacheo, destino, opts) => get.copy.byDigest(cache, hacheo, destino, opts)
-x.saca.info = (cache, clave) => get.info(cache, clave)
-x.saca.tieneDatos = (cache, hacheo) => get.hasContent(cache, hacheo)
-x.saca.tieneDatos.sinc = (cache, hacheo) => get.hasContent.sync(cache, hacheo)
-
-x.mete = (cache, clave, datos, ops) => put(cache, clave, datos, ops)
-x.mete.flujo = (cache, clave, ops) => put.stream(cache, clave, ops)
-
-x.rm = (cache, clave) => rm.entry(cache, clave)
-x.rm.todo = cache => rm.all(cache)
-x.rm.entrada = x.rm
-x.rm.datos = (cache, hacheo) => rm.content(cache, hacheo)
-
-x.ponLenguaje = lang => setLocale(lang)
-x.limpiaMemoizado = () => clearMemoized()
-
-x.tmp = {}
-x.tmp.mkdir = (cache, ops) => tmp.mkdir(cache, ops)
-x.tmp.hazdir = x.tmp.mkdir
-x.tmp.conTmp = (cache, ops, cb) => tmp.withTmp(cache, ops, cb)
-
-x.verifica = (cache, ops) => verify(cache, ops)
-x.verifica.ultimaVez = cache => verify.lastRun(cache)
-x.verifica.últimaVez = x.verifica.ultimaVez
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/locales/es.json b/node_modules/make-fetch-happen/node_modules/cacache/locales/es.json
deleted file mode 100644
index a91d76225..000000000
--- a/node_modules/make-fetch-happen/node_modules/cacache/locales/es.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "No cache entry for `%s` found in `%s`": "No existe ninguna entrada para «%s» en «%s»",
- "Integrity verification failed for %s (%s)": "Verificación de integridad falló para «%s» (%s)",
- "Bad data size: expected inserted data to be %s bytes, but got %s instead": "Tamaño incorrecto de datos: los datos insertados debieron haber sido %s octetos, pero fueron %s",
- "Cache input stream was empty": "El stream de entrada al caché estaba vacío"
-}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/ls.js b/node_modules/make-fetch-happen/node_modules/cacache/ls.js
deleted file mode 100644
index 9f49b388a..000000000
--- a/node_modules/make-fetch-happen/node_modules/cacache/ls.js
+++ /dev/null
@@ -1,6 +0,0 @@
-'use strict'
-
-var index = require('./lib/entry-index')
-
-module.exports = index.ls
-module.exports.stream = index.lsStream
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/package.json b/node_modules/make-fetch-happen/node_modules/cacache/package.json
deleted file mode 100644
index 4da8d17fd..000000000
--- a/node_modules/make-fetch-happen/node_modules/cacache/package.json
+++ /dev/null
@@ -1,126 +0,0 @@
-{
- "_from": "cacache@^12.0.0",
- "_id": "cacache@12.0.3",
- "_inBundle": false,
- "_integrity": "sha512-kqdmfXEGFepesTuROHMs3MpFLWrPkSSpRqOw80RCflZXy/khxaArvFrQ7uJxSUduzAufc6G0g1VUCOZXxWavPw==",
- "_location": "/make-fetch-happen/cacache",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "cacache@^12.0.0",
- "name": "cacache",
- "escapedName": "cacache",
- "rawSpec": "^12.0.0",
- "saveSpec": null,
- "fetchSpec": "^12.0.0"
- },
- "_requiredBy": [
- "/make-fetch-happen"
- ],
- "_resolved": "https://registry.npmjs.org/cacache/-/cacache-12.0.3.tgz",
- "_shasum": "be99abba4e1bf5df461cd5a2c1071fc432573390",
- "_spec": "cacache@^12.0.0",
- "_where": "/Users/claudiahdz/npm/cli/node_modules/make-fetch-happen",
- "author": {
- "name": "Kat Marchán",
- "email": "kzm@sykosomatic.org"
- },
- "bugs": {
- "url": "https://github.com/npm/cacache/issues"
- },
- "bundleDependencies": false,
- "cache-version": {
- "content": "2",
- "index": "5"
- },
- "config": {
- "nyc": {
- "exclude": [
- "node_modules/**",
- "test/**"
- ]
- }
- },
- "contributors": [
- {
- "name": "Charlotte Spencer",
- "email": "charlottelaspencer@gmail.com"
- },
- {
- "name": "Rebecca Turner",
- "email": "me@re-becca.org"
- }
- ],
- "dependencies": {
- "bluebird": "^3.5.5",
- "chownr": "^1.1.1",
- "figgy-pudding": "^3.5.1",
- "glob": "^7.1.4",
- "graceful-fs": "^4.1.15",
- "infer-owner": "^1.0.3",
- "lru-cache": "^5.1.1",
- "mississippi": "^3.0.0",
- "mkdirp": "^0.5.1",
- "move-concurrently": "^1.0.1",
- "promise-inflight": "^1.0.1",
- "rimraf": "^2.6.3",
- "ssri": "^6.0.1",
- "unique-filename": "^1.1.1",
- "y18n": "^4.0.0"
- },
- "deprecated": false,
- "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.",
- "devDependencies": {
- "benchmark": "^2.1.4",
- "chalk": "^2.4.2",
- "cross-env": "^5.1.4",
- "require-inject": "^1.4.4",
- "standard": "^12.0.1",
- "standard-version": "^6.0.1",
- "tacks": "^1.3.0",
- "tap": "^12.7.0",
- "weallbehave": "^1.2.0",
- "weallcontribute": "^1.0.9"
- },
- "files": [
- "*.js",
- "lib",
- "locales"
- ],
- "homepage": "https://github.com/npm/cacache#readme",
- "keywords": [
- "cache",
- "caching",
- "content-addressable",
- "sri",
- "sri hash",
- "subresource integrity",
- "cache",
- "storage",
- "store",
- "file store",
- "filesystem",
- "disk cache",
- "disk storage"
- ],
- "license": "ISC",
- "main": "index.js",
- "name": "cacache",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/cacache.git"
- },
- "scripts": {
- "benchmarks": "node test/benchmarks",
- "postrelease": "npm publish && git push --follow-tags",
- "prerelease": "npm t",
- "pretest": "standard",
- "release": "standard-version -s",
- "test": "cross-env CACACHE_UPDATE_LOCALE_FILES=true tap --coverage --nyc-arg=--all -J test/*.js",
- "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
- "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
- "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
- },
- "version": "12.0.3"
-}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/put.js b/node_modules/make-fetch-happen/node_modules/cacache/put.js
deleted file mode 100644
index a40063930..000000000
--- a/node_modules/make-fetch-happen/node_modules/cacache/put.js
+++ /dev/null
@@ -1,86 +0,0 @@
-'use strict'
-
-const figgyPudding = require('figgy-pudding')
-const index = require('./lib/entry-index')
-const memo = require('./lib/memoization')
-const write = require('./lib/content/write')
-const to = require('mississippi').to
-
-const PutOpts = figgyPudding({
- algorithms: {
- default: ['sha512']
- },
- integrity: {},
- memoize: {},
- metadata: {},
- pickAlgorithm: {},
- size: {},
- tmpPrefix: {},
- single: {},
- sep: {},
- error: {},
- strict: {}
-})
-
-module.exports = putData
-function putData (cache, key, data, opts) {
- opts = PutOpts(opts)
- return write(cache, data, opts).then(res => {
- return index.insert(
- cache, key, res.integrity, opts.concat({ size: res.size })
- ).then(entry => {
- if (opts.memoize) {
- memo.put(cache, entry, data, opts)
- }
- return res.integrity
- })
- })
-}
-
-module.exports.stream = putStream
-function putStream (cache, key, opts) {
- opts = PutOpts(opts)
- let integrity
- let size
- const contentStream = write.stream(
- cache, opts
- ).on('integrity', int => {
- integrity = int
- }).on('size', s => {
- size = s
- })
- let memoData
- let memoTotal = 0
- const stream = to((chunk, enc, cb) => {
- contentStream.write(chunk, enc, () => {
- if (opts.memoize) {
- if (!memoData) { memoData = [] }
- memoData.push(chunk)
- memoTotal += chunk.length
- }
- cb()
- })
- }, cb => {
- contentStream.end(() => {
- index.insert(cache, key, integrity, opts.concat({ size })).then(entry => {
- if (opts.memoize) {
- memo.put(cache, entry, Buffer.concat(memoData, memoTotal), opts)
- }
- stream.emit('integrity', integrity)
- cb()
- })
- })
- })
- let erred = false
- stream.once('error', err => {
- if (erred) { return }
- erred = true
- contentStream.emit('error', err)
- })
- contentStream.once('error', err => {
- if (erred) { return }
- erred = true
- stream.emit('error', err)
- })
- return stream
-}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/rm.js b/node_modules/make-fetch-happen/node_modules/cacache/rm.js
deleted file mode 100644
index e71a1d27b..000000000
--- a/node_modules/make-fetch-happen/node_modules/cacache/rm.js
+++ /dev/null
@@ -1,28 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const index = require('./lib/entry-index')
-const memo = require('./lib/memoization')
-const path = require('path')
-const rimraf = BB.promisify(require('rimraf'))
-const rmContent = require('./lib/content/rm')
-
-module.exports = entry
-module.exports.entry = entry
-function entry (cache, key) {
- memo.clearMemoized()
- return index.delete(cache, key)
-}
-
-module.exports.content = content
-function content (cache, integrity) {
- memo.clearMemoized()
- return rmContent(cache, integrity)
-}
-
-module.exports.all = all
-function all (cache) {
- memo.clearMemoized()
- return rimraf(path.join(cache, '*(content-*|index-*)'))
-}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/verify.js b/node_modules/make-fetch-happen/node_modules/cacache/verify.js
deleted file mode 100644
index db7763d7a..000000000
--- a/node_modules/make-fetch-happen/node_modules/cacache/verify.js
+++ /dev/null
@@ -1,3 +0,0 @@
-'use strict'
-
-module.exports = require('./lib/verify')
diff --git a/node_modules/make-fetch-happen/node_modules/ssri/CHANGELOG.md b/node_modules/make-fetch-happen/node_modules/ssri/CHANGELOG.md
deleted file mode 100644
index d4c589790..000000000
--- a/node_modules/make-fetch-happen/node_modules/ssri/CHANGELOG.md
+++ /dev/null
@@ -1,286 +0,0 @@
-# Change Log
-
-All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
-
-<a name="6.0.1"></a>
-## [6.0.1](https://github.com/zkat/ssri/compare/v6.0.0...v6.0.1) (2018-08-27)
-
-
-### Bug Fixes
-
-* **opts:** use figgy-pudding to specify consumed opts ([cf86553](https://github.com/zkat/ssri/commit/cf86553))
-
-
-
-<a name="6.0.0"></a>
-# [6.0.0](https://github.com/zkat/ssri/compare/v5.3.0...v6.0.0) (2018-04-09)
-
-
-### Bug Fixes
-
-* **docs:** minor typo ([b71ef17](https://github.com/zkat/ssri/commit/b71ef17))
-
-
-### meta
-
-* drop support for node@4 ([d9bf359](https://github.com/zkat/ssri/commit/d9bf359))
-
-
-### BREAKING CHANGES
-
-* node@4 is no longer supported
-
-
-
-<a name="5.3.0"></a>
-# [5.3.0](https://github.com/zkat/ssri/compare/v5.2.4...v5.3.0) (2018-03-13)
-
-
-### Features
-
-* **checkData:** optionally throw when checkData fails ([bf26b84](https://github.com/zkat/ssri/commit/bf26b84))
-
-
-
-<a name="5.2.4"></a>
-## [5.2.4](https://github.com/zkat/ssri/compare/v5.2.3...v5.2.4) (2018-02-16)
-
-
-
-<a name="5.2.3"></a>
-## [5.2.3](https://github.com/zkat/ssri/compare/v5.2.2...v5.2.3) (2018-02-16)
-
-
-### Bug Fixes
-
-* **hashes:** filter hash priority list by available hashes ([2fa30b8](https://github.com/zkat/ssri/commit/2fa30b8))
-* **integrityStream:** dedupe algorithms to generate ([d56c654](https://github.com/zkat/ssri/commit/d56c654))
-
-
-
-<a name="5.2.2"></a>
-## [5.2.2](https://github.com/zkat/ssri/compare/v5.2.1...v5.2.2) (2018-02-14)
-
-
-### Bug Fixes
-
-* **security:** tweak strict SRI regex ([#10](https://github.com/zkat/ssri/issues/10)) ([d0ebcdc](https://github.com/zkat/ssri/commit/d0ebcdc))
-
-
-
-<a name="5.2.1"></a>
-## [5.2.1](https://github.com/zkat/ssri/compare/v5.2.0...v5.2.1) (2018-02-06)
-
-
-
-<a name="5.2.0"></a>
-# [5.2.0](https://github.com/zkat/ssri/compare/v5.1.0...v5.2.0) (2018-02-06)
-
-
-### Features
-
-* **match:** add integrity.match() ([3c49cc4](https://github.com/zkat/ssri/commit/3c49cc4))
-
-
-
-<a name="5.1.0"></a>
-# [5.1.0](https://github.com/zkat/ssri/compare/v5.0.0...v5.1.0) (2018-01-18)
-
-
-### Bug Fixes
-
-* **checkStream:** integrityStream now takes opts.integrity algos into account ([d262910](https://github.com/zkat/ssri/commit/d262910))
-
-
-### Features
-
-* **sha3:** do some guesswork about upcoming sha3 ([7fdd9df](https://github.com/zkat/ssri/commit/7fdd9df))
-
-
-
-<a name="5.0.0"></a>
-# [5.0.0](https://github.com/zkat/ssri/compare/v4.1.6...v5.0.0) (2017-10-23)
-
-
-### Features
-
-* **license:** relicense to ISC (#9) ([c82983a](https://github.com/zkat/ssri/commit/c82983a))
-
-
-### BREAKING CHANGES
-
-* **license:** the license has been changed from CC0-1.0 to ISC.
-
-
-
-<a name="4.1.6"></a>
-## [4.1.6](https://github.com/zkat/ssri/compare/v4.1.5...v4.1.6) (2017-06-07)
-
-
-### Bug Fixes
-
-* **checkStream:** make sure to pass all opts through ([0b1bcbe](https://github.com/zkat/ssri/commit/0b1bcbe))
-
-
-
-<a name="4.1.5"></a>
-## [4.1.5](https://github.com/zkat/ssri/compare/v4.1.4...v4.1.5) (2017-06-05)
-
-
-### Bug Fixes
-
-* **integrityStream:** stop crashing if opts.algorithms and opts.integrity have an algo mismatch ([fb1293e](https://github.com/zkat/ssri/commit/fb1293e))
-
-
-
-<a name="4.1.4"></a>
-## [4.1.4](https://github.com/zkat/ssri/compare/v4.1.3...v4.1.4) (2017-05-31)
-
-
-### Bug Fixes
-
-* **node:** older versions of node[@4](https://github.com/4) do not support base64buffer string parsing ([513df4e](https://github.com/zkat/ssri/commit/513df4e))
-
-
-
-<a name="4.1.3"></a>
-## [4.1.3](https://github.com/zkat/ssri/compare/v4.1.2...v4.1.3) (2017-05-24)
-
-
-### Bug Fixes
-
-* **check:** handle various bad hash corner cases better ([c2c262b](https://github.com/zkat/ssri/commit/c2c262b))
-
-
-
-<a name="4.1.2"></a>
-## [4.1.2](https://github.com/zkat/ssri/compare/v4.1.1...v4.1.2) (2017-04-18)
-
-
-### Bug Fixes
-
-* **stream:** _flush can be called multiple times. use on("end") ([b1c4805](https://github.com/zkat/ssri/commit/b1c4805))
-
-
-
-<a name="4.1.1"></a>
-## [4.1.1](https://github.com/zkat/ssri/compare/v4.1.0...v4.1.1) (2017-04-12)
-
-
-### Bug Fixes
-
-* **pickAlgorithm:** error if pickAlgorithm() is used in an empty Integrity ([fab470e](https://github.com/zkat/ssri/commit/fab470e))
-
-
-
-<a name="4.1.0"></a>
-# [4.1.0](https://github.com/zkat/ssri/compare/v4.0.0...v4.1.0) (2017-04-07)
-
-
-### Features
-
-* adding ssri.create for a crypto style interface (#2) ([96f52ad](https://github.com/zkat/ssri/commit/96f52ad))
-
-
-
-<a name="4.0.0"></a>
-# [4.0.0](https://github.com/zkat/ssri/compare/v3.0.2...v4.0.0) (2017-04-03)
-
-
-### Bug Fixes
-
-* **integrity:** should have changed the error code before. oops ([8381afa](https://github.com/zkat/ssri/commit/8381afa))
-
-
-### BREAKING CHANGES
-
-* **integrity:** EBADCHECKSUM -> EINTEGRITY for verification errors
-
-
-
-<a name="3.0.2"></a>
-## [3.0.2](https://github.com/zkat/ssri/compare/v3.0.1...v3.0.2) (2017-04-03)
-
-
-
-<a name="3.0.1"></a>
-## [3.0.1](https://github.com/zkat/ssri/compare/v3.0.0...v3.0.1) (2017-04-03)
-
-
-### Bug Fixes
-
-* **package.json:** really should have these in the keywords because search ([a6ac6d0](https://github.com/zkat/ssri/commit/a6ac6d0))
-
-
-
-<a name="3.0.0"></a>
-# [3.0.0](https://github.com/zkat/ssri/compare/v2.0.0...v3.0.0) (2017-04-03)
-
-
-### Bug Fixes
-
-* **hashes:** IntegrityMetadata -> Hash ([d04aa1f](https://github.com/zkat/ssri/commit/d04aa1f))
-
-
-### Features
-
-* **check:** return IntegrityMetadata on check success ([2301e74](https://github.com/zkat/ssri/commit/2301e74))
-* **fromHex:** ssri.fromHex to make it easier to generate them from hex valus ([049b89e](https://github.com/zkat/ssri/commit/049b89e))
-* **hex:** utility function for getting hex version of digest ([a9f021c](https://github.com/zkat/ssri/commit/a9f021c))
-* **hexDigest:** added hexDigest method to Integrity objects too ([85208ba](https://github.com/zkat/ssri/commit/85208ba))
-* **integrity:** add .isIntegrity and .isIntegrityMetadata ([1b29e6f](https://github.com/zkat/ssri/commit/1b29e6f))
-* **integrityStream:** new stream that can both generate and check streamed data ([fd23e1b](https://github.com/zkat/ssri/commit/fd23e1b))
-* **parse:** allow parsing straight into a single IntegrityMetadata object ([c8ddf48](https://github.com/zkat/ssri/commit/c8ddf48))
-* **pickAlgorithm:** Intergrity#pickAlgorithm() added ([b97a796](https://github.com/zkat/ssri/commit/b97a796))
-* **size:** calculate and update stream sizes ([02ed1ad](https://github.com/zkat/ssri/commit/02ed1ad))
-
-
-### BREAKING CHANGES
-
-* **hashes:** `.isIntegrityMetadata` is now `.isHash`. Also, any references to `IntegrityMetadata` now refer to `Hash`.
-* **integrityStream:** createCheckerStream has been removed and replaced with a general-purpose integrityStream.
-
-To convert existing createCheckerStream code, move the `sri` argument into `opts.integrity` in integrityStream. All other options should be the same.
-* **check:** `checkData`, `checkStream`, and `createCheckerStream` now yield a whole IntegrityMetadata instance representing the first successful hash match.
-
-
-
-<a name="2.0.0"></a>
-# [2.0.0](https://github.com/zkat/ssri/compare/v1.0.0...v2.0.0) (2017-03-24)
-
-
-### Bug Fixes
-
-* **strict-mode:** make regexes more rigid ([122a32c](https://github.com/zkat/ssri/commit/122a32c))
-
-
-### Features
-
-* **api:** added serialize alias for unparse ([999b421](https://github.com/zkat/ssri/commit/999b421))
-* **concat:** add Integrity#concat() ([cae12c7](https://github.com/zkat/ssri/commit/cae12c7))
-* **pickAlgo:** pick the strongest algorithm provided, by default ([58c18f7](https://github.com/zkat/ssri/commit/58c18f7))
-* **strict-mode:** strict SRI support ([3f0b64c](https://github.com/zkat/ssri/commit/3f0b64c))
-* **stringify:** replaced unparse/serialize with stringify ([4acad30](https://github.com/zkat/ssri/commit/4acad30))
-* **verification:** add opts.pickAlgorithm ([f72e658](https://github.com/zkat/ssri/commit/f72e658))
-
-
-### BREAKING CHANGES
-
-* **pickAlgo:** ssri will prioritize specific hashes now
-* **stringify:** serialize and unparse have been removed. Use ssri.stringify instead.
-* **strict-mode:** functions that accepted an optional `sep` argument now expect `opts.sep`.
-
-
-
-<a name="1.0.0"></a>
-# 1.0.0 (2017-03-23)
-
-
-### Features
-
-* **api:** implemented initial api ([4fbb16b](https://github.com/zkat/ssri/commit/4fbb16b))
-
-
-### BREAKING CHANGES
-
-* **api:** Initial API established.
diff --git a/node_modules/make-fetch-happen/node_modules/ssri/LICENSE.md b/node_modules/make-fetch-happen/node_modules/ssri/LICENSE.md
deleted file mode 100644
index 8d28acf86..000000000
--- a/node_modules/make-fetch-happen/node_modules/ssri/LICENSE.md
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/make-fetch-happen/node_modules/ssri/README.md b/node_modules/make-fetch-happen/node_modules/ssri/README.md
deleted file mode 100644
index c250961bd..000000000
--- a/node_modules/make-fetch-happen/node_modules/ssri/README.md
+++ /dev/null
@@ -1,488 +0,0 @@
-# ssri [![npm version](https://img.shields.io/npm/v/ssri.svg)](https://npm.im/ssri) [![license](https://img.shields.io/npm/l/ssri.svg)](https://npm.im/ssri) [![Travis](https://img.shields.io/travis/zkat/ssri.svg)](https://travis-ci.org/zkat/ssri) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/ssri?svg=true)](https://ci.appveyor.com/project/zkat/ssri) [![Coverage Status](https://coveralls.io/repos/github/zkat/ssri/badge.svg?branch=latest)](https://coveralls.io/github/zkat/ssri?branch=latest)
-
-[`ssri`](https://github.com/zkat/ssri), short for Standard Subresource
-Integrity, is a Node.js utility for parsing, manipulating, serializing,
-generating, and verifying [Subresource
-Integrity](https://w3c.github.io/webappsec/specs/subresourceintegrity/) hashes.
-
-## Install
-
-`$ npm install --save ssri`
-
-## Table of Contents
-
-* [Example](#example)
-* [Features](#features)
-* [Contributing](#contributing)
-* [API](#api)
- * Parsing & Serializing
- * [`parse`](#parse)
- * [`stringify`](#stringify)
- * [`Integrity#concat`](#integrity-concat)
- * [`Integrity#toString`](#integrity-to-string)
- * [`Integrity#toJSON`](#integrity-to-json)
- * [`Integrity#match`](#integrity-match)
- * [`Integrity#pickAlgorithm`](#integrity-pick-algorithm)
- * [`Integrity#hexDigest`](#integrity-hex-digest)
- * Integrity Generation
- * [`fromHex`](#from-hex)
- * [`fromData`](#from-data)
- * [`fromStream`](#from-stream)
- * [`create`](#create)
- * Integrity Verification
- * [`checkData`](#check-data)
- * [`checkStream`](#check-stream)
- * [`integrityStream`](#integrity-stream)
-
-### Example
-
-```javascript
-const ssri = require('ssri')
-
-const integrity = 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo'
-
-// Parsing and serializing
-const parsed = ssri.parse(integrity)
-ssri.stringify(parsed) // === integrity (works on non-Integrity objects)
-parsed.toString() // === integrity
-
-// Async stream functions
-ssri.checkStream(fs.createReadStream('./my-file'), integrity).then(...)
-ssri.fromStream(fs.createReadStream('./my-file')).then(sri => {
- sri.toString() === integrity
-})
-fs.createReadStream('./my-file').pipe(ssri.createCheckerStream(sri))
-
-// Sync data functions
-ssri.fromData(fs.readFileSync('./my-file')) // === parsed
-ssri.checkData(fs.readFileSync('./my-file'), integrity) // => 'sha512'
-```
-
-### Features
-
-* Parses and stringifies SRI strings.
-* Generates SRI strings from raw data or Streams.
-* Strict standard compliance.
-* `?foo` metadata option support.
-* Multiple entries for the same algorithm.
-* Object-based integrity hash manipulation.
-* Small footprint: no dependencies, concise implementation.
-* Full test coverage.
-* Customizable algorithm picker.
-
-### Contributing
-
-The ssri team enthusiastically welcomes contributions and project participation!
-There's a bunch of things you can do if you want to contribute! The [Contributor
-Guide](CONTRIBUTING.md) has all the information you need for everything from
-reporting bugs to contributing entire new features. Please don't hesitate to
-jump in if you'd like to, or even ask us questions if something isn't clear.
-
-### API
-
-#### <a name="parse"></a> `> ssri.parse(sri, [opts]) -> Integrity`
-
-Parses `sri` into an `Integrity` data structure. `sri` can be an integrity
-string, an `Hash`-like with `digest` and `algorithm` fields and an optional
-`options` field, or an `Integrity`-like object. The resulting object will be an
-`Integrity` instance that has this shape:
-
-```javascript
-{
- 'sha1': [{algorithm: 'sha1', digest: 'deadbeef', options: []}],
- 'sha512': [
- {algorithm: 'sha512', digest: 'c0ffee', options: []},
- {algorithm: 'sha512', digest: 'bad1dea', options: ['foo']}
- ],
-}
-```
-
-If `opts.single` is truthy, a single `Hash` object will be returned. That is, a
-single object that looks like `{algorithm, digest, options}`, as opposed to a
-larger object with multiple of these.
-
-If `opts.strict` is truthy, the resulting object will be filtered such that
-it strictly follows the Subresource Integrity spec, throwing away any entries
-with any invalid components. This also means a restricted set of algorithms
-will be used -- the spec limits them to `sha256`, `sha384`, and `sha512`.
-
-Strict mode is recommended if the integrity strings are intended for use in
-browsers, or in other situations where strict adherence to the spec is needed.
-
-##### Example
-
-```javascript
-ssri.parse('sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo') // -> Integrity object
-```
-
-#### <a name="stringify"></a> `> ssri.stringify(sri, [opts]) -> String`
-
-This function is identical to [`Integrity#toString()`](#integrity-to-string),
-except it can be used on _any_ object that [`parse`](#parse) can handle -- that
-is, a string, an `Hash`-like, or an `Integrity`-like.
-
-The `opts.sep` option defines the string to use when joining multiple entries
-together. To be spec-compliant, this _must_ be whitespace. The default is a
-single space (`' '`).
-
-If `opts.strict` is true, the integrity string will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-##### Example
-
-```javascript
-// Useful for cleaning up input SRI strings:
-ssri.stringify('\n\rsha512-foo\n\t\tsha384-bar')
-// -> 'sha512-foo sha384-bar'
-
-// Hash-like: only a single entry.
-ssri.stringify({
- algorithm: 'sha512',
- digest:'9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==',
- options: ['foo']
-})
-// ->
-// 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo'
-
-// Integrity-like: full multi-entry syntax. Similar to output of `ssri.parse`
-ssri.stringify({
- 'sha512': [
- {
- algorithm: 'sha512',
- digest:'9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==',
- options: ['foo']
- }
- ]
-})
-// ->
-// 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo'
-```
-
-#### <a name="integrity-concat"></a> `> Integrity#concat(otherIntegrity, [opts]) -> Integrity`
-
-Concatenates an `Integrity` object with another IntegrityLike, or an integrity
-string.
-
-This is functionally equivalent to concatenating the string format of both
-integrity arguments, and calling [`ssri.parse`](#ssri-parse) on the new string.
-
-If `opts.strict` is true, the new `Integrity` will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-##### Example
-
-```javascript
-// This will combine the integrity checks for two different versions of
-// your index.js file so you can use a single integrity string and serve
-// either of these to clients, from a single `<script>` tag.
-const desktopIntegrity = ssri.fromData(fs.readFileSync('./index.desktop.js'))
-const mobileIntegrity = ssri.fromData(fs.readFileSync('./index.mobile.js'))
-
-// Note that browsers (and ssri) will succeed as long as ONE of the entries
-// for the *prioritized* algorithm succeeds. That is, in order for this fallback
-// to work, both desktop and mobile *must* use the same `algorithm` values.
-desktopIntegrity.concat(mobileIntegrity)
-```
-
-#### <a name="integrity-to-string"></a> `> Integrity#toString([opts]) -> String`
-
-Returns the string representation of an `Integrity` object. All hash entries
-will be concatenated in the string by `opts.sep`, which defaults to `' '`.
-
-If you want to serialize an object that didn't come from an `ssri` function,
-use [`ssri.stringify()`](#stringify).
-
-If `opts.strict` is true, the integrity string will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-##### Example
-
-```javascript
-const integrity = 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo'
-
-ssri.parse(integrity).toString() === integrity
-```
-
-#### <a name="integrity-to-json"></a> `> Integrity#toJSON() -> String`
-
-Returns the string representation of an `Integrity` object. All hash entries
-will be concatenated in the string by `' '`.
-
-This is a convenience method so you can pass an `Integrity` object directly to `JSON.stringify`.
-For more info check out [toJSON() behavior on mdn](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify#toJSON%28%29_behavior).
-
-##### Example
-
-```javascript
-const integrity = '"sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo"'
-
-JSON.stringify(ssri.parse(integrity)) === integrity
-```
-
-#### <a name="integrity-match"></a> `> Integrity#match(sri, [opts]) -> Hash | false`
-
-Returns the matching (truthy) hash if `Integrity` matches the argument passed as
-`sri`, which can be anything that [`parse`](#parse) will accept. `opts` will be
-passed through to `parse` and [`pickAlgorithm()`](#integrity-pick-algorithm).
-
-##### Example
-
-```javascript
-const integrity = 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A=='
-
-ssri.parse(integrity).match(integrity)
-// Hash {
-// digest: '9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A=='
-// algorithm: 'sha512'
-// }
-
-ssri.parse(integrity).match('sha1-deadbeef')
-// false
-```
-
-#### <a name="integrity-pick-algorithm"></a> `> Integrity#pickAlgorithm([opts]) -> String`
-
-Returns the "best" algorithm from those available in the integrity object.
-
-If `opts.pickAlgorithm` is provided, it will be passed two algorithms as
-arguments. ssri will prioritize whichever of the two algorithms is returned by
-this function. Note that the function may be called multiple times, and it
-**must** return one of the two algorithms provided. By default, ssri will make
-a best-effort to pick the strongest/most reliable of the given algorithms. It
-may intentionally deprioritize algorithms with known vulnerabilities.
-
-##### Example
-
-```javascript
-ssri.parse('sha1-WEakDigEST sha512-yzd8ELD1piyANiWnmdnpCL5F52f10UfUdEkHywVZeqTt0ymgrxR63Qz0GB7TKPoeeZQmWCaz7T1').pickAlgorithm() // sha512
-```
-
-#### <a name="integrity-hex-digest"></a> `> Integrity#hexDigest() -> String`
-
-`Integrity` is assumed to be either a single-hash `Integrity` instance, or a
-`Hash` instance. Returns its `digest`, converted to a hex representation of the
-base64 data.
-
-##### Example
-
-```javascript
-ssri.parse('sha1-deadbeef').hexDigest() // '75e69d6de79f'
-```
-
-#### <a name="from-hex"></a> `> ssri.fromHex(hexDigest, algorithm, [opts]) -> Integrity`
-
-Creates an `Integrity` object with a single entry, based on a hex-formatted
-hash. This is a utility function to help convert existing shasums to the
-Integrity format, and is roughly equivalent to something like:
-
-```javascript
-algorithm + '-' + Buffer.from(hexDigest, 'hex').toString('base64')
-```
-
-`opts.options` may optionally be passed in: it must be an array of option
-strings that will be added to all generated integrity hashes generated by
-`fromData`. This is a loosely-specified feature of SRIs, and currently has no
-specified semantics besides being `?`-separated. Use at your own risk, and
-probably avoid if your integrity strings are meant to be used with browsers.
-
-If `opts.strict` is true, the integrity object will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-If `opts.single` is true, a single `Hash` object will be returned.
-
-##### Example
-
-```javascript
-ssri.fromHex('75e69d6de79f', 'sha1').toString() // 'sha1-deadbeef'
-```
-
-#### <a name="from-data"></a> `> ssri.fromData(data, [opts]) -> Integrity`
-
-Creates an `Integrity` object from either string or `Buffer` data, calculating
-all the requested hashes and adding any specified options to the object.
-
-`opts.algorithms` determines which algorithms to generate hashes for. All
-results will be included in a single `Integrity` object. The default value for
-`opts.algorithms` is `['sha512']`. All algorithm strings must be hashes listed
-in `crypto.getHashes()` for the host Node.js platform.
-
-`opts.options` may optionally be passed in: it must be an array of option
-strings that will be added to all generated integrity hashes generated by
-`fromData`. This is a loosely-specified feature of SRIs, and currently has no
-specified semantics besides being `?`-separated. Use at your own risk, and
-probably avoid if your integrity strings are meant to be used with browsers.
-
-If `opts.strict` is true, the integrity object will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-##### Example
-
-```javascript
-const integrityObj = ssri.fromData('foobarbaz', {
- algorithms: ['sha256', 'sha384', 'sha512']
-})
-integrity.toString('\n')
-// ->
-// sha256-l981iLWj8kurw4UbNy8Lpxqdzd7UOxS50Glhv8FwfZ0=
-// sha384-irnCxQ0CfQhYGlVAUdwTPC9bF3+YWLxlaDGM4xbYminxpbXEq+D+2GCEBTxcjES9
-// sha512-yzd8ELD1piyANiWnmdnpCL5F52f10UfUdEkHywVZeqTt0ymgrxR63Qz0GB7TKPoeeZQmWCaz7T1+9vBnypkYWg==
-```
-
-#### <a name="from-stream"></a> `> ssri.fromStream(stream, [opts]) -> Promise<Integrity>`
-
-Returns a Promise of an Integrity object calculated by reading data from
-a given `stream`.
-
-It accepts both `opts.algorithms` and `opts.options`, which are documented as
-part of [`ssri.fromData`](#from-data).
-
-Additionally, `opts.Promise` may be passed in to inject a Promise library of
-choice. By default, ssri will use Node's built-in Promises.
-
-If `opts.strict` is true, the integrity object will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-##### Example
-
-```javascript
-ssri.fromStream(fs.createReadStream('index.js'), {
- algorithms: ['sha1', 'sha512']
-}).then(integrity => {
- return ssri.checkStream(fs.createReadStream('index.js'), integrity)
-}) // succeeds
-```
-
-#### <a name="create"></a> `> ssri.create([opts]) -> <Hash>`
-
-Returns a Hash object with `update(<Buffer or string>[,enc])` and `digest()` methods.
-
-
-The Hash object provides the same methods as [crypto class Hash](https://nodejs.org/dist/latest-v6.x/docs/api/crypto.html#crypto_class_hash).
-`digest()` accepts no arguments and returns an Integrity object calculated by reading data from
-calls to update.
-
-It accepts both `opts.algorithms` and `opts.options`, which are documented as
-part of [`ssri.fromData`](#from-data).
-
-If `opts.strict` is true, the integrity object will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-##### Example
-
-```javascript
-const integrity = ssri.create().update('foobarbaz').digest()
-integrity.toString()
-// ->
-// sha512-yzd8ELD1piyANiWnmdnpCL5F52f10UfUdEkHywVZeqTt0ymgrxR63Qz0GB7TKPoeeZQmWCaz7T1+9vBnypkYWg==
-```
-
-#### <a name="check-data"></a> `> ssri.checkData(data, sri, [opts]) -> Hash|false`
-
-Verifies `data` integrity against an `sri` argument. `data` may be either a
-`String` or a `Buffer`, and `sri` can be any subresource integrity
-representation that [`ssri.parse`](#parse) can handle.
-
-If verification succeeds, `checkData` will return the name of the algorithm that
-was used for verification (a truthy value). Otherwise, it will return `false`.
-
-If `opts.pickAlgorithm` is provided, it will be used by
-[`Integrity#pickAlgorithm`](#integrity-pick-algorithm) when deciding which of
-the available digests to match against.
-
-If `opts.error` is true, and verification fails, `checkData` will throw either
-an `EBADSIZE` or an `EINTEGRITY` error, instead of just returning false.
-
-##### Example
-
-```javascript
-const data = fs.readFileSync('index.js')
-ssri.checkData(data, ssri.fromData(data)) // -> 'sha512'
-ssri.checkData(data, 'sha256-l981iLWj8kurw4UbNy8Lpxqdzd7UOxS50Glhv8FwfZ0')
-ssri.checkData(data, 'sha1-BaDDigEST') // -> false
-ssri.checkData(data, 'sha1-BaDDigEST', {error: true}) // -> Error! EINTEGRITY
-```
-
-#### <a name="check-stream"></a> `> ssri.checkStream(stream, sri, [opts]) -> Promise<Hash>`
-
-Verifies the contents of `stream` against an `sri` argument. `stream` will be
-consumed in its entirety by this process. `sri` can be any subresource integrity
-representation that [`ssri.parse`](#parse) can handle.
-
-`checkStream` will return a Promise that either resolves to the
-`Hash` that succeeded verification, or, if the verification fails
-or an error happens with `stream`, the Promise will be rejected.
-
-If the Promise is rejected because verification failed, the returned error will
-have `err.code` as `EINTEGRITY`.
-
-If `opts.size` is given, it will be matched against the stream size. An error
-with `err.code` `EBADSIZE` will be returned by a rejection if the expected size
-and actual size fail to match.
-
-If `opts.pickAlgorithm` is provided, it will be used by
-[`Integrity#pickAlgorithm`](#integrity-pick-algorithm) when deciding which of
-the available digests to match against.
-
-##### Example
-
-```javascript
-const integrity = ssri.fromData(fs.readFileSync('index.js'))
-
-ssri.checkStream(
- fs.createReadStream('index.js'),
- integrity
-)
-// ->
-// Promise<{
-// algorithm: 'sha512',
-// digest: 'sha512-yzd8ELD1piyANiWnmdnpCL5F52f10UfUdEkHywVZeqTt0ymgrxR63Qz0GB7TKPoeeZQmWCaz7T1'
-// }>
-
-ssri.checkStream(
- fs.createReadStream('index.js'),
- 'sha256-l981iLWj8kurw4UbNy8Lpxqdzd7UOxS50Glhv8FwfZ0'
-) // -> Promise<Hash>
-
-ssri.checkStream(
- fs.createReadStream('index.js'),
- 'sha1-BaDDigEST'
-) // -> Promise<Error<{code: 'EINTEGRITY'}>>
-```
-
-#### <a name="integrity-stream"></a> `> integrityStream([opts]) -> IntegrityStream`
-
-Returns a `Transform` stream that data can be piped through in order to generate
-and optionally check data integrity for piped data. When the stream completes
-successfully, it emits `size` and `integrity` events, containing the total
-number of bytes processed and a calculated `Integrity` instance based on stream
-data, respectively.
-
-If `opts.algorithms` is passed in, the listed algorithms will be calculated when
-generating the final `Integrity` instance. The default is `['sha512']`.
-
-If `opts.single` is passed in, a single `Hash` instance will be returned.
-
-If `opts.integrity` is passed in, it should be an `integrity` value understood
-by [`parse`](#parse) that the stream will check the data against. If
-verification succeeds, the integrity stream will emit a `verified` event whose
-value is a single `Hash` object that is the one that succeeded verification. If
-verification fails, the stream will error with an `EINTEGRITY` error code.
-
-If `opts.size` is given, it will be matched against the stream size. An error
-with `err.code` `EBADSIZE` will be emitted by the stream if the expected size
-and actual size fail to match.
-
-If `opts.pickAlgorithm` is provided, it will be passed two algorithms as
-arguments. ssri will prioritize whichever of the two algorithms is returned by
-this function. Note that the function may be called multiple times, and it
-**must** return one of the two algorithms provided. By default, ssri will make
-a best-effort to pick the strongest/most reliable of the given algorithms. It
-may intentionally deprioritize algorithms with known vulnerabilities.
-
-##### Example
-
-```javascript
-const integrity = ssri.fromData(fs.readFileSync('index.js'))
-fs.createReadStream('index.js')
-.pipe(ssri.integrityStream({integrity}))
-```
diff --git a/node_modules/make-fetch-happen/node_modules/ssri/index.js b/node_modules/make-fetch-happen/node_modules/ssri/index.js
deleted file mode 100644
index e102892b0..000000000
--- a/node_modules/make-fetch-happen/node_modules/ssri/index.js
+++ /dev/null
@@ -1,395 +0,0 @@
-'use strict'
-
-const crypto = require('crypto')
-const figgyPudding = require('figgy-pudding')
-const Transform = require('stream').Transform
-
-const SPEC_ALGORITHMS = ['sha256', 'sha384', 'sha512']
-
-const BASE64_REGEX = /^[a-z0-9+/]+(?:=?=?)$/i
-const SRI_REGEX = /^([^-]+)-([^?]+)([?\S*]*)$/
-const STRICT_SRI_REGEX = /^([^-]+)-([A-Za-z0-9+/=]{44,88})(\?[\x21-\x7E]*)*$/
-const VCHAR_REGEX = /^[\x21-\x7E]+$/
-
-const SsriOpts = figgyPudding({
- algorithms: {default: ['sha512']},
- error: {default: false},
- integrity: {},
- options: {default: []},
- pickAlgorithm: {default: () => getPrioritizedHash},
- Promise: {default: () => Promise},
- sep: {default: ' '},
- single: {default: false},
- size: {},
- strict: {default: false}
-})
-
-class Hash {
- get isHash () { return true }
- constructor (hash, opts) {
- opts = SsriOpts(opts)
- const strict = !!opts.strict
- this.source = hash.trim()
- // 3.1. Integrity metadata (called "Hash" by ssri)
- // https://w3c.github.io/webappsec-subresource-integrity/#integrity-metadata-description
- const match = this.source.match(
- strict
- ? STRICT_SRI_REGEX
- : SRI_REGEX
- )
- if (!match) { return }
- if (strict && !SPEC_ALGORITHMS.some(a => a === match[1])) { return }
- this.algorithm = match[1]
- this.digest = match[2]
-
- const rawOpts = match[3]
- this.options = rawOpts ? rawOpts.slice(1).split('?') : []
- }
- hexDigest () {
- return this.digest && Buffer.from(this.digest, 'base64').toString('hex')
- }
- toJSON () {
- return this.toString()
- }
- toString (opts) {
- opts = SsriOpts(opts)
- if (opts.strict) {
- // Strict mode enforces the standard as close to the foot of the
- // letter as it can.
- if (!(
- // The spec has very restricted productions for algorithms.
- // https://www.w3.org/TR/CSP2/#source-list-syntax
- SPEC_ALGORITHMS.some(x => x === this.algorithm) &&
- // Usually, if someone insists on using a "different" base64, we
- // leave it as-is, since there's multiple standards, and the
- // specified is not a URL-safe variant.
- // https://www.w3.org/TR/CSP2/#base64_value
- this.digest.match(BASE64_REGEX) &&
- // Option syntax is strictly visual chars.
- // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-option-expression
- // https://tools.ietf.org/html/rfc5234#appendix-B.1
- (this.options || []).every(opt => opt.match(VCHAR_REGEX))
- )) {
- return ''
- }
- }
- const options = this.options && this.options.length
- ? `?${this.options.join('?')}`
- : ''
- return `${this.algorithm}-${this.digest}${options}`
- }
-}
-
-class Integrity {
- get isIntegrity () { return true }
- toJSON () {
- return this.toString()
- }
- toString (opts) {
- opts = SsriOpts(opts)
- let sep = opts.sep || ' '
- if (opts.strict) {
- // Entries must be separated by whitespace, according to spec.
- sep = sep.replace(/\S+/g, ' ')
- }
- return Object.keys(this).map(k => {
- return this[k].map(hash => {
- return Hash.prototype.toString.call(hash, opts)
- }).filter(x => x.length).join(sep)
- }).filter(x => x.length).join(sep)
- }
- concat (integrity, opts) {
- opts = SsriOpts(opts)
- const other = typeof integrity === 'string'
- ? integrity
- : stringify(integrity, opts)
- return parse(`${this.toString(opts)} ${other}`, opts)
- }
- hexDigest () {
- return parse(this, {single: true}).hexDigest()
- }
- match (integrity, opts) {
- opts = SsriOpts(opts)
- const other = parse(integrity, opts)
- const algo = other.pickAlgorithm(opts)
- return (
- this[algo] &&
- other[algo] &&
- this[algo].find(hash =>
- other[algo].find(otherhash =>
- hash.digest === otherhash.digest
- )
- )
- ) || false
- }
- pickAlgorithm (opts) {
- opts = SsriOpts(opts)
- const pickAlgorithm = opts.pickAlgorithm
- const keys = Object.keys(this)
- if (!keys.length) {
- throw new Error(`No algorithms available for ${
- JSON.stringify(this.toString())
- }`)
- }
- return keys.reduce((acc, algo) => {
- return pickAlgorithm(acc, algo) || acc
- })
- }
-}
-
-module.exports.parse = parse
-function parse (sri, opts) {
- opts = SsriOpts(opts)
- if (typeof sri === 'string') {
- return _parse(sri, opts)
- } else if (sri.algorithm && sri.digest) {
- const fullSri = new Integrity()
- fullSri[sri.algorithm] = [sri]
- return _parse(stringify(fullSri, opts), opts)
- } else {
- return _parse(stringify(sri, opts), opts)
- }
-}
-
-function _parse (integrity, opts) {
- // 3.4.3. Parse metadata
- // https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata
- if (opts.single) {
- return new Hash(integrity, opts)
- }
- return integrity.trim().split(/\s+/).reduce((acc, string) => {
- const hash = new Hash(string, opts)
- if (hash.algorithm && hash.digest) {
- const algo = hash.algorithm
- if (!acc[algo]) { acc[algo] = [] }
- acc[algo].push(hash)
- }
- return acc
- }, new Integrity())
-}
-
-module.exports.stringify = stringify
-function stringify (obj, opts) {
- opts = SsriOpts(opts)
- if (obj.algorithm && obj.digest) {
- return Hash.prototype.toString.call(obj, opts)
- } else if (typeof obj === 'string') {
- return stringify(parse(obj, opts), opts)
- } else {
- return Integrity.prototype.toString.call(obj, opts)
- }
-}
-
-module.exports.fromHex = fromHex
-function fromHex (hexDigest, algorithm, opts) {
- opts = SsriOpts(opts)
- const optString = opts.options && opts.options.length
- ? `?${opts.options.join('?')}`
- : ''
- return parse(
- `${algorithm}-${
- Buffer.from(hexDigest, 'hex').toString('base64')
- }${optString}`, opts
- )
-}
-
-module.exports.fromData = fromData
-function fromData (data, opts) {
- opts = SsriOpts(opts)
- const algorithms = opts.algorithms
- const optString = opts.options && opts.options.length
- ? `?${opts.options.join('?')}`
- : ''
- return algorithms.reduce((acc, algo) => {
- const digest = crypto.createHash(algo).update(data).digest('base64')
- const hash = new Hash(
- `${algo}-${digest}${optString}`,
- opts
- )
- if (hash.algorithm && hash.digest) {
- const algo = hash.algorithm
- if (!acc[algo]) { acc[algo] = [] }
- acc[algo].push(hash)
- }
- return acc
- }, new Integrity())
-}
-
-module.exports.fromStream = fromStream
-function fromStream (stream, opts) {
- opts = SsriOpts(opts)
- const P = opts.Promise || Promise
- const istream = integrityStream(opts)
- return new P((resolve, reject) => {
- stream.pipe(istream)
- stream.on('error', reject)
- istream.on('error', reject)
- let sri
- istream.on('integrity', s => { sri = s })
- istream.on('end', () => resolve(sri))
- istream.on('data', () => {})
- })
-}
-
-module.exports.checkData = checkData
-function checkData (data, sri, opts) {
- opts = SsriOpts(opts)
- sri = parse(sri, opts)
- if (!Object.keys(sri).length) {
- if (opts.error) {
- throw Object.assign(
- new Error('No valid integrity hashes to check against'), {
- code: 'EINTEGRITY'
- }
- )
- } else {
- return false
- }
- }
- const algorithm = sri.pickAlgorithm(opts)
- const digest = crypto.createHash(algorithm).update(data).digest('base64')
- const newSri = parse({algorithm, digest})
- const match = newSri.match(sri, opts)
- if (match || !opts.error) {
- return match
- } else if (typeof opts.size === 'number' && (data.length !== opts.size)) {
- const err = new Error(`data size mismatch when checking ${sri}.\n Wanted: ${opts.size}\n Found: ${data.length}`)
- err.code = 'EBADSIZE'
- err.found = data.length
- err.expected = opts.size
- err.sri = sri
- throw err
- } else {
- const err = new Error(`Integrity checksum failed when using ${algorithm}: Wanted ${sri}, but got ${newSri}. (${data.length} bytes)`)
- err.code = 'EINTEGRITY'
- err.found = newSri
- err.expected = sri
- err.algorithm = algorithm
- err.sri = sri
- throw err
- }
-}
-
-module.exports.checkStream = checkStream
-function checkStream (stream, sri, opts) {
- opts = SsriOpts(opts)
- const P = opts.Promise || Promise
- const checker = integrityStream(opts.concat({
- integrity: sri
- }))
- return new P((resolve, reject) => {
- stream.pipe(checker)
- stream.on('error', reject)
- checker.on('error', reject)
- let sri
- checker.on('verified', s => { sri = s })
- checker.on('end', () => resolve(sri))
- checker.on('data', () => {})
- })
-}
-
-module.exports.integrityStream = integrityStream
-function integrityStream (opts) {
- opts = SsriOpts(opts)
- // For verification
- const sri = opts.integrity && parse(opts.integrity, opts)
- const goodSri = sri && Object.keys(sri).length
- const algorithm = goodSri && sri.pickAlgorithm(opts)
- const digests = goodSri && sri[algorithm]
- // Calculating stream
- const algorithms = Array.from(
- new Set(opts.algorithms.concat(algorithm ? [algorithm] : []))
- )
- const hashes = algorithms.map(crypto.createHash)
- let streamSize = 0
- const stream = new Transform({
- transform (chunk, enc, cb) {
- streamSize += chunk.length
- hashes.forEach(h => h.update(chunk, enc))
- cb(null, chunk, enc)
- }
- }).on('end', () => {
- const optString = (opts.options && opts.options.length)
- ? `?${opts.options.join('?')}`
- : ''
- const newSri = parse(hashes.map((h, i) => {
- return `${algorithms[i]}-${h.digest('base64')}${optString}`
- }).join(' '), opts)
- // Integrity verification mode
- const match = goodSri && newSri.match(sri, opts)
- if (typeof opts.size === 'number' && streamSize !== opts.size) {
- const err = new Error(`stream size mismatch when checking ${sri}.\n Wanted: ${opts.size}\n Found: ${streamSize}`)
- err.code = 'EBADSIZE'
- err.found = streamSize
- err.expected = opts.size
- err.sri = sri
- stream.emit('error', err)
- } else if (opts.integrity && !match) {
- const err = new Error(`${sri} integrity checksum failed when using ${algorithm}: wanted ${digests} but got ${newSri}. (${streamSize} bytes)`)
- err.code = 'EINTEGRITY'
- err.found = newSri
- err.expected = digests
- err.algorithm = algorithm
- err.sri = sri
- stream.emit('error', err)
- } else {
- stream.emit('size', streamSize)
- stream.emit('integrity', newSri)
- match && stream.emit('verified', match)
- }
- })
- return stream
-}
-
-module.exports.create = createIntegrity
-function createIntegrity (opts) {
- opts = SsriOpts(opts)
- const algorithms = opts.algorithms
- const optString = opts.options.length
- ? `?${opts.options.join('?')}`
- : ''
-
- const hashes = algorithms.map(crypto.createHash)
-
- return {
- update: function (chunk, enc) {
- hashes.forEach(h => h.update(chunk, enc))
- return this
- },
- digest: function (enc) {
- const integrity = algorithms.reduce((acc, algo) => {
- const digest = hashes.shift().digest('base64')
- const hash = new Hash(
- `${algo}-${digest}${optString}`,
- opts
- )
- if (hash.algorithm && hash.digest) {
- const algo = hash.algorithm
- if (!acc[algo]) { acc[algo] = [] }
- acc[algo].push(hash)
- }
- return acc
- }, new Integrity())
-
- return integrity
- }
- }
-}
-
-const NODE_HASHES = new Set(crypto.getHashes())
-
-// This is a Best Effort™ at a reasonable priority for hash algos
-const DEFAULT_PRIORITY = [
- 'md5', 'whirlpool', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512',
- // TODO - it's unclear _which_ of these Node will actually use as its name
- // for the algorithm, so we guesswork it based on the OpenSSL names.
- 'sha3',
- 'sha3-256', 'sha3-384', 'sha3-512',
- 'sha3_256', 'sha3_384', 'sha3_512'
-].filter(algo => NODE_HASHES.has(algo))
-
-function getPrioritizedHash (algo1, algo2) {
- return DEFAULT_PRIORITY.indexOf(algo1.toLowerCase()) >= DEFAULT_PRIORITY.indexOf(algo2.toLowerCase())
- ? algo1
- : algo2
-}
diff --git a/node_modules/make-fetch-happen/node_modules/ssri/package.json b/node_modules/make-fetch-happen/node_modules/ssri/package.json
deleted file mode 100644
index 1bcb8ae14..000000000
--- a/node_modules/make-fetch-happen/node_modules/ssri/package.json
+++ /dev/null
@@ -1,89 +0,0 @@
-{
- "_from": "ssri@^6.0.0",
- "_id": "ssri@6.0.1",
- "_inBundle": false,
- "_integrity": "sha512-3Wge10hNcT1Kur4PDFwEieXSCMCJs/7WvSACcrMYrNp+b8kDL1/0wJch5Ni2WrtwEa2IO8OsVfeKIciKCDx/QA==",
- "_location": "/make-fetch-happen/ssri",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "ssri@^6.0.0",
- "name": "ssri",
- "escapedName": "ssri",
- "rawSpec": "^6.0.0",
- "saveSpec": null,
- "fetchSpec": "^6.0.0"
- },
- "_requiredBy": [
- "/make-fetch-happen"
- ],
- "_resolved": "https://registry.npmjs.org/ssri/-/ssri-6.0.1.tgz",
- "_shasum": "2a3c41b28dd45b62b63676ecb74001265ae9edd8",
- "_spec": "ssri@^6.0.0",
- "_where": "/Users/claudiahdz/npm/cli/node_modules/make-fetch-happen",
- "author": {
- "name": "Kat Marchán",
- "email": "kzm@sykosomatic.org"
- },
- "bugs": {
- "url": "https://github.com/zkat/ssri/issues"
- },
- "bundleDependencies": false,
- "config": {
- "nyc": {
- "exclude": [
- "node_modules/**",
- "test/**"
- ]
- }
- },
- "dependencies": {
- "figgy-pudding": "^3.5.1"
- },
- "deprecated": false,
- "description": "Standard Subresource Integrity library -- parses, serializes, generates, and verifies integrity metadata according to the SRI spec.",
- "devDependencies": {
- "nyc": "^11.4.1",
- "standard": "^10.0.3",
- "standard-version": "^4.3.0",
- "tap": "^11.1.0",
- "weallbehave": "^1.2.0",
- "weallcontribute": "^1.0.8"
- },
- "files": [
- "*.js"
- ],
- "homepage": "https://github.com/zkat/ssri#readme",
- "keywords": [
- "w3c",
- "web",
- "security",
- "integrity",
- "checksum",
- "hashing",
- "subresource integrity",
- "sri",
- "sri hash",
- "sri string",
- "sri generator",
- "html"
- ],
- "license": "ISC",
- "main": "index.js",
- "name": "ssri",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/zkat/ssri.git"
- },
- "scripts": {
- "postrelease": "npm publish && git push --follow-tags",
- "prerelease": "npm t",
- "pretest": "standard",
- "release": "standard-version -s",
- "test": "tap -J --coverage test/*.js",
- "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
- "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
- },
- "version": "6.0.1"
-}
diff --git a/node_modules/make-fetch-happen/package.json b/node_modules/make-fetch-happen/package.json
deleted file mode 100644
index a2c7397bc..000000000
--- a/node_modules/make-fetch-happen/package.json
+++ /dev/null
@@ -1,97 +0,0 @@
-{
- "_from": "make-fetch-happen@5.0.2",
- "_id": "make-fetch-happen@5.0.2",
- "_inBundle": false,
- "_integrity": "sha512-07JHC0r1ykIoruKO8ifMXu+xEU8qOXDFETylktdug6vJDACnP+HKevOu3PXyNPzFyTSlz8vrBYlBO1JZRe8Cag==",
- "_location": "/make-fetch-happen",
- "_phantomChildren": {},
- "_requested": {
- "type": "version",
- "registry": true,
- "raw": "make-fetch-happen@5.0.2",
- "name": "make-fetch-happen",
- "escapedName": "make-fetch-happen",
- "rawSpec": "5.0.2",
- "saveSpec": null,
- "fetchSpec": "5.0.2"
- },
- "_requiredBy": [
- "#USER",
- "/",
- "/npm-registry-fetch",
- "/pacote"
- ],
- "_resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-5.0.2.tgz",
- "_shasum": "aa8387104f2687edca01c8687ee45013d02d19bd",
- "_spec": "make-fetch-happen@5.0.2",
- "_where": "/Users/claudiahdz/npm/cli",
- "author": {
- "name": "Kat Marchán",
- "email": "kzm@zkat.tech"
- },
- "bugs": {
- "url": "https://github.com/zkat/make-fetch-happen/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "agentkeepalive": "^3.4.1",
- "cacache": "^12.0.0",
- "http-cache-semantics": "^3.8.1",
- "http-proxy-agent": "^2.1.0",
- "https-proxy-agent": "^2.2.3",
- "lru-cache": "^5.1.1",
- "mississippi": "^3.0.0",
- "node-fetch-npm": "^2.0.2",
- "promise-retry": "^1.1.1",
- "socks-proxy-agent": "^4.0.0",
- "ssri": "^6.0.0"
- },
- "deprecated": false,
- "description": "Opinionated, caching, retrying fetch client",
- "devDependencies": {
- "bluebird": "^3.5.1",
- "mkdirp": "^0.5.1",
- "nock": "^9.2.3",
- "npmlog": "^4.1.2",
- "require-inject": "^1.4.2",
- "rimraf": "^2.6.2",
- "safe-buffer": "^5.1.1",
- "standard": "^11.0.1",
- "standard-version": "^4.3.0",
- "tacks": "^1.2.6",
- "tap": "^12.7.0",
- "weallbehave": "^1.0.0",
- "weallcontribute": "^1.0.7"
- },
- "files": [
- "*.js",
- "lib"
- ],
- "homepage": "https://github.com/zkat/make-fetch-happen#readme",
- "keywords": [
- "http",
- "request",
- "fetch",
- "mean girls",
- "caching",
- "cache",
- "subresource integrity"
- ],
- "license": "ISC",
- "main": "index.js",
- "name": "make-fetch-happen",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/zkat/make-fetch-happen.git"
- },
- "scripts": {
- "postrelease": "npm publish --tag=legacy && git push --follow-tags",
- "prerelease": "npm t",
- "pretest": "standard",
- "release": "standard-version -s",
- "test": "tap --coverage --nyc-arg=--all --timeout=35 -J test/*.js",
- "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
- "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
- },
- "version": "5.0.2"
-}
diff --git a/node_modules/make-fetch-happen/warning.js b/node_modules/make-fetch-happen/warning.js
deleted file mode 100644
index b8f13cf83..000000000
--- a/node_modules/make-fetch-happen/warning.js
+++ /dev/null
@@ -1,24 +0,0 @@
-const url = require('url')
-
-module.exports = setWarning
-
-function setWarning (reqOrRes, code, message, replace) {
- // Warning = "Warning" ":" 1#warning-value
- // warning-value = warn-code SP warn-agent SP warn-text [SP warn-date]
- // warn-code = 3DIGIT
- // warn-agent = ( host [ ":" port ] ) | pseudonym
- // ; the name or pseudonym of the server adding
- // ; the Warning header, for use in debugging
- // warn-text = quoted-string
- // warn-date = <"> HTTP-date <">
- // (https://tools.ietf.org/html/rfc2616#section-14.46)
- const host = url.parse(reqOrRes.url).host
- const jsonMessage = JSON.stringify(message)
- const jsonDate = JSON.stringify(new Date().toUTCString())
- const header = replace ? 'set' : 'append'
-
- reqOrRes.headers[header](
- 'Warning',
- `${code} ${host} ${jsonMessage} ${jsonDate}`
- )
-}
diff --git a/node_modules/node-fetch-npm/CHANGELOG.md b/node_modules/node-fetch-npm/CHANGELOG.md
deleted file mode 100644
index a0cfe7f73..000000000
--- a/node_modules/node-fetch-npm/CHANGELOG.md
+++ /dev/null
@@ -1,252 +0,0 @@
-# Change Log
-
-All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
-
-<a name="2.0.2"></a>
-## [2.0.2](https://github.com/npm/node-fetch-npm/compare/v2.0.1...v2.0.2) (2017-08-15)
-
-
-### Bug Fixes
-
-* **json:** replace jju with a custom syntax error reporter ([#6](https://github.com/npm/node-fetch-npm/issues/6)) ([84d169c](https://github.com/npm/node-fetch-npm/commit/84d169c))
-
-
-
-<a name="2.0.1"></a>
-## [2.0.1](https://github.com/npm/node-fetch-npm/compare/v2.0.0...v2.0.1) (2017-05-24)
-
-
-### Bug Fixes
-
-* **json:** improve JSON parse error reporting ([1c810df](https://github.com/npm/node-fetch-npm/commit/1c810df))
-
-
-
-<a name="2.0.0"></a>
-# [2.0.0](https://github.com/npm/node-fetch-npm/compare/v1.0.1...v2.0.0) (2017-05-06)
-
-
-### Features
-
-* **version:** force bump to 2.0 ([39c5d50](https://github.com/npm/node-fetch-npm/commit/39c5d50))
-
-
-### BREAKING CHANGES
-
-* **version:** tooling got confused, so hitting 2.0.0 for reals now
-
-
-
-<a name="1.0.1"></a>
-## [1.0.1](https://github.com/npm/node-fetch-npm/compare/v1.0.0...v1.0.1) (2017-05-06)
-
-
-### Bug Fixes
-
-* **redirect:** Remove authorization header on redirect to different host ([#2](https://github.com/npm/node-fetch-npm/issues/2)) ([273260e](https://github.com/npm/node-fetch-npm/commit/273260e))
-
-
-
-<a name="1.0.0"></a>
-# [1.0.0](https://github.com/npm/node-fetch-npm/compare/v2.0.0-alpha.3...v1.0.0) (2017-04-26)
-
-
-### Bug Fixes
-
-* **proj:** initial fork changes ([0a190a8](https://github.com/npm/node-fetch-npm/commit/0a190a8))
-
-
-### BREAKING CHANGES
-
-* **proj:** this renames the library altogether
-
-
-
-
-Changelog
-=========
-
-
-# 2.x release
-
-## v2.0.0
-
-This is a major release. Check [our upgrade guide](https://github.com/bitinn/node-fetch/blob/master/UPGRADE-GUIDE.md) for an overview on some key differences between v1 and v2.
-
-### General changes
-
-- Major: Node.js 0.10.x and 0.12.x support is dropped
-- Major: `require('node-fetch/lib/response')` etc. is now unsupported; use `require('node-fetch').Response` or ES6 module imports
-- Enhance: start testing on Node.js 4, 6, 7
-- Enhance: use Rollup to produce a distributed bundle (less memory overhead and faster startup)
-- Enhance: make `Object.prototype.toString()` on Headers, Requests, and Responses return correct class strings
-- Other: rewrite in ES2015 using Babel
-- Other: use Codecov for code coverage tracking
-
-### HTTP requests
-
-- Major: overwrite user's `Content-Length` if we can be sure our information is correct (per spec)
-- Fix: support WHATWG URL objects, created by `whatwg-url` package or `require('url').URL` in Node.js 7+
-
-### Response and Request classes
-
-- Major: `response.text()` no longer attempts to detect encoding, instead always opting for UTF-8 (per spec); use `response.textConverted()` for the v1 behavior
-- Major: make `response.json()` throw error instead of returning an empty object on 204 no-content respose (per spec; reverts behavior changed in v1.6.2)
-- Major: internal methods are no longer exposed
-- Major: throw error when a `GET` or `HEAD` Request is constructed with a non-null body (per spec)
-- Enhance: add `response.arrayBuffer()` (also applies to Requests)
-- Enhance: add experimental `response.blob()` (also applies to Requests)
-- Fix: fix Request and Response with `null` body
-
-### Headers class
-
-- Major: remove `headers.getAll()`; make `get()` return all headers delimited by commas (per spec)
-- Enhance: make Headers iterable
-- Enhance: make Headers constructor accept an array of tuples
-- Enhance: make sure header names and values are valid in HTTP
-- Fix: coerce Headers prototype function parameters to strings, where applicable
-
-### Documentation
-
-- Enhance: more comprehensive API docs
-- Enhance: add a list of default headers in README
-
-
-# 1.x release
-
-## v1.6.3
-
-- Enhance: error handling document to explain `FetchError` design
-- Fix: support `form-data` 2.x releases (requires `form-data` >= 2.1.0)
-
-## v1.6.2
-
-- Enhance: minor document update
-- Fix: response.json() returns empty object on 204 no-content response instead of throwing a syntax error
-
-## v1.6.1
-
-- Fix: if `res.body` is a non-stream non-formdata object, we will call `body.toString` and send it as a string
-- Fix: `counter` value is incorrectly set to `follow` value when wrapping Request instance
-- Fix: documentation update
-
-## v1.6.0
-
-- Enhance: added `res.buffer()` api for convenience, it returns body as a Node.js buffer
-- Enhance: better old server support by handling raw deflate response
-- Enhance: skip encoding detection for non-HTML/XML response
-- Enhance: minor document update
-- Fix: HEAD request doesn't need decompression, as body is empty
-- Fix: `req.body` now accepts a Node.js buffer
-
-## v1.5.3
-
-- Fix: handle 204 and 304 responses when body is empty but content-encoding is gzip/deflate
-- Fix: allow resolving response and cloned response in any order
-- Fix: avoid setting `content-length` when `form-data` body use streams
-- Fix: send DELETE request with content-length when body is present
-- Fix: allow any url when calling new Request, but still reject non-http(s) url in fetch
-
-## v1.5.2
-
-- Fix: allow node.js core to handle keep-alive connection pool when passing a custom agent
-
-## v1.5.1
-
-- Fix: redirect mode `manual` should work even when there is no redirection or broken redirection
-
-## v1.5.0
-
-- Enhance: rejected promise now use custom `Error` (thx to @pekeler)
-- Enhance: `FetchError` contains `err.type` and `err.code`, allows for better error handling (thx to @pekeler)
-- Enhance: basic support for redirect mode `manual` and `error`, allows for location header extraction (thx to @jimmywarting for the initial PR)
-
-## v1.4.1
-
-- Fix: wrapping Request instance with FormData body again should preserve the body as-is
-
-## v1.4.0
-
-- Enhance: Request and Response now have `clone` method (thx to @kirill-konshin for the initial PR)
-- Enhance: Request and Response now have proper string and buffer body support (thx to @kirill-konshin)
-- Enhance: Body constructor has been refactored out (thx to @kirill-konshin)
-- Enhance: Headers now has `forEach` method (thx to @tricoder42)
-- Enhance: back to 100% code coverage
-- Fix: better form-data support (thx to @item4)
-- Fix: better character encoding detection under chunked encoding (thx to @dsuket for the initial PR)
-
-## v1.3.3
-
-- Fix: make sure `Content-Length` header is set when body is string for POST/PUT/PATCH requests
-- Fix: handle body stream error, for cases such as incorrect `Content-Encoding` header
-- Fix: when following certain redirects, use `GET` on subsequent request per Fetch Spec
-- Fix: `Request` and `Response` constructors now parse headers input using `Headers`
-
-## v1.3.2
-
-- Enhance: allow auto detect of form-data input (no `FormData` spec on node.js, this is form-data specific feature)
-
-## v1.3.1
-
-- Enhance: allow custom host header to be set (server-side only feature, as it's a forbidden header on client-side)
-
-## v1.3.0
-
-- Enhance: now `fetch.Request` is exposed as well
-
-## v1.2.1
-
-- Enhance: `Headers` now normalized `Number` value to `String`, prevent common mistakes
-
-## v1.2.0
-
-- Enhance: now fetch.Headers and fetch.Response are exposed, making testing easier
-
-## v1.1.2
-
-- Fix: `Headers` should only support `String` and `Array` properties, and ignore others
-
-## v1.1.1
-
-- Enhance: now req.headers accept both plain object and `Headers` instance
-
-## v1.1.0
-
-- Enhance: timeout now also applies to response body (in case of slow response)
-- Fix: timeout is now cleared properly when fetch is done/has failed
-
-## v1.0.6
-
-- Fix: less greedy content-type charset matching
-
-## v1.0.5
-
-- Fix: when `follow = 0`, fetch should not follow redirect
-- Enhance: update tests for better coverage
-- Enhance: code formatting
-- Enhance: clean up doc
-
-## v1.0.4
-
-- Enhance: test iojs support
-- Enhance: timeout attached to socket event only fire once per redirect
-
-## v1.0.3
-
-- Fix: response size limit should reject large chunk
-- Enhance: added character encoding detection for xml, such as rss/atom feed (encoding in DTD)
-
-## v1.0.2
-
-- Fix: added res.ok per spec change
-
-## v1.0.0
-
-- Enhance: better test coverage and doc
-
-
-# 0.x release
-
-## v0.1
-
-- Major: initial public release
diff --git a/node_modules/node-fetch-npm/LICENSE.md b/node_modules/node-fetch-npm/LICENSE.md
deleted file mode 100644
index 660ffecb5..000000000
--- a/node_modules/node-fetch-npm/LICENSE.md
+++ /dev/null
@@ -1,22 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2016 David Frank
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
-
diff --git a/node_modules/node-fetch-npm/README.md b/node_modules/node-fetch-npm/README.md
deleted file mode 100644
index 6d3dd0e6a..000000000
--- a/node_modules/node-fetch-npm/README.md
+++ /dev/null
@@ -1,398 +0,0 @@
-
-node-fetch-npm
-==============
-
-[![npm version][npm-image]][npm-url]
-[![build status][travis-image]][travis-url]
-[![coverage status][codecov-image]][codecov-url]
-
-A light-weight module that brings `window.fetch` to Node.js
-
-`node-fetch-npm` is a fork of [`node-fetch`](https://npm.im/node-fetch) used in
-npm itself, through [`make-fetch-happen`](https://npm.im/make-fetch-happen). It
-has more regular releases and accepts some patches that would not fit with
-`node-fetch`'s own design goals (such as picking a specific cookie library,
-removing `babel` dependency altogether, etc).
-
-This library is *not a replacement* for `node-fetch`, nor does it intend to
-supplant it. It's purely a fork maintained for the sake of easier patching of
-specific needs that it wouldn't be fair to shove down the main project's throat.
-This project will still send patches for shared bugs over and hopefully help
-improve its "parent".
-
-## Motivation
-
-Instead of implementing `XMLHttpRequest` in Node.js to run browser-specific [Fetch polyfill](https://github.com/github/fetch), why not go from native `http` to `fetch` API directly? Hence `node-fetch`, minimal code for a `window.fetch` compatible API on Node.js runtime.
-
-See Matt Andrews' [isomorphic-fetch](https://github.com/matthew-andrews/isomorphic-fetch) for isomorphic usage (exports `node-fetch` for server-side, `whatwg-fetch` for client-side).
-
-
-## Features
-
-- Stay consistent with `window.fetch` API.
-- Make conscious trade-off when following [whatwg fetch spec][whatwg-fetch] and [stream spec](https://streams.spec.whatwg.org/) implementation details, document known difference.
-- Use native promise, but allow substituting it with [insert your favorite promise library].
-- Use native stream for body, on both request and response.
-- Decode content encoding (gzip/deflate) properly, and convert string output (such as `res.text()` and `res.json()`) to UTF-8 automatically.
-- Useful extensions such as timeout, redirect limit, response size limit, [explicit errors][] for troubleshooting.
-
-
-## Difference from client-side fetch
-
-- See [Known Differences](https://github.com/npm/node-fetch-npm/blob/master/LIMITS.md) for details.
-- If you happen to use a missing feature that `window.fetch` offers, feel free to open an issue.
-- Pull requests are welcomed too!
-
-
-## Install
-
-```sh
-$ npm install node-fetch-npm --save
-```
-
-
-## Usage
-
-```javascript
-import fetch from 'node-fetch';
-// or
-// const fetch = require('node-fetch');
-
-// if you are using your own Promise library, set it through fetch.Promise. Eg.
-
-// import Bluebird from 'bluebird';
-// fetch.Promise = Bluebird;
-
-// plain text or html
-
-fetch('https://github.com/')
- .then(res => res.text())
- .then(body => console.log(body));
-
-// json
-
-fetch('https://api.github.com/users/github')
- .then(res => res.json())
- .then(json => console.log(json));
-
-// catching network error
-// 3xx-5xx responses are NOT network errors, and should be handled in then()
-// you only need one catch() at the end of your promise chain
-
-fetch('http://domain.invalid/')
- .catch(err => console.error(err));
-
-// stream
-// the node.js way is to use stream when possible
-
-fetch('https://assets-cdn.github.com/images/modules/logos_page/Octocat.png')
- .then(res => {
- const dest = fs.createWriteStream('./octocat.png');
- res.body.pipe(dest);
- });
-
-// buffer
-// if you prefer to cache binary data in full, use buffer()
-// note that buffer() is a node-fetch only API
-
-import fileType from 'file-type';
-
-fetch('https://assets-cdn.github.com/images/modules/logos_page/Octocat.png')
- .then(res => res.buffer())
- .then(buffer => fileType(buffer))
- .then(type => { /* ... */ });
-
-// meta
-
-fetch('https://github.com/')
- .then(res => {
- console.log(res.ok);
- console.log(res.status);
- console.log(res.statusText);
- console.log(res.headers.raw());
- console.log(res.headers.get('content-type'));
- });
-
-// post
-
-fetch('http://httpbin.org/post', { method: 'POST', body: 'a=1' })
- .then(res => res.json())
- .then(json => console.log(json));
-
-// post with stream from file
-
-import { createReadStream } from 'fs';
-
-const stream = createReadStream('input.txt');
-fetch('http://httpbin.org/post', { method: 'POST', body: stream })
- .then(res => res.json())
- .then(json => console.log(json));
-
-// post with JSON
-
-var body = { a: 1 };
-fetch('http://httpbin.org/post', {
- method: 'POST',
- body: JSON.stringify(body),
- headers: { 'Content-Type': 'application/json' },
-})
- .then(res => res.json())
- .then(json => console.log(json));
-
-// post with form-data (detect multipart)
-
-import FormData from 'form-data';
-
-const form = new FormData();
-form.append('a', 1);
-fetch('http://httpbin.org/post', { method: 'POST', body: form })
- .then(res => res.json())
- .then(json => console.log(json));
-
-// post with form-data (custom headers)
-// note that getHeaders() is non-standard API
-
-import FormData from 'form-data';
-
-const form = new FormData();
-form.append('a', 1);
-fetch('http://httpbin.org/post', { method: 'POST', body: form, headers: form.getHeaders() })
- .then(res => res.json())
- .then(json => console.log(json));
-
-// node 7+ with async function
-
-(async function () {
- const res = await fetch('https://api.github.com/users/github');
- const json = await res.json();
- console.log(json);
-})();
-```
-
-See [test cases](https://github.com/npm/node-fetch-npm/blob/master/test/test.js) for more examples.
-
-
-## API
-
-### fetch(url[, options])
-
-- `url` A string representing the URL for fetching
-- `options` [Options](#fetch-options) for the HTTP(S) request
-- Returns: <code>Promise&lt;[Response](#class-response)&gt;</code>
-
-Perform an HTTP(S) fetch.
-
-`url` should be an absolute url, such as `http://example.com/`. A path-relative URL (`/file/under/root`) or protocol-relative URL (`//can-be-http-or-https.com/`) will result in a rejected promise.
-
-<a id="fetch-options"></a>
-#### Options
-
-The default values are shown after each option key.
-
-```js
-{
- // These properties are part of the Fetch Standard
- method: 'GET',
- headers: {}, // request headers. format is the identical to that accepted by the Headers constructor (see below)
- body: null, // request body. can be null, a string, a Buffer, a Blob, or a Node.js Readable stream
- redirect: 'follow', // set to `manual` to extract redirect headers, `error` to reject redirect
-
- // The following properties are node-fetch-npm extensions
- follow: 20, // maximum redirect count. 0 to not follow redirect
- timeout: 0, // req/res timeout in ms, it resets on redirect. 0 to disable (OS limit applies)
- compress: true, // support gzip/deflate content encoding. false to disable
- size: 0, // maximum response body size in bytes. 0 to disable
- agent: null // http(s).Agent instance, allows custom proxy, certificate etc.
-}
-```
-
-##### Default Headers
-
-If no values are set, the following request headers will be sent automatically:
-
-Header | Value
------------------ | --------------------------------------------------------
-`Accept-Encoding` | `gzip,deflate` _(when `options.compress === true`)_
-`Accept` | `*/*`
-`Connection` | `close` _(when no `options.agent` is present)_
-`Content-Length` | _(automatically calculated, if possible)_
-`User-Agent` | `node-fetch-npm/1.0 (+https://github.com/npm/node-fetch-npm)`
-
-<a id="class-request"></a>
-### Class: Request
-
-An HTTP(S) request containing information about URL, method, headers, and the body. This class implements the [Body](#iface-body) interface.
-
-Due to the nature of Node.js, the following properties are not implemented at this moment:
-
-- `type`
-- `destination`
-- `referrer`
-- `referrerPolicy`
-- `mode`
-- `credentials`
-- `cache`
-- `integrity`
-- `keepalive`
-
-The following node-fetch-npm extension properties are provided:
-
-- `follow`
-- `compress`
-- `counter`
-- `agent`
-
-See [options](#fetch-options) for exact meaning of these extensions.
-
-#### new Request(input[, options])
-
-<small>*(spec-compliant)*</small>
-
-- `input` A string representing a URL, or another `Request` (which will be cloned)
-- `options` [Options][#fetch-options] for the HTTP(S) request
-
-Constructs a new `Request` object. The constructor is identical to that in the [browser](https://developer.mozilla.org/en-US/docs/Web/API/Request/Request).
-
-In most cases, directly `fetch(url, options)` is simpler than creating a `Request` object.
-
-<a id="class-response"></a>
-### Class: Response
-
-An HTTP(S) response. This class implements the [Body](#iface-body) interface.
-
-The following properties are not implemented in node-fetch-npm at this moment:
-
-- `Response.error()`
-- `Response.redirect()`
-- `type`
-- `redirected`
-- `trailer`
-
-#### new Response([body[, options]])
-
-<small>*(spec-compliant)*</small>
-
-- `body` A string or [Readable stream][node-readable]
-- `options` A [`ResponseInit`][response-init] options dictionary
-
-Constructs a new `Response` object. The constructor is identical to that in the [browser](https://developer.mozilla.org/en-US/docs/Web/API/Response/Response).
-
-Because Node.js does not implement service workers (for which this class was designed), one rarely has to construct a `Response` directly.
-
-<a id="class-headers"></a>
-### Class: Headers
-
-This class allows manipulating and iterating over a set of HTTP headers. All methods specified in the [Fetch Standard][whatwg-fetch] are implemented.
-
-#### new Headers([init])
-
-<small>*(spec-compliant)*</small>
-
-- `init` Optional argument to pre-fill the `Headers` object
-
-Construct a new `Headers` object. `init` can be either `null`, a `Headers` object, an key-value map object, or any iterable object.
-
-```js
-// Example adapted from https://fetch.spec.whatwg.org/#example-headers-class
-
-const meta = {
- 'Content-Type': 'text/xml',
- 'Breaking-Bad': '<3'
-};
-const headers = new Headers(meta);
-
-// The above is equivalent to
-const meta = [
- [ 'Content-Type', 'text/xml' ],
- [ 'Breaking-Bad', '<3' ]
-];
-const headers = new Headers(meta);
-
-// You can in fact use any iterable objects, like a Map or even another Headers
-const meta = new Map();
-meta.set('Content-Type', 'text/xml');
-meta.set('Breaking-Bad', '<3');
-const headers = new Headers(meta);
-const copyOfHeaders = new Headers(headers);
-```
-
-<a id="iface-body"></a>
-### Interface: Body
-
-`Body` is an abstract interface with methods that are applicable to both `Request` and `Response` classes.
-
-The following methods are not yet implemented in node-fetch-npm at this moment:
-
-- `formData()`
-
-#### body.body
-
-<small>*(deviation from spec)*</small>
-
-* Node.js [`Readable` stream][node-readable]
-
-The data encapsulated in the `Body` object. Note that while the [Fetch Standard][whatwg-fetch] requires the property to always be a WHATWG `ReadableStream`, in node-fetch-npm it is a Node.js [`Readable` stream][node-readable].
-
-#### body.bodyUsed
-
-<small>*(spec-compliant)*</small>
-
-* `Boolean`
-
-A boolean property for if this body has been consumed. Per spec, a consumed body cannot be used again.
-
-#### body.arrayBuffer()
-#### body.blob()
-#### body.json()
-#### body.text()
-
-<small>*(spec-compliant)*</small>
-
-* Returns: <code>Promise</code>
-
-Consume the body and return a promise that will resolve to one of these formats.
-
-#### body.buffer()
-
-<small>*(node-fetch-npm extension)*</small>
-
-* Returns: <code>Promise&lt;Buffer&gt;</code>
-
-Consume the body and return a promise that will resolve to a Buffer.
-
-#### body.textConverted()
-
-<small>*(node-fetch-npm extension)*</small>
-
-* Returns: <code>Promise&lt;String&gt;</code>
-
-Identical to `body.text()`, except instead of always converting to UTF-8, encoding sniffing will be performed and text converted to UTF-8, if possible.
-
-<a id="class-fetcherror"></a>
-### Class: FetchError
-
-<small>*(node-fetch-npm extension)*</small>
-
-An operational error in the fetching process. See [ERROR-HANDLING.md][] for more info.
-
-## License
-
-MIT
-
-
-## Acknowledgement
-
-Thanks to [github/fetch](https://github.com/github/fetch) for providing a solid implementation reference.
-
-
-[npm-image]: https://img.shields.io/npm/v/node-fetch-npm.svg?style=flat-square
-[npm-url]: https://www.npmjs.com/package/node-fetch-npm
-[travis-image]: https://img.shields.io/travis/npm/node-fetch-npm.svg?style=flat-square
-[travis-url]: https://travis-ci.org/npm/node-fetch-npm
-[codecov-image]: https://img.shields.io/codecov/c/github/npm/node-fetch-npm.svg?style=flat-square
-[codecov-url]: https://codecov.io/gh/npm/node-fetch-npm
-[ERROR-HANDLING.md]: https://github.com/npm/node-fetch-npm/blob/master/ERROR-HANDLING.md
-[whatwg-fetch]: https://fetch.spec.whatwg.org/
-[response-init]: https://fetch.spec.whatwg.org/#responseinit
-[node-readable]: https://nodejs.org/api/stream.html#stream_readable_streams
-[mdn-headers]: https://developer.mozilla.org/en-US/docs/Web/API/Headers
diff --git a/node_modules/node-fetch-npm/package.json b/node_modules/node-fetch-npm/package.json
deleted file mode 100644
index b256ce718..000000000
--- a/node_modules/node-fetch-npm/package.json
+++ /dev/null
@@ -1,102 +0,0 @@
-{
- "_from": "node-fetch-npm@^2.0.2",
- "_id": "node-fetch-npm@2.0.2",
- "_inBundle": false,
- "_integrity": "sha512-nJIxm1QmAj4v3nfCvEeCrYSoVwXyxLnaPBK5W1W5DGEJwjlKuC2VEUycGw5oxk+4zZahRrB84PUJJgEmhFTDFw==",
- "_location": "/node-fetch-npm",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "node-fetch-npm@^2.0.2",
- "name": "node-fetch-npm",
- "escapedName": "node-fetch-npm",
- "rawSpec": "^2.0.2",
- "saveSpec": null,
- "fetchSpec": "^2.0.2"
- },
- "_requiredBy": [
- "/make-fetch-happen",
- "/npm-profile/make-fetch-happen",
- "/npm-registry-fetch/make-fetch-happen"
- ],
- "_resolved": "https://registry.npmjs.org/node-fetch-npm/-/node-fetch-npm-2.0.2.tgz",
- "_shasum": "7258c9046182dca345b4208eda918daf33697ff7",
- "_spec": "node-fetch-npm@^2.0.2",
- "_where": "/Users/rebecca/code/npm/node_modules/make-fetch-happen",
- "author": {
- "name": "David Frank"
- },
- "bugs": {
- "url": "https://github.com/npm/node-fetch-npm/issues"
- },
- "bundleDependencies": false,
- "contributors": [
- {
- "name": "Rebecca Turner",
- "email": "me@re-becca.org"
- },
- {
- "name": "Kat Marchán",
- "email": "kzm@sykosomatic.org"
- }
- ],
- "dependencies": {
- "encoding": "^0.1.11",
- "json-parse-better-errors": "^1.0.0",
- "safe-buffer": "^5.1.1"
- },
- "deprecated": false,
- "description": "An npm cli-oriented fork of the excellent node-fetch",
- "devDependencies": {
- "chai": "^3.5.0",
- "chai-as-promised": "^6.0.0",
- "chai-iterator": "^1.1.1",
- "chai-string": "^1.4.0",
- "codecov": "^1.0.1",
- "cross-env": "^3.1.4",
- "form-data": "^2.2.0",
- "is-builtin-module": "^1.0.0",
- "mocha": "^3.5.0",
- "nyc": "^10.3.2",
- "parted": "^0.1.1",
- "promise": "^7.3.1",
- "resumer": "0.0.0",
- "standard": "^10.0.3",
- "standard-version": "^4.2.0",
- "weallbehave": "^1.2.0",
- "weallcontribute": "^1.0.8",
- "whatwg-url": "^4.8.0"
- },
- "engines": {
- "node": ">=4"
- },
- "files": [
- "src/*.js"
- ],
- "homepage": "https://github.com/npm/node-fetch-npm",
- "keywords": [
- "fetch",
- "http",
- "promise"
- ],
- "license": "MIT",
- "main": "src/index.js",
- "name": "node-fetch-npm",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/node-fetch-npm.git"
- },
- "scripts": {
- "coverage": "nyc --reporter json --reporter text mocha -R spec test/test.js && codecov -f coverage/coverage-final.json",
- "postrelease": "npm publish && git push --follow-tags",
- "prerelease": "npm t",
- "pretest": "standard src/*",
- "release": "standard-version -s",
- "report": "nyc --reporter lcov --reporter text mocha -R spec test/test.js",
- "test": "mocha test/test.js",
- "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
- "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
- },
- "version": "2.0.2"
-}
diff --git a/node_modules/node-fetch-npm/src/blob.js b/node_modules/node-fetch-npm/src/blob.js
deleted file mode 100644
index 0a4462abd..000000000
--- a/node_modules/node-fetch-npm/src/blob.js
+++ /dev/null
@@ -1,109 +0,0 @@
-'use strict'
-// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js
-// (MIT licensed)
-
-const Buffer = require('safe-buffer').Buffer
-
-const BUFFER = Symbol('buffer')
-const TYPE = Symbol('type')
-const CLOSED = Symbol('closed')
-
-class Blob {
- constructor () {
- Object.defineProperty(this, Symbol.toStringTag, {
- value: 'Blob',
- writable: false,
- enumerable: false,
- configurable: true
- })
-
- this[CLOSED] = false
- this[TYPE] = ''
-
- const blobParts = arguments[0]
- const options = arguments[1]
-
- const buffers = []
-
- if (blobParts) {
- const a = blobParts
- const length = Number(a.length)
- for (let i = 0; i < length; i++) {
- const element = a[i]
- let buffer
- if (Buffer.isBuffer(element)) {
- buffer = element
- } else if (ArrayBuffer.isView(element)) {
- buffer = Buffer.from(new Uint8Array(element.buffer, element.byteOffset, element.byteLength))
- } else if (element instanceof ArrayBuffer) {
- buffer = Buffer.from(new Uint8Array(element))
- } else if (element instanceof Blob) {
- buffer = element[BUFFER]
- } else {
- buffer = Buffer.from(typeof element === 'string' ? element : String(element))
- }
- buffers.push(buffer)
- }
- }
-
- this[BUFFER] = Buffer.concat(buffers)
-
- let type = options && options.type !== undefined && String(options.type).toLowerCase()
- if (type && !/[^\u0020-\u007E]/.test(type)) {
- this[TYPE] = type
- }
- }
- get size () {
- return this[CLOSED] ? 0 : this[BUFFER].length
- }
- get type () {
- return this[TYPE]
- }
- get isClosed () {
- return this[CLOSED]
- }
- slice () {
- const size = this.size
-
- const start = arguments[0]
- const end = arguments[1]
- let relativeStart, relativeEnd
- if (start === undefined) {
- relativeStart = 0
- } else if (start < 0) {
- relativeStart = Math.max(size + start, 0)
- } else {
- relativeStart = Math.min(start, size)
- }
- if (end === undefined) {
- relativeEnd = size
- } else if (end < 0) {
- relativeEnd = Math.max(size + end, 0)
- } else {
- relativeEnd = Math.min(end, size)
- }
- const span = Math.max(relativeEnd - relativeStart, 0)
-
- const buffer = this[BUFFER]
- const slicedBuffer = buffer.slice(
- relativeStart,
- relativeStart + span
- )
- const blob = new Blob([], { type: arguments[2] })
- blob[BUFFER] = slicedBuffer
- blob[CLOSED] = this[CLOSED]
- return blob
- }
- close () {
- this[CLOSED] = true
- }
-}
-exports = module.exports = Blob
-exports.BUFFER = BUFFER
-
-Object.defineProperty(Blob.prototype, Symbol.toStringTag, {
- value: 'BlobPrototype',
- writable: false,
- enumerable: false,
- configurable: true
-})
diff --git a/node_modules/node-fetch-npm/src/body.js b/node_modules/node-fetch-npm/src/body.js
deleted file mode 100644
index 9e7481857..000000000
--- a/node_modules/node-fetch-npm/src/body.js
+++ /dev/null
@@ -1,411 +0,0 @@
-'use strict'
-
-/**
- * body.js
- *
- * Body interface provides common methods for Request and Response
- */
-
-const Buffer = require('safe-buffer').Buffer
-
-const Blob = require('./blob.js')
-const BUFFER = Blob.BUFFER
-const convert = require('encoding').convert
-const parseJson = require('json-parse-better-errors')
-const FetchError = require('./fetch-error.js')
-const Stream = require('stream')
-
-const PassThrough = Stream.PassThrough
-const DISTURBED = Symbol('disturbed')
-
-/**
- * Body class
- *
- * Cannot use ES6 class because Body must be called with .call().
- *
- * @param Stream body Readable stream
- * @param Object opts Response options
- * @return Void
- */
-exports = module.exports = Body
-
-function Body (body, opts) {
- if (!opts) opts = {}
- const size = opts.size == null ? 0 : opts.size
- const timeout = opts.timeout == null ? 0 : opts.timeout
- if (body == null) {
- // body is undefined or null
- body = null
- } else if (typeof body === 'string') {
- // body is string
- } else if (body instanceof Blob) {
- // body is blob
- } else if (Buffer.isBuffer(body)) {
- // body is buffer
- } else if (body instanceof Stream) {
- // body is stream
- } else {
- // none of the above
- // coerce to string
- body = String(body)
- }
- this.body = body
- this[DISTURBED] = false
- this.size = size
- this.timeout = timeout
-}
-
-Body.prototype = {
- get bodyUsed () {
- return this[DISTURBED]
- },
-
- /**
- * Decode response as ArrayBuffer
- *
- * @return Promise
- */
- arrayBuffer () {
- return consumeBody.call(this).then(buf => buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength))
- },
-
- /**
- * Return raw response as Blob
- *
- * @return Promise
- */
- blob () {
- let ct = (this.headers && this.headers.get('content-type')) || ''
- return consumeBody.call(this).then(buf => Object.assign(
- // Prevent copying
- new Blob([], {
- type: ct.toLowerCase()
- }),
- {
- [BUFFER]: buf
- }
- ))
- },
-
- /**
- * Decode response as json
- *
- * @return Promise
- */
- json () {
- return consumeBody.call(this).then(buffer => parseJson(buffer.toString()))
- },
-
- /**
- * Decode response as text
- *
- * @return Promise
- */
- text () {
- return consumeBody.call(this).then(buffer => buffer.toString())
- },
-
- /**
- * Decode response as buffer (non-spec api)
- *
- * @return Promise
- */
- buffer () {
- return consumeBody.call(this)
- },
-
- /**
- * Decode response as text, while automatically detecting the encoding and
- * trying to decode to UTF-8 (non-spec api)
- *
- * @return Promise
- */
- textConverted () {
- return consumeBody.call(this).then(buffer => convertBody(buffer, this.headers))
- }
-
-}
-
-Body.mixIn = function (proto) {
- for (const name of Object.getOwnPropertyNames(Body.prototype)) {
- // istanbul ignore else: future proof
- if (!(name in proto)) {
- const desc = Object.getOwnPropertyDescriptor(Body.prototype, name)
- Object.defineProperty(proto, name, desc)
- }
- }
-}
-
-/**
- * Decode buffers into utf-8 string
- *
- * @return Promise
- */
-function consumeBody (body) {
- if (this[DISTURBED]) {
- return Body.Promise.reject(new Error(`body used already for: ${this.url}`))
- }
-
- this[DISTURBED] = true
-
- // body is null
- if (this.body === null) {
- return Body.Promise.resolve(Buffer.alloc(0))
- }
-
- // body is string
- if (typeof this.body === 'string') {
- return Body.Promise.resolve(Buffer.from(this.body))
- }
-
- // body is blob
- if (this.body instanceof Blob) {
- return Body.Promise.resolve(this.body[BUFFER])
- }
-
- // body is buffer
- if (Buffer.isBuffer(this.body)) {
- return Body.Promise.resolve(this.body)
- }
-
- // istanbul ignore if: should never happen
- if (!(this.body instanceof Stream)) {
- return Body.Promise.resolve(Buffer.alloc(0))
- }
-
- // body is stream
- // get ready to actually consume the body
- let accum = []
- let accumBytes = 0
- let abort = false
-
- return new Body.Promise((resolve, reject) => {
- let resTimeout
-
- // allow timeout on slow response body
- if (this.timeout) {
- resTimeout = setTimeout(() => {
- abort = true
- reject(new FetchError(`Response timeout while trying to fetch ${this.url} (over ${this.timeout}ms)`, 'body-timeout'))
- }, this.timeout)
- }
-
- // handle stream error, such as incorrect content-encoding
- this.body.on('error', err => {
- reject(new FetchError(`Invalid response body while trying to fetch ${this.url}: ${err.message}`, 'system', err))
- })
-
- this.body.on('data', chunk => {
- if (abort || chunk === null) {
- return
- }
-
- if (this.size && accumBytes + chunk.length > this.size) {
- abort = true
- reject(new FetchError(`content size at ${this.url} over limit: ${this.size}`, 'max-size'))
- return
- }
-
- accumBytes += chunk.length
- accum.push(chunk)
- })
-
- this.body.on('end', () => {
- if (abort) {
- return
- }
-
- clearTimeout(resTimeout)
- resolve(Buffer.concat(accum))
- })
- })
-}
-
-/**
- * Detect buffer encoding and convert to target encoding
- * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding
- *
- * @param Buffer buffer Incoming buffer
- * @param String encoding Target encoding
- * @return String
- */
-function convertBody (buffer, headers) {
- const ct = headers.get('content-type')
- let charset = 'utf-8'
- let res, str
-
- // header
- if (ct) {
- res = /charset=([^;]*)/i.exec(ct)
- }
-
- // no charset in content type, peek at response body for at most 1024 bytes
- str = buffer.slice(0, 1024).toString()
-
- // html5
- if (!res && str) {
- res = /<meta.+?charset=(['"])(.+?)\1/i.exec(str)
- }
-
- // html4
- if (!res && str) {
- res = /<meta[\s]+?http-equiv=(['"])content-type\1[\s]+?content=(['"])(.+?)\2/i.exec(str)
-
- if (res) {
- res = /charset=(.*)/i.exec(res.pop())
- }
- }
-
- // xml
- if (!res && str) {
- res = /<\?xml.+?encoding=(['"])(.+?)\1/i.exec(str)
- }
-
- // found charset
- if (res) {
- charset = res.pop()
-
- // prevent decode issues when sites use incorrect encoding
- // ref: https://hsivonen.fi/encoding-menu/
- if (charset === 'gb2312' || charset === 'gbk') {
- charset = 'gb18030'
- }
- }
-
- // turn raw buffers into a single utf-8 buffer
- return convert(
- buffer
- , 'UTF-8'
- , charset
- ).toString()
-}
-
-/**
- * Clone body given Res/Req instance
- *
- * @param Mixed instance Response or Request instance
- * @return Mixed
- */
-exports.clone = function clone (instance) {
- let p1, p2
- let body = instance.body
-
- // don't allow cloning a used body
- if (instance.bodyUsed) {
- throw new Error('cannot clone body after it is used')
- }
-
- // check that body is a stream and not form-data object
- // note: we can't clone the form-data object without having it as a dependency
- if ((body instanceof Stream) && (typeof body.getBoundary !== 'function')) {
- // tee instance body
- p1 = new PassThrough()
- p2 = new PassThrough()
- body.pipe(p1)
- body.pipe(p2)
- // set instance body to teed body and return the other teed body
- instance.body = p1
- body = p2
- }
-
- return body
-}
-
-/**
- * Performs the operation "extract a `Content-Type` value from |object|" as
- * specified in the specification:
- * https://fetch.spec.whatwg.org/#concept-bodyinit-extract
- *
- * This function assumes that instance.body is present and non-null.
- *
- * @param Mixed instance Response or Request instance
- */
-exports.extractContentType = function extractContentType (instance) {
- const body = instance.body
-
- // istanbul ignore if: Currently, because of a guard in Request, body
- // can never be null. Included here for completeness.
- if (body === null) {
- // body is null
- return null
- } else if (typeof body === 'string') {
- // body is string
- return 'text/plain;charset=UTF-8'
- } else if (body instanceof Blob) {
- // body is blob
- return body.type || null
- } else if (Buffer.isBuffer(body)) {
- // body is buffer
- return null
- } else if (typeof body.getBoundary === 'function') {
- // detect form data input from form-data module
- return `multipart/form-data;boundary=${body.getBoundary()}`
- } else {
- // body is stream
- // can't really do much about this
- return null
- }
-}
-
-exports.getTotalBytes = function getTotalBytes (instance) {
- const body = instance.body
-
- // istanbul ignore if: included for completion
- if (body === null) {
- // body is null
- return 0
- } else if (typeof body === 'string') {
- // body is string
- return Buffer.byteLength(body)
- } else if (body instanceof Blob) {
- // body is blob
- return body.size
- } else if (Buffer.isBuffer(body)) {
- // body is buffer
- return body.length
- } else if (body && typeof body.getLengthSync === 'function') {
- // detect form data input from form-data module
- if ((
- // 1.x
- body._lengthRetrievers &&
- body._lengthRetrievers.length === 0
- ) || (
- // 2.x
- body.hasKnownLength && body.hasKnownLength()
- )) {
- return body.getLengthSync()
- }
- return null
- } else {
- // body is stream
- // can't really do much about this
- return null
- }
-}
-
-exports.writeToStream = function writeToStream (dest, instance) {
- const body = instance.body
-
- if (body === null) {
- // body is null
- dest.end()
- } else if (typeof body === 'string') {
- // body is string
- dest.write(body)
- dest.end()
- } else if (body instanceof Blob) {
- // body is blob
- dest.write(body[BUFFER])
- dest.end()
- } else if (Buffer.isBuffer(body)) {
- // body is buffer
- dest.write(body)
- dest.end()
- } else {
- // body is stream
- body.pipe(dest)
- }
-}
-
-// expose Promise
-Body.Promise = global.Promise
diff --git a/node_modules/node-fetch-npm/src/common.js b/node_modules/node-fetch-npm/src/common.js
deleted file mode 100644
index 47b57c794..000000000
--- a/node_modules/node-fetch-npm/src/common.js
+++ /dev/null
@@ -1,92 +0,0 @@
-'use strict'
-/**
- * A set of utilities borrowed from Node.js' _http_common.js
- */
-
-/**
- * Verifies that the given val is a valid HTTP token
- * per the rules defined in RFC 7230
- * See https://tools.ietf.org/html/rfc7230#section-3.2.6
- *
- * Allowed characters in an HTTP token:
- * ^_`a-z 94-122
- * A-Z 65-90
- * - 45
- * 0-9 48-57
- * ! 33
- * #$%&' 35-39
- * *+ 42-43
- * . 46
- * | 124
- * ~ 126
- *
- * This implementation of checkIsHttpToken() loops over the string instead of
- * using a regular expression since the former is up to 180% faster with v8 4.9
- * depending on the string length (the shorter the string, the larger the
- * performance difference)
- *
- * Additionally, checkIsHttpToken() is currently designed to be inlinable by v8,
- * so take care when making changes to the implementation so that the source
- * code size does not exceed v8's default max_inlined_source_size setting.
- **/
-/* istanbul ignore next */
-function isValidTokenChar (ch) {
- if (ch >= 94 && ch <= 122) { return true }
- if (ch >= 65 && ch <= 90) { return true }
- if (ch === 45) { return true }
- if (ch >= 48 && ch <= 57) { return true }
- if (ch === 34 || ch === 40 || ch === 41 || ch === 44) { return false }
- if (ch >= 33 && ch <= 46) { return true }
- if (ch === 124 || ch === 126) { return true }
- return false
-}
-/* istanbul ignore next */
-function checkIsHttpToken (val) {
- if (typeof val !== 'string' || val.length === 0) { return false }
- if (!isValidTokenChar(val.charCodeAt(0))) { return false }
- const len = val.length
- if (len > 1) {
- if (!isValidTokenChar(val.charCodeAt(1))) { return false }
- if (len > 2) {
- if (!isValidTokenChar(val.charCodeAt(2))) { return false }
- if (len > 3) {
- if (!isValidTokenChar(val.charCodeAt(3))) { return false }
- for (var i = 4; i < len; i++) {
- if (!isValidTokenChar(val.charCodeAt(i))) { return false }
- }
- }
- }
- }
- return true
-}
-exports.checkIsHttpToken = checkIsHttpToken
-
-/**
- * True if val contains an invalid field-vchar
- * field-value = *( field-content / obs-fold )
- * field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ]
- * field-vchar = VCHAR / obs-text
- *
- * checkInvalidHeaderChar() is currently designed to be inlinable by v8,
- * so take care when making changes to the implementation so that the source
- * code size does not exceed v8's default max_inlined_source_size setting.
- **/
-/* istanbul ignore next */
-function checkInvalidHeaderChar (val) {
- val += ''
- if (val.length < 1) { return false }
- var c = val.charCodeAt(0)
- if ((c <= 31 && c !== 9) || c > 255 || c === 127) { return true }
- if (val.length < 2) { return false }
- c = val.charCodeAt(1)
- if ((c <= 31 && c !== 9) || c > 255 || c === 127) { return true }
- if (val.length < 3) { return false }
- c = val.charCodeAt(2)
- if ((c <= 31 && c !== 9) || c > 255 || c === 127) { return true }
- for (var i = 3; i < val.length; ++i) {
- c = val.charCodeAt(i)
- if ((c <= 31 && c !== 9) || c > 255 || c === 127) { return true }
- }
- return false
-}
-exports.checkInvalidHeaderChar = checkInvalidHeaderChar
diff --git a/node_modules/node-fetch-npm/src/fetch-error.js b/node_modules/node-fetch-npm/src/fetch-error.js
deleted file mode 100644
index 338e1e272..000000000
--- a/node_modules/node-fetch-npm/src/fetch-error.js
+++ /dev/null
@@ -1,35 +0,0 @@
-'use strict'
-
-/**
- * fetch-error.js
- *
- * FetchError interface for operational errors
- */
-
-/**
- * Create FetchError instance
- *
- * @param String message Error message for human
- * @param String type Error type for machine
- * @param String systemError For Node.js system error
- * @return FetchError
- */
-module.exports = FetchError
-function FetchError (message, type, systemError) {
- Error.call(this, message)
-
- this.message = message
- this.type = type
-
- // when err.type is `system`, err.code contains system error code
- if (systemError) {
- this.code = this.errno = systemError.code
- }
-
- // hide custom error implementation details from end-users
- Error.captureStackTrace(this, this.constructor)
-}
-
-FetchError.prototype = Object.create(Error.prototype)
-FetchError.prototype.constructor = FetchError
-FetchError.prototype.name = 'FetchError'
diff --git a/node_modules/node-fetch-npm/src/headers.js b/node_modules/node-fetch-npm/src/headers.js
deleted file mode 100644
index 28f71cd9b..000000000
--- a/node_modules/node-fetch-npm/src/headers.js
+++ /dev/null
@@ -1,296 +0,0 @@
-'use strict'
-
-/**
- * headers.js
- *
- * Headers class offers convenient helpers
- */
-
-const common = require('./common.js')
-const checkInvalidHeaderChar = common.checkInvalidHeaderChar
-const checkIsHttpToken = common.checkIsHttpToken
-
-function sanitizeName (name) {
- name += ''
- if (!checkIsHttpToken(name)) {
- throw new TypeError(`${name} is not a legal HTTP header name`)
- }
- return name.toLowerCase()
-}
-
-function sanitizeValue (value) {
- value += ''
- if (checkInvalidHeaderChar(value)) {
- throw new TypeError(`${value} is not a legal HTTP header value`)
- }
- return value
-}
-
-const MAP = Symbol('map')
-class Headers {
- /**
- * Headers class
- *
- * @param Object headers Response headers
- * @return Void
- */
- constructor (init) {
- this[MAP] = Object.create(null)
-
- if (init instanceof Headers) {
- const rawHeaders = init.raw()
- const headerNames = Object.keys(rawHeaders)
-
- for (const headerName of headerNames) {
- for (const value of rawHeaders[headerName]) {
- this.append(headerName, value)
- }
- }
-
- return
- }
-
- // We don't worry about converting prop to ByteString here as append()
- // will handle it.
- if (init == null) {
- // no op
- } else if (typeof init === 'object') {
- const method = init[Symbol.iterator]
- if (method != null) {
- if (typeof method !== 'function') {
- throw new TypeError('Header pairs must be iterable')
- }
-
- // sequence<sequence<ByteString>>
- // Note: per spec we have to first exhaust the lists then process them
- const pairs = []
- for (const pair of init) {
- if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {
- throw new TypeError('Each header pair must be iterable')
- }
- pairs.push(Array.from(pair))
- }
-
- for (const pair of pairs) {
- if (pair.length !== 2) {
- throw new TypeError('Each header pair must be a name/value tuple')
- }
- this.append(pair[0], pair[1])
- }
- } else {
- // record<ByteString, ByteString>
- for (const key of Object.keys(init)) {
- const value = init[key]
- this.append(key, value)
- }
- }
- } else {
- throw new TypeError('Provided initializer must be an object')
- }
-
- Object.defineProperty(this, Symbol.toStringTag, {
- value: 'Headers',
- writable: false,
- enumerable: false,
- configurable: true
- })
- }
-
- /**
- * Return first header value given name
- *
- * @param String name Header name
- * @return Mixed
- */
- get (name) {
- const list = this[MAP][sanitizeName(name)]
- if (!list) {
- return null
- }
-
- return list.join(', ')
- }
-
- /**
- * Iterate over all headers
- *
- * @param Function callback Executed for each item with parameters (value, name, thisArg)
- * @param Boolean thisArg `this` context for callback function
- * @return Void
- */
- forEach (callback, thisArg) {
- let pairs = getHeaderPairs(this)
- let i = 0
- while (i < pairs.length) {
- const name = pairs[i][0]
- const value = pairs[i][1]
- callback.call(thisArg, value, name, this)
- pairs = getHeaderPairs(this)
- i++
- }
- }
-
- /**
- * Overwrite header values given name
- *
- * @param String name Header name
- * @param String value Header value
- * @return Void
- */
- set (name, value) {
- this[MAP][sanitizeName(name)] = [sanitizeValue(value)]
- }
-
- /**
- * Append a value onto existing header
- *
- * @param String name Header name
- * @param String value Header value
- * @return Void
- */
- append (name, value) {
- if (!this.has(name)) {
- this.set(name, value)
- return
- }
-
- this[MAP][sanitizeName(name)].push(sanitizeValue(value))
- }
-
- /**
- * Check for header name existence
- *
- * @param String name Header name
- * @return Boolean
- */
- has (name) {
- return !!this[MAP][sanitizeName(name)]
- }
-
- /**
- * Delete all header values given name
- *
- * @param String name Header name
- * @return Void
- */
- delete (name) {
- delete this[MAP][sanitizeName(name)]
- };
-
- /**
- * Return raw headers (non-spec api)
- *
- * @return Object
- */
- raw () {
- return this[MAP]
- }
-
- /**
- * Get an iterator on keys.
- *
- * @return Iterator
- */
- keys () {
- return createHeadersIterator(this, 'key')
- }
-
- /**
- * Get an iterator on values.
- *
- * @return Iterator
- */
- values () {
- return createHeadersIterator(this, 'value')
- }
-
- /**
- * Get an iterator on entries.
- *
- * This is the default iterator of the Headers object.
- *
- * @return Iterator
- */
- [Symbol.iterator] () {
- return createHeadersIterator(this, 'key+value')
- }
-}
-Headers.prototype.entries = Headers.prototype[Symbol.iterator]
-
-Object.defineProperty(Headers.prototype, Symbol.toStringTag, {
- value: 'HeadersPrototype',
- writable: false,
- enumerable: false,
- configurable: true
-})
-
-function getHeaderPairs (headers, kind) {
- const keys = Object.keys(headers[MAP]).sort()
- return keys.map(
- kind === 'key'
- ? k => [k]
- : k => [k, headers.get(k)]
- )
-}
-
-const INTERNAL = Symbol('internal')
-
-function createHeadersIterator (target, kind) {
- const iterator = Object.create(HeadersIteratorPrototype)
- iterator[INTERNAL] = {
- target,
- kind,
- index: 0
- }
- return iterator
-}
-
-const HeadersIteratorPrototype = Object.setPrototypeOf({
- next () {
- // istanbul ignore if
- if (!this ||
- Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {
- throw new TypeError('Value of `this` is not a HeadersIterator')
- }
-
- const target = this[INTERNAL].target
- const kind = this[INTERNAL].kind
- const index = this[INTERNAL].index
- const values = getHeaderPairs(target, kind)
- const len = values.length
- if (index >= len) {
- return {
- value: undefined,
- done: true
- }
- }
-
- const pair = values[index]
- this[INTERNAL].index = index + 1
-
- let result
- if (kind === 'key') {
- result = pair[0]
- } else if (kind === 'value') {
- result = pair[1]
- } else {
- result = pair
- }
-
- return {
- value: result,
- done: false
- }
- }
-}, Object.getPrototypeOf(
- Object.getPrototypeOf([][Symbol.iterator]())
-))
-
-Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {
- value: 'HeadersIterator',
- writable: false,
- enumerable: false,
- configurable: true
-})
-
-module.exports = Headers
diff --git a/node_modules/node-fetch-npm/src/index.js b/node_modules/node-fetch-npm/src/index.js
deleted file mode 100644
index b2cf80f9e..000000000
--- a/node_modules/node-fetch-npm/src/index.js
+++ /dev/null
@@ -1,214 +0,0 @@
-'use strict'
-
-/**
- * index.js
- *
- * a request API compatible with window.fetch
- */
-
-const url = require('url')
-const http = require('http')
-const https = require('https')
-const zlib = require('zlib')
-const PassThrough = require('stream').PassThrough
-
-const Body = require('./body.js')
-const writeToStream = Body.writeToStream
-const Response = require('./response')
-const Headers = require('./headers')
-const Request = require('./request')
-const getNodeRequestOptions = Request.getNodeRequestOptions
-const FetchError = require('./fetch-error')
-const isURL = /^https?:/
-
-/**
- * Fetch function
- *
- * @param Mixed url Absolute url or Request instance
- * @param Object opts Fetch options
- * @return Promise
- */
-exports = module.exports = fetch
-function fetch (uri, opts) {
- // allow custom promise
- if (!fetch.Promise) {
- throw new Error('native promise missing, set fetch.Promise to your favorite alternative')
- }
-
- Body.Promise = fetch.Promise
-
- // wrap http.request into fetch
- return new fetch.Promise((resolve, reject) => {
- // build request object
- const request = new Request(uri, opts)
- const options = getNodeRequestOptions(request)
-
- const send = (options.protocol === 'https:' ? https : http).request
-
- // http.request only support string as host header, this hack make custom host header possible
- if (options.headers.host) {
- options.headers.host = options.headers.host[0]
- }
-
- // send request
- const req = send(options)
- let reqTimeout
-
- if (request.timeout) {
- req.once('socket', socket => {
- reqTimeout = setTimeout(() => {
- req.abort()
- reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'))
- }, request.timeout)
- })
- }
-
- req.on('error', err => {
- clearTimeout(reqTimeout)
- reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err))
- })
-
- req.on('response', res => {
- clearTimeout(reqTimeout)
-
- // handle redirect
- if (fetch.isRedirect(res.statusCode) && request.redirect !== 'manual') {
- if (request.redirect === 'error') {
- reject(new FetchError(`redirect mode is set to error: ${request.url}`, 'no-redirect'))
- return
- }
-
- if (request.counter >= request.follow) {
- reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'))
- return
- }
-
- if (!res.headers.location) {
- reject(new FetchError(`redirect location header missing at: ${request.url}`, 'invalid-redirect'))
- return
- }
- // Remove authorization if changing hostnames (but not if just
- // changing ports or protocols). This matches the behavior of request:
- // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138
- const resolvedUrl = url.resolve(request.url, res.headers.location)
- let redirectURL = ''
- if (!isURL.test(res.headers.location)) {
- redirectURL = url.parse(resolvedUrl)
- } else {
- redirectURL = url.parse(res.headers.location)
- }
- if (url.parse(request.url).hostname !== redirectURL.hostname) {
- request.headers.delete('authorization')
- }
-
- // per fetch spec, for POST request with 301/302 response, or any request with 303 response, use GET when following redirect
- if (res.statusCode === 303 ||
- ((res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST')) {
- request.method = 'GET'
- request.body = null
- request.headers.delete('content-length')
- }
-
- request.counter++
-
- resolve(fetch(resolvedUrl, request))
- return
- }
-
- // normalize location header for manual redirect mode
- const headers = new Headers()
- for (const name of Object.keys(res.headers)) {
- if (Array.isArray(res.headers[name])) {
- for (const val of res.headers[name]) {
- headers.append(name, val)
- }
- } else {
- headers.append(name, res.headers[name])
- }
- }
- if (request.redirect === 'manual' && headers.has('location')) {
- headers.set('location', url.resolve(request.url, headers.get('location')))
- }
-
- // prepare response
- let body = res.pipe(new PassThrough())
- const responseOptions = {
- url: request.url,
- status: res.statusCode,
- statusText: res.statusMessage,
- headers: headers,
- size: request.size,
- timeout: request.timeout
- }
-
- // HTTP-network fetch step 16.1.2
- const codings = headers.get('Content-Encoding')
-
- // HTTP-network fetch step 16.1.3: handle content codings
-
- // in following scenarios we ignore compression support
- // 1. compression support is disabled
- // 2. HEAD request
- // 3. no Content-Encoding header
- // 4. no content response (204)
- // 5. content not modified response (304)
- if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {
- resolve(new Response(body, responseOptions))
- return
- }
-
- // Be less strict when decoding compressed responses, since sometimes
- // servers send slightly invalid responses that are still accepted
- // by common browsers.
- // Always using Z_SYNC_FLUSH is what cURL does.
- const zlibOptions = {
- flush: zlib.Z_SYNC_FLUSH,
- finishFlush: zlib.Z_SYNC_FLUSH
- }
-
- // for gzip
- if (codings === 'gzip' || codings === 'x-gzip') {
- body = body.pipe(zlib.createGunzip(zlibOptions))
- resolve(new Response(body, responseOptions))
- return
- }
-
- // for deflate
- if (codings === 'deflate' || codings === 'x-deflate') {
- // handle the infamous raw deflate response from old servers
- // a hack for old IIS and Apache servers
- const raw = res.pipe(new PassThrough())
- raw.once('data', chunk => {
- // see http://stackoverflow.com/questions/37519828
- if ((chunk[0] & 0x0F) === 0x08) {
- body = body.pipe(zlib.createInflate(zlibOptions))
- } else {
- body = body.pipe(zlib.createInflateRaw(zlibOptions))
- }
- resolve(new Response(body, responseOptions))
- })
- return
- }
-
- // otherwise, use response as-is
- resolve(new Response(body, responseOptions))
- })
-
- writeToStream(req, request)
- })
-};
-
-/**
- * Redirect code matching
- *
- * @param Number code Status code
- * @return Boolean
- */
-fetch.isRedirect = code => code === 301 || code === 302 || code === 303 || code === 307 || code === 308
-
-// expose Promise
-fetch.Promise = global.Promise
-exports.Headers = Headers
-exports.Request = Request
-exports.Response = Response
-exports.FetchError = FetchError
diff --git a/node_modules/node-fetch-npm/src/request.js b/node_modules/node-fetch-npm/src/request.js
deleted file mode 100644
index 326dc4606..000000000
--- a/node_modules/node-fetch-npm/src/request.js
+++ /dev/null
@@ -1,174 +0,0 @@
-'use strict'
-
-/**
- * request.js
- *
- * Request class contains server only options
- */
-
-const url = require('url')
-const Headers = require('./headers.js')
-const Body = require('./body.js')
-const clone = Body.clone
-const extractContentType = Body.extractContentType
-const getTotalBytes = Body.getTotalBytes
-
-const PARSED_URL = Symbol('url')
-
-/**
- * Request class
- *
- * @param Mixed input Url or Request instance
- * @param Object init Custom options
- * @return Void
- */
-class Request {
- constructor (input, init) {
- if (!init) init = {}
- let parsedURL
-
- // normalize input
- if (!(input instanceof Request)) {
- if (input && input.href) {
- // in order to support Node.js' Url objects; though WHATWG's URL objects
- // will fall into this branch also (since their `toString()` will return
- // `href` property anyway)
- parsedURL = url.parse(input.href)
- } else {
- // coerce input to a string before attempting to parse
- parsedURL = url.parse(`${input}`)
- }
- input = {}
- } else {
- parsedURL = url.parse(input.url)
- }
-
- let method = init.method || input.method || 'GET'
-
- if ((init.body != null || (input instanceof Request && input.body !== null)) &&
- (method === 'GET' || method === 'HEAD')) {
- throw new TypeError('Request with GET/HEAD method cannot have body')
- }
-
- let inputBody = init.body != null
- ? init.body
- : input instanceof Request && input.body !== null
- ? clone(input)
- : null
-
- Body.call(this, inputBody, {
- timeout: init.timeout || input.timeout || 0,
- size: init.size || input.size || 0
- })
-
- // fetch spec options
- this.method = method.toUpperCase()
- this.redirect = init.redirect || input.redirect || 'follow'
- this.headers = new Headers(init.headers || input.headers || {})
-
- if (init.body != null) {
- const contentType = extractContentType(this)
- if (contentType !== null && !this.headers.has('Content-Type')) {
- this.headers.append('Content-Type', contentType)
- }
- }
-
- // server only options
- this.follow = init.follow !== undefined
- ? init.follow : input.follow !== undefined
- ? input.follow : 20
- this.compress = init.compress !== undefined
- ? init.compress : input.compress !== undefined
- ? input.compress : true
- this.counter = init.counter || input.counter || 0
- this.agent = init.agent || input.agent
-
- this[PARSED_URL] = parsedURL
- Object.defineProperty(this, Symbol.toStringTag, {
- value: 'Request',
- writable: false,
- enumerable: false,
- configurable: true
- })
- }
-
- get url () {
- return url.format(this[PARSED_URL])
- }
-
- /**
- * Clone this request
- *
- * @return Request
- */
- clone () {
- return new Request(this)
- }
-}
-
-Body.mixIn(Request.prototype)
-
-Object.defineProperty(Request.prototype, Symbol.toStringTag, {
- value: 'RequestPrototype',
- writable: false,
- enumerable: false,
- configurable: true
-})
-
-exports = module.exports = Request
-
-exports.getNodeRequestOptions = function getNodeRequestOptions (request) {
- const parsedURL = request[PARSED_URL]
- const headers = new Headers(request.headers)
-
- // fetch step 3
- if (!headers.has('Accept')) {
- headers.set('Accept', '*/*')
- }
-
- // Basic fetch
- if (!parsedURL.protocol || !parsedURL.hostname) {
- throw new TypeError('Only absolute URLs are supported')
- }
-
- if (!/^https?:$/.test(parsedURL.protocol)) {
- throw new TypeError('Only HTTP(S) protocols are supported')
- }
-
- // HTTP-network-or-cache fetch steps 5-9
- let contentLengthValue = null
- if (request.body == null && /^(POST|PUT)$/i.test(request.method)) {
- contentLengthValue = '0'
- }
- if (request.body != null) {
- const totalBytes = getTotalBytes(request)
- if (typeof totalBytes === 'number') {
- contentLengthValue = String(totalBytes)
- }
- }
- if (contentLengthValue) {
- headers.set('Content-Length', contentLengthValue)
- }
-
- // HTTP-network-or-cache fetch step 12
- if (!headers.has('User-Agent')) {
- headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)')
- }
-
- // HTTP-network-or-cache fetch step 16
- if (request.compress) {
- headers.set('Accept-Encoding', 'gzip,deflate')
- }
- if (!headers.has('Connection') && !request.agent) {
- headers.set('Connection', 'close')
- }
-
- // HTTP-network fetch step 4
- // chunked encoding is handled by Node.js
-
- return Object.assign({}, parsedURL, {
- method: request.method,
- headers: headers.raw(),
- agent: request.agent
- })
-}
diff --git a/node_modules/node-fetch-npm/src/response.js b/node_modules/node-fetch-npm/src/response.js
deleted file mode 100644
index 6fb050d74..000000000
--- a/node_modules/node-fetch-npm/src/response.js
+++ /dev/null
@@ -1,71 +0,0 @@
-'use strict'
-
-/**
- * response.js
- *
- * Response class provides content decoding
- */
-
-const STATUS_CODES = require('http').STATUS_CODES
-const Headers = require('./headers.js')
-const Body = require('./body.js')
-const clone = Body.clone
-
-/**
- * Response class
- *
- * @param Stream body Readable stream
- * @param Object opts Response options
- * @return Void
- */
-class Response {
- constructor (body, opts) {
- if (!opts) opts = {}
- Body.call(this, body, opts)
-
- this.url = opts.url
- this.status = opts.status || 200
- this.statusText = opts.statusText || STATUS_CODES[this.status]
-
- this.headers = new Headers(opts.headers)
-
- Object.defineProperty(this, Symbol.toStringTag, {
- value: 'Response',
- writable: false,
- enumerable: false,
- configurable: true
- })
- }
-
- /**
- * Convenience property representing if the request ended normally
- */
- get ok () {
- return this.status >= 200 && this.status < 300
- }
-
- /**
- * Clone this response
- *
- * @return Response
- */
- clone () {
- return new Response(clone(this), {
- url: this.url,
- status: this.status,
- statusText: this.statusText,
- headers: this.headers,
- ok: this.ok
- })
- }
-}
-
-Body.mixIn(Response.prototype)
-
-Object.defineProperty(Response.prototype, Symbol.toStringTag, {
- value: 'ResponsePrototype',
- writable: false,
- enumerable: false,
- configurable: true
-})
-module.exports = Response
diff --git a/node_modules/npm-logical-tree/CHANGELOG.md b/node_modules/npm-logical-tree/CHANGELOG.md
deleted file mode 100644
index 1a9b37116..000000000
--- a/node_modules/npm-logical-tree/CHANGELOG.md
+++ /dev/null
@@ -1,46 +0,0 @@
-# Change Log
-
-All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
-
-<a name="1.2.1"></a>
-## [1.2.1](https://github.com/npm/logical-tree/compare/v1.2.0...v1.2.1) (2018-01-19)
-
-
-### Bug Fixes
-
-* **requires:** stop requiring version match -- only what require would pick up ([6388fbd](https://github.com/npm/logical-tree/commit/6388fbd))
-
-
-
-<a name="1.2.0"></a>
-# [1.2.0](https://github.com/npm/logical-tree/compare/v1.1.0...v1.2.0) (2017-10-13)
-
-
-### Bug Fixes
-
-* **json:** fix repository url ([e51448a](https://github.com/npm/logical-tree/commit/e51448a))
-
-
-### Features
-
-* **api:** additional utility functions for dealing with trees ([23f6e69](https://github.com/npm/logical-tree/commit/23f6e69))
-
-
-
-<a name="1.1.0"></a>
-# [1.1.0](https://github.com/npm/npm-logical-tree/compare/v1.0.0...v1.1.0) (2017-10-11)
-
-
-### Features
-
-* **requiredBy:** add requiredBy field to nodes ([c4056fb](https://github.com/npm/npm-logical-tree/commit/c4056fb))
-
-
-
-<a name="1.0.0"></a>
-# 1.0.0 (2017-10-07)
-
-
-### Features
-
-* **api:** Initial Commit™ ([1025259](https://github.com/npm/npm-logical-tree/commit/1025259))
diff --git a/node_modules/npm-logical-tree/LICENSE.md b/node_modules/npm-logical-tree/LICENSE.md
deleted file mode 100644
index 8d28acf86..000000000
--- a/node_modules/npm-logical-tree/LICENSE.md
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/npm-logical-tree/README.md b/node_modules/npm-logical-tree/README.md
deleted file mode 100644
index 33260d9f2..000000000
--- a/node_modules/npm-logical-tree/README.md
+++ /dev/null
@@ -1,147 +0,0 @@
-# npm-logical-tree [![npm version](https://img.shields.io/npm/v/npm-logical-tree.svg)](https://npm.im/npm-logical-tree) [![license](https://img.shields.io/npm/l/npm-logical-tree.svg)](https://npm.im/npm-logical-tree) [![Travis](https://img.shields.io/travis/npm/logical-tree.svg)](https://travis-ci.org/npm/logical-tree) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/npm/logical-tree?svg=true)](https://ci.appveyor.com/project/npm/logical-tree) [![Coverage Status](https://coveralls.io/repos/github/npm/logical-tree/badge.svg?branch=latest)](https://coveralls.io/github/npm/logical-tree?branch=latest)
-
-[`npm-logical-tree`](https://github.com/npm/npm-logical-tree) is a Node.js
-library that takes the contents of a `package.json` and `package-lock.json` (or
-`npm-shrinkwrap.json`) and returns a nested tree data structure representing the
-logical relationships between the different dependencies.
-
-## Install
-
-`$ npm install npm-logical-tree`
-
-## Table of Contents
-
-* [Example](#example)
-* [Contributing](#contributing)
-* [API](#api)
- * [`logicalTree`](#logical-tree)
- * [`logicalTree.node`](#make-node)
- * [`tree.isRoot`](#is-root)
- * [`tree.addDep`](#add-dep)
- * [`tree.delDep`](#del-dep)
- * [`tree.getDep`](#get-dep)
- * [`tree.path`](#path)
- * [`tree.hasCycle`](#has-cycle)
- * [`tree.forEach`](#for-each)
- * [`tree.forEachAsync`](#for-each-async)
-
-### Example
-
-```javascript
-const fs = require('fs')
-const logicalTree = require('npm-logical-tree')
-
-const pkg = require('./package.json')
-const pkgLock = require('./package-lock.json')
-
-logicalTree(pkg, pkgLock)
-// returns:
-LogicalTree {
- name: 'npm-logical-tree',
- version: '1.0.0',
- address: null,
- optional: false,
- dev: false,
- bundled: false,
- resolved: undefined,
- integrity: undefined,
- requiredBy: Set { },
- dependencies:
- Map {
- 'foo' => LogicalTree {
- name: 'foo',
- version: '1.2.3',
- address: 'foo',
- optional: false,
- dev: true,
- bundled: false,
- resolved: 'https://registry.npmjs.org/foo/-/foo-1.2.3.tgz',
- integrity: 'sha1-rYUK/p261/SXByi0suR/7Rw4chw=',
- dependencies: Map { ... },
- requiredBy: Set { ... },
- },
- ...
- }
-}
-```
-
-### Contributing
-
-The npm team enthusiastically welcomes contributions and project participation!
-There's a bunch of things you can do if you want to contribute! The [Contributor
-Guide](CONTRIBUTING.md) has all the information you need for everything from
-reporting bugs to contributing entire new features. Please don't hesitate to
-jump in if you'd like to, or even ask us questions if something isn't clear.
-
-All participants and maintainers in this project are expected to follow [Code of
-Conduct](CODE_OF_CONDUCT.md), and just generally be excellent to each other.
-
-Please refer to the [Changelog](CHANGELOG.md) for project history details, too.
-
-Happy hacking!
-
-### API
-
-#### <a name="logical-tree"></a> `> logicalTree(pkg, lock) -> LogicalTree`
-
-Calculates a logical tree based on a matching `package.json` and
-`package-lock.json` pair. A "logical tree" is a fully-nested dependency graph
-for an npm package, as opposed to a physical tree which might be flattened.
-
-`logical-tree` will represent deduplicated/flattened nodes using the same object
-throughout the tree, so duplication can be checked by object identity.
-
-##### Example
-
-```javascript
-const pkg = require('./package.json')
-const pkgLock = require('./package-lock.json')
-
-logicalTree(pkg, pkgLock)
-// returns:
-LogicalTree {
- name: 'npm-logical-tree',
- version: '1.0.0',
- address: null,
- optional: false,
- dev: false,
- bundled: false,
- resolved: undefined,
- integrity: undefined,
- requiredBy: Set { },
- dependencies:
- Map {
- 'foo' => LogicalTree {
- name: 'foo',
- version: '1.2.3',
- address: 'foo',
- optional: false,
- dev: true,
- bundled: false,
- resolved: 'https://registry.npmjs.org/foo/-/foo-1.2.3.tgz',
- integrity: 'sha1-rYUK/p261/SXByi0suR/7Rw4chw=',
- requiredBy: Set { ... },
- dependencies: Map { ... }
- },
- ...
- }
-}
-```
-
-#### <a name="make-node"></a> `> logicalTree.node(name, [address, [opts]]) -> LogicalTree`
-
-Manually creates a new LogicalTree node.
-
-##### Options
-
-* `opts.version` - version of the node.
-* `opts.optional` - is this node an optionalDep?
-* `opts.dev` - is this node a devDep?
-* `opts.bundled` - is this bundled?
-* `opts.resolved` - resolved address.
-* `opts.integrity` - SRI string.
-
-##### Example
-```javascript
-logicalTree.node('hello', 'subpath:to:@foo/bar', {dev: true})
-```
diff --git a/node_modules/npm-logical-tree/index.js b/node_modules/npm-logical-tree/index.js
deleted file mode 100644
index 10ab7599f..000000000
--- a/node_modules/npm-logical-tree/index.js
+++ /dev/null
@@ -1,192 +0,0 @@
-'use strict'
-
-let path
-
-class LogicalTree {
- constructor (name, address, opts) {
- this.name = name
- this.version = opts.version
- this.address = address || ''
- this.optional = !!opts.optional
- this.dev = !!opts.dev
- this.bundled = !!opts.bundled
- this.resolved = opts.resolved
- this.integrity = opts.integrity
- this.dependencies = new Map()
- this.requiredBy = new Set()
- }
-
- get isRoot () { return !this.requiredBy.size }
-
- addDep (dep) {
- this.dependencies.set(dep.name, dep)
- dep.requiredBy.add(this)
- return this
- }
-
- delDep (dep) {
- this.dependencies.delete(dep.name)
- dep.requiredBy.delete(this)
- return this
- }
-
- getDep (name) {
- return this.dependencies.get(name)
- }
-
- path (prefix) {
- if (this.isRoot) {
- // The address of the root is the prefix itself.
- return prefix || ''
- } else {
- if (!path) { path = require('path') }
- return path.join(
- prefix || '',
- 'node_modules',
- this.address.replace(/:/g, '/node_modules/')
- )
- }
- }
-
- // This finds cycles _from_ a given node: if some deeper dep has
- // its own cycle, but that cycle does not refer to this node,
- // it will return false.
- hasCycle (_seen, _from) {
- if (!_seen) { _seen = new Set() }
- if (!_from) { _from = this }
- for (let dep of this.dependencies.values()) {
- if (_seen.has(dep)) { continue }
- _seen.add(dep)
- if (dep === _from || dep.hasCycle(_seen, _from)) {
- return true
- }
- }
- return false
- }
-
- forEachAsync (fn, opts, _pending) {
- if (!opts) { opts = _pending || {} }
- if (!_pending) { _pending = new Map() }
- const P = opts.Promise || Promise
- if (_pending.has(this)) {
- return P.resolve(this.hasCycle() || _pending.get(this))
- }
- const pending = P.resolve().then(() => {
- return fn(this, () => {
- return promiseMap(
- this.dependencies.values(),
- dep => dep.forEachAsync(fn, opts, _pending),
- opts
- )
- })
- })
- _pending.set(this, pending)
- return pending
- }
-
- forEach (fn, _seen) {
- if (!_seen) { _seen = new Set() }
- if (_seen.has(this)) { return }
- _seen.add(this)
- fn(this, () => {
- for (let dep of this.dependencies.values()) {
- dep.forEach(fn, _seen)
- }
- })
- }
-}
-
-module.exports = lockTree
-function lockTree (pkg, pkgLock, opts) {
- const tree = makeNode(pkg.name, null, pkg)
- const allDeps = new Map()
- Array.from(
- new Set(Object.keys(pkg.devDependencies || {})
- .concat(Object.keys(pkg.optionalDependencies || {}))
- .concat(Object.keys(pkg.dependencies || {})))
- ).forEach(name => {
- let dep = allDeps.get(name)
- if (!dep) {
- const depNode = (pkgLock.dependencies || {})[name]
- dep = makeNode(name, name, depNode)
- }
- addChild(dep, tree, allDeps, pkgLock)
- })
- return tree
-}
-
-module.exports.node = makeNode
-function makeNode (name, address, opts) {
- return new LogicalTree(name, address, opts || {})
-}
-
-function addChild (dep, tree, allDeps, pkgLock) {
- tree.addDep(dep)
- allDeps.set(dep.address, dep)
- const addr = dep.address
- const lockNode = atAddr(pkgLock, addr)
- Object.keys(lockNode.requires || {}).forEach(name => {
- const tdepAddr = reqAddr(pkgLock, name, addr)
- let tdep = allDeps.get(tdepAddr)
- if (!tdep) {
- tdep = makeNode(name, tdepAddr, atAddr(pkgLock, tdepAddr))
- addChild(tdep, dep, allDeps, pkgLock)
- } else {
- dep.addDep(tdep)
- }
- })
-}
-
-module.exports._reqAddr = reqAddr
-function reqAddr (pkgLock, name, fromAddr) {
- const lockNode = atAddr(pkgLock, fromAddr)
- const child = (lockNode.dependencies || {})[name]
- if (child) {
- return `${fromAddr}:${name}`
- } else {
- const parts = fromAddr.split(':')
- while (parts.length) {
- parts.pop()
- const joined = parts.join(':')
- const parent = atAddr(pkgLock, joined)
- if (parent) {
- const child = (parent.dependencies || {})[name]
- if (child) {
- return `${joined}${parts.length ? ':' : ''}${name}`
- }
- }
- }
- const err = new Error(`${name} not accessible from ${fromAddr}`)
- err.pkgLock = pkgLock
- err.target = name
- err.from = fromAddr
- throw err
- }
-}
-
-module.exports._atAddr = atAddr
-function atAddr (pkgLock, addr) {
- if (!addr.length) { return pkgLock }
- const parts = addr.split(':')
- return parts.reduce((acc, next) => {
- return acc && (acc.dependencies || {})[next]
- }, pkgLock)
-}
-
-function promiseMap (arr, fn, opts, _index) {
- _index = _index || 0
- const P = (opts && opts.Promise) || Promise
- if (P.map) {
- return P.map(arr, fn, opts)
- } else {
- if (!(arr instanceof Array)) {
- arr = Array.from(arr)
- }
- if (_index >= arr.length) {
- return P.resolve()
- } else {
- return P.resolve(fn(arr[_index], _index, arr))
- .then(() => promiseMap(arr, fn, opts, _index + 1))
- }
- }
-}
diff --git a/node_modules/npm-logical-tree/package.json b/node_modules/npm-logical-tree/package.json
deleted file mode 100644
index 33e8f1d0e..000000000
--- a/node_modules/npm-logical-tree/package.json
+++ /dev/null
@@ -1,83 +0,0 @@
-{
- "_from": "npm-logical-tree@^1.2.1",
- "_id": "npm-logical-tree@1.2.1",
- "_inBundle": false,
- "_integrity": "sha512-AJI/qxDB2PWI4LG1CYN579AY1vCiNyWfkiquCsJWqntRu/WwimVrC8yXeILBFHDwxfOejxewlmnvW9XXjMlYIg==",
- "_location": "/npm-logical-tree",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "npm-logical-tree@^1.2.1",
- "name": "npm-logical-tree",
- "escapedName": "npm-logical-tree",
- "rawSpec": "^1.2.1",
- "saveSpec": null,
- "fetchSpec": "^1.2.1"
- },
- "_requiredBy": [
- "/libcipm"
- ],
- "_resolved": "https://registry.npmjs.org/npm-logical-tree/-/npm-logical-tree-1.2.1.tgz",
- "_shasum": "44610141ca24664cad35d1e607176193fd8f5b88",
- "_spec": "npm-logical-tree@^1.2.1",
- "_where": "/Users/rebecca/code/npm/node_modules/libcipm",
- "author": {
- "name": "Kat Marchán",
- "email": "kzm@sykosomatic.org"
- },
- "bugs": {
- "url": "https://github.com/npm/logical-tree/issues"
- },
- "bundleDependencies": false,
- "config": {
- "nyc": {
- "exclude": [
- "node_modules/**",
- "test/**"
- ]
- }
- },
- "contributors": [
- {
- "name": "Rebecca Turner",
- "email": "me@re-becca.org"
- }
- ],
- "deprecated": false,
- "description": "Calculate 'logical' trees from a package.json + package-lock",
- "devDependencies": {
- "bluebird": "^3.5.1",
- "nyc": "^11.1.0",
- "standard": "^10.0.2",
- "standard-version": "^4.2.0",
- "tap": "^10.7.0",
- "weallbehave": "^1.2.0",
- "weallcontribute": "^1.0.8"
- },
- "files": [
- "*.js"
- ],
- "homepage": "https://github.com/npm/logical-tree#readme",
- "keywords": [
- "npm",
- "package manager"
- ],
- "license": "ISC",
- "main": "index.js",
- "name": "npm-logical-tree",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/logical-tree.git"
- },
- "scripts": {
- "postrelease": "npm publish && git push --follow-tags",
- "prerelease": "npm t",
- "pretest": "standard lib test *.js",
- "release": "standard-version -s",
- "test": "nyc --all -- tap -J test/*.js",
- "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
- "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
- },
- "version": "1.2.1"
-}
diff --git a/node_modules/protoduck/CHANGELOG.md b/node_modules/protoduck/CHANGELOG.md
deleted file mode 100644
index f4902ab2d..000000000
--- a/node_modules/protoduck/CHANGELOG.md
+++ /dev/null
@@ -1,66 +0,0 @@
-# Change Log
-
-All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
-
-<a name="5.0.1"></a>
-## [5.0.1](https://github.com/zkat/protoduck/compare/v5.0.0...v5.0.1) (2018-10-26)
-
-
-### Bug Fixes
-
-* **security:** npm audit fix ([55a2007](https://github.com/zkat/protoduck/commit/55a2007))
-* **standard:** standard --fix ([2dedbb8](https://github.com/zkat/protoduck/commit/2dedbb8))
-
-
-
-<a name="5.0.0"></a>
-# [5.0.0](https://github.com/zkat/protoduck/compare/v4.0.0...v5.0.0) (2017-12-12)
-
-
-### Bug Fixes
-
-* **license:** relicense to MIT ([55cdd89](https://github.com/zkat/protoduck/commit/55cdd89))
-* **platforms:** drop support for node 4 and 7 ([07a19b1](https://github.com/zkat/protoduck/commit/07a19b1))
-
-
-### BREAKING CHANGES
-
-* **platforms:** node 4 and node 7 are no longer officially supported
-* **license:** license changed from CC0-1.0 to MIT
-
-
-
-<a name="4.0.0"></a>
-# [4.0.0](https://github.com/zkat/protoduck/compare/v3.3.2...v4.0.0) (2017-04-17)
-
-
-### Bug Fixes
-
-* **test:** .name is inconsistently available ([3483f4a](https://github.com/zkat/protoduck/commit/3483f4a))
-
-
-### Features
-
-* **api:** Fresh New API™ ([#2](https://github.com/zkat/protoduck/issues/2)) ([534e5cf](https://github.com/zkat/protoduck/commit/534e5cf))
-* **constraints:** added optional where-constraints ([16ad124](https://github.com/zkat/protoduck/commit/16ad124))
-* **defaults:** allow default impls without arrays in defs ([6cf7d84](https://github.com/zkat/protoduck/commit/6cf7d84))
-* **deps:** use genfun[@4](https://github.com/4) ([f6810a7](https://github.com/zkat/protoduck/commit/f6810a7))
-* **meta:** bringing project stuff up to date ([61791da](https://github.com/zkat/protoduck/commit/61791da))
-
-
-### BREAKING CHANGES
-
-* **api:** The API was significantly overhauled.
-
-* New protocol creating is now through protoduck.define() instead of protoduck()
-* Implementations are through Duck#impl instead of Duck(...)
-* The `private` option was removed
-* Static protocols were removed -- only method-style protocols are available now.
-* As part of that: the target argument to impl can no longer be omitted
-* The main export object is now the metaobject. protoduck.impl can be used to extend to MOP
-* .isDerivable is now a property on Duck instances, not a static method
-* .hasImpl is now a method on Duck instances, not a static method
-* Protoduck will now genfunnify existing functions as default methods for genfuns declared in a protocol when implementing
-* Error messages have been overhauled to be more helpful
-* **deps:** nextMethod is now an extra argument to methods
-* **meta:** node@<4 is no longer supported
diff --git a/node_modules/protoduck/LICENSE b/node_modules/protoduck/LICENSE
deleted file mode 100644
index ab41caa64..000000000
--- a/node_modules/protoduck/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-The MIT License (MIT)
-Copyright (c) 2017 Kat Marchán
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
-OR OTHER DEALINGS IN THE SOFTWARE.
-
diff --git a/node_modules/protoduck/README.md b/node_modules/protoduck/README.md
deleted file mode 100644
index 2dc70a307..000000000
--- a/node_modules/protoduck/README.md
+++ /dev/null
@@ -1,346 +0,0 @@
-# protoduck [![npm version](https://img.shields.io/npm/v/protoduck.svg)](https://npm.im/protoduck) [![license](https://img.shields.io/npm/l/protoduck.svg)](https://npm.im/protoduck) [![Travis](https://img.shields.io/travis/zkat/protoduck.svg)](https://travis-ci.org/zkat/protoduck) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/protoduck?svg=true)](https://ci.appveyor.com/project/zkat/protoduck) [![Coverage Status](https://coveralls.io/repos/github/zkat/protoduck/badge.svg?branch=latest)](https://coveralls.io/github/zkat/protoduck?branch=latest)
-
-[`protoduck`](https://github.com/zkat/protoduck) is a JavaScript library is a
-library for making groups of methods, called "protocols".
-
-If you're familiar with the concept of ["duck
-typing"](https://en.wikipedia.org/wiki/Duck_typing), then it might make sense to
-think of protocols as things that explicitly define what methods you need in
-order to "clearly be a duck".
-
-## Install
-
-`$ npm install -S protoduck`
-
-## Table of Contents
-
-* [Example](#example)
-* [Features](#features)
-* [Guide](#guide)
- * [Introduction](#introduction)
- * [Defining protocols](#defining-protocols)
- * [Implementations](#protocol-impls)
- * [Multiple dispatch](#multiple-dispatch)
- * [Constraints](#constraints)
-* [API](#api)
- * [`define()`](#define)
- * [`proto.impl()`](#impl)
-
-### Example
-
-```javascript
-const protoduck = require('protoduck')
-
-// Quackable is a protocol that defines three methods
-const Quackable = protoduck.define({
- walk: [],
- talk: [],
- isADuck: [() => true] // default implementation -- it's optional!
-})
-
-// `duck` must implement `Quackable` for this function to work. It doesn't
-// matter what type or class duck is, as long as it implements Quackable.
-function doStuffToDucks (duck) {
- if (!duck.isADuck()) {
- throw new Error('I want a duck!')
- } else {
- console.log(duck.walk())
- console.log(duck.talk())
- }
-}
-
-// ...In a different package:
-const ducks = require('./ducks')
-
-class Duck () {}
-
-// Implement the protocol on the Duck class.
-ducks.Quackable.impl(Duck, {
- walk () { return "*hobble hobble*" }
- talk () { return "QUACK QUACK" }
-})
-
-// main.js
-ducks.doStuffToDucks(new Duck()) // works!
-```
-
-### Features
-
-* Verifies implementations in case methods are missing or wrong ones added
-* Helpful, informative error messages
-* Optional default method implementations
-* Fresh JavaScript Feel™ -- methods work just like native methods when called
-* Methods can dispatch on arguments, not just `this` ([multimethods](https://npm.im/genfun))
-* Type constraints
-
-### Guide
-
-#### Introduction
-
-Like most Object-oriented languages, JavaScript comes with its own way of
-defining methods: You simply add regular `function`s as properties to regular
-objects, and when you do `obj.method()`, it calls the right code! ES6/ES2015
-further extended this by adding a `class` syntax that allowed this same system
-to work with more familiar syntax sugar: `class Foo { method() { ... } }`.
-
-The point of "protocols" is to have a more explicit definitions of what methods
-"go together". That is, a protocol is a description of a type of object your
-code interacts with. If someone passes an object into your library, and it fits
-your defined protocol, the assumption is that the object will work just as well.
-
-Duck typing is a common term for this sort of thing: If it walks like a duck,
-and it talks like a duck, then it may as well be a duck, as far as any of our
-code is concerned.
-
-Many other languages have similar or identical concepts under different names:
-Java's interfaces, Haskell's typeclasses, Rust's traits. Elixir and Clojure both
-call them "protocols" as well.
-
-One big advantage to using these protocols is that they let users define their
-own versions of some abstraction, without requiring the type to inherit from
-another -- protocols are independent of inheritance, even though they're able to
-work together with it. If you've ever found yourself in some sort of inheritance
-mess, this is exactly the sort of thing you use to escape it.
-
-#### Defining Protocols
-
-The first step to using `protoduck` is to define a protocol. Protocol
-definitions look like this:
-
-```javascript
-// import the library first!
-const protoduck = require('protoduck')
-
-// `Ducklike` is the name of our protocol. It defines what it means for
-// something to be "like a duck", as far as our code is concerned.
-const Ducklike = protoduck.define([], {
- walk: [], // This says that the protocol requires a "walk" method.
- talk: [] // and ducks also need to talk
- peck: [] // and they can even be pretty scary
-})
-```
-
-Protocols by themselves don't really *do* anything, they simply define what
-methods are included in the protocol, and thus what will need to be implemented.
-
-#### Protocol Impls
-
-The simplest type of definitions for protocols are as regular methods. In this
-style, protocols end up working exactly like normal JavaScript methods: they're
-added as properties of the target type/object, and we call them using the
-`foo.method()` syntax. `this` is accessible inside the methods, as usual.
-
-Implementation syntax is very similar to protocol definitions, using `.impl`:
-
-```javascript
-class Dog {}
-
-// Implementing `Ducklike` for `Dog`s
-Ducklike.impl(Dog, [], {
- walk () { return '*pads on all fours*' }
- talk () { return 'woof woof. I mean "quack" >_>' }
- peck (victim) { return 'Can I just bite ' + victim + ' instead?...' }
-})
-```
-
-So now, our `Dog` class has two extra methods: `walk`, and `talk`, and we can
-just call them:
-
-```javascript
-const pupper = new Dog()
-
-pupper.walk() // *pads on all fours*
-pupper.talk() // woof woof. I mean "quack" >_>
-pupper.peck('this string') // Can I just bite this string instead?...
-```
-
-#### Multiple Dispatch
-
-You may have noticed before that we have these `[]` in various places that don't
-seem to have any obvious purpose.
-
-These arrays allow protocols to be implemented not just for a single value of
-`this`, but across *all arguments*. That is, you can have methods in these
-protocols that use both `this`, and the first argument (or any other arguments)
-in order to determine what code to actually execute.
-
-This type of method is called a multimethod, and is one of the differences
-between protoduck and the default `class` syntax.
-
-To use it: in the protocol *definitions*, you put matching
-strings in different spots where those empty arrays were, and when you
-*implement* the protocol, you give the definition the actual types/objects you
-want to implement it on, and it takes care of mapping types to the strings you
-defined, and making sure the right code is run:
-
-```javascript
-const Playful = protoduck.define(['friend'], {// <---\
- playWith: ['friend'] // <------------ these correspond to each other
-})
-
-class Cat {}
-class Human {}
-class Dog {}
-
-// The first protocol is for Cat/Human combination
-Playful.impl(Cat, [Human], {
- playWith (human) {
- return '*headbutt* *purr* *cuddle* omg ilu, ' + human.name
- }
-})
-
-// And we define it *again* for a different combination
-Playful.impl(Cat, [Dog], {
- playWith (dog) {
- return '*scratches* *hisses* omg i h8 u, ' + dog.name
- }
-})
-
-// depending on what you call it with, it runs different methods:
-const cat = new Cat()
-const human = new Human()
-const dog = new Dog()
-
-cat.playWith(human) // *headbutt* *purr* *cuddle* omg ilu, Sam
-cat.playWith(dog) // *scratches* *hisses* omg i h8 u, Pupper
-```
-
-#### Constraints
-
-Sometimes, you want to have all the functionality of a certain protocol, but you
-want to add a few requirements or other bits an pieces. Usually, you would have
-to define the entire functionality of the "parent" protocol in your own protocol
-in order to pull this off. This isn't very DRY and thus prone to errors, missing
-or out-of-sync functionality, or other issues. You could also just tell users
-"hey, if you implement this, make sure to implement that", but there's no
-guarantee they'll know about it, or know which arguments map to what.
-
-This is where constraints come in: You can define a protocol that expects
-anything that implements it to *also* implement one or more "parent" protocols.
-
-```javascript
-const Show = proto.define({
- // This syntax allows default impls without using arrays.
- toString () {
- return Object.prototype.toString.call(this)
- },
- toJSON () {
- return JSON.stringify(this)
- }
-})
-
-const Log = proto.define({
- log () { console.log(this.toString()) }
-}, {
- where: Show()
- // Also valid:
- // [Show('this'), Show('a')]
- // [Show('this', ['a', 'b'])]
-})
-
-// This fails with an error: must implement Show:
-Log.impl(MyThing)
-
-// So derive Show first...
-Show.impl(MyThing)
-// And now it's ok!
-Log.impl(MyThing)
-```
-
-### API
-
-#### <a name="define"></a> `define(<types>?, <spec>, <opts>)`
-
-Defines a new protocol on across arguments of types defined by `<types>`, which
-will expect implementations for the functions specified in `<spec>`.
-
-If `<types>` is missing, it will be treated the same as if it were an empty
-array.
-
-The types in `<spec>` entries must map, by string name, to the type names
-specified in `<types>`, or be an empty array if `<types>` is omitted. The types
-in `<spec>` will then be used to map between method implementations for the
-individual functions, and the provided types in the impl.
-
-Protocols can include an `opts` object as the last argument, with the following
-available options:
-
-* `opts.name` `{String}` - The name to use when referring to the protocol.
-
-* `opts.where` `{Array[Constraint]|Constraint}` - Protocol constraints to use.
-
-* `opts.metaobject` - Accepts an object implementing the
- `Protoduck` protocol, which can be used to alter protocol definition
- mechanisms in `protoduck`.
-
-##### Example
-
-```javascript
-const Eq = protoduck.define(['a'], {
- eq: ['a']
-})
-```
-
-#### <a name="impl"></a> `proto.impl(<target>, <types>?, <implementations>?)`
-
-Adds a new implementation to the given protocol across `<types>`.
-
-`<implementations>` must be an object with functions matching the protocol's
-API. If given, the types in `<types>` will be mapped to their corresponding
-method arguments according to the original protocol definition.
-
-If a protocol is derivable -- that is, all its functions have default impls,
-then the `<implementations>` object can be omitted entirely, and the protocol
-will be automatically derived for the given `<types>`
-
-##### Example
-
-```javascript
-import protoduck from 'protoduck'
-
-// Singly-dispatched protocols
-const Show = protoduck.define({
- show: []
-})
-
-class Foo {
- constructor (name) {
- this.name = name
- }
-}
-
-Show.impl(Foo, {
- show () { return `[object Foo(${this.name})]` }
-})
-
-const f = new Foo('alex')
-f.show() === '[object Foo(alex)]'
-```
-
-```javascript
-import protoduck from 'protoduck'
-
-// Multi-dispatched protocols
-const Comparable = protoduck.define(['target'], {
- compare: ['target'],
-})
-
-class Foo {}
-class Bar {}
-class Baz {}
-
-Comparable.impl(Foo, [Bar], {
- compare (bar) { return 'bars are ok' }
-})
-
-Comparable.impl(Foo, [Baz], {
- compare (baz) { return 'but bazzes are better' }
-})
-
-const foo = new Foo()
-const bar = new Bar()
-const baz = new Baz()
-
-foo.compare(bar) // 'bars are ok'
-foo.compare(baz) // 'but bazzes are better'
-```
diff --git a/node_modules/protoduck/index.js b/node_modules/protoduck/index.js
deleted file mode 100644
index b6a83e1f7..000000000
--- a/node_modules/protoduck/index.js
+++ /dev/null
@@ -1,349 +0,0 @@
-'use strict'
-
-const genfun = require('genfun')
-
-class Duck extends Function {
- // Duck.impl(Foo, [String, Array], { frob (str, arr) { ... }})
- impl (target, types, impls) {
- if (!impls && !isArray(types)) {
- impls = types
- types = []
- }
- if (!impls && this.isDerivable) {
- impls = this._defaultImpls
- }
- if (!impls) {
- impls = {}
- }
- if (typeof target === 'function' && !target.isGenfun) {
- target = target.prototype
- }
- checkImpls(this, target, impls)
- checkArgTypes(this, types)
- this._constraints.forEach(c => {
- if (!c.verify(target, types)) {
- throw new Error(`Implementations of ${
- this.name || 'this protocol'
- } must first implement ${
- c.parent.name || 'its constraint protocols defined in opts.where.'
- }`)
- }
- })
- this._methodNames.forEach(name => {
- defineMethod(this, name, target, types, impls)
- })
- }
-
- hasImpl (arg, args) {
- args = args || []
- const fns = this._methodNames
- var gf
- if (typeof arg === 'function' && !arg.isGenfun) {
- arg = arg.prototype
- }
- args = args.map(arg => {
- if (typeof arg === 'function' && !arg.isGenfun) {
- return arg.prototype
- } else {
- return arg
- }
- })
- for (var i = 0; i < fns.length; i++) {
- gf = arg[fns[i]]
- if (!gf ||
- (gf.hasMethod
- ? !gf.hasMethod.apply(gf, args)
- : typeof gf === 'function')) {
- return false
- }
- }
- return true
- }
-
- // MyDuck.matches('a', ['this', 'c'])
- matches (thisType, argTypes) {
- if (!argTypes && isArray(thisType)) {
- argTypes = thisType
- thisType = 'this'
- }
- if (!thisType) {
- thisType = 'this'
- }
- if (!argTypes) {
- argTypes = []
- }
- return new Constraint(this, thisType, argTypes)
- }
-}
-Duck.prototype.isDuck = true
-Duck.prototype.isProtocol = true
-
-const Protoduck = module.exports = define(['duck'], {
- createGenfun: ['duck', _metaCreateGenfun],
- addMethod: ['duck', _metaAddMethod]
-}, { name: 'Protoduck' })
-
-const noImplFound = module.exports.noImplFound = genfun.noApplicableMethod
-
-module.exports.define = define
-function define (types, spec, opts) {
- if (!isArray(types)) {
- // protocol(spec, opts?) syntax for method-based protocols
- opts = spec
- spec = types
- types = []
- }
- const duck = function (thisType, argTypes) {
- return duck.matches(thisType, argTypes)
- }
- Object.setPrototypeOf(duck, Duck.prototype)
- duck.isDerivable = true
- Object.defineProperty(duck, 'name', {
- value: (opts && opts.name) || 'Protocol'
- })
- if (opts && opts.where) {
- let where = opts.where
- if (!isArray(opts.where)) { where = [opts.where] }
- duck._constraints = where.map(w => w.isProtocol // `where: [Foo]`
- ? w.matches()
- : w
- )
- } else {
- duck._constraints = []
- }
- duck.isProtocol = true
- duck._metaobject = opts && opts.metaobject
- duck._types = types
- duck._defaultImpls = {}
- duck._gfTypes = {}
- duck._methodNames = Object.keys(spec)
- duck._methodNames.forEach(name => {
- checkMethodSpec(duck, name, spec)
- })
- duck._constraints.forEach(c => c.attach(duck))
- return duck
-}
-
-function checkMethodSpec (duck, name, spec) {
- let gfTypes = spec[name]
- if (typeof gfTypes === 'function') {
- duck._defaultImpls[name] = gfTypes
- gfTypes = [gfTypes]
- } if (typeof gfTypes[gfTypes.length - 1] === 'function') {
- duck._defaultImpls[name] = gfTypes.pop()
- } else {
- duck.isDerivable = false
- }
- duck._gfTypes[name] = gfTypes.map(typeId => {
- const idx = duck._types.indexOf(typeId)
- if (idx === -1) {
- throw new Error(
- `type '${
- typeId
- }' for function '${
- name
- }' does not match any protocol types (${
- duck._types.join(', ')
- }).`
- )
- } else {
- return idx
- }
- })
-}
-
-function defineMethod (duck, name, target, types, impls) {
- const methodTypes = duck._gfTypes[name].map(function (typeIdx) {
- return types[typeIdx]
- })
- for (let i = methodTypes.length - 1; i >= 0; i--) {
- if (methodTypes[i] === undefined) {
- methodTypes.pop()
- } else {
- break
- }
- }
- const useMetaobject = duck._metaobject && duck._metaobject !== Protoduck
- // `target` does not necessarily inherit from `Object`
- if (!Object.prototype.hasOwnProperty.call(target, name)) {
- // Make a genfun if there's nothing there
- const gf = useMetaobject
- ? duck._metaobject.createGenfun(duck, target, name, null)
- : _metaCreateGenfun(duck, target, name, null)
- target[name] = gf
- } else if (typeof target[name] === 'function' && !target[name].isGenfun) {
- // Turn non-gf functions into genfuns
- const gf = useMetaobject
- ? duck._metaobject.createGenfun(duck, target, name, target[name])
- : _metaCreateGenfun(duck, target, name, target[name])
- target[name] = gf
- }
-
- const fn = impls[name] || duck._defaultImpls[name]
- if (fn) { // checkImpls made sure this is safe
- useMetaobject
- ? duck._metaobject.addMethod(duck, target, name, methodTypes, fn)
- : _metaAddMethod(duck, target, name, methodTypes, fn)
- }
-}
-
-function checkImpls (duck, target, impls) {
- duck._methodNames.forEach(function (name) {
- if (
- !impls[name] &&
- !duck._defaultImpls[name] &&
- // Existing methods on the target are acceptable defaults.
- typeof target[name] !== 'function'
- ) {
- throw new Error(`Missing implementation for ${
- formatMethod(duck, name, duck.name)
- }. Make sure the method is present in your ${
- duck.name || 'protocol'
- } definition. Required methods: ${
- duck._methodNames.filter(m => {
- return !duck._defaultImpls[m]
- }).map(m => formatMethod(duck, m)).join(', ')
- }.`)
- }
- })
- Object.keys(impls).forEach(function (name) {
- if (duck._methodNames.indexOf(name) === -1) {
- throw new Error(
- `${name}() was included in the impl, but is not part of ${
- duck.name || 'the protocol'
- }. Allowed methods: ${
- duck._methodNames.map(m => formatMethod(duck, m)).join(', ')
- }.`
- )
- }
- })
-}
-
-function formatMethod (duck, name, withDuckName) {
- return `${
- withDuckName && duck.name ? `${duck.name}#` : ''
- }${name}(${duck._gfTypes[name].map(n => duck._types[n]).join(', ')})`
-}
-
-function checkArgTypes (duck, types) {
- var requiredTypes = duck._types
- if (types.length > requiredTypes.length) {
- throw new Error(
- `${
- duck.name || 'Protocol'
- } expects to be defined across ${
- requiredTypes.length
- } type${requiredTypes.length > 1 ? 's' : ''}, but ${
- types.length
- } ${types.length > 1 ? 'were' : 'was'} specified.`
- )
- }
-}
-
-function typeName (obj) {
- return (/\[object ([a-zA-Z0-9]+)\]/).exec(({}).toString.call(obj))[1]
-}
-
-function installMethodErrorMessage (proto, gf, target, name) {
- noImplFound.add([gf], function (gf, thisArg, args) {
- let parent = Object.getPrototypeOf(thisArg)
- while (parent && parent[name] === gf) {
- parent = Object.getPrototypeOf(parent)
- }
- if (parent && parent[name] && typeof parent[name] === 'function') {
- }
- var msg = `No ${typeName(thisArg)} impl for ${
- proto.name ? `${proto.name}#` : ''
- }${name}(${[].map.call(args, typeName).join(', ')}). You must implement ${
- proto.name
- ? formatMethod(proto, name, true)
- : `the protocol ${formatMethod(proto, name)} belongs to`
- } in order to call ${typeName(thisArg)}#${name}(${
- [].map.call(args, typeName).join(', ')
- }).`
- const err = new Error(msg)
- err.protocol = proto
- err.function = gf
- err.thisArg = thisArg
- err.args = args
- err.code = 'ENOIMPL'
- throw err
- })
-}
-
-function isArray (x) {
- return Object.prototype.toString.call(x) === '[object Array]'
-}
-
-// Metaobject Protocol
-Protoduck.impl(Protoduck) // defaults configured by definition
-
-function _metaCreateGenfun (proto, target, name, deflt) {
- var gf = genfun({
- default: deflt,
- name: `${proto.name ? `${proto.name}#` : ''}${name}`
- })
- installMethodErrorMessage(proto, gf, target, name)
- gf.duck = proto
- return gf
-}
-
-function _metaAddMethod (duck, target, name, methodTypes, fn) {
- return target[name].add(methodTypes, fn)
-}
-
-// Constraints
-class Constraint {
- constructor (parent, thisType, argTypes) {
- this.parent = parent
- this.target = thisType
- this.types = argTypes
- }
-
- attach (obj) {
- this.child = obj
- if (this.target === 'this') {
- this.thisIdx = 'this'
- } else {
- const idx = this.child._types.indexOf(this.target)
- if (idx === -1) {
- this.thisIdx = null
- } else {
- this.thisIdx = idx
- }
- }
- this.indices = this.types.map(typeId => {
- if (typeId === 'this') {
- return 'this'
- } else {
- const idx = this.child._types.indexOf(typeId)
- if (idx === -1) {
- return null
- } else {
- return idx
- }
- }
- })
- }
-
- verify (target, types) {
- const thisType = (
- this.thisIdx === 'this' || this.thisIdx == null
- )
- ? target
- : types[this.thisIdx]
- const parentTypes = this.indices.map(idx => {
- if (idx === 'this') {
- return target
- } else if (idx === 'this') {
- return types[this.thisIdx]
- } else if (idx === null) {
- return Object
- } else {
- return types[idx] || Object.prototype
- }
- })
- return this.parent.hasImpl(thisType, parentTypes)
- }
-}
-Constraint.prototype.isConstraint = true
diff --git a/node_modules/protoduck/package.json b/node_modules/protoduck/package.json
deleted file mode 100644
index 3ee47ac6a..000000000
--- a/node_modules/protoduck/package.json
+++ /dev/null
@@ -1,88 +0,0 @@
-{
- "_from": "protoduck@^5.0.1",
- "_id": "protoduck@5.0.1",
- "_inBundle": false,
- "_integrity": "sha512-WxoCeDCoCBY55BMvj4cAEjdVUFGRWed9ZxPlqTKYyw1nDDTQ4pqmnIMAGfJlg7Dx35uB/M+PHJPTmGOvaCaPTg==",
- "_location": "/protoduck",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "protoduck@^5.0.1",
- "name": "protoduck",
- "escapedName": "protoduck",
- "rawSpec": "^5.0.1",
- "saveSpec": null,
- "fetchSpec": "^5.0.1"
- },
- "_requiredBy": [
- "/pacote"
- ],
- "_resolved": "https://registry.npmjs.org/protoduck/-/protoduck-5.0.1.tgz",
- "_shasum": "03c3659ca18007b69a50fd82a7ebcc516261151f",
- "_spec": "protoduck@^5.0.1",
- "_where": "/Users/zkat/Documents/code/work/npm/node_modules/pacote",
- "author": {
- "name": "Kat Marchán",
- "email": "kzm@sykosomatic.org"
- },
- "bugs": {
- "url": "https://github.com/zkat/protoduck/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "genfun": "^5.0.0"
- },
- "deprecated": false,
- "description": "Fancy duck typing for the most serious of ducks.",
- "devDependencies": {
- "mocha": "^5.2.0",
- "nyc": "^13.1.0",
- "standard": "^12.0.1",
- "standard-version": "^4.0.0",
- "tap": "^12.0.1",
- "weallbehave": "^1.0.3",
- "weallcontribute": "^1.0.8"
- },
- "files": [
- "index.js"
- ],
- "homepage": "https://github.com/zkat/protoduck#readme",
- "keywords": [
- "oop",
- "util",
- "object oriented",
- "duck type",
- "ducktype",
- "ducktyping",
- "protocols",
- "multimethod",
- "clojure",
- "haskell",
- "rust",
- "generic",
- "functions",
- "clos",
- "polymorphism",
- "impl",
- "typeclass",
- "traits"
- ],
- "license": "MIT",
- "main": "index.js",
- "name": "protoduck",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/zkat/protoduck.git"
- },
- "scripts": {
- "postrelease": "npm publish && git push --follow-tags",
- "prerelease": "npm t",
- "pretest": "standard",
- "release": "standard-version -s",
- "test": "tap -J --coverage test/*.js",
- "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
- "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
- },
- "version": "5.0.1"
-}
diff --git a/node_modules/socks-proxy-agent/.travis.yml b/node_modules/socks-proxy-agent/.travis.yml
deleted file mode 100644
index 1ff82daa2..000000000
--- a/node_modules/socks-proxy-agent/.travis.yml
+++ /dev/null
@@ -1,21 +0,0 @@
-sudo: false
-
-language: node_js
-
-node_js:
- - "6"
- - "7"
- - "8"
- - "9"
-
-install:
- - PATH="`npm bin`:`npm bin -g`:$PATH"
- # Install dependencies and build
- - npm install
-
-script:
- # Output useful info for debugging
- - node --version
- - npm --version
- # Run tests
- - npm test
diff --git a/node_modules/socks-proxy-agent/History.md b/node_modules/socks-proxy-agent/History.md
deleted file mode 100644
index b0266b5e3..000000000
--- a/node_modules/socks-proxy-agent/History.md
+++ /dev/null
@@ -1,96 +0,0 @@
-
-3.0.1 / 2017-09-18
-==================
-
- * update "agent-base" to v4.1.0
-
-3.0.0 / 2017-06-13
-==================
-
- * [BREAKING] drop support for Node < 4
- * update deps, remove `extend` dependency
- * rename `socks-proxy-agent.js` to `index.js`
-
-2.1.1 / 2017-06-13
-==================
-
- * fix a bug where `close` would emit before `end`
- * use "raw-body" module for tests
- * prettier
-
-2.1.0 / 2017-05-24
-==================
-
- * DRY post-lookup logic
- * Fix an error in readme (#13, @599316527)
- * travis: test node v5
- * travis: test iojs v1, 2, 3 and node.js v4
- * test: use ssl-cert-snakeoil cert files
- * Authentication support (#9, @baryshev)
-
-2.0.0 / 2015-07-10
-==================
-
- * API CHANGE! Removed `secure` boolean second argument in constructor
- * upgrade to "agent-base" v2 API
- * package: update "extend" to v3
-
-1.0.2 / 2015-07-01
-==================
-
- * remove "v4a" from description
- * socks-proxy-agent: cast `port` to a Number
- * travis: attempt to make node v0.8 work
- * travis: test node v0.12, don't test v0.11
- * test: pass `rejectUnauthorized` as a proxy opt
- * test: catch http.ClientRequest errors
- * test: add self-signed SSL server cert files
- * test: refactor to use local SOCKS, HTTP and HTTPS servers
- * README: use SVG for Travis-CI badge
-
-1.0.1 / 2015-03-01
-==================
-
- * switched from using "socks-client" to "socks" (#5, @JoshGlazebrook)
-
-1.0.0 / 2015-02-11
-==================
-
- * add client-side DNS lookup logic for 4 and 5 version socks proxies
- * remove dead `onproxyconnect()` code function
- * use a switch statement to decide the socks `version`
- * refactor to use "socks-client" instead of "rainbowsocks"
- * package: remove "rainbowsocks" dependency
- * package: allow any "mocha" v2
-
-0.1.2 / 2014-06-11
-==================
-
- * package: update "rainbowsocks" to v0.1.2
- * travis: don't test node v0.9
-
-0.1.1 / 2014-04-09
-==================
-
- * package: update outdated dependencies
- * socks-proxy-agent: pass `secure` flag when no `new`
- * socks-proxy-agent: small code cleanup
-
-0.1.0 / 2013-11-19
-==================
-
- * add .travis.yml file
- * socks-proxy-agent: properly mix in the proxy options
- * socks-proxy-agent: coerce the `secureEndpoint` into a Boolean
- * socks-proxy-agent: use "extend" module
- * socks-proxy-agent: update to "agent-base" v1 API
-
-0.0.2 / 2013-07-24
-==================
-
- * socks-proxy-agent: properly set the `defaultPort` property
-
-0.0.1 / 2013-07-11
-==================
-
- * Initial release
diff --git a/node_modules/socks-proxy-agent/README.md b/node_modules/socks-proxy-agent/README.md
deleted file mode 100644
index 36028ad9f..000000000
--- a/node_modules/socks-proxy-agent/README.md
+++ /dev/null
@@ -1,133 +0,0 @@
-socks-proxy-agent
-================
-### A SOCKS proxy `http.Agent` implementation for HTTP and HTTPS
-[![Build Status](https://travis-ci.org/TooTallNate/node-socks-proxy-agent.svg?branch=master)](https://travis-ci.org/TooTallNate/node-socks-proxy-agent)
-
-This module provides an `http.Agent` implementation that connects to a
-specified SOCKS proxy server, and can be used with the built-in `http`
-or `https` modules.
-
-It can also be used in conjunction with the `ws` module to establish a WebSocket
-connection over a SOCKS proxy. See the "Examples" section below.
-
-Installation
-------------
-
-Install with `npm`:
-
-``` bash
-$ npm install socks-proxy-agent
-```
-
-
-Examples
---------
-
-#### `http` module example
-
-``` js
-var url = require('url');
-var http = require('http');
-var SocksProxyAgent = require('socks-proxy-agent');
-
-// SOCKS proxy to connect to
-var proxy = process.env.socks_proxy || 'socks://127.0.0.1:9050';
-console.log('using proxy server %j', proxy);
-
-// HTTP endpoint for the proxy to connect to
-var endpoint = process.argv[2] || 'http://nodejs.org/api/';
-console.log('attempting to GET %j', endpoint);
-var opts = url.parse(endpoint);
-
-// create an instance of the `SocksProxyAgent` class with the proxy server information
-var agent = new SocksProxyAgent(proxy);
-opts.agent = agent;
-
-http.get(opts, function (res) {
- console.log('"response" event!', res.headers);
- res.pipe(process.stdout);
-});
-```
-
-#### `https` module example
-
-``` js
-var url = require('url');
-var https = require('https');
-var SocksProxyAgent = require('socks-proxy-agent');
-
-// SOCKS proxy to connect to
-var proxy = process.env.socks_proxy || 'socks://127.0.0.1:9050';
-console.log('using proxy server %j', proxy);
-
-// HTTP endpoint for the proxy to connect to
-var endpoint = process.argv[2] || 'https://encrypted.google.com/';
-console.log('attempting to GET %j', endpoint);
-var opts = url.parse(endpoint);
-
-// create an instance of the `SocksProxyAgent` class with the proxy server information
-var agent = new SocksProxyAgent(proxy);
-opts.agent = agent;
-
-https.get(opts, function (res) {
- console.log('"response" event!', res.headers);
- res.pipe(process.stdout);
-});
-```
-
-#### `ws` WebSocket connection example
-
-``` js
-var WebSocket = require('ws');
-var SocksProxyAgent = require('socks-proxy-agent');
-
-// SOCKS proxy to connect to
-var proxy = process.env.socks_proxy || 'socks://127.0.0.1:9050';
-console.log('using proxy server %j', proxy);
-
-// WebSocket endpoint for the proxy to connect to
-var endpoint = process.argv[2] || 'ws://echo.websocket.org';
-console.log('attempting to connect to WebSocket %j', endpoint);
-
-// create an instance of the `SocksProxyAgent` class with the proxy server information
-var agent = new SocksProxyAgent(proxy);
-
-// initiate the WebSocket connection
-var socket = new WebSocket(endpoint, { agent: agent });
-
-socket.on('open', function () {
- console.log('"open" event!');
- socket.send('hello world');
-});
-
-socket.on('message', function (data, flags) {
- console.log('"message" event! %j %j', data, flags);
- socket.close();
-});
-```
-
-License
--------
-
-(The MIT License)
-
-Copyright (c) 2013 Nathan Rajlich &lt;nathan@tootallnate.net&gt;
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-'Software'), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/socks-proxy-agent/index.js b/node_modules/socks-proxy-agent/index.js
deleted file mode 100644
index 5418abfac..000000000
--- a/node_modules/socks-proxy-agent/index.js
+++ /dev/null
@@ -1,145 +0,0 @@
-/**
- * Module dependencies.
- */
-
-var tls; // lazy-loaded...
-var url = require('url');
-var dns = require('dns');
-var Agent = require('agent-base');
-var SocksClient = require('socks').SocksClient;
-var inherits = require('util').inherits;
-
-/**
- * Module exports.
- */
-
-module.exports = SocksProxyAgent;
-
-/**
- * The `SocksProxyAgent`.
- *
- * @api public
- */
-
-function SocksProxyAgent(opts) {
- if (!(this instanceof SocksProxyAgent)) return new SocksProxyAgent(opts);
- if ('string' == typeof opts) opts = url.parse(opts);
- if (!opts)
- throw new Error(
- 'a SOCKS proxy server `host` and `port` must be specified!'
- );
- Agent.call(this, opts);
-
- var proxy = Object.assign({}, opts);
-
- // prefer `hostname` over `host`, because of `url.parse()`
- proxy.host = proxy.hostname || proxy.host;
-
- // SOCKS doesn't *technically* have a default port, but this is
- // the same default that `curl(1)` uses
- proxy.port = +proxy.port || 1080;
-
- if (proxy.host && proxy.path) {
- // if both a `host` and `path` are specified then it's most likely the
- // result of a `url.parse()` call... we need to remove the `path` portion so
- // that `net.connect()` doesn't attempt to open that as a unix socket file.
- delete proxy.path;
- delete proxy.pathname;
- }
-
- // figure out if we want socks v4 or v5, based on the "protocol" used.
- // Defaults to 5.
- proxy.lookup = false;
- switch (proxy.protocol) {
- case 'socks4:':
- proxy.lookup = true;
- // pass through
- case 'socks4a:':
- proxy.version = 4;
- break;
- case 'socks5:':
- proxy.lookup = true;
- // pass through
- case 'socks:': // no version specified, default to 5h
- case 'socks5h:':
- proxy.version = 5;
- break;
- default:
- throw new TypeError(
- 'A "socks" protocol must be specified! Got: ' + proxy.protocol
- );
- }
-
- if (proxy.auth) {
- var auth = proxy.auth.split(':');
- proxy.authentication = { username: auth[0], password: auth[1] };
- proxy.userid = auth[0];
- }
- this.proxy = proxy;
-}
-inherits(SocksProxyAgent, Agent);
-
-/**
- * Initiates a SOCKS connection to the specified SOCKS proxy server,
- * which in turn connects to the specified remote host and port.
- *
- * @api public
- */
-
-SocksProxyAgent.prototype.callback = function connect(req, opts, fn) {
- var proxy = this.proxy;
-
- // called once the SOCKS proxy has connected to the specified remote endpoint
- function onhostconnect(err, result) {
- if (err) return fn(err);
-
- var socket = result.socket;
-
- var s = socket;
- if (opts.secureEndpoint) {
- // since the proxy is connecting to an SSL server, we have
- // to upgrade this socket connection to an SSL connection
- if (!tls) tls = require('tls');
- opts.socket = socket;
- opts.servername = opts.host;
- opts.host = null;
- opts.hostname = null;
- opts.port = null;
- s = tls.connect(opts);
- }
-
- fn(null, s);
- }
-
- // called for the `dns.lookup()` callback
- function onlookup(err, ip) {
- if (err) return fn(err);
- options.destination.host = ip;
- SocksClient.createConnection(options, onhostconnect);
- }
-
- var options = {
- proxy: {
- ipaddress: proxy.host,
- port: +proxy.port,
- type: proxy.version
- },
- destination: {
- port: +opts.port
- },
- command: 'connect'
- };
-
- if (proxy.authentication) {
- options.proxy.userId = proxy.userid;
- options.proxy.password = proxy.authentication.password;
- }
-
- if (proxy.lookup) {
- // client-side DNS resolution for "4" and "5" socks proxy versions
- dns.lookup(opts.host, onlookup);
- } else {
- // proxy hostname DNS resolution for "4a" and "5h" socks proxy servers
- onlookup(null, opts.host);
- }
-}
diff --git a/node_modules/socks-proxy-agent/node_modules/agent-base/.travis.yml b/node_modules/socks-proxy-agent/node_modules/agent-base/.travis.yml
deleted file mode 100644
index 6ce862c6f..000000000
--- a/node_modules/socks-proxy-agent/node_modules/agent-base/.travis.yml
+++ /dev/null
@@ -1,23 +0,0 @@
-sudo: false
-
-language: node_js
-
-node_js:
- - "4"
- - "5"
- - "6"
- - "7"
- - "8"
- - "9"
-
-install:
- - PATH="`npm bin`:`npm bin -g`:$PATH"
- # Install dependencies and build
- - npm install
-
-script:
- # Output useful info for debugging
- - node --version
- - npm --version
- # Run tests
- - npm test
diff --git a/node_modules/socks-proxy-agent/node_modules/agent-base/History.md b/node_modules/socks-proxy-agent/node_modules/agent-base/History.md
deleted file mode 100644
index 80c88dc40..000000000
--- a/node_modules/socks-proxy-agent/node_modules/agent-base/History.md
+++ /dev/null
@@ -1,113 +0,0 @@
-
-4.2.0 / 2018-01-15
-==================
-
- * Add support for returning an `http.Agent` instance
- * Optimize promisifying logic
- * Set `timeout` to null for proper cleanup
- * Remove Node.js <= 0.11.3 special-casing from test case
-
-4.1.2 / 2017-11-20
-==================
-
- * test Node 9 on Travis
- * ensure that `https.get()` uses the patched `https.request()`
-
-4.1.1 / 2017-07-20
-==================
-
- * Correct `https.request()` with a String (#9)
-
-4.1.0 / 2017-06-26
-==================
-
- * mix in Agent options into Request options
- * throw when nothing is returned from agent-base callback
- * do not modify the options object for https requests
-
-4.0.1 / 2017-06-13
-==================
-
- * add `this` context tests and fixes
-
-4.0.0 / 2017-06-06
-==================
-
- * drop support for Node.js < 4
- * drop old versions of Node.js from Travis-CI
- * specify Node.js >= 4.0.0 in `engines.node`
- * remove more old code
- * remove "extend" dependency
- * remove "semver" dependency
- * make the Promise logic a bit cleaner
- * add async function pseudo-example to README
- * use direct return in README example
-
-3.0.0 / 2017-06-02
-==================
-
- * drop support for Node.js v0.8 and v0.10
- * add support for async, Promises, and direct return
- * add a couple `options` test cases
- * implement a `"timeout"` option
- * rename main file to `index.js`
- * test Node 8 on Travis
-
-2.1.1 / 2017-05-30
-==================
-
- * Revert [`fe2162e`](https://github.com/TooTallNate/node-agent-base/commit/fe2162e0ba18123f5b301cba4de1e9dd74e437cd) and [`270bdc9`](https://github.com/TooTallNate/node-agent-base/commit/270bdc92eb8e3bd0444d1e5266e8e9390aeb3095) (fixes #7)
-
-2.1.0 / 2017-05-26
-==================
-
- * unref is not supported for node < 0.9.1 (@pi0)
- * add tests to dangling socket (@pi0)
- * check unref() is supported (@pi0)
- * fix dangling sockets problem (@pi0)
- * add basic "ws" module tests
- * make `Agent` be subclassable
- * turn `addRequest()` into a named function
- * test: Node.js v4 likes to call `cork` on the stream (#3, @tomhughes)
- * travis: test node v4, v5, v6 and v7
-
-2.0.1 / 2015-09-10
-==================
-
- * package: update "semver" to v5.0.1 for WebPack (#1, @vhpoet)
-
-2.0.0 / 2015-07-10
-==================
-
- * refactor to patch Node.js core for more consistent `opts` values
- * ensure that HTTP(s) default port numbers are always given
- * test: use ssl-cert-snakeoil SSL certs
- * test: add tests for arbitrary options
- * README: add API section
- * README: make the Agent HTTP/HTTPS generic in the example
- * README: use SVG for Travis-CI badge
-
-1.0.2 / 2015-06-27
-==================
-
- * agent: set `req._hadError` to true after emitting "error"
- * package: update "mocha" to v2
- * test: add artificial HTTP GET request test
- * test: add artificial data events test
- * test: fix artifical GET response test on node > v0.11.3
- * test: use a real timeout for the async error test
-
-1.0.1 / 2013-09-09
-==================
-
- * Fix passing an "error" object to the callback function on the first tick
-
-1.0.0 / 2013-09-09
-==================
-
- * New API: now you pass a callback function directly
-
-0.0.1 / 2013-07-09
-==================
-
- * Initial release
diff --git a/node_modules/socks-proxy-agent/node_modules/agent-base/README.md b/node_modules/socks-proxy-agent/node_modules/agent-base/README.md
deleted file mode 100644
index dbeceab8a..000000000
--- a/node_modules/socks-proxy-agent/node_modules/agent-base/README.md
+++ /dev/null
@@ -1,145 +0,0 @@
-agent-base
-==========
-### Turn a function into an [`http.Agent`][http.Agent] instance
-[![Build Status](https://travis-ci.org/TooTallNate/node-agent-base.svg?branch=master)](https://travis-ci.org/TooTallNate/node-agent-base)
-
-This module provides an `http.Agent` generator. That is, you pass it an async
-callback function, and it returns a new `http.Agent` instance that will invoke the
-given callback function when sending outbound HTTP requests.
-
-#### Some subclasses:
-
-Here's some more interesting uses of `agent-base`.
-Send a pull request to list yours!
-
- * [`http-proxy-agent`][http-proxy-agent]: An HTTP(s) proxy `http.Agent` implementation for HTTP endpoints
- * [`https-proxy-agent`][https-proxy-agent]: An HTTP(s) proxy `http.Agent` implementation for HTTPS endpoints
- * [`pac-proxy-agent`][pac-proxy-agent]: A PAC file proxy `http.Agent` implementation for HTTP and HTTPS
- * [`socks-proxy-agent`][socks-proxy-agent]: A SOCKS (v4a) proxy `http.Agent` implementation for HTTP and HTTPS
-
-
-Installation
-------------
-
-Install with `npm`:
-
-``` bash
-$ npm install agent-base
-```
-
-
-Example
--------
-
-Here's a minimal example that creates a new `net.Socket` connection to the server
-for every HTTP request (i.e. the equivalent of `agent: false` option):
-
-```js
-var net = require('net');
-var tls = require('tls');
-var url = require('url');
-var http = require('http');
-var agent = require('agent-base');
-
-var endpoint = 'http://nodejs.org/api/';
-var parsed = url.parse(endpoint);
-
-// This is the important part!
-parsed.agent = agent(function (req, opts) {
- var socket;
- // `secureEndpoint` is true when using the https module
- if (opts.secureEndpoint) {
- socket = tls.connect(opts);
- } else {
- socket = net.connect(opts);
- }
- return socket;
-});
-
-// Everything else works just like normal...
-http.get(parsed, function (res) {
- console.log('"response" event!', res.headers);
- res.pipe(process.stdout);
-});
-```
-
-Returning a Promise or using an `async` function is also supported:
-
-```js
-agent(async function (req, opts) {
- await sleep(1000);
- // etc…
-});
-```
-
-Return another `http.Agent` instance to "pass through" the responsibility
-for that HTTP request to that agent:
-
-```js
-agent(function (req, opts) {
- return opts.secureEndpoint ? https.globalAgent : http.globalAgent;
-});
-```
-
-
-API
----
-
-## Agent(Function callback[, Object options]) → [http.Agent][]
-
-Creates a base `http.Agent` that will execute the callback function `callback`
-for every HTTP request that it is used as the `agent` for. The callback function
-is responsible for creating a `stream.Duplex` instance of some kind that will be
-used as the underlying socket in the HTTP request.
-
-The `options` object accepts the following properties:
-
- * `timeout` - Number - Timeout for the `callback()` function in milliseconds. Defaults to Infinity (optional).
-
-The callback function should have the following signature:
-
-### callback(http.ClientRequest req, Object options, Function cb) → undefined
-
-The ClientRequest `req` can be accessed to read request headers and
-and the path, etc. The `options` object contains the options passed
-to the `http.request()`/`https.request()` function call, and is formatted
-to be directly passed to `net.connect()`/`tls.connect()`, or however
-else you want a Socket to be created. Pass the created socket to
-the callback function `cb` once created, and the HTTP request will
-continue to proceed.
-
-If the `https` module is used to invoke the HTTP request, then the
-`secureEndpoint` property on `options` _will be set to `true`_.
-
-
-License
--------
-
-(The MIT License)
-
-Copyright (c) 2013 Nathan Rajlich &lt;nathan@tootallnate.net&gt;
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-'Software'), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-[http-proxy-agent]: https://github.com/TooTallNate/node-http-proxy-agent
-[https-proxy-agent]: https://github.com/TooTallNate/node-https-proxy-agent
-[pac-proxy-agent]: https://github.com/TooTallNate/node-pac-proxy-agent
-[socks-proxy-agent]: https://github.com/TooTallNate/node-socks-proxy-agent
-[http.Agent]: https://nodejs.org/api/http.html#http_class_http_agent
diff --git a/node_modules/socks-proxy-agent/node_modules/agent-base/index.js b/node_modules/socks-proxy-agent/node_modules/agent-base/index.js
deleted file mode 100644
index 0ee6b2969..000000000
--- a/node_modules/socks-proxy-agent/node_modules/agent-base/index.js
+++ /dev/null
@@ -1,170 +0,0 @@
-'use strict';
-require('./patch-core');
-const inherits = require('util').inherits;
-const promisify = require('es6-promisify');
-const EventEmitter = require('events').EventEmitter;
-
-module.exports = Agent;
-
-function isAgent(v) {
- return v && typeof v.addRequest === 'function';
-}
-
-/**
- * Base `http.Agent` implementation.
- * No pooling/keep-alive is implemented by default.
- *
- * @param {Function} callback
- * @api public
- */
-function Agent(callback, _opts) {
- if (!(this instanceof Agent)) {
- return new Agent(callback, _opts);
- }
-
- EventEmitter.call(this);
-
- // The callback gets promisified if it has 3 parameters
- // (i.e. it has a callback function) lazily
- this._promisifiedCallback = false;
-
- let opts = _opts;
- if ('function' === typeof callback) {
- this.callback = callback;
- } else if (callback) {
- opts = callback;
- }
-
- // timeout for the socket to be returned from the callback
- this.timeout = (opts && opts.timeout) || null;
-
- this.options = opts;
-}
-inherits(Agent, EventEmitter);
-
-/**
- * Override this function in your subclass!
- */
-Agent.prototype.callback = function callback(req, opts) {
- throw new Error(
- '"agent-base" has no default implementation, you must subclass and override `callback()`'
- );
-};
-
-/**
- * Called by node-core's "_http_client.js" module when creating
- * a new HTTP request with this Agent instance.
- *
- * @api public
- */
-Agent.prototype.addRequest = function addRequest(req, _opts) {
- const ownOpts = Object.assign({}, _opts);
-
- // Set default `host` for HTTP to localhost
- if (null == ownOpts.host) {
- ownOpts.host = 'localhost';
- }
-
- // Set default `port` for HTTP if none was explicitly specified
- if (null == ownOpts.port) {
- ownOpts.port = ownOpts.secureEndpoint ? 443 : 80;
- }
-
- const opts = Object.assign({}, this.options, ownOpts);
-
- if (opts.host && opts.path) {
- // If both a `host` and `path` are specified then it's most likely the
- // result of a `url.parse()` call... we need to remove the `path` portion so
- // that `net.connect()` doesn't attempt to open that as a unix socket file.
- delete opts.path;
- }
-
- delete opts.agent;
- delete opts.hostname;
- delete opts._defaultAgent;
- delete opts.defaultPort;
- delete opts.createConnection;
-
- // Hint to use "Connection: close"
- // XXX: non-documented `http` module API :(
- req._last = true;
- req.shouldKeepAlive = false;
-
- // Create the `stream.Duplex` instance
- let timeout;
- let timedOut = false;
- const timeoutMs = this.timeout;
- const freeSocket = this.freeSocket;
-
- function onerror(err) {
- if (req._hadError) return;
- req.emit('error', err);
- // For Safety. Some additional errors might fire later on
- // and we need to make sure we don't double-fire the error event.
- req._hadError = true;
- }
-
- function ontimeout() {
- timeout = null;
- timedOut = true;
- const err = new Error(
- 'A "socket" was not created for HTTP request before ' + timeoutMs + 'ms'
- );
- err.code = 'ETIMEOUT';
- onerror(err);
- }
-
- function callbackError(err) {
- if (timedOut) return;
- if (timeout != null) {
- clearTimeout(timeout);
- timeout = null;
- }
- onerror(err);
- }
-
- function onsocket(socket) {
- if (timedOut) return;
- if (timeout != null) {
- clearTimeout(timeout);
- timeout = null;
- }
- if (isAgent(socket)) {
- // `socket` is actually an http.Agent instance, so relinquish
- // responsibility for this `req` to the Agent from here on
- socket.addRequest(req, opts);
- } else if (socket) {
- function onfree() {
- freeSocket(socket, opts);
- }
- socket.on('free', onfree);
- req.onSocket(socket);
- } else {
- const err = new Error(
- 'no Duplex stream was returned to agent-base for `' + req.method + ' ' + req.path + '`'
- );
- onerror(err);
- }
- }
-
- if (!this._promisifiedCallback && this.callback.length >= 3) {
- // Legacy callback function - convert to a Promise
- this.callback = promisify(this.callback, this);
- this._promisifiedCallback = true;
- }
-
- if (timeoutMs > 0) {
- timeout = setTimeout(ontimeout, timeoutMs);
- }
-
- try {
- Promise.resolve(this.callback(req, opts)).then(onsocket, callbackError);
- } catch (err) {
- Promise.reject(err).catch(callbackError);
- }
-};
-
-Agent.prototype.freeSocket = function freeSocket(socket, opts) {
- // TODO reuse sockets
- socket.destroy();
-};
diff --git a/node_modules/socks-proxy-agent/node_modules/agent-base/package.json b/node_modules/socks-proxy-agent/node_modules/agent-base/package.json
deleted file mode 100644
index 01139d0a6..000000000
--- a/node_modules/socks-proxy-agent/node_modules/agent-base/package.json
+++ /dev/null
@@ -1,65 +0,0 @@
-{
- "_from": "agent-base@~4.2.1",
- "_id": "agent-base@4.2.1",
- "_inBundle": false,
- "_integrity": "sha512-JVwXMr9nHYTUXsBFKUqhJwvlcYU/blreOEUkhNR2eXZIvwd+c+o5V4MgDPKWnMS/56awN3TRzIP+KoPn+roQtg==",
- "_location": "/socks-proxy-agent/agent-base",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "agent-base@~4.2.1",
- "name": "agent-base",
- "escapedName": "agent-base",
- "rawSpec": "~4.2.1",
- "saveSpec": null,
- "fetchSpec": "~4.2.1"
- },
- "_requiredBy": [
- "/socks-proxy-agent"
- ],
- "_resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.2.1.tgz",
- "_shasum": "d89e5999f797875674c07d87f260fc41e83e8ca9",
- "_spec": "agent-base@~4.2.1",
- "_where": "/Users/isaacs/dev/npm/cli/node_modules/socks-proxy-agent",
- "author": {
- "name": "Nathan Rajlich",
- "email": "nathan@tootallnate.net",
- "url": "http://n8.io/"
- },
- "bugs": {
- "url": "https://github.com/TooTallNate/node-agent-base/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "es6-promisify": "^5.0.0"
- },
- "deprecated": false,
- "description": "Turn a function into an `http.Agent` instance",
- "devDependencies": {
- "mocha": "^3.4.2",
- "ws": "^3.0.0"
- },
- "engines": {
- "node": ">= 4.0.0"
- },
- "homepage": "https://github.com/TooTallNate/node-agent-base#readme",
- "keywords": [
- "http",
- "agent",
- "base",
- "barebones",
- "https"
- ],
- "license": "MIT",
- "main": "./index.js",
- "name": "agent-base",
- "repository": {
- "type": "git",
- "url": "git://github.com/TooTallNate/node-agent-base.git"
- },
- "scripts": {
- "test": "mocha --reporter spec"
- },
- "version": "4.2.1"
-}
diff --git a/node_modules/socks-proxy-agent/node_modules/agent-base/patch-core.js b/node_modules/socks-proxy-agent/node_modules/agent-base/patch-core.js
deleted file mode 100644
index 47d26a72b..000000000
--- a/node_modules/socks-proxy-agent/node_modules/agent-base/patch-core.js
+++ /dev/null
@@ -1,37 +0,0 @@
-'use strict';
-const url = require('url');
-const https = require('https');
-
-/**
- * This currently needs to be applied to all Node.js versions
- * in order to determine if the `req` is an HTTP or HTTPS request.
- *
- * There is currently no PR attempting to move this property upstream.
- */
-https.request = (function(request) {
- return function(_options, cb) {
- let options;
- if (typeof _options === 'string') {
- options = url.parse(_options);
- } else {
- options = Object.assign({}, _options);
- }
- if (null == options.port) {
- options.port = 443;
- }
- options.secureEndpoint = true;
- return request.call(https, options, cb);
- };
-})(https.request);
-
-/**
- * This is needed for Node.js >= 9.0.0 to make sure `https.get()` uses the
- * patched `https.request()`.
- *
- * Ref: https://github.com/nodejs/node/commit/5118f31
- */
-https.get = function(options, cb) {
- const req = https.request(options, cb);
- req.end();
- return req;
-};
diff --git a/node_modules/socks-proxy-agent/node_modules/agent-base/test/ssl-cert-snakeoil.key b/node_modules/socks-proxy-agent/node_modules/agent-base/test/ssl-cert-snakeoil.key
deleted file mode 100644
index fd1250122..000000000
--- a/node_modules/socks-proxy-agent/node_modules/agent-base/test/ssl-cert-snakeoil.key
+++ /dev/null
@@ -1,15 +0,0 @@
------BEGIN RSA PRIVATE KEY-----
-MIICWwIBAAKBgQCzURxIqzer0ACAbX/lHdsn4Gd9PLKrf7EeDYfIdV0HZKPD8WDr
-bBx2/fBu0OW2sjnzv/SVZbJ0DAuPE/p0+eT0qb2qC10iz9iTD7ribd7gxhirVb8y
-b3fBjXsxc8V8p4Ny1LcvNSqCjwUbJqdRogfoJeTiqPM58z5sNzuv5iq7iwIDAQAB
-AoGAPMQy4olrP0UotlzlJ36bowLP70ffgHCwU+/f4NWs5fF78c3du0oSx1w820Dd
-Z7E0JF8bgnlJJTxjumPZz0RUCugrEHBKJmzEz3cxF5E3+7NvteZcjKn9D67RrM5x
-1/uSZ9cqKE9cYvY4fSuHx18diyZ4axR/wB1Pea2utjjDM+ECQQDb9ZbmmaWMiRpQ
-5Up+loxP7BZNPsEVsm+DVJmEFbaFgGfncWBqSIqnPNjMwTwj0OigTwCAEGPkfRVW
-T0pbYWCxAkEA0LK7SCTwzyDmhASUalk0x+3uCAA6ryFdwJf/wd8TRAvVOmkTEldX
-uJ7ldLvfrONYO3v56uKTU/SoNdZYzKtO+wJAX2KM4ctXYy5BXztPpr2acz4qHa1N
-Bh+vBAC34fOYhyQ76r3b1btHhWZ5jbFuZwm9F2erC94Ps5IaoqcX07DSwQJAPKGw
-h2U0EPkd/3zVIZCJJQya+vgWFIs9EZcXVtvYXQyTBkVApTN66MhBIYjzkub5205J
-bVQmOV37AKklY1DhwQJAA1wos0cYxro02edzatxd0DIR2r4qqOqLkw6BhYHhq6HJ
-ZvIcQkHqdSXzdETFc01I1znDGGIrJHcnvKWgBPoEUg==
------END RSA PRIVATE KEY-----
diff --git a/node_modules/socks-proxy-agent/node_modules/agent-base/test/ssl-cert-snakeoil.pem b/node_modules/socks-proxy-agent/node_modules/agent-base/test/ssl-cert-snakeoil.pem
deleted file mode 100644
index b115a5e91..000000000
--- a/node_modules/socks-proxy-agent/node_modules/agent-base/test/ssl-cert-snakeoil.pem
+++ /dev/null
@@ -1,12 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIB1TCCAT4CCQDV5mPlzm9+izANBgkqhkiG9w0BAQUFADAvMS0wKwYDVQQDEyQ3
-NTI3YmQ3Ny1hYjNlLTQ3NGItYWNlNy1lZWQ2MDUzOTMxZTcwHhcNMTUwNzA2MjI0
-NTA3WhcNMjUwNzAzMjI0NTA3WjAvMS0wKwYDVQQDEyQ3NTI3YmQ3Ny1hYjNlLTQ3
-NGItYWNlNy1lZWQ2MDUzOTMxZTcwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGB
-ALNRHEirN6vQAIBtf+Ud2yfgZ308sqt/sR4Nh8h1XQdko8PxYOtsHHb98G7Q5bay
-OfO/9JVlsnQMC48T+nT55PSpvaoLXSLP2JMPuuJt3uDGGKtVvzJvd8GNezFzxXyn
-g3LUty81KoKPBRsmp1GiB+gl5OKo8znzPmw3O6/mKruLAgMBAAEwDQYJKoZIhvcN
-AQEFBQADgYEACzoHUF8UV2Z6541Q2wKEA0UFUzmUjf/E1XwBO+1P15ZZ64uw34B4
-1RwMPtAo9RY/PmICTWtNxWGxkzwb2JtDWtnxVER/lF8k2XcXPE76fxTHJF/BKk9J
-QU8OTD1dd9gHCBviQB9TqntRZ5X7axjtuWjb2umY+owBYzAHZkp1HKI=
------END CERTIFICATE-----
diff --git a/node_modules/socks-proxy-agent/node_modules/agent-base/test/test.js b/node_modules/socks-proxy-agent/node_modules/agent-base/test/test.js
deleted file mode 100644
index 6a8ca68e0..000000000
--- a/node_modules/socks-proxy-agent/node_modules/agent-base/test/test.js
+++ /dev/null
@@ -1,697 +0,0 @@
-/**
- * Module dependencies.
- */
-
-var fs = require('fs');
-var url = require('url');
-var net = require('net');
-var tls = require('tls');
-var http = require('http');
-var https = require('https');
-var WebSocket = require('ws');
-var assert = require('assert');
-var events = require('events');
-var inherits = require('util').inherits;
-var Agent = require('../');
-
-var PassthroughAgent = Agent(function(req, opts) {
- return opts.secureEndpoint ? https.globalAgent : http.globalAgent;
-});
-
-describe('Agent', function() {
- describe('subclass', function() {
- it('should be subclassable', function(done) {
- function MyAgent() {
- Agent.call(this);
- }
- inherits(MyAgent, Agent);
-
- MyAgent.prototype.callback = function(req, opts, fn) {
- assert.equal(req.path, '/foo');
- assert.equal(req.getHeader('host'), '127.0.0.1:1234');
- assert.equal(opts.secureEndpoint, true);
- done();
- };
-
- var info = url.parse('https://127.0.0.1:1234/foo');
- info.agent = new MyAgent();
- https.get(info);
- });
- });
- describe('options', function() {
- it('should support an options Object as first argument', function() {
- var agent = new Agent({ timeout: 1000 });
- assert.equal(1000, agent.timeout);
- });
- it('should support an options Object as second argument', function() {
- var agent = new Agent(function() {}, { timeout: 1000 });
- assert.equal(1000, agent.timeout);
- });
- it('should be mixed in with HTTP request options', function(done) {
- var agent = new Agent({
- host: 'my-proxy.com',
- port: 3128,
- foo: 'bar'
- });
- agent.callback = function(req, opts, fn) {
- assert.equal('bar', opts.foo);
- assert.equal('a', opts.b);
-
- // `host` and `port` are special-cases, and should always be
- // overwritten in the request `opts` inside the agent-base callback
- assert.equal('localhost', opts.host);
- assert.equal(80, opts.port);
- done();
- };
- var opts = {
- b: 'a',
- agent: agent
- };
- http.get(opts);
- });
- });
- describe('`this` context', function() {
- it('should be the Agent instance', function(done) {
- var called = false;
- var agent = new Agent();
- agent.callback = function() {
- called = true;
- assert.equal(this, agent);
- };
- var info = url.parse('http://127.0.0.1/foo');
- info.agent = agent;
- var req = http.get(info);
- req.on('error', function(err) {
- assert(/no Duplex stream was returned/.test(err.message));
- done();
- });
- });
- it('should be the Agent instance with callback signature', function(done) {
- var called = false;
- var agent = new Agent();
- agent.callback = function(req, opts, fn) {
- called = true;
- assert.equal(this, agent);
- fn();
- };
- var info = url.parse('http://127.0.0.1/foo');
- info.agent = agent;
- var req = http.get(info);
- req.on('error', function(err) {
- assert(/no Duplex stream was returned/.test(err.message));
- done();
- });
- });
- });
- describe('"error" event', function() {
- it('should be invoked on `http.ClientRequest` instance if `callback()` has not been defined', function(
- done
- ) {
- var agent = new Agent();
- var info = url.parse('http://127.0.0.1/foo');
- info.agent = agent;
- var req = http.get(info);
- req.on('error', function(err) {
- assert.equal(
- '"agent-base" has no default implementation, you must subclass and override `callback()`',
- err.message
- );
- done();
- });
- });
- it('should be invoked on `http.ClientRequest` instance if Error passed to callback function on the first tick', function(
- done
- ) {
- var agent = new Agent(function(req, opts, fn) {
- fn(new Error('is this caught?'));
- });
- var info = url.parse('http://127.0.0.1/foo');
- info.agent = agent;
- var req = http.get(info);
- req.on('error', function(err) {
- assert.equal('is this caught?', err.message);
- done();
- });
- });
- it('should be invoked on `http.ClientRequest` instance if Error passed to callback function after the first tick', function(
- done
- ) {
- var agent = new Agent(function(req, opts, fn) {
- setTimeout(function() {
- fn(new Error('is this caught?'));
- }, 10);
- });
- var info = url.parse('http://127.0.0.1/foo');
- info.agent = agent;
- var req = http.get(info);
- req.on('error', function(err) {
- assert.equal('is this caught?', err.message);
- done();
- });
- });
- });
- describe('artificial "streams"', function() {
- it('should send a GET request', function(done) {
- var stream = new events.EventEmitter();
-
- // needed for the `http` module to call .write() on the stream
- stream.writable = true;
-
- stream.write = function(str) {
- assert(0 == str.indexOf('GET / HTTP/1.1'));
- done();
- };
-
- // needed for `http` module in Node.js 4
- stream.cork = function() {};
-
- var opts = {
- method: 'GET',
- host: '127.0.0.1',
- path: '/',
- port: 80,
- agent: new Agent(function(req, opts, fn) {
- fn(null, stream);
- })
- };
- var req = http.request(opts);
- req.end();
- });
- it('should receive a GET response', function(done) {
- var stream = new events.EventEmitter();
- var opts = {
- method: 'GET',
- host: '127.0.0.1',
- path: '/',
- port: 80,
- agent: new Agent(function(req, opts, fn) {
- fn(null, stream);
- })
- };
- var req = http.request(opts, function(res) {
- assert.equal('0.9', res.httpVersion);
- assert.equal(111, res.statusCode);
- assert.equal('bar', res.headers.foo);
- done();
- });
-
- // have to wait for the "socket" event since `http.ClientRequest`
- // doesn't *actually* attach the listeners to the "stream" until
- // this happens
- req.once('socket', function() {
- var buf = new Buffer(
- 'HTTP/0.9 111\r\n' +
- 'Foo: bar\r\n' +
- 'Set-Cookie: 1\r\n' +
- 'Set-Cookie: 2\r\n\r\n'
- );
- stream.emit('data', buf);
- });
-
- req.end();
- });
- });
-});
-
-describe('"http" module', function() {
- var server;
- var port;
-
- // setup test HTTP server
- before(function(done) {
- server = http.createServer();
- server.listen(0, function() {
- port = server.address().port;
- done();
- });
- });
-
- // shut down test HTTP server
- after(function(done) {
- server.once('close', function() {
- done();
- });
- server.close();
- });
-
- it('should work for basic HTTP requests', function(done) {
- var called = false;
- var agent = new Agent(function(req, opts, fn) {
- called = true;
- var socket = net.connect(opts);
- fn(null, socket);
- });
-
- // add HTTP server "request" listener
- var gotReq = false;
- server.once('request', function(req, res) {
- gotReq = true;
- res.setHeader('X-Foo', 'bar');
- res.setHeader('X-Url', req.url);
- res.end();
- });
-
- var info = url.parse('http://127.0.0.1:' + port + '/foo');
- info.agent = agent;
- http.get(info, function(res) {
- assert.equal('bar', res.headers['x-foo']);
- assert.equal('/foo', res.headers['x-url']);
- assert(gotReq);
- assert(called);
- done();
- });
- });
-
- it('should support direct return in `connect()`', function(done) {
- var called = false;
- var agent = new Agent(function(req, opts) {
- called = true;
- return net.connect(opts);
- });
-
- // add HTTP server "request" listener
- var gotReq = false;
- server.once('request', function(req, res) {
- gotReq = true;
- res.setHeader('X-Foo', 'bar');
- res.setHeader('X-Url', req.url);
- res.end();
- });
-
- var info = url.parse('http://127.0.0.1:' + port + '/foo');
- info.agent = agent;
- http.get(info, function(res) {
- assert.equal('bar', res.headers['x-foo']);
- assert.equal('/foo', res.headers['x-url']);
- assert(gotReq);
- assert(called);
- done();
- });
- });
-
- it('should support returning a Promise in `connect()`', function(done) {
- var called = false;
- var agent = new Agent(function(req, opts) {
- return new Promise(function(resolve, reject) {
- called = true;
- resolve(net.connect(opts));
- });
- });
-
- // add HTTP server "request" listener
- var gotReq = false;
- server.once('request', function(req, res) {
- gotReq = true;
- res.setHeader('X-Foo', 'bar');
- res.setHeader('X-Url', req.url);
- res.end();
- });
-
- var info = url.parse('http://127.0.0.1:' + port + '/foo');
- info.agent = agent;
- http.get(info, function(res) {
- assert.equal('bar', res.headers['x-foo']);
- assert.equal('/foo', res.headers['x-url']);
- assert(gotReq);
- assert(called);
- done();
- });
- });
-
- it('should set the `Connection: close` response header', function(done) {
- var called = false;
- var agent = new Agent(function(req, opts, fn) {
- called = true;
- var socket = net.connect(opts);
- fn(null, socket);
- });
-
- // add HTTP server "request" listener
- var gotReq = false;
- server.once('request', function(req, res) {
- gotReq = true;
- res.setHeader('X-Url', req.url);
- assert.equal('close', req.headers.connection);
- res.end();
- });
-
- var info = url.parse('http://127.0.0.1:' + port + '/bar');
- info.agent = agent;
- http.get(info, function(res) {
- assert.equal('/bar', res.headers['x-url']);
- assert.equal('close', res.headers.connection);
- assert(gotReq);
- assert(called);
- done();
- });
- });
-
- it('should pass through options from `http.request()`', function(done) {
- var agent = new Agent(function(req, opts, fn) {
- assert.equal('google.com', opts.host);
- assert.equal('bar', opts.foo);
- done();
- });
-
- http.get({
- host: 'google.com',
- foo: 'bar',
- agent: agent
- });
- });
-
- it('should default to port 80', function(done) {
- var agent = new Agent(function(req, opts, fn) {
- assert.equal(80, opts.port);
- done();
- });
-
- // (probably) not hitting a real HTTP server here,
- // so no need to add a httpServer request listener
- http.get({
- host: '127.0.0.1',
- path: '/foo',
- agent: agent
- });
- });
-
- it('should support the "timeout" option', function(done) {
- // ensure we timeout after the "error" event had a chance to trigger
- this.timeout(1000);
- this.slow(800);
-
- var agent = new Agent(
- function(req, opts, fn) {
- // this function will time out
- },
- { timeout: 100 }
- );
-
- var opts = url.parse('http://nodejs.org');
- opts.agent = agent;
-
- var req = http.get(opts);
- req.once('error', function(err) {
- assert.equal('ETIMEOUT', err.code);
- req.abort();
- done();
- });
- });
-
- it('should free sockets after use', function(done) {
- var agent = new Agent(function(req, opts, fn) {
- var socket = net.connect(opts);
- fn(null, socket);
- });
-
- // add HTTP server "request" listener
- var gotReq = false;
- server.once('request', function(req, res) {
- gotReq = true;
- res.end();
- });
-
- var info = url.parse('http://127.0.0.1:' + port + '/foo');
- info.agent = agent;
- http.get(info, function(res) {
- res.socket.emit('free');
- assert.equal(true, res.socket.destroyed);
- assert(gotReq);
- done();
- });
- });
-
-
- describe('PassthroughAgent', function() {
- it('should pass through to `http.globalAgent`', function(done) {
- // add HTTP server "request" listener
- var gotReq = false;
- server.once('request', function(req, res) {
- gotReq = true;
- res.setHeader('X-Foo', 'bar');
- res.setHeader('X-Url', req.url);
- res.end();
- });
-
- var info = url.parse('http://127.0.0.1:' + port + '/foo');
- info.agent = PassthroughAgent;
- http.get(info, function(res) {
- assert.equal('bar', res.headers['x-foo']);
- assert.equal('/foo', res.headers['x-url']);
- assert(gotReq);
- done();
- });
- });
- });
-});
-
-describe('"https" module', function() {
- var server;
- var port;
-
- // setup test HTTPS server
- before(function(done) {
- var options = {
- key: fs.readFileSync(__dirname + '/ssl-cert-snakeoil.key'),
- cert: fs.readFileSync(__dirname + '/ssl-cert-snakeoil.pem')
- };
- server = https.createServer(options);
- server.listen(0, function() {
- port = server.address().port;
- done();
- });
- });
-
- // shut down test HTTP server
- after(function(done) {
- server.once('close', function() {
- done();
- });
- server.close();
- });
-
- it('should not modify the passed in Options object', function(done) {
- var called = false;
- var agent = new Agent(function(req, opts, fn) {
- called = true;
- assert.equal(true, opts.secureEndpoint);
- assert.equal(443, opts.port);
- assert.equal('localhost', opts.host);
- });
- var opts = { agent: agent };
- var req = https.request(opts);
- assert.equal(true, called);
- assert.equal(false, 'secureEndpoint' in opts);
- assert.equal(false, 'port' in opts);
- done();
- });
-
- it('should work with a String URL', function(done) {
- var endpoint = 'https://127.0.0.1:' + port;
- var req = https.get(endpoint);
-
- // it's gonna error out since `rejectUnauthorized` is not being passed in
- req.on('error', function(err) {
- assert.equal(err.code, 'DEPTH_ZERO_SELF_SIGNED_CERT');
- done();
- });
- });
-
- it('should work for basic HTTPS requests', function(done) {
- var called = false;
- var agent = new Agent(function(req, opts, fn) {
- called = true;
- assert(opts.secureEndpoint);
- var socket = tls.connect(opts);
- fn(null, socket);
- });
-
- // add HTTPS server "request" listener
- var gotReq = false;
- server.once('request', function(req, res) {
- gotReq = true;
- res.setHeader('X-Foo', 'bar');
- res.setHeader('X-Url', req.url);
- res.end();
- });
-
- var info = url.parse('https://127.0.0.1:' + port + '/foo');
- info.agent = agent;
- info.rejectUnauthorized = false;
- https.get(info, function(res) {
- assert.equal('bar', res.headers['x-foo']);
- assert.equal('/foo', res.headers['x-url']);
- assert(gotReq);
- assert(called);
- done();
- });
- });
-
- it('should pass through options from `https.request()`', function(done) {
- var agent = new Agent(function(req, opts, fn) {
- assert.equal('google.com', opts.host);
- assert.equal('bar', opts.foo);
- done();
- });
-
- https.get({
- host: 'google.com',
- foo: 'bar',
- agent: agent
- });
- });
-
- it('should default to port 443', function(done) {
- var agent = new Agent(function(req, opts, fn) {
- assert.equal(true, opts.secureEndpoint);
- assert.equal(false, opts.rejectUnauthorized);
- assert.equal(443, opts.port);
- done();
- });
-
- // (probably) not hitting a real HTTPS server here,
- // so no need to add a httpsServer request listener
- https.get({
- host: '127.0.0.1',
- path: '/foo',
- agent: agent,
- rejectUnauthorized: false
- });
- });
-
- describe('PassthroughAgent', function() {
- it('should pass through to `https.globalAgent`', function(done) {
- // add HTTP server "request" listener
- var gotReq = false;
- server.once('request', function(req, res) {
- gotReq = true;
- res.setHeader('X-Foo', 'bar');
- res.setHeader('X-Url', req.url);
- res.end();
- });
-
- var info = url.parse('https://127.0.0.1:' + port + '/foo');
- info.agent = PassthroughAgent;
- info.rejectUnauthorized = false;
- https.get(info, function(res) {
- assert.equal('bar', res.headers['x-foo']);
- assert.equal('/foo', res.headers['x-url']);
- assert(gotReq);
- done();
- });
- });
- });
-});
-
-describe('"ws" server', function() {
- var wss;
- var server;
- var port;
-
- // setup test HTTP server
- before(function(done) {
- server = http.createServer();
- wss = new WebSocket.Server({ server: server });
- server.listen(0, function() {
- port = server.address().port;
- done();
- });
- });
-
- // shut down test HTTP server
- after(function(done) {
- server.once('close', function() {
- done();
- });
- server.close();
- });
-
- it('should work for basic WebSocket connections', function(done) {
- function onconnection(ws) {
- ws.on('message', function(data) {
- assert.equal('ping', data);
- ws.send('pong');
- });
- }
- wss.on('connection', onconnection);
-
- var agent = new Agent(function(req, opts, fn) {
- var socket = net.connect(opts);
- fn(null, socket);
- });
-
- var client = new WebSocket('ws://127.0.0.1:' + port + '/', {
- agent: agent
- });
-
- client.on('open', function() {
- client.send('ping');
- });
-
- client.on('message', function(data) {
- assert.equal('pong', data);
- client.close();
- wss.removeListener('connection', onconnection);
- done();
- });
- });
-});
-
-describe('"wss" server', function() {
- var wss;
- var server;
- var port;
-
- // setup test HTTP server
- before(function(done) {
- var options = {
- key: fs.readFileSync(__dirname + '/ssl-cert-snakeoil.key'),
- cert: fs.readFileSync(__dirname + '/ssl-cert-snakeoil.pem')
- };
- server = https.createServer(options);
- wss = new WebSocket.Server({ server: server });
- server.listen(0, function() {
- port = server.address().port;
- done();
- });
- });
-
- // shut down test HTTP server
- after(function(done) {
- server.once('close', function() {
- done();
- });
- server.close();
- });
-
- it('should work for secure WebSocket connections', function(done) {
- function onconnection(ws) {
- ws.on('message', function(data) {
- assert.equal('ping', data);
- ws.send('pong');
- });
- }
- wss.on('connection', onconnection);
-
- var agent = new Agent(function(req, opts, fn) {
- var socket = tls.connect(opts);
- fn(null, socket);
- });
-
- var client = new WebSocket('wss://127.0.0.1:' + port + '/', {
- agent: agent,
- rejectUnauthorized: false
- });
-
- client.on('open', function() {
- client.send('ping');
- });
-
- client.on('message', function(data) {
- assert.equal('pong', data);
- client.close();
- wss.removeListener('connection', onconnection);
- done();
- });
- });
-});
diff --git a/node_modules/socks-proxy-agent/package.json b/node_modules/socks-proxy-agent/package.json
deleted file mode 100644
index 7f7023f5a..000000000
--- a/node_modules/socks-proxy-agent/package.json
+++ /dev/null
@@ -1,71 +0,0 @@
-{
- "_from": "socks-proxy-agent@^4.0.0",
- "_id": "socks-proxy-agent@4.0.2",
- "_inBundle": false,
- "_integrity": "sha512-NT6syHhI9LmuEMSK6Kd2V7gNv5KFZoLE7V5udWmn0de+3Mkj3UMA/AJPLyeNUVmElCurSHtUdM3ETpR3z770Wg==",
- "_location": "/socks-proxy-agent",
- "_phantomChildren": {
- "es6-promisify": "5.0.0"
- },
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "socks-proxy-agent@^4.0.0",
- "name": "socks-proxy-agent",
- "escapedName": "socks-proxy-agent",
- "rawSpec": "^4.0.0",
- "saveSpec": null,
- "fetchSpec": "^4.0.0"
- },
- "_requiredBy": [
- "/make-fetch-happen"
- ],
- "_resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-4.0.2.tgz",
- "_shasum": "3c8991f3145b2799e70e11bd5fbc8b1963116386",
- "_spec": "socks-proxy-agent@^4.0.0",
- "_where": "/Users/isaacs/dev/npm/cli/node_modules/make-fetch-happen",
- "author": {
- "name": "Nathan Rajlich",
- "email": "nathan@tootallnate.net",
- "url": "http://n8.io/"
- },
- "bugs": {
- "url": "https://github.com/TooTallNate/node-socks-proxy-agent/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "agent-base": "~4.2.1",
- "socks": "~2.3.2"
- },
- "deprecated": false,
- "description": "A SOCKS proxy `http.Agent` implementation for HTTP and HTTPS",
- "devDependencies": {
- "mocha": "~5.1.0",
- "raw-body": "~2.3.2",
- "socksv5": "0.0.6"
- },
- "engines": {
- "node": ">= 6"
- },
- "homepage": "https://github.com/TooTallNate/node-socks-proxy-agent#readme",
- "keywords": [
- "socks",
- "socks4",
- "socks4a",
- "proxy",
- "http",
- "https",
- "agent"
- ],
- "license": "MIT",
- "main": "./index.js",
- "name": "socks-proxy-agent",
- "repository": {
- "type": "git",
- "url": "git://github.com/TooTallNate/node-socks-proxy-agent.git"
- },
- "scripts": {
- "test": "mocha --reporter spec"
- },
- "version": "4.0.2"
-}
diff --git a/node_modules/socks-proxy-agent/test/ssl-cert-snakeoil.key b/node_modules/socks-proxy-agent/test/ssl-cert-snakeoil.key
deleted file mode 100644
index fd1250122..000000000
--- a/node_modules/socks-proxy-agent/test/ssl-cert-snakeoil.key
+++ /dev/null
@@ -1,15 +0,0 @@
------BEGIN RSA PRIVATE KEY-----
-MIICWwIBAAKBgQCzURxIqzer0ACAbX/lHdsn4Gd9PLKrf7EeDYfIdV0HZKPD8WDr
-bBx2/fBu0OW2sjnzv/SVZbJ0DAuPE/p0+eT0qb2qC10iz9iTD7ribd7gxhirVb8y
-b3fBjXsxc8V8p4Ny1LcvNSqCjwUbJqdRogfoJeTiqPM58z5sNzuv5iq7iwIDAQAB
-AoGAPMQy4olrP0UotlzlJ36bowLP70ffgHCwU+/f4NWs5fF78c3du0oSx1w820Dd
-Z7E0JF8bgnlJJTxjumPZz0RUCugrEHBKJmzEz3cxF5E3+7NvteZcjKn9D67RrM5x
-1/uSZ9cqKE9cYvY4fSuHx18diyZ4axR/wB1Pea2utjjDM+ECQQDb9ZbmmaWMiRpQ
-5Up+loxP7BZNPsEVsm+DVJmEFbaFgGfncWBqSIqnPNjMwTwj0OigTwCAEGPkfRVW
-T0pbYWCxAkEA0LK7SCTwzyDmhASUalk0x+3uCAA6ryFdwJf/wd8TRAvVOmkTEldX
-uJ7ldLvfrONYO3v56uKTU/SoNdZYzKtO+wJAX2KM4ctXYy5BXztPpr2acz4qHa1N
-Bh+vBAC34fOYhyQ76r3b1btHhWZ5jbFuZwm9F2erC94Ps5IaoqcX07DSwQJAPKGw
-h2U0EPkd/3zVIZCJJQya+vgWFIs9EZcXVtvYXQyTBkVApTN66MhBIYjzkub5205J
-bVQmOV37AKklY1DhwQJAA1wos0cYxro02edzatxd0DIR2r4qqOqLkw6BhYHhq6HJ
-ZvIcQkHqdSXzdETFc01I1znDGGIrJHcnvKWgBPoEUg==
------END RSA PRIVATE KEY-----
diff --git a/node_modules/socks-proxy-agent/test/ssl-cert-snakeoil.pem b/node_modules/socks-proxy-agent/test/ssl-cert-snakeoil.pem
deleted file mode 100644
index b115a5e91..000000000
--- a/node_modules/socks-proxy-agent/test/ssl-cert-snakeoil.pem
+++ /dev/null
@@ -1,12 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIB1TCCAT4CCQDV5mPlzm9+izANBgkqhkiG9w0BAQUFADAvMS0wKwYDVQQDEyQ3
-NTI3YmQ3Ny1hYjNlLTQ3NGItYWNlNy1lZWQ2MDUzOTMxZTcwHhcNMTUwNzA2MjI0
-NTA3WhcNMjUwNzAzMjI0NTA3WjAvMS0wKwYDVQQDEyQ3NTI3YmQ3Ny1hYjNlLTQ3
-NGItYWNlNy1lZWQ2MDUzOTMxZTcwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGB
-ALNRHEirN6vQAIBtf+Ud2yfgZ308sqt/sR4Nh8h1XQdko8PxYOtsHHb98G7Q5bay
-OfO/9JVlsnQMC48T+nT55PSpvaoLXSLP2JMPuuJt3uDGGKtVvzJvd8GNezFzxXyn
-g3LUty81KoKPBRsmp1GiB+gl5OKo8znzPmw3O6/mKruLAgMBAAEwDQYJKoZIhvcN
-AQEFBQADgYEACzoHUF8UV2Z6541Q2wKEA0UFUzmUjf/E1XwBO+1P15ZZ64uw34B4
-1RwMPtAo9RY/PmICTWtNxWGxkzwb2JtDWtnxVER/lF8k2XcXPE76fxTHJF/BKk9J
-QU8OTD1dd9gHCBviQB9TqntRZ5X7axjtuWjb2umY+owBYzAHZkp1HKI=
------END CERTIFICATE-----
diff --git a/node_modules/socks-proxy-agent/test/test.js b/node_modules/socks-proxy-agent/test/test.js
deleted file mode 100644
index 968ef650f..000000000
--- a/node_modules/socks-proxy-agent/test/test.js
+++ /dev/null
@@ -1,144 +0,0 @@
-
-/**
- * Module dependencies.
- */
-
-var fs = require('fs');
-var url = require('url');
-var http = require('http');
-var https = require('https');
-var assert = require('assert');
-var socks = require('socksv5');
-var getRawBody = require('raw-body');
-var SocksProxyAgent = require('../');
-
-describe('SocksProxyAgent', function () {
- var httpServer, httpPort;
- var httpsServer, httpsPort;
- var socksServer, socksPort;
-
- before(function (done) {
- // setup SOCKS proxy server
- socksServer = socks.createServer(function(info, accept, deny) {
- accept();
- });
- socksServer.listen(0, '127.0.0.1', function() {
- socksPort = socksServer.address().port;
- //console.log('SOCKS server listening on port %d', socksPort);
- done();
- });
- socksServer.useAuth(socks.auth.None());
- //socksServer.useAuth(socks.auth.UserPassword(function(user, password, cb) {
- // cb(user === 'nodejs' && password === 'rules!');
- //}));
- });
-
- before(function (done) {
- // setup target HTTP server
- httpServer = http.createServer();
- httpServer.listen(function () {
- httpPort = httpServer.address().port;
- done();
- });
- });
-
- before(function (done) {
- // setup target SSL HTTPS server
- var options = {
- key: fs.readFileSync(__dirname + '/ssl-cert-snakeoil.key'),
- cert: fs.readFileSync(__dirname + '/ssl-cert-snakeoil.pem')
- };
- httpsServer = https.createServer(options);
- httpsServer.listen(function () {
- httpsPort = httpsServer.address().port;
- done();
- });
- });
-
- after(function (done) {
- socksServer.once('close', function () { done(); });
- socksServer.close();
- });
-
- after(function (done) {
- httpServer.once('close', function () { done(); });
- httpServer.close();
- });
-
- after(function (done) {
- httpsServer.once('close', function () { done(); });
- httpsServer.close();
- });
-
- describe('constructor', function () {
- it('should throw an Error if no "proxy" argument is given', function () {
- assert.throws(function () {
- new SocksProxyAgent();
- });
- });
- it('should accept a "string" proxy argument', function () {
- var agent = new SocksProxyAgent('socks://127.0.0.1:' + socksPort);
- assert.equal('127.0.0.1', agent.proxy.host);
- assert.equal(socksPort, agent.proxy.port);
- });
- it('should accept a `url.parse()` result object argument', function () {
- var opts = url.parse('socks://127.0.0.1:' + socksPort);
- var agent = new SocksProxyAgent(opts);
- assert.equal('127.0.0.1', agent.proxy.host);
- assert.equal(socksPort, agent.proxy.port);
- });
- });
-
- describe('"http" module', function () {
- it('should work against an HTTP endpoint', function (done) {
- httpServer.once('request', function (req, res) {
- assert.equal('/foo', req.url);
- res.statusCode = 404;
- res.end(JSON.stringify(req.headers));
- });
-
- var agent = new SocksProxyAgent('socks://127.0.0.1:' + socksPort);
- var opts = url.parse('http://127.0.0.1:' + httpPort + '/foo');
- opts.agent = agent;
- opts.headers = { foo: 'bar' };
- var req = http.get(opts, function (res) {
- assert.equal(404, res.statusCode);
- getRawBody(res, 'utf8', function (err, buf) {
- if (err) return done(err);
- var data = JSON.parse(buf);
- assert.equal('bar', data.foo);
- done();
- });
- });
- req.once('error', done);
- });
- });
-
- describe('"https" module', function () {
- it('should work against an HTTPS endpoint', function (done) {
- httpsServer.once('request', function (req, res) {
- assert.equal('/foo', req.url);
- res.statusCode = 404;
- res.end(JSON.stringify(req.headers));
- });
-
- var agent = new SocksProxyAgent('socks://127.0.0.1:' + socksPort);
- var opts = url.parse('https://127.0.0.1:' + httpsPort + '/foo');
- opts.agent = agent;
- opts.rejectUnauthorized = false;
-
- opts.headers = { foo: 'bar' };
- var req = https.get(opts, function (res) {
- assert.equal(404, res.statusCode);
- getRawBody(res, 'utf8', function (err, buf) {
- if (err) return done(err);
- var data = JSON.parse(buf);
- assert.equal('bar', data.foo);
- done();
- });
- });
- req.once('error', done);
- });
- });
-
-});
diff --git a/node_modules/socks-proxy-agent/yarn.lock b/node_modules/socks-proxy-agent/yarn.lock
deleted file mode 100644
index 337f8152b..000000000
--- a/node_modules/socks-proxy-agent/yarn.lock
+++ /dev/null
@@ -1,354 +0,0 @@
-# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
-# yarn lockfile v1
-
-
-agent-base@~4.2.1:
- version "4.2.1"
- resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-4.2.1.tgz#d89e5999f797875674c07d87f260fc41e83e8ca9"
- integrity sha512-JVwXMr9nHYTUXsBFKUqhJwvlcYU/blreOEUkhNR2eXZIvwd+c+o5V4MgDPKWnMS/56awN3TRzIP+KoPn+roQtg==
- dependencies:
- es6-promisify "^5.0.0"
-
-async@0.2.x:
- version "0.2.10"
- resolved "https://registry.yarnpkg.com/async/-/async-0.2.10.tgz#b6bbe0b0674b9d719708ca38de8c237cb526c3d1"
- integrity sha1-trvgsGdLnXGXCMo43owjfLUmw9E=
-
-balanced-match@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767"
- integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c=
-
-brace-expansion@^1.1.7:
- version "1.1.11"
- resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd"
- integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==
- dependencies:
- balanced-match "^1.0.0"
- concat-map "0.0.1"
-
-browser-stdout@1.3.1:
- version "1.3.1"
- resolved "https://registry.yarnpkg.com/browser-stdout/-/browser-stdout-1.3.1.tgz#baa559ee14ced73452229bad7326467c61fabd60"
- integrity sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==
-
-bytes@3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048"
- integrity sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg=
-
-cli@0.4.x:
- version "0.4.5"
- resolved "https://registry.yarnpkg.com/cli/-/cli-0.4.5.tgz#78f9485cd161b566e9a6c72d7170c4270e81db61"
- integrity sha1-ePlIXNFhtWbppsctcXDEJw6B22E=
- dependencies:
- glob ">= 3.1.4"
-
-cliff@0.1.x:
- version "0.1.10"
- resolved "https://registry.yarnpkg.com/cliff/-/cliff-0.1.10.tgz#53be33ea9f59bec85609ee300ac4207603e52013"
- integrity sha1-U74z6p9ZvshWCe4wCsQgdgPlIBM=
- dependencies:
- colors "~1.0.3"
- eyes "~0.1.8"
- winston "0.8.x"
-
-colors@0.6.x:
- version "0.6.2"
- resolved "https://registry.yarnpkg.com/colors/-/colors-0.6.2.tgz#2423fe6678ac0c5dae8852e5d0e5be08c997abcc"
- integrity sha1-JCP+ZnisDF2uiFLl0OW+CMmXq8w=
-
-colors@~1.0.3:
- version "1.0.3"
- resolved "https://registry.yarnpkg.com/colors/-/colors-1.0.3.tgz#0433f44d809680fdeb60ed260f1b0c262e82a40b"
- integrity sha1-BDP0TYCWgP3rYO0mDxsMJi6CpAs=
-
-commander@2.11.0:
- version "2.11.0"
- resolved "https://registry.yarnpkg.com/commander/-/commander-2.11.0.tgz#157152fd1e7a6c8d98a5b715cf376df928004563"
- integrity sha512-b0553uYA5YAEGgyYIGYROzKQ7X5RAqedkfjiZxwi0kL1g3bOaBNNZfYkzt/CL0umgD5wc9Jec2FbB98CjkMRvQ==
-
-concat-map@0.0.1:
- version "0.0.1"
- resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b"
- integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=
-
-cycle@1.0.x:
- version "1.0.3"
- resolved "https://registry.yarnpkg.com/cycle/-/cycle-1.0.3.tgz#21e80b2be8580f98b468f379430662b046c34ad2"
- integrity sha1-IegLK+hYD5i0aPN5QwZisEbDStI=
-
-debug@3.1.0:
- version "3.1.0"
- resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261"
- integrity sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==
- dependencies:
- ms "2.0.0"
-
-depd@~1.1.2:
- version "1.1.2"
- resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9"
- integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=
-
-diff@3.5.0:
- version "3.5.0"
- resolved "https://registry.yarnpkg.com/diff/-/diff-3.5.0.tgz#800c0dd1e0a8bfbc95835c202ad220fe317e5a12"
- integrity sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==
-
-es6-promise@^4.0.3:
- version "4.2.6"
- resolved "https://registry.yarnpkg.com/es6-promise/-/es6-promise-4.2.6.tgz#b685edd8258886365ea62b57d30de28fadcd974f"
- integrity sha512-aRVgGdnmW2OiySVPUC9e6m+plolMAJKjZnQlCwNSuK5yQ0JN61DZSO1X1Ufd1foqWRAlig0rhduTCHe7sVtK5Q==
-
-es6-promisify@^5.0.0:
- version "5.0.0"
- resolved "https://registry.yarnpkg.com/es6-promisify/-/es6-promisify-5.0.0.tgz#5109d62f3e56ea967c4b63505aef08291c8a5203"
- integrity sha1-UQnWLz5W6pZ8S2NQWu8IKRyKUgM=
- dependencies:
- es6-promise "^4.0.3"
-
-escape-string-regexp@1.0.5:
- version "1.0.5"
- resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4"
- integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=
-
-eyes@0.1.x, eyes@~0.1.8:
- version "0.1.8"
- resolved "https://registry.yarnpkg.com/eyes/-/eyes-0.1.8.tgz#62cf120234c683785d902348a800ef3e0cc20bc0"
- integrity sha1-Ys8SAjTGg3hdkCNIqADvPgzCC8A=
-
-fs.realpath@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f"
- integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8=
-
-glob@7.1.2:
- version "7.1.2"
- resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.2.tgz#c19c9df9a028702d678612384a6552404c636d15"
- integrity sha512-MJTUg1kjuLeQCJ+ccE4Vpa6kKVXkPYJ2mOCQyUuKLcLQsdrMCpBPUi8qVE6+YuaJkozeA9NusTAw3hLr8Xe5EQ==
- dependencies:
- fs.realpath "^1.0.0"
- inflight "^1.0.4"
- inherits "2"
- minimatch "^3.0.4"
- once "^1.3.0"
- path-is-absolute "^1.0.0"
-
-"glob@>= 3.1.4":
- version "7.1.3"
- resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.3.tgz#3960832d3f1574108342dafd3a67b332c0969df1"
- integrity sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==
- dependencies:
- fs.realpath "^1.0.0"
- inflight "^1.0.4"
- inherits "2"
- minimatch "^3.0.4"
- once "^1.3.0"
- path-is-absolute "^1.0.0"
-
-growl@1.10.3:
- version "1.10.3"
- resolved "https://registry.yarnpkg.com/growl/-/growl-1.10.3.tgz#1926ba90cf3edfe2adb4927f5880bc22c66c790f"
- integrity sha512-hKlsbA5Vu3xsh1Cg3J7jSmX/WaW6A5oBeqzM88oNbCRQFz+zUaXm6yxS4RVytp1scBoJzSYl4YAEOQIt6O8V1Q==
-
-has-flag@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-2.0.0.tgz#e8207af1cc7b30d446cc70b734b5e8be18f88d51"
- integrity sha1-6CB68cx7MNRGzHC3NLXovhj4jVE=
-
-he@1.1.1:
- version "1.1.1"
- resolved "https://registry.yarnpkg.com/he/-/he-1.1.1.tgz#93410fd21b009735151f8868c2f271f3427e23fd"
- integrity sha1-k0EP0hsAlzUVH4howvJx80J+I/0=
-
-http-errors@1.6.3:
- version "1.6.3"
- resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.6.3.tgz#8b55680bb4be283a0b5bf4ea2e38580be1d9320d"
- integrity sha1-i1VoC7S+KDoLW/TqLjhYC+HZMg0=
- dependencies:
- depd "~1.1.2"
- inherits "2.0.3"
- setprototypeof "1.1.0"
- statuses ">= 1.4.0 < 2"
-
-iconv-lite@0.4.23:
- version "0.4.23"
- resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.23.tgz#297871f63be507adcfbfca715d0cd0eed84e9a63"
- integrity sha512-neyTUVFtahjf0mB3dZT77u+8O0QB89jFdnBkd5P1JgYPbPaia3gXXOVL2fq8VyU2gMMD7SaN7QukTB/pmXYvDA==
- dependencies:
- safer-buffer ">= 2.1.2 < 3"
-
-inflight@^1.0.4:
- version "1.0.6"
- resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9"
- integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=
- dependencies:
- once "^1.3.0"
- wrappy "1"
-
-inherits@2, inherits@2.0.3:
- version "2.0.3"
- resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de"
- integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=
-
-ip@^1.1.5:
- version "1.1.5"
- resolved "https://registry.yarnpkg.com/ip/-/ip-1.1.5.tgz#bdded70114290828c0a039e72ef25f5aaec4354a"
- integrity sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo=
-
-ipv6@*:
- version "3.1.3"
- resolved "https://registry.yarnpkg.com/ipv6/-/ipv6-3.1.3.tgz#4d9064f9c2dafa0dd10b8b7d76ffca4aad31b3b9"
- integrity sha1-TZBk+cLa+g3RC4t9dv/KSq0xs7k=
- dependencies:
- cli "0.4.x"
- cliff "0.1.x"
- sprintf "0.1.x"
-
-isstream@0.1.x:
- version "0.1.2"
- resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a"
- integrity sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=
-
-minimatch@3.0.4, minimatch@^3.0.4:
- version "3.0.4"
- resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083"
- integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==
- dependencies:
- brace-expansion "^1.1.7"
-
-minimist@0.0.8:
- version "0.0.8"
- resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d"
- integrity sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=
-
-mkdirp@0.5.1:
- version "0.5.1"
- resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903"
- integrity sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=
- dependencies:
- minimist "0.0.8"
-
-mocha@~5.1.0:
- version "5.1.1"
- resolved "https://registry.yarnpkg.com/mocha/-/mocha-5.1.1.tgz#b774c75609dac05eb48f4d9ba1d827b97fde8a7b"
- integrity sha512-kKKs/H1KrMMQIEsWNxGmb4/BGsmj0dkeyotEvbrAuQ01FcWRLssUNXCEUZk6SZtyJBi6EE7SL0zDDtItw1rGhw==
- dependencies:
- browser-stdout "1.3.1"
- commander "2.11.0"
- debug "3.1.0"
- diff "3.5.0"
- escape-string-regexp "1.0.5"
- glob "7.1.2"
- growl "1.10.3"
- he "1.1.1"
- minimatch "3.0.4"
- mkdirp "0.5.1"
- supports-color "4.4.0"
-
-ms@2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8"
- integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=
-
-once@^1.3.0:
- version "1.4.0"
- resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
- integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E=
- dependencies:
- wrappy "1"
-
-path-is-absolute@^1.0.0:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f"
- integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18=
-
-pkginfo@0.3.x:
- version "0.3.1"
- resolved "https://registry.yarnpkg.com/pkginfo/-/pkginfo-0.3.1.tgz#5b29f6a81f70717142e09e765bbeab97b4f81e21"
- integrity sha1-Wyn2qB9wcXFC4J52W76rl7T4HiE=
-
-raw-body@~2.3.2:
- version "2.3.3"
- resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.3.3.tgz#1b324ece6b5706e153855bc1148c65bb7f6ea0c3"
- integrity sha512-9esiElv1BrZoI3rCDuOuKCBRbuApGGaDPQfjSflGxdy4oyzqghxu6klEkkVIvBje+FF0BX9coEv8KqW6X/7njw==
- dependencies:
- bytes "3.0.0"
- http-errors "1.6.3"
- iconv-lite "0.4.23"
- unpipe "1.0.0"
-
-"safer-buffer@>= 2.1.2 < 3":
- version "2.1.2"
- resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a"
- integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==
-
-setprototypeof@1.1.0:
- version "1.1.0"
- resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656"
- integrity sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ==
-
-smart-buffer@4.0.2:
- version "4.0.2"
- resolved "https://registry.yarnpkg.com/smart-buffer/-/smart-buffer-4.0.2.tgz#5207858c3815cc69110703c6b94e46c15634395d"
- integrity sha512-JDhEpTKzXusOqXZ0BUIdH+CjFdO/CR3tLlf5CN34IypI+xMmXW1uB16OOY8z3cICbJlDAVJzNbwBhNO0wt9OAw==
-
-socks@~2.3.2:
- version "2.3.2"
- resolved "https://registry.yarnpkg.com/socks/-/socks-2.3.2.tgz#ade388e9e6d87fdb11649c15746c578922a5883e"
- integrity sha512-pCpjxQgOByDHLlNqlnh/mNSAxIUkyBBuwwhTcV+enZGbDaClPvHdvm6uvOwZfFJkam7cGhBNbb4JxiP8UZkRvQ==
- dependencies:
- ip "^1.1.5"
- smart-buffer "4.0.2"
-
-socksv5@0.0.6:
- version "0.0.6"
- resolved "https://registry.yarnpkg.com/socksv5/-/socksv5-0.0.6.tgz#1327235ff7e8de21ac434a0a579dc69c3f071061"
- integrity sha1-EycjX/fo3iGsQ0oKV53GnD8HEGE=
- dependencies:
- ipv6 "*"
-
-sprintf@0.1.x:
- version "0.1.5"
- resolved "https://registry.yarnpkg.com/sprintf/-/sprintf-0.1.5.tgz#8f83e39a9317c1a502cb7db8050e51c679f6edcf"
- integrity sha1-j4PjmpMXwaUCy324BQ5Rxnn27c8=
-
-stack-trace@0.0.x:
- version "0.0.10"
- resolved "https://registry.yarnpkg.com/stack-trace/-/stack-trace-0.0.10.tgz#547c70b347e8d32b4e108ea1a2a159e5fdde19c0"
- integrity sha1-VHxws0fo0ytOEI6hoqFZ5f3eGcA=
-
-"statuses@>= 1.4.0 < 2":
- version "1.5.0"
- resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c"
- integrity sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=
-
-supports-color@4.4.0:
- version "4.4.0"
- resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-4.4.0.tgz#883f7ddabc165142b2a61427f3352ded195d1a3e"
- integrity sha512-rKC3+DyXWgK0ZLKwmRsrkyHVZAjNkfzeehuFWdGGcqGDTZFH73+RH6S/RDAAxl9GusSjZSUWYLmT9N5pzXFOXQ==
- dependencies:
- has-flag "^2.0.0"
-
-unpipe@1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec"
- integrity sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=
-
-winston@0.8.x:
- version "0.8.3"
- resolved "https://registry.yarnpkg.com/winston/-/winston-0.8.3.tgz#64b6abf4cd01adcaefd5009393b1d8e8bec19db0"
- integrity sha1-ZLar9M0Brcrv1QCTk7HY6L7BnbA=
- dependencies:
- async "0.2.x"
- colors "0.6.x"
- cycle "1.0.x"
- eyes "0.1.x"
- isstream "0.1.x"
- pkginfo "0.3.x"
- stack-trace "0.0.x"
-
-wrappy@1:
- version "1.0.2"
- resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
- integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=
diff --git a/package-lock.json b/package-lock.json
index 61ceca812..ca9ccda6e 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -462,22 +462,6 @@
}
}
},
- "agent-base": {
- "version": "4.3.0",
- "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.3.0.tgz",
- "integrity": "sha512-salcGninV0nPrwpGNn4VTXBb1SOuXQBiqbrNXoeizJsHrsL6ERFM2Ne3JUSBWRE6aeNJI2ROP/WEEIDUiDe3cg==",
- "requires": {
- "es6-promisify": "^5.0.0"
- }
- },
- "agentkeepalive": {
- "version": "3.5.2",
- "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-3.5.2.tgz",
- "integrity": "sha512-e0L/HNe6qkQ7H19kTlRRqUibEAwDK5AFk6y3PtMsuut2VAH6+Q4xZml1tNDJD7kSAyqmbG/K08K5WEJYtUrSlQ==",
- "requires": {
- "humanize-ms": "^1.2.1"
- }
- },
"aggregate-error": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.0.1.tgz",
@@ -1483,6 +1467,7 @@
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz",
"integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==",
+ "dev": true,
"requires": {
"ms": "2.0.0"
},
@@ -1490,7 +1475,8 @@
"ms": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
- "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g="
+ "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=",
+ "dev": true
}
}
},
@@ -1722,6 +1708,7 @@
"version": "0.1.12",
"resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.12.tgz",
"integrity": "sha1-U4tm8+5izRq1HsMjgp0flIDHS+s=",
+ "optional": true,
"requires": {
"iconv-lite": "~0.4.13"
}
@@ -1795,19 +1782,6 @@
"integrity": "sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg==",
"dev": true
},
- "es6-promise": {
- "version": "4.2.8",
- "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz",
- "integrity": "sha512-HJDGx5daxeIvxdBxvG2cb9g4tEvwIk3i8+nhX0yGrYmZUzbkdg8QbDevheDB8gd0//uPj4c1EQua8Q+MViT0/w=="
- },
- "es6-promisify": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/es6-promisify/-/es6-promisify-5.0.0.tgz",
- "integrity": "sha1-UQnWLz5W6pZ8S2NQWu8IKRyKUgM=",
- "requires": {
- "es6-promise": "^4.0.3"
- }
- },
"escape-html": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz",
@@ -2648,11 +2622,6 @@
}
}
},
- "genfun": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/genfun/-/genfun-5.0.0.tgz",
- "integrity": "sha512-KGDOARWVga7+rnB3z9Sd2Letx515owfk0hSxHGuqjANb1M+x2bGZGqHLiozPsYMdM2OubeMni/Hpwmjq6qIUhA=="
- },
"gentle-fs": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/gentle-fs/-/gentle-fs-2.3.0.tgz",
@@ -2862,11 +2831,6 @@
"integrity": "sha512-a4u9BeERWGu/S8JiWEAQcdrg9v4QArtP9keViQjGMdff20fBdd8waotXaNmODqBe6uZ3Nafi7K/ho4gCQHV3Ig==",
"dev": true
},
- "http-cache-semantics": {
- "version": "3.8.1",
- "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-3.8.1.tgz",
- "integrity": "sha512-5ai2iksyV8ZXmnZhHH4rWPoxxistEexSi5936zIQ1bnNTW5VnA85B6P/VpXiRM017IgRvb2kKo1a//y+0wSp3w=="
- },
"http-proxy": {
"version": "0.8.7",
"resolved": "https://registry.npmjs.org/http-proxy/-/http-proxy-0.8.7.tgz",
@@ -2886,15 +2850,6 @@
}
}
},
- "http-proxy-agent": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-2.1.0.tgz",
- "integrity": "sha512-qwHbBLV7WviBl0rQsOzH6o5lwyOIvwp/BdFnvVxXORldu5TmjFfjzBcWUWS5kWAZhmv+JtiDhSuQCp4sBfbIgg==",
- "requires": {
- "agent-base": "4",
- "debug": "3.1.0"
- }
- },
"http-signature": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz",
@@ -2905,15 +2860,6 @@
"sshpk": "^1.7.0"
}
},
- "https-proxy-agent": {
- "version": "2.2.4",
- "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.2.4.tgz",
- "integrity": "sha512-OmvfoQ53WLjtA9HeYP9RNrWMJzzAz1JGaSFr1nijg0PVR1JaD/xbJq1mdEIIlxGpXp9eSe/O2LgU9DJmTPd0Eg==",
- "requires": {
- "agent-base": "^4.3.0",
- "debug": "^3.1.0"
- }
- },
"humanize-ms": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/humanize-ms/-/humanize-ms-1.2.1.tgz",
@@ -3591,236 +3537,6 @@
"type-check": "~0.3.2"
}
},
- "libcipm": {
- "version": "4.0.7",
- "resolved": "https://registry.npmjs.org/libcipm/-/libcipm-4.0.7.tgz",
- "integrity": "sha512-fTq33otU3PNXxxCTCYCYe7V96o59v/o7bvtspmbORXpgFk+wcWrGf5x6tBgui5gCed/45/wtPomBsZBYm5KbIw==",
- "requires": {
- "bin-links": "^1.1.2",
- "bluebird": "^3.5.1",
- "figgy-pudding": "^3.5.1",
- "find-npm-prefix": "^1.0.2",
- "graceful-fs": "^4.1.11",
- "ini": "^1.3.5",
- "lock-verify": "^2.0.2",
- "mkdirp": "^0.5.1",
- "npm-lifecycle": "^3.0.0",
- "npm-logical-tree": "^1.2.1",
- "npm-package-arg": "^6.1.0",
- "pacote": "^9.1.0",
- "read-package-json": "^2.0.13",
- "rimraf": "^2.6.2",
- "worker-farm": "^1.6.0"
- },
- "dependencies": {
- "cacache": {
- "version": "12.0.3",
- "resolved": "https://registry.npmjs.org/cacache/-/cacache-12.0.3.tgz",
- "integrity": "sha512-kqdmfXEGFepesTuROHMs3MpFLWrPkSSpRqOw80RCflZXy/khxaArvFrQ7uJxSUduzAufc6G0g1VUCOZXxWavPw==",
- "requires": {
- "bluebird": "^3.5.5",
- "chownr": "^1.1.1",
- "figgy-pudding": "^3.5.1",
- "glob": "^7.1.4",
- "graceful-fs": "^4.1.15",
- "infer-owner": "^1.0.3",
- "lru-cache": "^5.1.1",
- "mississippi": "^3.0.0",
- "mkdirp": "^0.5.1",
- "move-concurrently": "^1.0.1",
- "promise-inflight": "^1.0.1",
- "rimraf": "^2.6.3",
- "ssri": "^6.0.1",
- "unique-filename": "^1.1.1",
- "y18n": "^4.0.0"
- }
- },
- "fs-minipass": {
- "version": "1.2.7",
- "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-1.2.7.tgz",
- "integrity": "sha512-GWSSJGFy4e9GUeCcbIkED+bgAoFyj7XF1mV8rma3QW4NIqX9Kyx79N/PF61H5udOV3aY1IaMLs6pGbH71nlCTA==",
- "requires": {
- "minipass": "^2.6.0"
- }
- },
- "minipass": {
- "version": "2.9.0",
- "resolved": "https://registry.npmjs.org/minipass/-/minipass-2.9.0.tgz",
- "integrity": "sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg==",
- "requires": {
- "safe-buffer": "^5.1.2",
- "yallist": "^3.0.0"
- },
- "dependencies": {
- "yallist": {
- "version": "3.1.1",
- "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz",
- "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g=="
- }
- }
- },
- "minizlib": {
- "version": "1.3.3",
- "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-1.3.3.tgz",
- "integrity": "sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q==",
- "requires": {
- "minipass": "^2.9.0"
- }
- },
- "npm-package-arg": {
- "version": "6.1.1",
- "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-6.1.1.tgz",
- "integrity": "sha512-qBpssaL3IOZWi5vEKUKW0cO7kzLeT+EQO9W8RsLOZf76KF9E/K9+wH0C7t06HXPpaH8WH5xF1MExLuCwbTqRUg==",
- "requires": {
- "hosted-git-info": "^2.7.1",
- "osenv": "^0.1.5",
- "semver": "^5.6.0",
- "validate-npm-package-name": "^3.0.0"
- },
- "dependencies": {
- "hosted-git-info": {
- "version": "2.8.5",
- "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.5.tgz",
- "integrity": "sha512-kssjab8CvdXfcXMXVcvsXum4Hwdq9XGtRD3TteMEvEbq0LXyiNQr6AprqKqfeaDXze7SxWvRxdpwE6ku7ikLkg=="
- },
- "semver": {
- "version": "5.7.1",
- "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz",
- "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ=="
- }
- }
- },
- "npm-pick-manifest": {
- "version": "3.0.2",
- "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-3.0.2.tgz",
- "integrity": "sha512-wNprTNg+X5nf+tDi+hbjdHhM4bX+mKqv6XmPh7B5eG+QY9VARfQPfCEH013H5GqfNj6ee8Ij2fg8yk0mzps1Vw==",
- "requires": {
- "figgy-pudding": "^3.5.1",
- "npm-package-arg": "^6.0.0",
- "semver": "^5.4.1"
- },
- "dependencies": {
- "semver": {
- "version": "5.7.1",
- "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz",
- "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ=="
- }
- }
- },
- "npm-registry-fetch": {
- "version": "4.0.3",
- "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-4.0.3.tgz",
- "integrity": "sha512-WGvUx0lkKFhu9MbiGFuT9nG2NpfQ+4dCJwRwwtK2HK5izJEvwDxMeUyqbuMS7N/OkpVCqDorV6rO5E4V9F8lJw==",
- "requires": {
- "JSONStream": "^1.3.4",
- "bluebird": "^3.5.1",
- "figgy-pudding": "^3.4.1",
- "lru-cache": "^5.1.1",
- "make-fetch-happen": "^5.0.0",
- "npm-package-arg": "^6.1.0",
- "safe-buffer": "^5.2.0"
- },
- "dependencies": {
- "safe-buffer": {
- "version": "5.2.0",
- "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.0.tgz",
- "integrity": "sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg=="
- }
- }
- },
- "pacote": {
- "version": "9.5.12",
- "resolved": "https://registry.npmjs.org/pacote/-/pacote-9.5.12.tgz",
- "integrity": "sha512-BUIj/4kKbwWg4RtnBncXPJd15piFSVNpTzY0rysSr3VnMowTYgkGKcaHrbReepAkjTr8lH2CVWRi58Spg2CicQ==",
- "requires": {
- "bluebird": "^3.5.3",
- "cacache": "^12.0.2",
- "chownr": "^1.1.2",
- "figgy-pudding": "^3.5.1",
- "get-stream": "^4.1.0",
- "glob": "^7.1.3",
- "infer-owner": "^1.0.4",
- "lru-cache": "^5.1.1",
- "make-fetch-happen": "^5.0.0",
- "minimatch": "^3.0.4",
- "minipass": "^2.3.5",
- "mississippi": "^3.0.0",
- "mkdirp": "^0.5.1",
- "normalize-package-data": "^2.4.0",
- "npm-normalize-package-bin": "^1.0.0",
- "npm-package-arg": "^6.1.0",
- "npm-packlist": "^1.1.12",
- "npm-pick-manifest": "^3.0.0",
- "npm-registry-fetch": "^4.0.0",
- "osenv": "^0.1.5",
- "promise-inflight": "^1.0.1",
- "promise-retry": "^1.1.1",
- "protoduck": "^5.0.1",
- "rimraf": "^2.6.2",
- "safe-buffer": "^5.1.2",
- "semver": "^5.6.0",
- "ssri": "^6.0.1",
- "tar": "^4.4.10",
- "unique-filename": "^1.1.1",
- "which": "^1.3.1"
- },
- "dependencies": {
- "npm-packlist": {
- "version": "1.4.8",
- "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-1.4.8.tgz",
- "integrity": "sha512-5+AZgwru5IevF5ZdnFglB5wNlHG1AOOuw28WhUq8/8emhBmLv6jX5by4WJCh7lW0uSYZYS6DXqIsyZVIXRZU9A==",
- "requires": {
- "ignore-walk": "^3.0.1",
- "npm-bundled": "^1.0.1",
- "npm-normalize-package-bin": "^1.0.1"
- }
- },
- "semver": {
- "version": "5.7.1",
- "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz",
- "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ=="
- }
- }
- },
- "ssri": {
- "version": "6.0.1",
- "resolved": "https://registry.npmjs.org/ssri/-/ssri-6.0.1.tgz",
- "integrity": "sha512-3Wge10hNcT1Kur4PDFwEieXSCMCJs/7WvSACcrMYrNp+b8kDL1/0wJch5Ni2WrtwEa2IO8OsVfeKIciKCDx/QA==",
- "requires": {
- "figgy-pudding": "^3.5.1"
- }
- },
- "tar": {
- "version": "4.4.13",
- "resolved": "https://registry.npmjs.org/tar/-/tar-4.4.13.tgz",
- "integrity": "sha512-w2VwSrBoHa5BsSyH+KxEqeQBAllHhccyMFVHtGtdMpF4W7IRWfZjFiQceJPChOeTsSDVUpER2T8FA93pr0L+QA==",
- "requires": {
- "chownr": "^1.1.1",
- "fs-minipass": "^1.2.5",
- "minipass": "^2.8.6",
- "minizlib": "^1.2.1",
- "mkdirp": "^0.5.0",
- "safe-buffer": "^5.1.2",
- "yallist": "^3.0.3"
- },
- "dependencies": {
- "yallist": {
- "version": "3.1.1",
- "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz",
- "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g=="
- }
- }
- },
- "which": {
- "version": "1.3.1",
- "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz",
- "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==",
- "requires": {
- "isexe": "^2.0.0"
- }
- }
- }
- },
"libnpmaccess": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/libnpmaccess/-/libnpmaccess-4.0.0.tgz",
@@ -4249,56 +3965,6 @@
"integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==",
"dev": true
},
- "make-fetch-happen": {
- "version": "5.0.2",
- "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-5.0.2.tgz",
- "integrity": "sha512-07JHC0r1ykIoruKO8ifMXu+xEU8qOXDFETylktdug6vJDACnP+HKevOu3PXyNPzFyTSlz8vrBYlBO1JZRe8Cag==",
- "requires": {
- "agentkeepalive": "^3.4.1",
- "cacache": "^12.0.0",
- "http-cache-semantics": "^3.8.1",
- "http-proxy-agent": "^2.1.0",
- "https-proxy-agent": "^2.2.3",
- "lru-cache": "^5.1.1",
- "mississippi": "^3.0.0",
- "node-fetch-npm": "^2.0.2",
- "promise-retry": "^1.1.1",
- "socks-proxy-agent": "^4.0.0",
- "ssri": "^6.0.0"
- },
- "dependencies": {
- "cacache": {
- "version": "12.0.3",
- "resolved": "https://registry.npmjs.org/cacache/-/cacache-12.0.3.tgz",
- "integrity": "sha512-kqdmfXEGFepesTuROHMs3MpFLWrPkSSpRqOw80RCflZXy/khxaArvFrQ7uJxSUduzAufc6G0g1VUCOZXxWavPw==",
- "requires": {
- "bluebird": "^3.5.5",
- "chownr": "^1.1.1",
- "figgy-pudding": "^3.5.1",
- "glob": "^7.1.4",
- "graceful-fs": "^4.1.15",
- "infer-owner": "^1.0.3",
- "lru-cache": "^5.1.1",
- "mississippi": "^3.0.0",
- "mkdirp": "^0.5.1",
- "move-concurrently": "^1.0.1",
- "promise-inflight": "^1.0.1",
- "rimraf": "^2.6.3",
- "ssri": "^6.0.1",
- "unique-filename": "^1.1.1",
- "y18n": "^4.0.0"
- }
- },
- "ssri": {
- "version": "6.0.1",
- "resolved": "https://registry.npmjs.org/ssri/-/ssri-6.0.1.tgz",
- "integrity": "sha512-3Wge10hNcT1Kur4PDFwEieXSCMCJs/7WvSACcrMYrNp+b8kDL1/0wJch5Ni2WrtwEa2IO8OsVfeKIciKCDx/QA==",
- "requires": {
- "figgy-pudding": "^3.5.1"
- }
- }
- }
- },
"map-age-cleaner": {
"version": "0.1.3",
"resolved": "https://registry.npmjs.org/map-age-cleaner/-/map-age-cleaner-0.1.3.tgz",
@@ -4565,16 +4231,6 @@
"resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz",
"integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ=="
},
- "node-fetch-npm": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/node-fetch-npm/-/node-fetch-npm-2.0.2.tgz",
- "integrity": "sha512-nJIxm1QmAj4v3nfCvEeCrYSoVwXyxLnaPBK5W1W5DGEJwjlKuC2VEUycGw5oxk+4zZahRrB84PUJJgEmhFTDFw==",
- "requires": {
- "encoding": "^0.1.11",
- "json-parse-better-errors": "^1.0.0",
- "safe-buffer": "^5.1.1"
- }
- },
"node-gyp": {
"version": "5.0.7",
"resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-5.0.7.tgz",
@@ -4781,11 +4437,6 @@
}
}
},
- "npm-logical-tree": {
- "version": "1.2.1",
- "resolved": "https://registry.npmjs.org/npm-logical-tree/-/npm-logical-tree-1.2.1.tgz",
- "integrity": "sha512-AJI/qxDB2PWI4LG1CYN579AY1vCiNyWfkiquCsJWqntRu/WwimVrC8yXeILBFHDwxfOejxewlmnvW9XXjMlYIg=="
- },
"npm-normalize-package-bin": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-1.0.1.tgz",
@@ -5821,14 +5472,6 @@
"resolved": "https://registry.npmjs.org/proto-list/-/proto-list-1.2.4.tgz",
"integrity": "sha1-IS1b/hMYMGpCD2QCuOJv85ZHqEk="
},
- "protoduck": {
- "version": "5.0.1",
- "resolved": "https://registry.npmjs.org/protoduck/-/protoduck-5.0.1.tgz",
- "integrity": "sha512-WxoCeDCoCBY55BMvj4cAEjdVUFGRWed9ZxPlqTKYyw1nDDTQ4pqmnIMAGfJlg7Dx35uB/M+PHJPTmGOvaCaPTg==",
- "requires": {
- "genfun": "^5.0.0"
- }
- },
"prr": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/prr/-/prr-1.0.1.tgz",
@@ -6339,25 +5982,6 @@
"smart-buffer": "^4.1.0"
}
},
- "socks-proxy-agent": {
- "version": "4.0.2",
- "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-4.0.2.tgz",
- "integrity": "sha512-NT6syHhI9LmuEMSK6Kd2V7gNv5KFZoLE7V5udWmn0de+3Mkj3UMA/AJPLyeNUVmElCurSHtUdM3ETpR3z770Wg==",
- "requires": {
- "agent-base": "~4.2.1",
- "socks": "~2.3.2"
- },
- "dependencies": {
- "agent-base": {
- "version": "4.2.1",
- "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.2.1.tgz",
- "integrity": "sha512-JVwXMr9nHYTUXsBFKUqhJwvlcYU/blreOEUkhNR2eXZIvwd+c+o5V4MgDPKWnMS/56awN3TRzIP+KoPn+roQtg==",
- "requires": {
- "es6-promisify": "^5.0.0"
- }
- }
- }
- },
"sorted-object": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/sorted-object/-/sorted-object-2.0.1.tgz",
diff --git a/package.json b/package.json
index e53c3d45b..66c8348d3 100644
--- a/package.json
+++ b/package.json
@@ -76,7 +76,6 @@
"json-parse-better-errors": "^1.0.2",
"json-parse-even-better-errors": "^2.2.0",
"lazy-property": "~1.0.0",
- "libcipm": "^4.0.7",
"libnpmaccess": "^4.0.0",
"libnpmhook": "^6.0.0",
"libnpmorg": "^2.0.0",