Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/npm/cli.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDarcy Clarke <darcy@darcyclarke.me>2020-10-13 04:20:53 +0300
committerisaacs <i@izs.me>2020-10-13 07:08:26 +0300
commit365f2e7565d0cfde858a43d894a77fb3c6338bb7 (patch)
treed5e845f52fea7b8bc3b3ca790900fcdcfaa103da /node_modules/init-package-json
parentfafb348ef976116d47ada238beb258d5db5758a7 (diff)
read-package-json@3.0.0
Diffstat (limited to 'node_modules/init-package-json')
-rw-r--r--node_modules/init-package-json/node_modules/read-package-json/CHANGELOG.md41
-rw-r--r--node_modules/init-package-json/node_modules/read-package-json/LICENSE15
-rw-r--r--node_modules/init-package-json/node_modules/read-package-json/README.md151
-rw-r--r--node_modules/init-package-json/node_modules/read-package-json/package.json33
-rw-r--r--node_modules/init-package-json/node_modules/read-package-json/read-json.js469
5 files changed, 709 insertions, 0 deletions
diff --git a/node_modules/init-package-json/node_modules/read-package-json/CHANGELOG.md b/node_modules/init-package-json/node_modules/read-package-json/CHANGELOG.md
new file mode 100644
index 000000000..4b710cb2a
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/read-package-json/CHANGELOG.md
@@ -0,0 +1,41 @@
+# Change Log
+
+All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
+
+<a name="2.1.2"></a>
+## [2.1.2](https://github.com/npm/read-package-json/compare/v2.1.1...v2.1.2) (2020-08-20)
+
+
+### Bug Fixes
+
+* even better json errors, remove graceful-fs ([fdbf082](https://github.com/npm/read-package-json/commit/fdbf082))
+
+
+
+<a name="2.1.1"></a>
+## [2.1.1](https://github.com/npm/read-package-json/compare/v2.1.0...v2.1.1) (2019-12-09)
+
+
+### Bug Fixes
+
+* normalize and sanitize pkg bin entries ([b8cb5fa](https://github.com/npm/read-package-json/commit/b8cb5fa))
+
+
+
+<a name="2.1.0"></a>
+# [2.1.0](https://github.com/npm/read-package-json/compare/v2.0.13...v2.1.0) (2019-08-13)
+
+
+### Features
+
+* support bundleDependencies: true ([76f6f42](https://github.com/npm/read-package-json/commit/76f6f42))
+
+
+
+<a name="2.0.13"></a>
+## [2.0.13](https://github.com/npm/read-package-json/compare/v2.0.12...v2.0.13) (2018-03-08)
+
+
+### Bug Fixes
+
+* **git:** support git packed refs --all mode ([#77](https://github.com/npm/read-package-json/issues/77)) ([1869940](https://github.com/npm/read-package-json/commit/1869940))
diff --git a/node_modules/init-package-json/node_modules/read-package-json/LICENSE b/node_modules/init-package-json/node_modules/read-package-json/LICENSE
new file mode 100644
index 000000000..052085c43
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/read-package-json/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
+OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/init-package-json/node_modules/read-package-json/README.md b/node_modules/init-package-json/node_modules/read-package-json/README.md
new file mode 100644
index 000000000..da1f63dc8
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/read-package-json/README.md
@@ -0,0 +1,151 @@
+# read-package-json
+
+This is the thing that npm uses to read package.json files. It
+validates some stuff, and loads some default things.
+
+It keeps a cache of the files you've read, so that you don't end
+up reading the same package.json file multiple times.
+
+Note that if you just want to see what's literally in the package.json
+file, you can usually do `var data = require('some-module/package.json')`.
+
+This module is basically only needed by npm, but it's handy to see what
+npm will see when it looks at your package.
+
+## Usage
+
+```javascript
+var readJson = require('read-package-json')
+
+// readJson(filename, [logFunction=noop], [strict=false], cb)
+readJson('/path/to/package.json', console.error, false, function (er, data) {
+ if (er) {
+ console.error("There was an error reading the file")
+ return
+ }
+
+ console.error('the package data is', data)
+});
+```
+
+## readJson(file, [logFn = noop], [strict = false], cb)
+
+* `file` {String} The path to the package.json file
+* `logFn` {Function} Function to handle logging. Defaults to a noop.
+* `strict` {Boolean} True to enforce SemVer 2.0 version strings, and
+ other strict requirements.
+* `cb` {Function} Gets called with `(er, data)`, as is The Node Way.
+
+Reads the JSON file and does the things.
+
+## `package.json` Fields
+
+See `man 5 package.json` or `npm help json`.
+
+## readJson.log
+
+By default this is a reference to the `npmlog` module. But if that
+module can't be found, then it'll be set to just a dummy thing that does
+nothing.
+
+Replace with your own `{log,warn,error}` object for fun loggy time.
+
+## readJson.extras(file, data, cb)
+
+Run all the extra stuff relative to the file, with the parsed data.
+
+Modifies the data as it does stuff. Calls the cb when it's done.
+
+## readJson.extraSet = [fn, fn, ...]
+
+Array of functions that are called by `extras`. Each one receives the
+arguments `fn(file, data, cb)` and is expected to call `cb(er, data)`
+when done or when an error occurs.
+
+Order is indeterminate, so each function should be completely
+independent.
+
+Mix and match!
+
+## Other Relevant Files Besides `package.json`
+
+Some other files have an effect on the resulting data object, in the
+following ways:
+
+### `README?(.*)`
+
+If there is a `README` or `README.*` file present, then npm will attach
+a `readme` field to the data with the contents of this file.
+
+Owing to the fact that roughly 100% of existing node modules have
+Markdown README files, it will generally be assumed to be Markdown,
+regardless of the extension. Please plan accordingly.
+
+### `server.js`
+
+If there is a `server.js` file, and there is not already a
+`scripts.start` field, then `scripts.start` will be set to `node
+server.js`.
+
+### `AUTHORS`
+
+If there is not already a `contributors` field, then the `contributors`
+field will be set to the contents of the `AUTHORS` file, split by lines,
+and parsed.
+
+### `bindings.gyp`
+
+If a bindings.gyp file exists, and there is not already a
+`scripts.install` field, then the `scripts.install` field will be set to
+`node-gyp rebuild`.
+
+### `index.js`
+
+If the json file does not exist, but there is a `index.js` file
+present instead, and that file has a package comment, then it will try
+to parse the package comment, and use that as the data instead.
+
+A package comment looks like this:
+
+```javascript
+/**package
+ * { "name": "my-bare-module"
+ * , "version": "1.2.3"
+ * , "description": "etc...." }
+ **/
+
+// or...
+
+/**package
+{ "name": "my-bare-module"
+, "version": "1.2.3"
+, "description": "etc...." }
+**/
+```
+
+The important thing is that it starts with `/**package`, and ends with
+`**/`. If the package.json file exists, then the index.js is not
+parsed.
+
+### `{directories.man}/*.[0-9]`
+
+If there is not already a `man` field defined as an array of files or a
+single file, and
+there is a `directories.man` field defined, then that directory will
+be searched for manpages.
+
+Any valid manpages found in that directory will be assigned to the `man`
+array, and installed in the appropriate man directory at package install
+time, when installed globally on a Unix system.
+
+### `{directories.bin}/*`
+
+If there is not already a `bin` field defined as a string filename or a
+hash of `<name> : <filename>` pairs, then the `directories.bin`
+directory will be searched and all the files within it will be linked as
+executables at install time.
+
+When installing locally, npm links bins into `node_modules/.bin`, which
+is in the `PATH` environ when npm runs scripts. When
+installing globally, they are linked into `{prefix}/bin`, which is
+presumably in the `PATH` environment variable.
diff --git a/node_modules/init-package-json/node_modules/read-package-json/package.json b/node_modules/init-package-json/node_modules/read-package-json/package.json
new file mode 100644
index 000000000..988508baa
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/read-package-json/package.json
@@ -0,0 +1,33 @@
+{
+ "name": "read-package-json",
+ "version": "2.1.2",
+ "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
+ "description": "The thing npm uses to read package.json files with semantics and defaults and validation",
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/npm/read-package-json.git"
+ },
+ "main": "read-json.js",
+ "scripts": {
+ "prerelease": "npm t",
+ "postrelease": "npm publish && git push --follow-tags",
+ "pretest": "standard",
+ "release": "standard-version -s",
+ "test": "tap --nyc-arg=--all --coverage test/*.js"
+ },
+ "dependencies": {
+ "glob": "^7.1.1",
+ "normalize-package-data": "^2.0.0",
+ "npm-normalize-package-bin": "^1.0.0",
+ "json-parse-even-better-errors": "^2.3.0"
+ },
+ "devDependencies": {
+ "standard": "^11.0.0",
+ "standard-version": "^4.3.0",
+ "tap": "^11.1.2"
+ },
+ "license": "ISC",
+ "files": [
+ "read-json.js"
+ ]
+}
diff --git a/node_modules/init-package-json/node_modules/read-package-json/read-json.js b/node_modules/init-package-json/node_modules/read-package-json/read-json.js
new file mode 100644
index 000000000..0e91e784e
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/read-package-json/read-json.js
@@ -0,0 +1,469 @@
+var fs = require('fs')
+
+var path = require('path')
+
+var glob = require('glob')
+var normalizeData = require('normalize-package-data')
+var safeJSON = require('json-parse-even-better-errors')
+var util = require('util')
+var normalizePackageBin = require('npm-normalize-package-bin')
+
+module.exports = readJson
+
+// put more stuff on here to customize.
+readJson.extraSet = [
+ bundleDependencies,
+ gypfile,
+ serverjs,
+ scriptpath,
+ authors,
+ readme,
+ mans,
+ bins,
+ githead
+]
+
+var typoWarned = {}
+var cache = {}
+
+function readJson (file, log_, strict_, cb_) {
+ var log, strict, cb
+ for (var i = 1; i < arguments.length - 1; i++) {
+ if (typeof arguments[i] === 'boolean') {
+ strict = arguments[i]
+ } else if (typeof arguments[i] === 'function') {
+ log = arguments[i]
+ }
+ }
+
+ if (!log) log = function () {}
+ cb = arguments[ arguments.length - 1 ]
+
+ readJson_(file, log, strict, cb)
+}
+
+function readJson_ (file, log, strict, cb) {
+ fs.readFile(file, 'utf8', function (er, d) {
+ parseJson(file, er, d, log, strict, cb)
+ })
+}
+
+function stripBOM (content) {
+ // Remove byte order marker. This catches EF BB BF (the UTF-8 BOM)
+ // because the buffer-to-string conversion in `fs.readFileSync()`
+ // translates it to FEFF, the UTF-16 BOM.
+ if (content.charCodeAt(0) === 0xFEFF) content = content.slice(1)
+ return content
+}
+
+function jsonClone (obj) {
+ if (obj == null) {
+ return obj
+ } else if (Array.isArray(obj)) {
+ var newarr = new Array(obj.length)
+ for (var ii in obj) {
+ newarr[ii] = obj[ii]
+ }
+ } else if (typeof obj === 'object') {
+ var newobj = {}
+ for (var kk in obj) {
+ newobj[kk] = jsonClone[kk]
+ }
+ } else {
+ return obj
+ }
+}
+
+function parseJson (file, er, d, log, strict, cb) {
+ if (er && er.code === 'ENOENT') {
+ return fs.stat(path.dirname(file), function (err, stat) {
+ if (!err && stat && !stat.isDirectory()) {
+ // ENOTDIR isn't used on Windows, but npm expects it.
+ er = Object.create(er)
+ er.code = 'ENOTDIR'
+ return cb(er)
+ } else {
+ return indexjs(file, er, log, strict, cb)
+ }
+ })
+ }
+ if (er) return cb(er)
+
+ if (cache[d]) return cb(null, jsonClone(cache[d]))
+
+ var data
+
+ try {
+ data = safeJSON(stripBOM(d))
+ } catch (er) {
+ data = parseIndex(d)
+ if (!data) return cb(parseError(er, file))
+ }
+
+ extrasCached(file, d, data, log, strict, cb)
+}
+
+function extrasCached (file, d, data, log, strict, cb) {
+ extras(file, data, log, strict, function (err, data) {
+ if (!err) {
+ cache[d] = jsonClone(data)
+ }
+ cb(err, data)
+ })
+}
+
+function indexjs (file, er, log, strict, cb) {
+ if (path.basename(file) === 'index.js') return cb(er)
+
+ var index = path.resolve(path.dirname(file), 'index.js')
+ fs.readFile(index, 'utf8', function (er2, d) {
+ if (er2) return cb(er)
+
+ if (cache[d]) return cb(null, cache[d])
+
+ var data = parseIndex(d)
+ if (!data) return cb(er)
+
+ extrasCached(file, d, data, log, strict, cb)
+ })
+}
+
+readJson.extras = extras
+function extras (file, data, log_, strict_, cb_) {
+ var log, strict, cb
+ for (var i = 2; i < arguments.length - 1; i++) {
+ if (typeof arguments[i] === 'boolean') {
+ strict = arguments[i]
+ } else if (typeof arguments[i] === 'function') {
+ log = arguments[i]
+ }
+ }
+
+ if (!log) log = function () {}
+ cb = arguments[i]
+
+ var set = readJson.extraSet
+ var n = set.length
+ var errState = null
+ set.forEach(function (fn) {
+ fn(file, data, then)
+ })
+
+ function then (er) {
+ if (errState) return
+ if (er) return cb(errState = er)
+ if (--n > 0) return
+ final(file, data, log, strict, cb)
+ }
+}
+
+function scriptpath (file, data, cb) {
+ if (!data.scripts) return cb(null, data)
+ var k = Object.keys(data.scripts)
+ k.forEach(scriptpath_, data.scripts)
+ cb(null, data)
+}
+
+function scriptpath_ (key) {
+ var s = this[key]
+ // This is never allowed, and only causes problems
+ if (typeof s !== 'string') return delete this[key]
+
+ var spre = /^(\.[/\\])?node_modules[/\\].bin[\\/]/
+ if (s.match(spre)) {
+ this[key] = this[key].replace(spre, '')
+ }
+}
+
+function gypfile (file, data, cb) {
+ var dir = path.dirname(file)
+ var s = data.scripts || {}
+ if (s.install || s.preinstall) return cb(null, data)
+
+ glob('*.gyp', { cwd: dir }, function (er, files) {
+ if (er) return cb(er)
+ if (data.gypfile === false) return cb(null, data)
+ gypfile_(file, data, files, cb)
+ })
+}
+
+function gypfile_ (file, data, files, cb) {
+ if (!files.length) return cb(null, data)
+ var s = data.scripts || {}
+ s.install = 'node-gyp rebuild'
+ data.scripts = s
+ data.gypfile = true
+ return cb(null, data)
+}
+
+function serverjs (file, data, cb) {
+ var dir = path.dirname(file)
+ var s = data.scripts || {}
+ if (s.start) return cb(null, data)
+ glob('server.js', { cwd: dir }, function (er, files) {
+ if (er) return cb(er)
+ serverjs_(file, data, files, cb)
+ })
+}
+
+function serverjs_ (file, data, files, cb) {
+ if (!files.length) return cb(null, data)
+ var s = data.scripts || {}
+ s.start = 'node server.js'
+ data.scripts = s
+ return cb(null, data)
+}
+
+function authors (file, data, cb) {
+ if (data.contributors) return cb(null, data)
+ var af = path.resolve(path.dirname(file), 'AUTHORS')
+ fs.readFile(af, 'utf8', function (er, ad) {
+ // ignore error. just checking it.
+ if (er) return cb(null, data)
+ authors_(file, data, ad, cb)
+ })
+}
+
+function authors_ (file, data, ad, cb) {
+ ad = ad.split(/\r?\n/g).map(function (line) {
+ return line.replace(/^\s*#.*$/, '').trim()
+ }).filter(function (line) {
+ return line
+ })
+ data.contributors = ad
+ return cb(null, data)
+}
+
+function readme (file, data, cb) {
+ if (data.readme) return cb(null, data)
+ var dir = path.dirname(file)
+ var globOpts = { cwd: dir, nocase: true, mark: true }
+ glob('{README,README.*}', globOpts, function (er, files) {
+ if (er) return cb(er)
+ // don't accept directories.
+ files = files.filter(function (file) {
+ return !file.match(/\/$/)
+ })
+ if (!files.length) return cb()
+ var fn = preferMarkdownReadme(files)
+ var rm = path.resolve(dir, fn)
+ readme_(file, data, rm, cb)
+ })
+}
+
+function preferMarkdownReadme (files) {
+ var fallback = 0
+ var re = /\.m?a?r?k?d?o?w?n?$/i
+ for (var i = 0; i < files.length; i++) {
+ if (files[i].match(re)) {
+ return files[i]
+ } else if (files[i].match(/README$/)) {
+ fallback = i
+ }
+ }
+ // prefer README.md, followed by README; otherwise, return
+ // the first filename (which could be README)
+ return files[fallback]
+}
+
+function readme_ (file, data, rm, cb) {
+ var rmfn = path.basename(rm)
+ fs.readFile(rm, 'utf8', function (er, rm) {
+ // maybe not readable, or something.
+ if (er) return cb()
+ data.readme = rm
+ data.readmeFilename = rmfn
+ return cb(er, data)
+ })
+}
+
+function mans (file, data, cb) {
+ var m = data.directories && data.directories.man
+ if (data.man || !m) return cb(null, data)
+ m = path.resolve(path.dirname(file), m)
+ glob('**/*.[0-9]', { cwd: m }, function (er, mans) {
+ if (er) return cb(er)
+ mans_(file, data, mans, cb)
+ })
+}
+
+function mans_ (file, data, mans, cb) {
+ var m = data.directories && data.directories.man
+ data.man = mans.map(function (mf) {
+ return path.resolve(path.dirname(file), m, mf)
+ })
+ return cb(null, data)
+}
+
+function bins (file, data, cb) {
+ data = normalizePackageBin(data)
+
+ var m = data.directories && data.directories.bin
+ if (data.bin || !m) return cb(null, data)
+
+ m = path.resolve(path.dirname(file), m)
+ glob('**', { cwd: m }, function (er, bins) {
+ if (er) return cb(er)
+ bins_(file, data, bins, cb)
+ })
+}
+
+function bins_ (file, data, bins, cb) {
+ var m = (data.directories && data.directories.bin) || '.'
+ data.bin = bins.reduce(function (acc, mf) {
+ if (mf && mf.charAt(0) !== '.') {
+ var f = path.basename(mf)
+ acc[f] = path.join(m, mf)
+ }
+ return acc
+ }, {})
+ return cb(null, normalizePackageBin(data))
+}
+
+function bundleDependencies (file, data, cb) {
+ var bd = 'bundleDependencies'
+ var bdd = 'bundledDependencies'
+ // normalize key name
+ if (data[bdd] !== undefined) {
+ if (data[bd] === undefined) data[bd] = data[bdd]
+ delete data[bdd]
+ }
+ if (data[bd] === false) delete data[bd]
+ else if (data[bd] === true) {
+ data[bd] = Object.keys(data.dependencies || {})
+ } else if (data[bd] !== undefined && !Array.isArray(data[bd])) {
+ delete data[bd]
+ }
+ return cb(null, data)
+}
+
+function githead (file, data, cb) {
+ if (data.gitHead) return cb(null, data)
+ var dir = path.dirname(file)
+ var head = path.resolve(dir, '.git/HEAD')
+ fs.readFile(head, 'utf8', function (er, head) {
+ if (er) return cb(null, data)
+ githead_(file, data, dir, head, cb)
+ })
+}
+
+function githead_ (file, data, dir, head, cb) {
+ if (!head.match(/^ref: /)) {
+ data.gitHead = head.trim()
+ return cb(null, data)
+ }
+ var headRef = head.replace(/^ref: /, '').trim()
+ var headFile = path.resolve(dir, '.git', headRef)
+ fs.readFile(headFile, 'utf8', function (er, head) {
+ if (er || !head) {
+ var packFile = path.resolve(dir, '.git/packed-refs')
+ return fs.readFile(packFile, 'utf8', function (er, refs) {
+ if (er || !refs) {
+ return cb(null, data)
+ }
+ refs = refs.split('\n')
+ for (var i = 0; i < refs.length; i++) {
+ var match = refs[i].match(/^([0-9a-f]{40}) (.+)$/)
+ if (match && match[2].trim() === headRef) {
+ data.gitHead = match[1]
+ break
+ }
+ }
+ return cb(null, data)
+ })
+ }
+ head = head.replace(/^ref: /, '').trim()
+ data.gitHead = head
+ return cb(null, data)
+ })
+}
+
+/**
+ * Warn if the bin references don't point to anything. This might be better in
+ * normalize-package-data if it had access to the file path.
+ */
+function checkBinReferences_ (file, data, warn, cb) {
+ if (!(data.bin instanceof Object)) return cb()
+
+ var keys = Object.keys(data.bin)
+ var keysLeft = keys.length
+ if (!keysLeft) return cb()
+
+ function handleExists (relName, result) {
+ keysLeft--
+ if (!result) warn('No bin file found at ' + relName)
+ if (!keysLeft) cb()
+ }
+
+ keys.forEach(function (key) {
+ var dirName = path.dirname(file)
+ var relName = data.bin[key]
+ /* istanbul ignore if - impossible, bins have been normalized */
+ if (typeof relName !== 'string') {
+ var msg = 'Bin filename for ' + key +
+ ' is not a string: ' + util.inspect(relName)
+ warn(msg)
+ delete data.bin[key]
+ handleExists(relName, true)
+ return
+ }
+ var binPath = path.resolve(dirName, relName)
+ fs.stat(binPath, (err) => handleExists(relName, !err))
+ })
+}
+
+function final (file, data, log, strict, cb) {
+ var pId = makePackageId(data)
+
+ function warn (msg) {
+ if (typoWarned[pId]) return
+ if (log) log('package.json', pId, msg)
+ }
+
+ try {
+ normalizeData(data, warn, strict)
+ } catch (error) {
+ return cb(error)
+ }
+
+ checkBinReferences_(file, data, warn, function () {
+ typoWarned[pId] = true
+ cb(null, data)
+ })
+}
+
+function makePackageId (data) {
+ var name = cleanString(data.name)
+ var ver = cleanString(data.version)
+ return name + '@' + ver
+}
+
+function cleanString (str) {
+ return (!str || typeof (str) !== 'string') ? '' : str.trim()
+}
+
+// /**package { "name": "foo", "version": "1.2.3", ... } **/
+function parseIndex (data) {
+ data = data.split(/^\/\*\*package(?:\s|$)/m)
+
+ if (data.length < 2) return null
+ data = data[1]
+ data = data.split(/\*\*\/$/m)
+
+ if (data.length < 2) return null
+ data = data[0]
+ data = data.replace(/^\s*\*/mg, '')
+
+ try {
+ return safeJSON(data)
+ } catch (er) {
+ return null
+ }
+}
+
+function parseError (ex, file) {
+ var e = new Error('Failed to parse json\n' + ex.message)
+ e.code = 'EJSONPARSE'
+ e.file = file
+ return e
+}