Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/npm/cli.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorisaacs <i@izs.me>2013-07-24 07:46:54 +0400
committerisaacs <i@izs.me>2013-07-24 07:46:54 +0400
commitef6a53a50425b47ff227172ba7b621a1594e7f06 (patch)
tree3423520447b91170cbbf36e4f141e9835281cdf3 /node_modules/tar
parent9a8bc875e93371f2ffc5b5365d2f5d477377ca18 (diff)
bump all deps to use inherits@2
Diffstat (limited to 'node_modules/tar')
-rw-r--r--node_modules/tar/README.md27
-rw-r--r--node_modules/tar/lib/buffer-entry.js4
-rw-r--r--node_modules/tar/lib/entry.js273
-rw-r--r--node_modules/tar/lib/extended-header-writer.js1
-rw-r--r--node_modules/tar/lib/extended-header.js3
-rw-r--r--node_modules/tar/package.json15
6 files changed, 160 insertions, 163 deletions
diff --git a/node_modules/tar/README.md b/node_modules/tar/README.md
index 7cfe3bbca..c6b0f829e 100644
--- a/node_modules/tar/README.md
+++ b/node_modules/tar/README.md
@@ -7,28 +7,25 @@ Tar for Node.js.
1. Be able to parse and reasonably extract the contents of any tar file
created by any program that creates tar files, period.
- At least, this includes every version of:
+ At least, this includes every version of:
- * bsdtar
- * gnutar
- * solaris posix tar
- * Joerg Schilling's star ("Schilly tar")
+ * bsdtar
+ * gnutar
+ * solaris posix tar
+ * Joerg Schilling's star ("Schilly tar")
-2. Create tar files that can be extracted by any of the following tar
- programs:
+2. Create tar files that can be extracted by any of the following tar programs:
- * bsdtar/libarchive version 2.6.2
- * gnutar 1.15 and above
- * SunOS Posix tar
- * Joerg Schilling's star ("Schilly tar")
+ * bsdtar/libarchive version 2.6.2
+ * gnutar 1.15 and above
+ * SunOS Posix tar
+ * Joerg Schilling's star ("Schilly tar")
-3. 100% test coverage. Speed is important. Correctness is slightly
- more important.
+3. 100% test coverage. Speed is important. Correctness is slightly more important.
4. Create the kind of tar interface that Node users would want to use.
-5. Satisfy npm's needs for a portable tar implementation with a
- JavaScript interface.
+5. Satisfy npm's needs for a portable tar implementation with a JavaScript interface.
6. No excuses. No complaining. No tolerance for failure.
diff --git a/node_modules/tar/lib/buffer-entry.js b/node_modules/tar/lib/buffer-entry.js
index 5322a28ab..6c1da2373 100644
--- a/node_modules/tar/lib/buffer-entry.js
+++ b/node_modules/tar/lib/buffer-entry.js
@@ -20,11 +20,11 @@ function BufferEntry () {
})
}
+inherits(BufferEntry, Entry)
+
// collect the bytes as they come in.
BufferEntry.prototype.write = function (c) {
c.copy(this._buffer, this._offset)
this._offset += c.length
Entry.prototype.write.call(this, c)
}
-
-inherits(BufferEntry, Entry)
diff --git a/node_modules/tar/lib/entry.js b/node_modules/tar/lib/entry.js
index df72313bc..4af5c4108 100644
--- a/node_modules/tar/lib/entry.js
+++ b/node_modules/tar/lib/entry.js
@@ -45,168 +45,169 @@ function Entry (header, extended, global) {
this._setProps()
}
-inherits(Entry, Stream,
-{ write: function (c) {
- if (this._ending) this.error("write() after end()", null, true)
- if (this._remaining === 0) {
- this.error("invalid bytes past eof")
- }
+inherits(Entry, Stream)
- // often we'll get a bunch of \0 at the end of the last write,
- // since chunks will always be 512 bytes when reading a tarball.
- if (c.length > this._remaining) {
- c = c.slice(0, this._remaining)
- }
- this._remaining -= c.length
+Entry.prototype.write = function (c) {
+ if (this._ending) this.error("write() after end()", null, true)
+ if (this._remaining === 0) {
+ this.error("invalid bytes past eof")
+ }
- // put it on the stack.
- var ql = this._queueLen
- this._queue.push(c)
- this._queueLen ++
+ // often we'll get a bunch of \0 at the end of the last write,
+ // since chunks will always be 512 bytes when reading a tarball.
+ if (c.length > this._remaining) {
+ c = c.slice(0, this._remaining)
+ }
+ this._remaining -= c.length
- this._read()
+ // put it on the stack.
+ var ql = this._queueLen
+ this._queue.push(c)
+ this._queueLen ++
- // either paused, or buffered
- if (this._paused || ql > 0) {
- this._needDrain = true
- return false
- }
+ this._read()
- return true
+ // either paused, or buffered
+ if (this._paused || ql > 0) {
+ this._needDrain = true
+ return false
}
-, end: function (c) {
- if (c) this.write(c)
- this._ending = true
- this._read()
- }
+ return true
+}
-, pause: function () {
- this._paused = true
- this.emit("pause")
- }
+Entry.prototype.end = function (c) {
+ if (c) this.write(c)
+ this._ending = true
+ this._read()
+}
-, resume: function () {
- // console.error(" Tar Entry resume", this.path)
- this.emit("resume")
- this._paused = false
- this._read()
- return this._queueLen - this._index > 1
- }
+Entry.prototype.pause = function () {
+ this._paused = true
+ this.emit("pause")
+}
+
+Entry.prototype.resume = function () {
+ // console.error(" Tar Entry resume", this.path)
+ this.emit("resume")
+ this._paused = false
+ this._read()
+ return this._queueLen - this._index > 1
+}
// This is bound to the instance
-, _read: function () {
- // console.error(" Tar Entry _read", this.path)
+Entry.prototype._read = function () {
+ // console.error(" Tar Entry _read", this.path)
- if (this._paused || this._reading || this._ended) return
+ if (this._paused || this._reading || this._ended) return
- // set this flag so that event handlers don't inadvertently
- // get multiple _read() calls running.
- this._reading = true
+ // set this flag so that event handlers don't inadvertently
+ // get multiple _read() calls running.
+ this._reading = true
- // have any data to emit?
- while (this._index < this._queueLen && !this._paused) {
- var chunk = this._queue[this._index ++]
- this.emit("data", chunk)
- }
+ // have any data to emit?
+ while (this._index < this._queueLen && !this._paused) {
+ var chunk = this._queue[this._index ++]
+ this.emit("data", chunk)
+ }
- // check if we're drained
- if (this._index >= this._queueLen) {
- this._queue.length = this._queueLen = this._index = 0
- if (this._needDrain) {
- this._needDrain = false
- this.emit("drain")
- }
- if (this._ending) {
- this._ended = true
- this.emit("end")
- }
+ // check if we're drained
+ if (this._index >= this._queueLen) {
+ this._queue.length = this._queueLen = this._index = 0
+ if (this._needDrain) {
+ this._needDrain = false
+ this.emit("drain")
}
-
- // if the queue gets too big, then pluck off whatever we can.
- // this should be fairly rare.
- var mql = this._maxQueueLen
- if (this._queueLen > mql && this._index > 0) {
- mql = Math.min(this._index, mql)
- this._index -= mql
- this._queueLen -= mql
- this._queue = this._queue.slice(mql)
+ if (this._ending) {
+ this._ended = true
+ this.emit("end")
}
+ }
- this._reading = false
+ // if the queue gets too big, then pluck off whatever we can.
+ // this should be fairly rare.
+ var mql = this._maxQueueLen
+ if (this._queueLen > mql && this._index > 0) {
+ mql = Math.min(this._index, mql)
+ this._index -= mql
+ this._queueLen -= mql
+ this._queue = this._queue.slice(mql)
}
-, _setProps: function () {
- // props = extended->global->header->{}
- var header = this._header
- , extended = this._extended
- , global = this._global
- , props = this.props
-
- // first get the values from the normal header.
- var fields = tar.fields
- for (var f = 0; fields[f] !== null; f ++) {
- var field = fields[f]
- , val = header[field]
- if (typeof val !== "undefined") props[field] = val
- }
+ this._reading = false
+}
- // next, the global header for this file.
- // numeric values, etc, will have already been parsed.
- ;[global, extended].forEach(function (p) {
- Object.keys(p).forEach(function (f) {
- if (typeof p[f] !== "undefined") props[f] = p[f]
- })
- })
+Entry.prototype._setProps = function () {
+ // props = extended->global->header->{}
+ var header = this._header
+ , extended = this._extended
+ , global = this._global
+ , props = this.props
+
+ // first get the values from the normal header.
+ var fields = tar.fields
+ for (var f = 0; fields[f] !== null; f ++) {
+ var field = fields[f]
+ , val = header[field]
+ if (typeof val !== "undefined") props[field] = val
+ }
- // no nulls allowed in path or linkpath
- ;["path", "linkpath"].forEach(function (p) {
- if (props.hasOwnProperty(p)) {
- props[p] = props[p].split("\0")[0]
- }
+ // next, the global header for this file.
+ // numeric values, etc, will have already been parsed.
+ ;[global, extended].forEach(function (p) {
+ Object.keys(p).forEach(function (f) {
+ if (typeof p[f] !== "undefined") props[f] = p[f]
})
+ })
+ // no nulls allowed in path or linkpath
+ ;["path", "linkpath"].forEach(function (p) {
+ if (props.hasOwnProperty(p)) {
+ props[p] = props[p].split("\0")[0]
+ }
+ })
- // set date fields to be a proper date
- ;["mtime", "ctime", "atime"].forEach(function (p) {
- if (props.hasOwnProperty(p)) {
- props[p] = new Date(props[p] * 1000)
- }
- })
- // set the type so that we know what kind of file to create
- var type
- switch (tar.types[props.type]) {
- case "OldFile":
- case "ContiguousFile":
- type = "File"
- break
-
- case "GNUDumpDir":
- type = "Directory"
- break
-
- case undefined:
- type = "Unknown"
- break
-
- case "Link":
- case "SymbolicLink":
- case "CharacterDevice":
- case "BlockDevice":
- case "Directory":
- case "FIFO":
- default:
- type = tar.types[props.type]
+ // set date fields to be a proper date
+ ;["mtime", "ctime", "atime"].forEach(function (p) {
+ if (props.hasOwnProperty(p)) {
+ props[p] = new Date(props[p] * 1000)
}
+ })
- this.type = type
- this.path = props.path
- this.size = props.size
-
- // size is special, since it signals when the file needs to end.
- this._remaining = props.size
+ // set the type so that we know what kind of file to create
+ var type
+ switch (tar.types[props.type]) {
+ case "OldFile":
+ case "ContiguousFile":
+ type = "File"
+ break
+
+ case "GNUDumpDir":
+ type = "Directory"
+ break
+
+ case undefined:
+ type = "Unknown"
+ break
+
+ case "Link":
+ case "SymbolicLink":
+ case "CharacterDevice":
+ case "BlockDevice":
+ case "Directory":
+ case "FIFO":
+ default:
+ type = tar.types[props.type]
}
-, warn: fstream.warn
-, error: fstream.error
-})
+
+ this.type = type
+ this.path = props.path
+ this.size = props.size
+
+ // size is special, since it signals when the file needs to end.
+ this._remaining = props.size
+}
+
+Entry.prototype.warn = fstream.warn
+Entry.prototype.error = fstream.error
diff --git a/node_modules/tar/lib/extended-header-writer.js b/node_modules/tar/lib/extended-header-writer.js
index a130c5b16..1728c4583 100644
--- a/node_modules/tar/lib/extended-header-writer.js
+++ b/node_modules/tar/lib/extended-header-writer.js
@@ -8,7 +8,6 @@ inherits(ExtendedHeaderWriter, EntryWriter)
var tar = require("../tar.js")
, path = require("path")
- , inherits = require("inherits")
, TarHeader = require("./header.js")
// props is the props of the thing we need to write an
diff --git a/node_modules/tar/lib/extended-header.js b/node_modules/tar/lib/extended-header.js
index 4346d6c59..74f432cee 100644
--- a/node_modules/tar/lib/extended-header.js
+++ b/node_modules/tar/lib/extended-header.js
@@ -30,7 +30,8 @@ function ExtendedHeader () {
this._key = ""
}
-inherits(ExtendedHeader, Entry, { _parse: parse })
+inherits(ExtendedHeader, Entry)
+ExtendedHeader.prototype._parse = parse
var s = 0
, states = ExtendedHeader.states = {}
diff --git a/node_modules/tar/package.json b/node_modules/tar/package.json
index 36eae3de6..b31410019 100644
--- a/node_modules/tar/package.json
+++ b/node_modules/tar/package.json
@@ -6,7 +6,7 @@
},
"name": "tar",
"description": "tar for node",
- "version": "0.1.17",
+ "version": "0.1.18",
"repository": {
"type": "git",
"url": "git://github.com/isaacs/node-tar.git"
@@ -16,7 +16,7 @@
"test": "tap test/*.js"
},
"dependencies": {
- "inherits": "1.x",
+ "inherits": "2",
"block-stream": "*",
"fstream": "~0.1.8"
},
@@ -25,12 +25,11 @@
"rimraf": "1.x"
},
"license": "BSD",
- "readme": "# node-tar\n\nTar for Node.js.\n\n## Goals of this project\n\n1. Be able to parse and reasonably extract the contents of any tar file\n created by any program that creates tar files, period.\n\n At least, this includes every version of:\n\n * bsdtar\n * gnutar\n * solaris posix tar\n * Joerg Schilling's star (\"Schilly tar\")\n\n2. Create tar files that can be extracted by any of the following tar\n programs:\n\n * bsdtar/libarchive version 2.6.2\n * gnutar 1.15 and above\n * SunOS Posix tar\n * Joerg Schilling's star (\"Schilly tar\")\n\n3. 100% test coverage. Speed is important. Correctness is slightly\n more important.\n\n4. Create the kind of tar interface that Node users would want to use.\n\n5. Satisfy npm's needs for a portable tar implementation with a\n JavaScript interface.\n\n6. No excuses. No complaining. No tolerance for failure.\n\n## But isn't there already a tar.js?\n\nYes, there are a few. This one is going to be better, and it will be\nfanatically maintained, because npm will depend on it.\n\nThat's why I need to write it from scratch. Creating and extracting\ntarballs is such a large part of what npm does, I simply can't have it\nbe a black box any longer.\n\n## Didn't you have something already? Where'd it go?\n\nIt's in the \"old\" folder. It's not functional. Don't use it.\n\nIt was a useful exploration to learn the issues involved, but like most\nsoftware of any reasonable complexity, node-tar won't be useful until\nit's been written at least 3 times.\n",
+ "readme": "# node-tar\n\nTar for Node.js.\n\n## Goals of this project\n\n1. Be able to parse and reasonably extract the contents of any tar file\n created by any program that creates tar files, period.\n\n At least, this includes every version of:\n\n * bsdtar\n * gnutar\n * solaris posix tar\n * Joerg Schilling's star (\"Schilly tar\")\n\n2. Create tar files that can be extracted by any of the following tar programs:\n\n * bsdtar/libarchive version 2.6.2\n * gnutar 1.15 and above\n * SunOS Posix tar\n * Joerg Schilling's star (\"Schilly tar\")\n\n3. 100% test coverage. Speed is important. Correctness is slightly more important.\n\n4. Create the kind of tar interface that Node users would want to use.\n\n5. Satisfy npm's needs for a portable tar implementation with a JavaScript interface.\n\n6. No excuses. No complaining. No tolerance for failure.\n\n## But isn't there already a tar.js?\n\nYes, there are a few. This one is going to be better, and it will be\nfanatically maintained, because npm will depend on it.\n\nThat's why I need to write it from scratch. Creating and extracting\ntarballs is such a large part of what npm does, I simply can't have it\nbe a black box any longer.\n\n## Didn't you have something already? Where'd it go?\n\nIt's in the \"old\" folder. It's not functional. Don't use it.\n\nIt was a useful exploration to learn the issues involved, but like most\nsoftware of any reasonable complexity, node-tar won't be useful until\nit's been written at least 3 times.\n",
"readmeFilename": "README.md",
- "_id": "tar@0.1.17",
- "dist": {
- "shasum": "408c8a95deb8e78a65b59b1a51a333183a32badc"
+ "bugs": {
+ "url": "https://github.com/isaacs/node-tar/issues"
},
- "_from": "tar@0.1.17",
- "_resolved": "https://registry.npmjs.org/tar/-/tar-0.1.17.tgz"
+ "_id": "tar@0.1.18",
+ "_from": "tar@latest"
}