Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/npm/cli.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorisaacs <i@izs.me>2021-01-07 23:11:21 +0300
committerisaacs <i@izs.me>2021-01-07 23:11:21 +0300
commit7a49fd4afc8cd24db40aee008031ea648583d0bc (patch)
tree16e0371f2fd0752e9d9ea45c61a8b2b8bda5b179 /node_modules/tar
parent7282329512a729d05c630583c52a085bc9ecc03b (diff)
tar@6.1.0 pacote@11.1.14
Diffstat (limited to 'node_modules/tar')
-rw-r--r--node_modules/tar/README.md17
-rw-r--r--node_modules/tar/lib/create.js19
-rw-r--r--node_modules/tar/lib/extract.js20
-rw-r--r--node_modules/tar/lib/header.js26
-rw-r--r--node_modules/tar/lib/high-level-opt.js6
-rw-r--r--node_modules/tar/lib/large-numbers.js30
-rw-r--r--node_modules/tar/lib/list.js21
-rw-r--r--node_modules/tar/lib/mkdir.js20
-rw-r--r--node_modules/tar/lib/mode-fix.js5
-rw-r--r--node_modules/tar/lib/pack.js24
-rw-r--r--node_modules/tar/lib/parse.js16
-rw-r--r--node_modules/tar/lib/path-reservations.js12
-rw-r--r--node_modules/tar/lib/pax.js12
-rw-r--r--node_modules/tar/lib/read-entry.js9
-rw-r--r--node_modules/tar/lib/replace.js33
-rw-r--r--node_modules/tar/lib/types.js2
-rw-r--r--node_modules/tar/lib/unpack.js44
-rw-r--r--node_modules/tar/lib/update.js2
-rw-r--r--node_modules/tar/lib/warn-mixin.js4
-rw-r--r--node_modules/tar/lib/winchars.js4
-rw-r--r--node_modules/tar/lib/write-entry.js33
-rw-r--r--node_modules/tar/package.json11
22 files changed, 195 insertions, 175 deletions
diff --git a/node_modules/tar/README.md b/node_modules/tar/README.md
index 1d6969405..42afb1aa7 100644
--- a/node_modules/tar/README.md
+++ b/node_modules/tar/README.md
@@ -1,7 +1,5 @@
# node-tar
-[![Build Status](https://travis-ci.org/npm/node-tar.svg?branch=master)](https://travis-ci.org/npm/node-tar)
-
[Fast](./benchmarks) and full-featured Tar for Node.js
The API is designed to mimic the behavior of `tar(1)` on unix systems.
@@ -304,7 +302,6 @@ The following options are supported:
- `mtime` Set to a `Date` object to force a specific `mtime` for
everything added to the archive. Overridden by `noMtime`.
-
The following options are mostly internal, but can be modified in some
advanced use cases, such as re-using caches between runs.
@@ -398,6 +395,13 @@ The following options are supported:
the `filter` option described above.)
- `onentry` A function that gets called with `(entry)` for each entry
that passes the filter.
+- `onwarn` A function that will get called with `(code, message, data)` for
+ any warnings encountered. (See "Warnings and Errors")
+- `noChmod` Set to true to omit calling `fs.chmod()` to ensure that the
+ extracted file matches the entry mode. This also suppresses the call to
+ `process.umask()` to determine the default umask value, since tar will
+ extract with whatever mode is provided, and let the process `umask` apply
+ normally.
The following options are mostly internal, but can be modified in some
advanced use cases, such as re-using caches between runs.
@@ -453,6 +457,8 @@ The following options are supported:
the call to `onentry`. Set `noResume: true` to suppress this
behavior. Note that by opting into this, the stream will never
complete until the entry data is consumed.
+- `onwarn` A function that will get called with `(code, message, data)` for
+ any warnings encountered. (See "Warnings and Errors")
### tar.u(options, fileList, callback) [alias: tar.update]
@@ -710,6 +716,11 @@ Most unpack errors will cause a `warn` event to be emitted. If the
that passes the filter.
- `onwarn` A function that will get called with `(code, message, data)` for
any warnings encountered. (See "Warnings and Errors")
+- `noChmod` Set to true to omit calling `fs.chmod()` to ensure that the
+ extracted file matches the entry mode. This also suppresses the call to
+ `process.umask()` to determine the default umask value, since tar will
+ extract with whatever mode is provided, and let the process `umask` apply
+ normally.
### class tar.Unpack.Sync
diff --git a/node_modules/tar/lib/create.js b/node_modules/tar/lib/create.js
index a37aa52e6..d033640ac 100644
--- a/node_modules/tar/lib/create.js
+++ b/node_modules/tar/lib/create.js
@@ -4,12 +4,11 @@
const hlo = require('./high-level-opt.js')
const Pack = require('./pack.js')
-const fs = require('fs')
const fsm = require('fs-minipass')
const t = require('./list.js')
const path = require('path')
-const c = module.exports = (opt_, files, cb) => {
+module.exports = (opt_, files, cb) => {
if (typeof files === 'function')
cb = files
@@ -38,7 +37,7 @@ const c = module.exports = (opt_, files, cb) => {
const createFileSync = (opt, files) => {
const p = new Pack.Sync(opt)
const stream = new fsm.WriteStreamSync(opt.file, {
- mode: opt.mode || 0o666
+ mode: opt.mode || 0o666,
})
p.pipe(stream)
addFilesSync(p, files)
@@ -47,7 +46,7 @@ const createFileSync = (opt, files) => {
const createFile = (opt, files, cb) => {
const p = new Pack(opt)
const stream = new fsm.WriteStream(opt.file, {
- mode: opt.mode || 0o666
+ mode: opt.mode || 0o666,
})
p.pipe(stream)
@@ -64,14 +63,14 @@ const createFile = (opt, files, cb) => {
const addFilesSync = (p, files) => {
files.forEach(file => {
- if (file.charAt(0) === '@')
+ if (file.charAt(0) === '@') {
t({
file: path.resolve(p.cwd, file.substr(1)),
sync: true,
noResume: true,
- onentry: entry => p.add(entry)
+ onentry: entry => p.add(entry),
})
- else
+ } else
p.add(file)
})
p.end()
@@ -80,13 +79,13 @@ const addFilesSync = (p, files) => {
const addFilesAsync = (p, files) => {
while (files.length) {
const file = files.shift()
- if (file.charAt(0) === '@')
+ if (file.charAt(0) === '@') {
return t({
file: path.resolve(p.cwd, file.substr(1)),
noResume: true,
- onentry: entry => p.add(entry)
+ onentry: entry => p.add(entry),
}).then(_ => addFilesAsync(p, files))
- else
+ } else
p.add(file)
}
p.end()
diff --git a/node_modules/tar/lib/extract.js b/node_modules/tar/lib/extract.js
index cbb458a0a..f269145ed 100644
--- a/node_modules/tar/lib/extract.js
+++ b/node_modules/tar/lib/extract.js
@@ -7,7 +7,7 @@ const fs = require('fs')
const fsm = require('fs-minipass')
const path = require('path')
-const x = module.exports = (opt_, files, cb) => {
+module.exports = (opt_, files, cb) => {
if (typeof opt_ === 'function')
cb = opt_, files = null, opt_ = {}
else if (Array.isArray(opt_))
@@ -63,22 +63,20 @@ const extractFileSync = opt => {
const u = new Unpack.Sync(opt)
const file = opt.file
- let threw = true
- let fd
const stat = fs.statSync(file)
// This trades a zero-byte read() syscall for a stat
// However, it will usually result in less memory allocation
- const readSize = opt.maxReadSize || 16*1024*1024
+ const readSize = opt.maxReadSize || 16 * 1024 * 1024
const stream = new fsm.ReadStreamSync(file, {
readSize: readSize,
- size: stat.size
+ size: stat.size,
})
stream.pipe(u)
}
const extractFile = (opt, cb) => {
const u = new Unpack(opt)
- const readSize = opt.maxReadSize || 16*1024*1024
+ const readSize = opt.maxReadSize || 16 * 1024 * 1024
const file = opt.file
const p = new Promise((resolve, reject) => {
@@ -93,7 +91,7 @@ const extractFile = (opt, cb) => {
else {
const stream = new fsm.ReadStream(file, {
readSize: readSize,
- size: stat.size
+ size: stat.size,
})
stream.on('error', reject)
stream.pipe(u)
@@ -103,10 +101,6 @@ const extractFile = (opt, cb) => {
return cb ? p.then(cb, cb) : p
}
-const extractSync = opt => {
- return new Unpack.Sync(opt)
-}
+const extractSync = opt => new Unpack.Sync(opt)
-const extract = opt => {
- return new Unpack(opt)
-}
+const extract = opt => new Unpack(opt)
diff --git a/node_modules/tar/lib/header.js b/node_modules/tar/lib/header.js
index 5d88f6cf8..129504048 100644
--- a/node_modules/tar/lib/header.js
+++ b/node_modules/tar/lib/header.js
@@ -95,19 +95,19 @@ class Header {
}
let sum = 8 * 0x20
- for (let i = off; i < off + 148; i++) {
+ for (let i = off; i < off + 148; i++)
sum += buf[i]
- }
- for (let i = off + 156; i < off + 512; i++) {
+
+ for (let i = off + 156; i < off + 512; i++)
sum += buf[i]
- }
+
this.cksumValid = sum === this.cksum
if (this.cksum === null && sum === 8 * 0x20)
this.nullBlock = true
}
[SLURP] (ex, global) {
- for (let k in ex) {
+ for (const k in ex) {
// we slurp in everything except for the path attribute in
// a global extended header, because that's weird.
if (ex[k] !== null && ex[k] !== undefined &&
@@ -157,12 +157,12 @@ class Header {
}
let sum = 8 * 0x20
- for (let i = off; i < off + 148; i++) {
+ for (let i = off; i < off + 148; i++)
sum += buf[i]
- }
- for (let i = off + 156; i < off + 512; i++) {
+
+ for (let i = off + 156; i < off + 512; i++)
sum += buf[i]
- }
+
this.cksum = sum
encNumber(buf, off + 148, 8, this.cksum)
this.cksumValid = true
@@ -171,7 +171,7 @@ class Header {
}
set (data) {
- for (let i in data) {
+ for (const i in data) {
if (data[i] !== null && data[i] !== undefined)
this[i] = data[i]
}
@@ -242,7 +242,7 @@ const numToDate = num => num === null ? null : new Date(num * 1000)
const decNumber = (buf, off, size) =>
buf[off] & 0x80 ? large.parse(buf.slice(off, off + size))
- : decSmallNumber(buf, off, size)
+ : decSmallNumber(buf, off, size)
const nanNull = value => isNaN(value) ? null : value
@@ -254,7 +254,7 @@ const decSmallNumber = (buf, off, size) =>
// the maximum encodable as a null-terminated octal, by field size
const MAXNUM = {
12: 0o77777777777,
- 8 : 0o7777777
+ 8: 0o7777777,
}
const encNumber = (buf, off, size, number) =>
@@ -283,6 +283,6 @@ const NULLS = new Array(156).join('\0')
const encString = (buf, off, size, string) =>
string === null ? false :
(buf.write(string + NULLS, off, size, 'utf8'),
- string.length !== Buffer.byteLength(string) || string.length > size)
+ string.length !== Buffer.byteLength(string) || string.length > size)
module.exports = Header
diff --git a/node_modules/tar/lib/high-level-opt.js b/node_modules/tar/lib/high-level-opt.js
index 7333db915..40e44180e 100644
--- a/node_modules/tar/lib/high-level-opt.js
+++ b/node_modules/tar/lib/high-level-opt.js
@@ -21,9 +21,9 @@ const argmap = new Map([
['no-mtime', 'noMtime'],
['p', 'preserveOwner'],
['L', 'follow'],
- ['h', 'follow']
+ ['h', 'follow'],
])
-const parse = module.exports = opt => opt ? Object.keys(opt).map(k => [
- argmap.has(k) ? argmap.get(k) : k, opt[k]
+module.exports = opt => opt ? Object.keys(opt).map(k => [
+ argmap.has(k) ? argmap.get(k) : k, opt[k],
]).reduce((set, kv) => (set[kv[0]] = kv[1], set), Object.create(null)) : {}
diff --git a/node_modules/tar/lib/large-numbers.js b/node_modules/tar/lib/large-numbers.js
index ad30bc350..dd6f690b9 100644
--- a/node_modules/tar/lib/large-numbers.js
+++ b/node_modules/tar/lib/large-numbers.js
@@ -2,7 +2,7 @@
// Tar can encode large and negative numbers using a leading byte of
// 0xff for negative, and 0x80 for positive.
-const encode = exports.encode = (num, buf) => {
+const encode = (num, buf) => {
if (!Number.isSafeInteger(num))
// The number is so large that javascript cannot represent it with integer
// precision.
@@ -18,7 +18,7 @@ const encodePositive = (num, buf) => {
buf[0] = 0x80
for (var i = buf.length; i > 1; i--) {
- buf[i-1] = num & 0xff
+ buf[i - 1] = num & 0xff
num = Math.floor(num / 0x100)
}
}
@@ -31,25 +31,22 @@ const encodeNegative = (num, buf) => {
var byte = num & 0xff
num = Math.floor(num / 0x100)
if (flipped)
- buf[i-1] = onesComp(byte)
+ buf[i - 1] = onesComp(byte)
else if (byte === 0)
- buf[i-1] = 0
+ buf[i - 1] = 0
else {
flipped = true
- buf[i-1] = twosComp(byte)
+ buf[i - 1] = twosComp(byte)
}
}
}
-const parse = exports.parse = (buf) => {
- var post = buf[buf.length - 1]
- var pre = buf[0]
- var value;
- if (pre === 0x80)
- value = pos(buf.slice(1, buf.length))
- else if (pre === 0xff)
- value = twos(buf)
- else
+const parse = (buf) => {
+ const pre = buf[0]
+ const value = pre === 0x80 ? pos(buf.slice(1, buf.length))
+ : pre === 0xff ? twos(buf)
+ : null
+ if (value === null)
throw Error('invalid base256 encoding')
if (!Number.isSafeInteger(value))
@@ -95,3 +92,8 @@ const pos = (buf) => {
const onesComp = byte => (0xff ^ byte) & 0xff
const twosComp = byte => ((0xff ^ byte) + 1) & 0xff
+
+module.exports = {
+ encode,
+ parse,
+}
diff --git a/node_modules/tar/lib/list.js b/node_modules/tar/lib/list.js
index 9da3f812c..702cfea80 100644
--- a/node_modules/tar/lib/list.js
+++ b/node_modules/tar/lib/list.js
@@ -10,7 +10,7 @@ const fs = require('fs')
const fsm = require('fs-minipass')
const path = require('path')
-const t = module.exports = (opt_, files, cb) => {
+module.exports = (opt_, files, cb) => {
if (typeof opt_ === 'function')
cb = opt_, files = null, opt_ = {}
else if (Array.isArray(opt_))
@@ -79,15 +79,15 @@ const listFileSync = opt => {
let fd
try {
const stat = fs.statSync(file)
- const readSize = opt.maxReadSize || 16*1024*1024
- if (stat.size < readSize) {
+ const readSize = opt.maxReadSize || 16 * 1024 * 1024
+ if (stat.size < readSize)
p.end(fs.readFileSync(file))
- } else {
+ else {
let pos = 0
const buf = Buffer.allocUnsafe(readSize)
fd = fs.openSync(file, 'r')
while (pos < stat.size) {
- let bytesRead = fs.readSync(fd, buf, 0, readSize, pos)
+ const bytesRead = fs.readSync(fd, buf, 0, readSize, pos)
pos += bytesRead
p.write(buf.slice(0, bytesRead))
}
@@ -95,14 +95,17 @@ const listFileSync = opt => {
}
threw = false
} finally {
- if (threw && fd)
- try { fs.closeSync(fd) } catch (er) {}
+ if (threw && fd) {
+ try {
+ fs.closeSync(fd)
+ } catch (er) {}
+ }
}
}
const listFile = (opt, cb) => {
const parse = new Parser(opt)
- const readSize = opt.maxReadSize || 16*1024*1024
+ const readSize = opt.maxReadSize || 16 * 1024 * 1024
const file = opt.file
const p = new Promise((resolve, reject) => {
@@ -115,7 +118,7 @@ const listFile = (opt, cb) => {
else {
const stream = new fsm.ReadStream(file, {
readSize: readSize,
- size: stat.size
+ size: stat.size,
})
stream.on('error', reject)
stream.pipe(parse)
diff --git a/node_modules/tar/lib/mkdir.js b/node_modules/tar/lib/mkdir.js
index 381d0e1b3..aed398fcd 100644
--- a/node_modules/tar/lib/mkdir.js
+++ b/node_modules/tar/lib/mkdir.js
@@ -33,7 +33,7 @@ class CwdError extends Error {
}
}
-const mkdir = module.exports = (dir, opt, cb) => {
+module.exports = (dir, opt, cb) => {
// if there's any overlap between mask and mode,
// then we'll need an explicit chmod
const umask = opt.umask
@@ -44,7 +44,7 @@ const mkdir = module.exports = (dir, opt, cb) => {
const gid = opt.gid
const doChown = typeof uid === 'number' &&
typeof gid === 'number' &&
- ( uid !== opt.processUid || gid !== opt.processGid )
+ (uid !== opt.processUid || gid !== opt.processGid)
const preserve = opt.preserve
const unlink = opt.unlink
@@ -68,12 +68,13 @@ const mkdir = module.exports = (dir, opt, cb) => {
if (cache && cache.get(dir) === true)
return done()
- if (dir === cwd)
+ if (dir === cwd) {
return fs.stat(dir, (er, st) => {
if (er || !st.isDirectory())
er = new CwdError(dir, er && er.code || 'ENOTDIR')
done(er)
})
+ }
if (preserve)
return mkdirp(dir, {mode}).then(made => done(null, made), done)
@@ -104,13 +105,13 @@ const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => er => {
cb(statEr)
else if (st.isDirectory())
mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
- else if (unlink)
+ else if (unlink) {
fs.unlink(part, er => {
if (er)
return cb(er)
fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))
})
- else if (st.isSymbolicLink())
+ } else if (st.isSymbolicLink())
return cb(new SymlinkError(part, part + '/' + parts.join('/')))
else
cb(er)
@@ -121,7 +122,7 @@ const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => er => {
}
}
-const mkdirSync = module.exports.sync = (dir, opt) => {
+module.exports.sync = (dir, opt) => {
// if there's any overlap between mask and mode,
// then we'll need an explicit chmod
const umask = opt.umask
@@ -132,7 +133,7 @@ const mkdirSync = module.exports.sync = (dir, opt) => {
const gid = opt.gid
const doChown = typeof uid === 'number' &&
typeof gid === 'number' &&
- ( uid !== opt.processUid || gid !== opt.processGid )
+ (uid !== opt.processUid || gid !== opt.processGid)
const preserve = opt.preserve
const unlink = opt.unlink
@@ -172,9 +173,8 @@ const mkdirSync = module.exports.sync = (dir, opt) => {
const parts = sub.split(/\/|\\/)
let created = null
for (let p = parts.shift(), part = cwd;
- p && (part += '/' + p);
- p = parts.shift()) {
-
+ p && (part += '/' + p);
+ p = parts.shift()) {
if (cache.get(part))
continue
diff --git a/node_modules/tar/lib/mode-fix.js b/node_modules/tar/lib/mode-fix.js
index c3758741c..6a045ffca 100644
--- a/node_modules/tar/lib/mode-fix.js
+++ b/node_modules/tar/lib/mode-fix.js
@@ -7,9 +7,8 @@ module.exports = (mode, isDir, portable) => {
// (as some linux distros do), then we'll write the
// archive with 0o644 instead. Also, don't ever create
// a file that is not readable/writable by the owner.
- if (portable) {
- mode = (mode | 0o600) &~0o22
- }
+ if (portable)
+ mode = (mode | 0o600) & ~0o22
// if dirs are readable, then they should be listable
if (isDir) {
diff --git a/node_modules/tar/lib/pack.js b/node_modules/tar/lib/pack.js
index 0fca4ae25..492fe18ec 100644
--- a/node_modules/tar/lib/pack.js
+++ b/node_modules/tar/lib/pack.js
@@ -97,7 +97,7 @@ const Pack = warner(class Pack extends MiniPass {
this.filter = typeof opt.filter === 'function' ? opt.filter : _ => true
- this[QUEUE] = new Yallist
+ this[QUEUE] = new Yallist()
this[JOBS] = 0
this.jobs = +opt.jobs || 4
this[PROCESSING] = false
@@ -209,8 +209,8 @@ const Pack = warner(class Pack extends MiniPass {
this[PROCESSING] = true
for (let w = this[QUEUE].head;
- w !== null && this[JOBS] < this.jobs;
- w = w.next) {
+ w !== null && this[JOBS] < this.jobs;
+ w = w.next) {
this[PROCESSJOB](w.value)
if (w.value.ignore) {
const p = w.next
@@ -297,7 +297,7 @@ const Pack = warner(class Pack extends MiniPass {
linkCache: this.linkCache,
statCache: this.statCache,
noMtime: this.noMtime,
- mtime: this.mtime
+ mtime: this.mtime,
}
}
@@ -321,7 +321,7 @@ const Pack = warner(class Pack extends MiniPass {
[PIPE] (job) {
job.piped = true
- if (job.readdir)
+ if (job.readdir) {
job.readdir.forEach(entry => {
const p = this.prefix ?
job.path.slice(this.prefix.length + 1) || './'
@@ -330,20 +330,22 @@ const Pack = warner(class Pack extends MiniPass {
const base = p === './' ? '' : p.replace(/\/*$/, '/')
this[ADDFSENTRY](base + entry)
})
+ }
const source = job.entry
const zip = this.zip
- if (zip)
+ if (zip) {
source.on('data', chunk => {
if (!zip.write(chunk))
source.pause()
})
- else
+ } else {
source.on('data', chunk => {
if (!super.write(chunk))
source.pause()
})
+ }
}
pause () {
@@ -377,7 +379,7 @@ class PackSync extends Pack {
const source = job.entry
const zip = this.zip
- if (job.readdir)
+ if (job.readdir) {
job.readdir.forEach(entry => {
const p = this.prefix ?
job.path.slice(this.prefix.length + 1) || './'
@@ -386,15 +388,17 @@ class PackSync extends Pack {
const base = p === './' ? '' : p.replace(/\/*$/, '/')
this[ADDFSENTRY](base + entry)
})
+ }
- if (zip)
+ if (zip) {
source.on('data', chunk => {
zip.write(chunk)
})
- else
+ } else {
source.on('data', chunk => {
super[WRITE](chunk)
})
+ }
}
}
diff --git a/node_modules/tar/lib/parse.js b/node_modules/tar/lib/parse.js
index d9a49ad1f..b1b4e7e47 100644
--- a/node_modules/tar/lib/parse.js
+++ b/node_modules/tar/lib/parse.js
@@ -21,7 +21,6 @@
// ignored entries get .resume() called on them straight away
const warner = require('./warn-mixin.js')
-const path = require('path')
const Header = require('./header.js')
const EE = require('events')
const Yallist = require('yallist')
@@ -85,13 +84,14 @@ module.exports = warner(class Parser extends EE {
if (opt.ondone)
this.on(DONE, opt.ondone)
- else
+ else {
this.on(DONE, _ => {
this.emit('prefinish')
this.emit('finish')
this.emit('end')
this.emit('close')
})
+ }
this.strict = !!opt.strict
this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize
@@ -166,9 +166,8 @@ module.exports = warner(class Parser extends EE {
this[SAW_VALID_ENTRY] = true
}
entry.on('end', onend)
- } else {
+ } else
this[SAW_VALID_ENTRY] = true
- }
}
if (entry.meta) {
@@ -249,7 +248,7 @@ module.exports = warner(class Parser extends EE {
this.emit('drain')
} else
re.once('drain', _ => this.emit('drain'))
- }
+ }
}
[CONSUMEBODY] (chunk, position) {
@@ -352,7 +351,7 @@ module.exports = warner(class Parser extends EE {
this[CONSUMECHUNK]()
})
this[WRITING] = true
- const ret = this[UNZIP][ended ? 'end' : 'write' ](chunk)
+ const ret = this[UNZIP][ended ? 'end' : 'write'](chunk)
this[WRITING] = false
return ret
}
@@ -415,9 +414,8 @@ module.exports = warner(class Parser extends EE {
const c = this[BUFFER]
this[BUFFER] = null
this[CONSUMECHUNKSUB](c)
- } else {
+ } else
this[CONSUMECHUNKSUB](chunk)
- }
while (this[BUFFER] &&
this[BUFFER].length >= 512 &&
@@ -438,7 +436,7 @@ module.exports = warner(class Parser extends EE {
// we know that we are in CONSUMING mode, so anything written goes into
// the buffer. Advance the position and put any remainder in the buffer.
let position = 0
- let length = chunk.length
+ const length = chunk.length
while (position + 512 <= length && !this[ABORTED] && !this[SAW_EOF]) {
switch (this[STATE]) {
case 'begin':
diff --git a/node_modules/tar/lib/path-reservations.js b/node_modules/tar/lib/path-reservations.js
index 3cf0c2c12..c0a16b0a1 100644
--- a/node_modules/tar/lib/path-reservations.js
+++ b/node_modules/tar/lib/path-reservations.js
@@ -20,8 +20,8 @@ module.exports = () => {
// return a set of parent dirs for a given path
const { join } = require('path')
const getDirs = path =>
- join(path).split(/[\\\/]/).slice(0, -1).reduce((set, path) =>
- set.length ? set.concat(join(set[set.length-1], path)) : [path], [])
+ join(path).split(/[\\/]/).slice(0, -1).reduce((set, path) =>
+ set.length ? set.concat(join(set[set.length - 1], path)) : [path], [])
// functions currently running
const running = new Set()
@@ -80,9 +80,9 @@ module.exports = () => {
dirs.forEach(dir => {
const q = queues.get(dir)
assert(q[0] instanceof Set)
- if (q[0].size === 1 && q.length === 1) {
+ if (q[0].size === 1 && q.length === 1)
queues.delete(dir)
- } else if (q[0].size === 1) {
+ else if (q[0].size === 1) {
q.shift()
// must be a function or else the Set would've been reused
@@ -112,8 +112,8 @@ module.exports = () => {
const q = queues.get(dir)
if (!q)
queues.set(dir, [new Set([fn])])
- else if (q[q.length-1] instanceof Set)
- q[q.length-1].add(fn)
+ else if (q[q.length - 1] instanceof Set)
+ q[q.length - 1].add(fn)
else
q.push(new Set([fn]))
})
diff --git a/node_modules/tar/lib/pax.js b/node_modules/tar/lib/pax.js
index 214a459f3..7768c7b45 100644
--- a/node_modules/tar/lib/pax.js
+++ b/node_modules/tar/lib/pax.js
@@ -34,9 +34,8 @@ class Pax {
const buf = Buffer.allocUnsafe(bufLen)
// 0-fill the header section, it might not hit every field
- for (let i = 0; i < 512; i++) {
+ for (let i = 0; i < 512; i++)
buf[i] = 0
- }
new Header({
// XXX split the path
@@ -55,15 +54,14 @@ class Pax {
devmaj: 0,
devmin: 0,
atime: this.atime || null,
- ctime: this.ctime || null
+ ctime: this.ctime || null,
}).encode(buf)
buf.write(body, 512, bodyLen, 'utf8')
// null pad after the body
- for (let i = bodyLen + 512; i < buf.length; i++) {
+ for (let i = bodyLen + 512; i < buf.length; i++)
buf[i] = 0
- }
return buf
}
@@ -95,7 +93,7 @@ class Pax {
: this[field]
const s = ' ' +
(field === 'dev' || field === 'ino' || field === 'nlink'
- ? 'SCHILY.' : '') +
+ ? 'SCHILY.' : '') +
field + '=' + v + '\n'
const byteLen = Buffer.byteLength(s)
// the digits includes the length of the digits in ascii base-10
@@ -136,7 +134,7 @@ const parseKVLine = (set, line) => {
const v = kv.join('=')
set[k] = /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k)
- ? new Date(v * 1000)
+ ? new Date(v * 1000)
: /^[0-9]+$/.test(v) ? +v
: v
return set
diff --git a/node_modules/tar/lib/read-entry.js b/node_modules/tar/lib/read-entry.js
index 8acee94ba..6661cba5f 100644
--- a/node_modules/tar/lib/read-entry.js
+++ b/node_modules/tar/lib/read-entry.js
@@ -1,5 +1,4 @@
'use strict'
-const types = require('./types.js')
const MiniPass = require('minipass')
const SLURP = Symbol('slurp')
@@ -63,8 +62,10 @@ module.exports = class ReadEntry extends MiniPass {
this.uname = header.uname
this.gname = header.gname
- if (ex) this[SLURP](ex)
- if (gex) this[SLURP](gex, true)
+ if (ex)
+ this[SLURP](ex)
+ if (gex)
+ this[SLURP](gex, true)
}
write (data) {
@@ -87,7 +88,7 @@ module.exports = class ReadEntry extends MiniPass {
}
[SLURP] (ex, global) {
- for (let k in ex) {
+ for (const k in ex) {
// we slurp in everything except for the path attribute in
// a global extended header, because that's weird.
if (ex[k] !== null && ex[k] !== undefined &&
diff --git a/node_modules/tar/lib/replace.js b/node_modules/tar/lib/replace.js
index 44126d1f8..e5e2a4255 100644
--- a/node_modules/tar/lib/replace.js
+++ b/node_modules/tar/lib/replace.js
@@ -3,7 +3,6 @@
// tar -r
const hlo = require('./high-level-opt.js')
const Pack = require('./pack.js')
-const Parse = require('./parse.js')
const fs = require('fs')
const fsm = require('fs-minipass')
const t = require('./list.js')
@@ -17,7 +16,7 @@ const path = require('path')
const Header = require('./header.js')
-const r = module.exports = (opt_, files, cb) => {
+module.exports = (opt_, files, cb) => {
const opt = hlo(opt_)
if (!opt.file)
@@ -68,10 +67,10 @@ const replaceSync = (opt, files) => {
break POSITION
}
- let h = new Header(headBuf)
+ const h = new Header(headBuf)
if (!h.cksumValid)
break
- let entryBlockSize = 512 * Math.ceil(h.size / 512)
+ const entryBlockSize = 512 * Math.ceil(h.size / 512)
if (position + entryBlockSize + 512 > st.size)
break
// the 512 for the header we just parsed will be added as well
@@ -84,15 +83,18 @@ const replaceSync = (opt, files) => {
streamSync(opt, p, position, fd, files)
} finally {
- if (threw)
- try { fs.closeSync(fd) } catch (er) {}
+ if (threw) {
+ try {
+ fs.closeSync(fd)
+ } catch (er) {}
+ }
}
}
const streamSync = (opt, p, position, fd, files) => {
const stream = new fsm.WriteStreamSync(opt.file, {
fd: fd,
- start: position
+ start: position,
})
p.pipe(stream)
addFilesSync(p, files)
@@ -120,11 +122,12 @@ const replace = (opt, files, cb) => {
if (er)
return cb(er)
bufPos += bytes
- if (bufPos < 512 && bytes)
+ if (bufPos < 512 && bytes) {
return fs.read(
fd, headBuf, bufPos, headBuf.length - bufPos,
position + bufPos, onread
)
+ }
if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b)
return cb(new Error('cannot append to compressed archives'))
@@ -173,7 +176,7 @@ const replace = (opt, files, cb) => {
return reject(er)
const stream = new fsm.WriteStream(opt.file, {
fd: fd,
- start: position
+ start: position,
})
p.pipe(stream)
stream.on('error', reject)
@@ -190,14 +193,14 @@ const replace = (opt, files, cb) => {
const addFilesSync = (p, files) => {
files.forEach(file => {
- if (file.charAt(0) === '@')
+ if (file.charAt(0) === '@') {
t({
file: path.resolve(p.cwd, file.substr(1)),
sync: true,
noResume: true,
- onentry: entry => p.add(entry)
+ onentry: entry => p.add(entry),
})
- else
+ } else
p.add(file)
})
p.end()
@@ -206,13 +209,13 @@ const addFilesSync = (p, files) => {
const addFilesAsync = (p, files) => {
while (files.length) {
const file = files.shift()
- if (file.charAt(0) === '@')
+ if (file.charAt(0) === '@') {
return t({
file: path.resolve(p.cwd, file.substr(1)),
noResume: true,
- onentry: entry => p.add(entry)
+ onentry: entry => p.add(entry),
}).then(_ => addFilesAsync(p, files))
- else
+ } else
p.add(file)
}
p.end()
diff --git a/node_modules/tar/lib/types.js b/node_modules/tar/lib/types.js
index df425652b..7bfc25465 100644
--- a/node_modules/tar/lib/types.js
+++ b/node_modules/tar/lib/types.js
@@ -37,7 +37,7 @@ exports.name = new Map([
// skip
['V', 'TapeVolumeHeader'],
// like x
- ['X', 'OldExtendedHeader']
+ ['X', 'OldExtendedHeader'],
])
// map the other direction
diff --git a/node_modules/tar/lib/unpack.js b/node_modules/tar/lib/unpack.js
index af0e0ffa0..7d4b79d9e 100644
--- a/node_modules/tar/lib/unpack.js
+++ b/node_modules/tar/lib/unpack.js
@@ -7,13 +7,11 @@
// clobbering an fs object to create one of a different type.)
const assert = require('assert')
-const EE = require('events').EventEmitter
const Parser = require('./parse.js')
const fs = require('fs')
const fsm = require('fs-minipass')
const path = require('path')
const mkdir = require('./mkdir.js')
-const mkdirSync = mkdir.sync
const wc = require('./winchars.js')
const pathReservations = require('./path-reservations.js')
@@ -28,7 +26,6 @@ const LINK = Symbol('link')
const SYMLINK = Symbol('symlink')
const HARDLINK = Symbol('hardlink')
const UNSUPPORTED = Symbol('unsupported')
-const UNKNOWN = Symbol('unknown')
const CHECKPATH = Symbol('checkPath')
const MKDIR = Symbol('mkdir')
const ONERROR = Symbol('onError')
@@ -121,9 +118,10 @@ class Unpack extends Parser {
// need both or neither
if (typeof opt.uid !== 'number' || typeof opt.gid !== 'number')
throw new TypeError('cannot set owner without number uid and gid')
- if (opt.preserveOwner)
+ if (opt.preserveOwner) {
throw new TypeError(
'cannot preserve owner in archive and also set owner explicitly')
+ }
this.uid = opt.uid
this.gid = opt.gid
this.setOwner = true
@@ -171,11 +169,14 @@ class Unpack extends Parser {
this.cwd = path.resolve(opt.cwd || process.cwd())
this.strip = +opt.strip || 0
- this.processUmask = process.umask()
+ // if we're not chmodding, then we don't need the process umask
+ this.processUmask = opt.noChmod ? 0 : process.umask()
this.umask = typeof opt.umask === 'number' ? opt.umask : this.processUmask
+
// default mode for dirs created as parents
this.dmode = opt.dmode || (0o0777 & (~this.umask))
this.fmode = opt.fmode || (0o0666 & (~this.umask))
+
this.on('entry', entry => this[ONENTRY](entry))
}
@@ -271,6 +272,7 @@ class Unpack extends Parser {
case 'CharacterDevice':
case 'BlockDevice':
case 'FIFO':
+ default:
return this[UNSUPPORTED](entry)
}
}
@@ -299,7 +301,8 @@ class Unpack extends Parser {
unlink: this.unlink,
cache: this.dirCache,
cwd: this.cwd,
- mode: mode
+ mode: mode,
+ noChmod: this.noChmod,
}, cb)
}
@@ -308,11 +311,11 @@ class Unpack extends Parser {
// in set owner mode, chown if setting doesn't match process
return this.forceChown ||
this.preserveOwner &&
- ( typeof entry.uid === 'number' && entry.uid !== this.processUid ||
- typeof entry.gid === 'number' && entry.gid !== this.processGid )
+ (typeof entry.uid === 'number' && entry.uid !== this.processUid ||
+ typeof entry.gid === 'number' && entry.gid !== this.processGid)
||
- ( typeof this.uid === 'number' && this.uid !== this.processUid ||
- typeof this.gid === 'number' && this.gid !== this.processGid )
+ (typeof this.uid === 'number' && this.uid !== this.processUid ||
+ typeof this.gid === 'number' && this.gid !== this.processGid)
}
[UID] (entry) {
@@ -328,7 +331,7 @@ class Unpack extends Parser {
const stream = new fsm.WriteStream(entry.absolute, {
flags: getFlag(entry.size),
mode: mode,
- autoClose: false
+ autoClose: false,
})
stream.on('error', er => this[ONERROR](er, entry))
@@ -460,6 +463,7 @@ class Unpack extends Parser {
paths.push(entry.linkpath)
this.reservations.reserve(paths, done => this[CHECKFS2](entry, done))
}
+
[CHECKFS2] (entry, done) {
this[MKDIR](path.dirname(entry.absolute), this.dmode, er => {
if (er) {
@@ -470,16 +474,17 @@ class Unpack extends Parser {
if (st && (this.keep || this.newer && st.mtime > entry.mtime)) {
this[SKIP](entry)
done()
- } else if (er || this[ISREUSABLE](entry, st)) {
+ } else if (er || this[ISREUSABLE](entry, st))
this[MAKEFS](null, entry, done)
- }
+
else if (st.isDirectory()) {
if (entry.type === 'Directory') {
- if (!entry.mode || (st.mode & 0o7777) === entry.mode)
+ if (!this.noChmod && (!entry.mode || (st.mode & 0o7777) === entry.mode))
this[MAKEFS](null, entry, done)
- else
+ else {
fs.chmod(entry.absolute, entry.mode,
er => this[MAKEFS](er, entry, done))
+ }
} else
fs.rmdir(entry.absolute, er => this[MAKEFS](er, entry, done))
} else
@@ -523,10 +528,6 @@ class Unpack extends Parser {
}
class UnpackSync extends Unpack {
- constructor (opt) {
- super(opt)
- }
-
[CHECKFS] (entry) {
const er = this[MKDIR](path.dirname(entry.absolute), this.dmode, neverCalled)
if (er)
@@ -541,7 +542,7 @@ class UnpackSync extends Unpack {
try {
if (st.isDirectory()) {
if (entry.type === 'Directory') {
- if (entry.mode && (st.mode & 0o7777) !== entry.mode)
+ if (!this.noChmod && entry.mode && (st.mode & 0o7777) !== entry.mode)
fs.chmodSync(entry.absolute, entry.mode)
} else
fs.rmdirSync(entry.absolute)
@@ -571,7 +572,6 @@ class UnpackSync extends Unpack {
this[ONERROR](er || closeError, entry)
}
- let stream
let fd
try {
fd = fs.openSync(entry.absolute, getFlag(entry.size), mode)
@@ -659,7 +659,7 @@ class UnpackSync extends Unpack {
unlink: this.unlink,
cache: this.dirCache,
cwd: this.cwd,
- mode: mode
+ mode: mode,
})
} catch (er) {
return er
diff --git a/node_modules/tar/lib/update.js b/node_modules/tar/lib/update.js
index 16c3e93ed..a5784b73f 100644
--- a/node_modules/tar/lib/update.js
+++ b/node_modules/tar/lib/update.js
@@ -6,7 +6,7 @@ const hlo = require('./high-level-opt.js')
const r = require('./replace.js')
// just call tar.r with the filter and mtimeCache
-const u = module.exports = (opt_, files, cb) => {
+module.exports = (opt_, files, cb) => {
const opt = hlo(opt_)
if (!opt.file)
diff --git a/node_modules/tar/lib/warn-mixin.js b/node_modules/tar/lib/warn-mixin.js
index 11eb52cc6..aeebb531b 100644
--- a/node_modules/tar/lib/warn-mixin.js
+++ b/node_modules/tar/lib/warn-mixin.js
@@ -13,9 +13,9 @@ module.exports = Base => class extends Base {
message = message.message
}
this.emit('warn', data.tarCode, message, data)
- } else if (message instanceof Error) {
+ } else if (message instanceof Error)
this.emit('error', Object.assign(message, data))
- } else
+ else
this.emit('error', Object.assign(new Error(`${code}: ${message}`), data))
}
}
diff --git a/node_modules/tar/lib/winchars.js b/node_modules/tar/lib/winchars.js
index cf6ea0606..ebcab4aed 100644
--- a/node_modules/tar/lib/winchars.js
+++ b/node_modules/tar/lib/winchars.js
@@ -8,7 +8,7 @@ const raw = [
'<',
'>',
'?',
- ':'
+ ':',
]
const win = raw.map(char =>
@@ -19,5 +19,5 @@ const toRaw = new Map(win.map((char, i) => [char, raw[i]]))
module.exports = {
encode: s => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s),
- decode: s => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s)
+ decode: s => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s),
}
diff --git a/node_modules/tar/lib/write-entry.js b/node_modules/tar/lib/write-entry.js
index 0e33cb59d..1d0b746cd 100644
--- a/node_modules/tar/lib/write-entry.js
+++ b/node_modules/tar/lib/write-entry.js
@@ -2,11 +2,9 @@
const MiniPass = require('minipass')
const Pax = require('./pax.js')
const Header = require('./header.js')
-const ReadEntry = require('./read-entry.js')
const fs = require('fs')
const path = require('path')
-const types = require('./types.js')
const maxReadSize = 16 * 1024 * 1024
const PROCESS = Symbol('process')
const FILE = Symbol('file')
@@ -134,12 +132,12 @@ const WriteEntry = warner(class WriteEntry extends MiniPass {
mtime: this.noMtime ? null : this.mtime || this.stat.mtime,
type: this.type,
uname: this.portable ? null :
- this.stat.uid === this.myuid ? this.myuser : '',
+ this.stat.uid === this.myuid ? this.myuser : '',
atime: this.portable ? null : this.stat.atime,
- ctime: this.portable ? null : this.stat.ctime
+ ctime: this.portable ? null : this.stat.ctime,
})
- if (this.header.encode() && !this.noPax)
+ if (this.header.encode() && !this.noPax) {
this.write(new Pax({
atime: this.portable ? null : this.header.atime,
ctime: this.portable ? null : this.header.ctime,
@@ -152,8 +150,9 @@ const WriteEntry = warner(class WriteEntry extends MiniPass {
uname: this.portable ? null : this.header.uname,
dev: this.portable ? null : this.stat.dev,
ino: this.portable ? null : this.stat.ino,
- nlink: this.portable ? null : this.stat.nlink
+ nlink: this.portable ? null : this.stat.nlink,
}).encode())
+ }
this.write(this.header.block)
}
@@ -256,8 +255,8 @@ const WriteEntry = warner(class WriteEntry extends MiniPass {
if (bytesRead === remain) {
for (let i = bytesRead; i < length && bytesRead < blockRemain; i++) {
buf[i + offset] = 0
- bytesRead ++
- remain ++
+ bytesRead++
+ remain++
}
}
@@ -286,10 +285,6 @@ const WriteEntry = warner(class WriteEntry extends MiniPass {
})
class WriteEntrySync extends WriteEntry {
- constructor (path, opt) {
- super(path, opt)
- }
-
[LSTAT] () {
this[ONLSTAT](fs.lstatSync(this.absolute))
}
@@ -311,8 +306,11 @@ class WriteEntrySync extends WriteEntry {
} finally {
// ignoring the error from close(2) is a bad practice, but at
// this point we already have an error, don't need another one
- if (threw)
- try { this[CLOSE](fd, () => {}) } catch (er) {}
+ if (threw) {
+ try {
+ this[CLOSE](fd, () => {})
+ } catch (er) {}
+ }
}
}
@@ -375,7 +373,7 @@ const WriteEntryTar = warner(class WriteEntryTar extends MiniPass {
type: this.type,
uname: this.portable ? null : this.uname,
atime: this.portable ? null : this.atime,
- ctime: this.portable ? null : this.ctime
+ ctime: this.portable ? null : this.ctime,
})
if (pathWarn) {
@@ -385,7 +383,7 @@ const WriteEntryTar = warner(class WriteEntryTar extends MiniPass {
})
}
- if (this.header.encode() && !this.noPax)
+ if (this.header.encode() && !this.noPax) {
super.write(new Pax({
atime: this.portable ? null : this.atime,
ctime: this.portable ? null : this.ctime,
@@ -398,8 +396,9 @@ const WriteEntryTar = warner(class WriteEntryTar extends MiniPass {
uname: this.portable ? null : this.uname,
dev: this.portable ? null : this.readEntry.dev,
ino: this.portable ? null : this.readEntry.ino,
- nlink: this.portable ? null : this.readEntry.nlink
+ nlink: this.portable ? null : this.readEntry.nlink,
}).encode())
+ }
super.write(this.header.block)
readEntry.pipe(this)
diff --git a/node_modules/tar/package.json b/node_modules/tar/package.json
index 9c388c57c..9b8b96ec6 100644
--- a/node_modules/tar/package.json
+++ b/node_modules/tar/package.json
@@ -2,13 +2,17 @@
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
"name": "tar",
"description": "tar for node",
- "version": "6.0.5",
+ "version": "6.1.0",
"repository": {
"type": "git",
"url": "https://github.com/npm/node-tar.git"
},
"scripts": {
"test": "tap",
+ "posttest": "npm run lint",
+ "eslint": "eslint",
+ "lint": "npm run eslint -- test lib",
+ "lintfix": "npm run lint -- --fix",
"preversion": "npm test",
"postversion": "npm publish",
"prepublishOnly": "git push origin --follow-tags",
@@ -26,6 +30,11 @@
"devDependencies": {
"chmodr": "^1.2.0",
"end-of-stream": "^1.4.3",
+ "eslint": "^7.17.0",
+ "eslint-plugin-import": "^2.22.1",
+ "eslint-plugin-node": "^11.1.0",
+ "eslint-plugin-promise": "^4.2.1",
+ "eslint-plugin-standard": "^5.0.0",
"events-to-array": "^1.1.2",
"mutate-fs": "^2.1.1",
"rimraf": "^2.7.1",