1
0
mirror of https://github.com/musix-org/musix-oss synced 2025-06-17 04:26:00 +00:00
This commit is contained in:
MatteZ02
2020-03-04 13:43:21 +02:00
parent 79f8a18164
commit da84fcfed1
1292 changed files with 93623 additions and 35760 deletions

11
node_modules/tar/lib/buffer.js generated vendored Normal file
View File

@ -0,0 +1,11 @@
'use strict'
// Buffer in node 4.x < 4.5.0 doesn't have working Buffer.from
// or Buffer.alloc, and Buffer in node 10 deprecated the ctor.
// .M, this is fine .\^/M..
let B = Buffer
/* istanbul ignore next */
if (!B.alloc) {
B = require('safe-buffer').Buffer
}
module.exports = B

105
node_modules/tar/lib/create.js generated vendored Normal file
View File

@ -0,0 +1,105 @@
'use strict'
// tar -c
const hlo = require('./high-level-opt.js')
const Pack = require('./pack.js')
const fs = require('fs')
const fsm = require('fs-minipass')
const t = require('./list.js')
const path = require('path')
const c = module.exports = (opt_, files, cb) => {
if (typeof files === 'function')
cb = files
if (Array.isArray(opt_))
files = opt_, opt_ = {}
if (!files || !Array.isArray(files) || !files.length)
throw new TypeError('no files or directories specified')
files = Array.from(files)
const opt = hlo(opt_)
if (opt.sync && typeof cb === 'function')
throw new TypeError('callback not supported for sync tar functions')
if (!opt.file && typeof cb === 'function')
throw new TypeError('callback only supported with file option')
return opt.file && opt.sync ? createFileSync(opt, files)
: opt.file ? createFile(opt, files, cb)
: opt.sync ? createSync(opt, files)
: create(opt, files)
}
const createFileSync = (opt, files) => {
const p = new Pack.Sync(opt)
const stream = new fsm.WriteStreamSync(opt.file, {
mode: opt.mode || 0o666
})
p.pipe(stream)
addFilesSync(p, files)
}
const createFile = (opt, files, cb) => {
const p = new Pack(opt)
const stream = new fsm.WriteStream(opt.file, {
mode: opt.mode || 0o666
})
p.pipe(stream)
const promise = new Promise((res, rej) => {
stream.on('error', rej)
stream.on('close', res)
p.on('error', rej)
})
addFilesAsync(p, files)
return cb ? promise.then(cb, cb) : promise
}
const addFilesSync = (p, files) => {
files.forEach(file => {
if (file.charAt(0) === '@')
t({
file: path.resolve(p.cwd, file.substr(1)),
sync: true,
noResume: true,
onentry: entry => p.add(entry)
})
else
p.add(file)
})
p.end()
}
const addFilesAsync = (p, files) => {
while (files.length) {
const file = files.shift()
if (file.charAt(0) === '@')
return t({
file: path.resolve(p.cwd, file.substr(1)),
noResume: true,
onentry: entry => p.add(entry)
}).then(_ => addFilesAsync(p, files))
else
p.add(file)
}
p.end()
}
const createSync = (opt, files) => {
const p = new Pack.Sync(opt)
addFilesSync(p, files)
return p
}
const create = (opt, files) => {
const p = new Pack(opt)
addFilesAsync(p, files)
return p
}

112
node_modules/tar/lib/extract.js generated vendored Normal file
View File

@ -0,0 +1,112 @@
'use strict'
// tar -x
const hlo = require('./high-level-opt.js')
const Unpack = require('./unpack.js')
const fs = require('fs')
const fsm = require('fs-minipass')
const path = require('path')
const x = module.exports = (opt_, files, cb) => {
if (typeof opt_ === 'function')
cb = opt_, files = null, opt_ = {}
else if (Array.isArray(opt_))
files = opt_, opt_ = {}
if (typeof files === 'function')
cb = files, files = null
if (!files)
files = []
else
files = Array.from(files)
const opt = hlo(opt_)
if (opt.sync && typeof cb === 'function')
throw new TypeError('callback not supported for sync tar functions')
if (!opt.file && typeof cb === 'function')
throw new TypeError('callback only supported with file option')
if (files.length)
filesFilter(opt, files)
return opt.file && opt.sync ? extractFileSync(opt)
: opt.file ? extractFile(opt, cb)
: opt.sync ? extractSync(opt)
: extract(opt)
}
// construct a filter that limits the file entries listed
// include child entries if a dir is included
const filesFilter = (opt, files) => {
const map = new Map(files.map(f => [f.replace(/\/+$/, ''), true]))
const filter = opt.filter
const mapHas = (file, r) => {
const root = r || path.parse(file).root || '.'
const ret = file === root ? false
: map.has(file) ? map.get(file)
: mapHas(path.dirname(file), root)
map.set(file, ret)
return ret
}
opt.filter = filter
? (file, entry) => filter(file, entry) && mapHas(file.replace(/\/+$/, ''))
: file => mapHas(file.replace(/\/+$/, ''))
}
const extractFileSync = opt => {
const u = new Unpack.Sync(opt)
const file = opt.file
let threw = true
let fd
const stat = fs.statSync(file)
// This trades a zero-byte read() syscall for a stat
// However, it will usually result in less memory allocation
const readSize = opt.maxReadSize || 16*1024*1024
const stream = new fsm.ReadStreamSync(file, {
readSize: readSize,
size: stat.size
})
stream.pipe(u)
}
const extractFile = (opt, cb) => {
const u = new Unpack(opt)
const readSize = opt.maxReadSize || 16*1024*1024
const file = opt.file
const p = new Promise((resolve, reject) => {
u.on('error', reject)
u.on('close', resolve)
// This trades a zero-byte read() syscall for a stat
// However, it will usually result in less memory allocation
fs.stat(file, (er, stat) => {
if (er)
reject(er)
else {
const stream = new fsm.ReadStream(file, {
readSize: readSize,
size: stat.size
})
stream.on('error', reject)
stream.pipe(u)
}
})
})
return cb ? p.then(cb, cb) : p
}
const extractSync = opt => {
return new Unpack.Sync(opt)
}
const extract = opt => {
return new Unpack(opt)
}

289
node_modules/tar/lib/header.js generated vendored Normal file
View File

@ -0,0 +1,289 @@
'use strict'
// parse a 512-byte header block to a data object, or vice-versa
// encode returns `true` if a pax extended header is needed, because
// the data could not be faithfully encoded in a simple header.
// (Also, check header.needPax to see if it needs a pax header.)
const Buffer = require('./buffer.js')
const types = require('./types.js')
const pathModule = require('path').posix
const large = require('./large-numbers.js')
const SLURP = Symbol('slurp')
const TYPE = Symbol('type')
class Header {
constructor (data, off, ex, gex) {
this.cksumValid = false
this.needPax = false
this.nullBlock = false
this.block = null
this.path = null
this.mode = null
this.uid = null
this.gid = null
this.size = null
this.mtime = null
this.cksum = null
this[TYPE] = '0'
this.linkpath = null
this.uname = null
this.gname = null
this.devmaj = 0
this.devmin = 0
this.atime = null
this.ctime = null
if (Buffer.isBuffer(data))
this.decode(data, off || 0, ex, gex)
else if (data)
this.set(data)
}
decode (buf, off, ex, gex) {
if (!off)
off = 0
if (!buf || !(buf.length >= off + 512))
throw new Error('need 512 bytes for header')
this.path = decString(buf, off, 100)
this.mode = decNumber(buf, off + 100, 8)
this.uid = decNumber(buf, off + 108, 8)
this.gid = decNumber(buf, off + 116, 8)
this.size = decNumber(buf, off + 124, 12)
this.mtime = decDate(buf, off + 136, 12)
this.cksum = decNumber(buf, off + 148, 12)
// if we have extended or global extended headers, apply them now
// See https://github.com/npm/node-tar/pull/187
this[SLURP](ex)
this[SLURP](gex, true)
// old tar versions marked dirs as a file with a trailing /
this[TYPE] = decString(buf, off + 156, 1)
if (this[TYPE] === '')
this[TYPE] = '0'
if (this[TYPE] === '0' && this.path.substr(-1) === '/')
this[TYPE] = '5'
// tar implementations sometimes incorrectly put the stat(dir).size
// as the size in the tarball, even though Directory entries are
// not able to have any body at all. In the very rare chance that
// it actually DOES have a body, we weren't going to do anything with
// it anyway, and it'll just be a warning about an invalid header.
if (this[TYPE] === '5')
this.size = 0
this.linkpath = decString(buf, off + 157, 100)
if (buf.slice(off + 257, off + 265).toString() === 'ustar\u000000') {
this.uname = decString(buf, off + 265, 32)
this.gname = decString(buf, off + 297, 32)
this.devmaj = decNumber(buf, off + 329, 8)
this.devmin = decNumber(buf, off + 337, 8)
if (buf[off + 475] !== 0) {
// definitely a prefix, definitely >130 chars.
const prefix = decString(buf, off + 345, 155)
this.path = prefix + '/' + this.path
} else {
const prefix = decString(buf, off + 345, 130)
if (prefix)
this.path = prefix + '/' + this.path
this.atime = decDate(buf, off + 476, 12)
this.ctime = decDate(buf, off + 488, 12)
}
}
let sum = 8 * 0x20
for (let i = off; i < off + 148; i++) {
sum += buf[i]
}
for (let i = off + 156; i < off + 512; i++) {
sum += buf[i]
}
this.cksumValid = sum === this.cksum
if (this.cksum === null && sum === 8 * 0x20)
this.nullBlock = true
}
[SLURP] (ex, global) {
for (let k in ex) {
// we slurp in everything except for the path attribute in
// a global extended header, because that's weird.
if (ex[k] !== null && ex[k] !== undefined &&
!(global && k === 'path'))
this[k] = ex[k]
}
}
encode (buf, off) {
if (!buf) {
buf = this.block = Buffer.alloc(512)
off = 0
}
if (!off)
off = 0
if (!(buf.length >= off + 512))
throw new Error('need 512 bytes for header')
const prefixSize = this.ctime || this.atime ? 130 : 155
const split = splitPrefix(this.path || '', prefixSize)
const path = split[0]
const prefix = split[1]
this.needPax = split[2]
this.needPax = encString(buf, off, 100, path) || this.needPax
this.needPax = encNumber(buf, off + 100, 8, this.mode) || this.needPax
this.needPax = encNumber(buf, off + 108, 8, this.uid) || this.needPax
this.needPax = encNumber(buf, off + 116, 8, this.gid) || this.needPax
this.needPax = encNumber(buf, off + 124, 12, this.size) || this.needPax
this.needPax = encDate(buf, off + 136, 12, this.mtime) || this.needPax
buf[off + 156] = this[TYPE].charCodeAt(0)
this.needPax = encString(buf, off + 157, 100, this.linkpath) || this.needPax
buf.write('ustar\u000000', off + 257, 8)
this.needPax = encString(buf, off + 265, 32, this.uname) || this.needPax
this.needPax = encString(buf, off + 297, 32, this.gname) || this.needPax
this.needPax = encNumber(buf, off + 329, 8, this.devmaj) || this.needPax
this.needPax = encNumber(buf, off + 337, 8, this.devmin) || this.needPax
this.needPax = encString(buf, off + 345, prefixSize, prefix) || this.needPax
if (buf[off + 475] !== 0)
this.needPax = encString(buf, off + 345, 155, prefix) || this.needPax
else {
this.needPax = encString(buf, off + 345, 130, prefix) || this.needPax
this.needPax = encDate(buf, off + 476, 12, this.atime) || this.needPax
this.needPax = encDate(buf, off + 488, 12, this.ctime) || this.needPax
}
let sum = 8 * 0x20
for (let i = off; i < off + 148; i++) {
sum += buf[i]
}
for (let i = off + 156; i < off + 512; i++) {
sum += buf[i]
}
this.cksum = sum
encNumber(buf, off + 148, 8, this.cksum)
this.cksumValid = true
return this.needPax
}
set (data) {
for (let i in data) {
if (data[i] !== null && data[i] !== undefined)
this[i] = data[i]
}
}
get type () {
return types.name.get(this[TYPE]) || this[TYPE]
}
get typeKey () {
return this[TYPE]
}
set type (type) {
if (types.code.has(type))
this[TYPE] = types.code.get(type)
else
this[TYPE] = type
}
}
const splitPrefix = (p, prefixSize) => {
const pathSize = 100
let pp = p
let prefix = ''
let ret
const root = pathModule.parse(p).root || '.'
if (Buffer.byteLength(pp) < pathSize)
ret = [pp, prefix, false]
else {
// first set prefix to the dir, and path to the base
prefix = pathModule.dirname(pp)
pp = pathModule.basename(pp)
do {
// both fit!
if (Buffer.byteLength(pp) <= pathSize &&
Buffer.byteLength(prefix) <= prefixSize)
ret = [pp, prefix, false]
// prefix fits in prefix, but path doesn't fit in path
else if (Buffer.byteLength(pp) > pathSize &&
Buffer.byteLength(prefix) <= prefixSize)
ret = [pp.substr(0, pathSize - 1), prefix, true]
else {
// make path take a bit from prefix
pp = pathModule.join(pathModule.basename(prefix), pp)
prefix = pathModule.dirname(prefix)
}
} while (prefix !== root && !ret)
// at this point, found no resolution, just truncate
if (!ret)
ret = [p.substr(0, pathSize - 1), '', true]
}
return ret
}
const decString = (buf, off, size) =>
buf.slice(off, off + size).toString('utf8').replace(/\0.*/, '')
const decDate = (buf, off, size) =>
numToDate(decNumber(buf, off, size))
const numToDate = num => num === null ? null : new Date(num * 1000)
const decNumber = (buf, off, size) =>
buf[off] & 0x80 ? large.parse(buf.slice(off, off + size))
: decSmallNumber(buf, off, size)
const nanNull = value => isNaN(value) ? null : value
const decSmallNumber = (buf, off, size) =>
nanNull(parseInt(
buf.slice(off, off + size)
.toString('utf8').replace(/\0.*$/, '').trim(), 8))
// the maximum encodable as a null-terminated octal, by field size
const MAXNUM = {
12: 0o77777777777,
8 : 0o7777777
}
const encNumber = (buf, off, size, number) =>
number === null ? false :
number > MAXNUM[size] || number < 0
? (large.encode(number, buf.slice(off, off + size)), true)
: (encSmallNumber(buf, off, size, number), false)
const encSmallNumber = (buf, off, size, number) =>
buf.write(octalString(number, size), off, size, 'ascii')
const octalString = (number, size) =>
padOctal(Math.floor(number).toString(8), size)
const padOctal = (string, size) =>
(string.length === size - 1 ? string
: new Array(size - string.length - 1).join('0') + string + ' ') + '\0'
const encDate = (buf, off, size, date) =>
date === null ? false :
encNumber(buf, off, size, date.getTime() / 1000)
// enough to fill the longest string we've got
const NULLS = new Array(156).join('\0')
// pad with nulls, return true if it's longer or non-ascii
const encString = (buf, off, size, string) =>
string === null ? false :
(buf.write(string + NULLS, off, size, 'utf8'),
string.length !== Buffer.byteLength(string) || string.length > size)
module.exports = Header

29
node_modules/tar/lib/high-level-opt.js generated vendored Normal file
View File

@ -0,0 +1,29 @@
'use strict'
// turn tar(1) style args like `C` into the more verbose things like `cwd`
const argmap = new Map([
['C', 'cwd'],
['f', 'file'],
['z', 'gzip'],
['P', 'preservePaths'],
['U', 'unlink'],
['strip-components', 'strip'],
['stripComponents', 'strip'],
['keep-newer', 'newer'],
['keepNewer', 'newer'],
['keep-newer-files', 'newer'],
['keepNewerFiles', 'newer'],
['k', 'keep'],
['keep-existing', 'keep'],
['keepExisting', 'keep'],
['m', 'noMtime'],
['no-mtime', 'noMtime'],
['p', 'preserveOwner'],
['L', 'follow'],
['h', 'follow']
])
const parse = module.exports = opt => opt ? Object.keys(opt).map(k => [
argmap.has(k) ? argmap.get(k) : k, opt[k]
]).reduce((set, kv) => (set[kv[0]] = kv[1], set), Object.create(null)) : {}

97
node_modules/tar/lib/large-numbers.js generated vendored Normal file
View File

@ -0,0 +1,97 @@
'use strict'
// Tar can encode large and negative numbers using a leading byte of
// 0xff for negative, and 0x80 for positive.
const encode = exports.encode = (num, buf) => {
if (!Number.isSafeInteger(num))
// The number is so large that javascript cannot represent it with integer
// precision.
throw TypeError('cannot encode number outside of javascript safe integer range')
else if (num < 0)
encodeNegative(num, buf)
else
encodePositive(num, buf)
return buf
}
const encodePositive = (num, buf) => {
buf[0] = 0x80
for (var i = buf.length; i > 1; i--) {
buf[i-1] = num & 0xff
num = Math.floor(num / 0x100)
}
}
const encodeNegative = (num, buf) => {
buf[0] = 0xff
var flipped = false
num = num * -1
for (var i = buf.length; i > 1; i--) {
var byte = num & 0xff
num = Math.floor(num / 0x100)
if (flipped)
buf[i-1] = onesComp(byte)
else if (byte === 0)
buf[i-1] = 0
else {
flipped = true
buf[i-1] = twosComp(byte)
}
}
}
const parse = exports.parse = (buf) => {
var post = buf[buf.length - 1]
var pre = buf[0]
var value;
if (pre === 0x80)
value = pos(buf.slice(1, buf.length))
else if (pre === 0xff)
value = twos(buf)
else
throw TypeError('invalid base256 encoding')
if (!Number.isSafeInteger(value))
// The number is so large that javascript cannot represent it with integer
// precision.
throw TypeError('parsed number outside of javascript safe integer range')
return value
}
const twos = (buf) => {
var len = buf.length
var sum = 0
var flipped = false
for (var i = len - 1; i > -1; i--) {
var byte = buf[i]
var f
if (flipped)
f = onesComp(byte)
else if (byte === 0)
f = byte
else {
flipped = true
f = twosComp(byte)
}
if (f !== 0)
sum -= f * Math.pow(256, len - i - 1)
}
return sum
}
const pos = (buf) => {
var len = buf.length
var sum = 0
for (var i = len - 1; i > -1; i--) {
var byte = buf[i]
if (byte !== 0)
sum += byte * Math.pow(256, len - i - 1)
}
return sum
}
const onesComp = byte => (0xff ^ byte) & 0xff
const twosComp = byte => ((0xff ^ byte) + 1) & 0xff

130
node_modules/tar/lib/list.js generated vendored Normal file
View File

@ -0,0 +1,130 @@
'use strict'
const Buffer = require('./buffer.js')
// XXX: This shares a lot in common with extract.js
// maybe some DRY opportunity here?
// tar -t
const hlo = require('./high-level-opt.js')
const Parser = require('./parse.js')
const fs = require('fs')
const fsm = require('fs-minipass')
const path = require('path')
const t = module.exports = (opt_, files, cb) => {
if (typeof opt_ === 'function')
cb = opt_, files = null, opt_ = {}
else if (Array.isArray(opt_))
files = opt_, opt_ = {}
if (typeof files === 'function')
cb = files, files = null
if (!files)
files = []
else
files = Array.from(files)
const opt = hlo(opt_)
if (opt.sync && typeof cb === 'function')
throw new TypeError('callback not supported for sync tar functions')
if (!opt.file && typeof cb === 'function')
throw new TypeError('callback only supported with file option')
if (files.length)
filesFilter(opt, files)
if (!opt.noResume)
onentryFunction(opt)
return opt.file && opt.sync ? listFileSync(opt)
: opt.file ? listFile(opt, cb)
: list(opt)
}
const onentryFunction = opt => {
const onentry = opt.onentry
opt.onentry = onentry ? e => {
onentry(e)
e.resume()
} : e => e.resume()
}
// construct a filter that limits the file entries listed
// include child entries if a dir is included
const filesFilter = (opt, files) => {
const map = new Map(files.map(f => [f.replace(/\/+$/, ''), true]))
const filter = opt.filter
const mapHas = (file, r) => {
const root = r || path.parse(file).root || '.'
const ret = file === root ? false
: map.has(file) ? map.get(file)
: mapHas(path.dirname(file), root)
map.set(file, ret)
return ret
}
opt.filter = filter
? (file, entry) => filter(file, entry) && mapHas(file.replace(/\/+$/, ''))
: file => mapHas(file.replace(/\/+$/, ''))
}
const listFileSync = opt => {
const p = list(opt)
const file = opt.file
let threw = true
let fd
try {
const stat = fs.statSync(file)
const readSize = opt.maxReadSize || 16*1024*1024
if (stat.size < readSize) {
p.end(fs.readFileSync(file))
} else {
let pos = 0
const buf = Buffer.allocUnsafe(readSize)
fd = fs.openSync(file, 'r')
while (pos < stat.size) {
let bytesRead = fs.readSync(fd, buf, 0, readSize, pos)
pos += bytesRead
p.write(buf.slice(0, bytesRead))
}
p.end()
}
threw = false
} finally {
if (threw && fd)
try { fs.closeSync(fd) } catch (er) {}
}
}
const listFile = (opt, cb) => {
const parse = new Parser(opt)
const readSize = opt.maxReadSize || 16*1024*1024
const file = opt.file
const p = new Promise((resolve, reject) => {
parse.on('error', reject)
parse.on('end', resolve)
fs.stat(file, (er, stat) => {
if (er)
reject(er)
else {
const stream = new fsm.ReadStream(file, {
readSize: readSize,
size: stat.size
})
stream.on('error', reject)
stream.pipe(parse)
}
})
})
return cb ? p.then(cb, cb) : p
}
const list = opt => new Parser(opt)

206
node_modules/tar/lib/mkdir.js generated vendored Normal file
View File

@ -0,0 +1,206 @@
'use strict'
// wrapper around mkdirp for tar's needs.
// TODO: This should probably be a class, not functionally
// passing around state in a gazillion args.
const mkdirp = require('mkdirp')
const fs = require('fs')
const path = require('path')
const chownr = require('chownr')
class SymlinkError extends Error {
constructor (symlink, path) {
super('Cannot extract through symbolic link')
this.path = path
this.symlink = symlink
}
get name () {
return 'SylinkError'
}
}
class CwdError extends Error {
constructor (path, code) {
super(code + ': Cannot cd into \'' + path + '\'')
this.path = path
this.code = code
}
get name () {
return 'CwdError'
}
}
const mkdir = module.exports = (dir, opt, cb) => {
// if there's any overlap between mask and mode,
// then we'll need an explicit chmod
const umask = opt.umask
const mode = opt.mode | 0o0700
const needChmod = (mode & umask) !== 0
const uid = opt.uid
const gid = opt.gid
const doChown = typeof uid === 'number' &&
typeof gid === 'number' &&
( uid !== opt.processUid || gid !== opt.processGid )
const preserve = opt.preserve
const unlink = opt.unlink
const cache = opt.cache
const cwd = opt.cwd
const done = (er, created) => {
if (er)
cb(er)
else {
cache.set(dir, true)
if (created && doChown)
chownr(created, uid, gid, er => done(er))
else if (needChmod)
fs.chmod(dir, mode, cb)
else
cb()
}
}
if (cache && cache.get(dir) === true)
return done()
if (dir === cwd)
return fs.stat(dir, (er, st) => {
if (er || !st.isDirectory())
er = new CwdError(dir, er && er.code || 'ENOTDIR')
done(er)
})
if (preserve)
return mkdirp(dir, mode, done)
const sub = path.relative(cwd, dir)
const parts = sub.split(/\/|\\/)
mkdir_(cwd, parts, mode, cache, unlink, cwd, null, done)
}
const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => {
if (!parts.length)
return cb(null, created)
const p = parts.shift()
const part = base + '/' + p
if (cache.get(part))
return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))
}
const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => er => {
if (er) {
if (er.path && path.dirname(er.path) === cwd &&
(er.code === 'ENOTDIR' || er.code === 'ENOENT'))
return cb(new CwdError(cwd, er.code))
fs.lstat(part, (statEr, st) => {
if (statEr)
cb(statEr)
else if (st.isDirectory())
mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
else if (unlink)
fs.unlink(part, er => {
if (er)
return cb(er)
fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))
})
else if (st.isSymbolicLink())
return cb(new SymlinkError(part, part + '/' + parts.join('/')))
else
cb(er)
})
} else {
created = created || part
mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
}
}
const mkdirSync = module.exports.sync = (dir, opt) => {
// if there's any overlap between mask and mode,
// then we'll need an explicit chmod
const umask = opt.umask
const mode = opt.mode | 0o0700
const needChmod = (mode & umask) !== 0
const uid = opt.uid
const gid = opt.gid
const doChown = typeof uid === 'number' &&
typeof gid === 'number' &&
( uid !== opt.processUid || gid !== opt.processGid )
const preserve = opt.preserve
const unlink = opt.unlink
const cache = opt.cache
const cwd = opt.cwd
const done = (created) => {
cache.set(dir, true)
if (created && doChown)
chownr.sync(created, uid, gid)
if (needChmod)
fs.chmodSync(dir, mode)
}
if (cache && cache.get(dir) === true)
return done()
if (dir === cwd) {
let ok = false
let code = 'ENOTDIR'
try {
ok = fs.statSync(dir).isDirectory()
} catch (er) {
code = er.code
} finally {
if (!ok)
throw new CwdError(dir, code)
}
done()
return
}
if (preserve)
return done(mkdirp.sync(dir, mode))
const sub = path.relative(cwd, dir)
const parts = sub.split(/\/|\\/)
let created = null
for (let p = parts.shift(), part = cwd;
p && (part += '/' + p);
p = parts.shift()) {
if (cache.get(part))
continue
try {
fs.mkdirSync(part, mode)
created = created || part
cache.set(part, true)
} catch (er) {
if (er.path && path.dirname(er.path) === cwd &&
(er.code === 'ENOTDIR' || er.code === 'ENOENT'))
return new CwdError(cwd, er.code)
const st = fs.lstatSync(part)
if (st.isDirectory()) {
cache.set(part, true)
continue
} else if (unlink) {
fs.unlinkSync(part)
fs.mkdirSync(part, mode)
created = created || part
cache.set(part, true)
continue
} else if (st.isSymbolicLink())
return new SymlinkError(part, part + '/' + parts.join('/'))
}
}
return done(created)
}

14
node_modules/tar/lib/mode-fix.js generated vendored Normal file
View File

@ -0,0 +1,14 @@
'use strict'
module.exports = (mode, isDir) => {
mode &= 0o7777
// if dirs are readable, then they should be listable
if (isDir) {
if (mode & 0o400)
mode |= 0o100
if (mode & 0o40)
mode |= 0o10
if (mode & 0o4)
mode |= 0o1
}
return mode
}

404
node_modules/tar/lib/pack.js generated vendored Normal file
View File

@ -0,0 +1,404 @@
'use strict'
const Buffer = require('./buffer.js')
// A readable tar stream creator
// Technically, this is a transform stream that you write paths into,
// and tar format comes out of.
// The `add()` method is like `write()` but returns this,
// and end() return `this` as well, so you can
// do `new Pack(opt).add('files').add('dir').end().pipe(output)
// You could also do something like:
// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar'))
class PackJob {
constructor (path, absolute) {
this.path = path || './'
this.absolute = absolute
this.entry = null
this.stat = null
this.readdir = null
this.pending = false
this.ignore = false
this.piped = false
}
}
const MiniPass = require('minipass')
const zlib = require('minizlib')
const ReadEntry = require('./read-entry.js')
const WriteEntry = require('./write-entry.js')
const WriteEntrySync = WriteEntry.Sync
const WriteEntryTar = WriteEntry.Tar
const Yallist = require('yallist')
const EOF = Buffer.alloc(1024)
const ONSTAT = Symbol('onStat')
const ENDED = Symbol('ended')
const QUEUE = Symbol('queue')
const CURRENT = Symbol('current')
const PROCESS = Symbol('process')
const PROCESSING = Symbol('processing')
const PROCESSJOB = Symbol('processJob')
const JOBS = Symbol('jobs')
const JOBDONE = Symbol('jobDone')
const ADDFSENTRY = Symbol('addFSEntry')
const ADDTARENTRY = Symbol('addTarEntry')
const STAT = Symbol('stat')
const READDIR = Symbol('readdir')
const ONREADDIR = Symbol('onreaddir')
const PIPE = Symbol('pipe')
const ENTRY = Symbol('entry')
const ENTRYOPT = Symbol('entryOpt')
const WRITEENTRYCLASS = Symbol('writeEntryClass')
const WRITE = Symbol('write')
const ONDRAIN = Symbol('ondrain')
const fs = require('fs')
const path = require('path')
const warner = require('./warn-mixin.js')
const Pack = warner(class Pack extends MiniPass {
constructor (opt) {
super(opt)
opt = opt || Object.create(null)
this.opt = opt
this.cwd = opt.cwd || process.cwd()
this.maxReadSize = opt.maxReadSize
this.preservePaths = !!opt.preservePaths
this.strict = !!opt.strict
this.noPax = !!opt.noPax
this.prefix = (opt.prefix || '').replace(/(\\|\/)+$/, '')
this.linkCache = opt.linkCache || new Map()
this.statCache = opt.statCache || new Map()
this.readdirCache = opt.readdirCache || new Map()
this[WRITEENTRYCLASS] = WriteEntry
if (typeof opt.onwarn === 'function')
this.on('warn', opt.onwarn)
this.zip = null
if (opt.gzip) {
if (typeof opt.gzip !== 'object')
opt.gzip = {}
this.zip = new zlib.Gzip(opt.gzip)
this.zip.on('data', chunk => super.write(chunk))
this.zip.on('end', _ => super.end())
this.zip.on('drain', _ => this[ONDRAIN]())
this.on('resume', _ => this.zip.resume())
} else
this.on('drain', this[ONDRAIN])
this.portable = !!opt.portable
this.noDirRecurse = !!opt.noDirRecurse
this.follow = !!opt.follow
this.noMtime = !!opt.noMtime
this.mtime = opt.mtime || null
this.filter = typeof opt.filter === 'function' ? opt.filter : _ => true
this[QUEUE] = new Yallist
this[JOBS] = 0
this.jobs = +opt.jobs || 4
this[PROCESSING] = false
this[ENDED] = false
}
[WRITE] (chunk) {
return super.write(chunk)
}
add (path) {
this.write(path)
return this
}
end (path) {
if (path)
this.write(path)
this[ENDED] = true
this[PROCESS]()
return this
}
write (path) {
if (this[ENDED])
throw new Error('write after end')
if (path instanceof ReadEntry)
this[ADDTARENTRY](path)
else
this[ADDFSENTRY](path)
return this.flowing
}
[ADDTARENTRY] (p) {
const absolute = path.resolve(this.cwd, p.path)
if (this.prefix)
p.path = this.prefix + '/' + p.path.replace(/^\.(\/+|$)/, '')
// in this case, we don't have to wait for the stat
if (!this.filter(p.path, p))
p.resume()
else {
const job = new PackJob(p.path, absolute, false)
job.entry = new WriteEntryTar(p, this[ENTRYOPT](job))
job.entry.on('end', _ => this[JOBDONE](job))
this[JOBS] += 1
this[QUEUE].push(job)
}
this[PROCESS]()
}
[ADDFSENTRY] (p) {
const absolute = path.resolve(this.cwd, p)
if (this.prefix)
p = this.prefix + '/' + p.replace(/^\.(\/+|$)/, '')
this[QUEUE].push(new PackJob(p, absolute))
this[PROCESS]()
}
[STAT] (job) {
job.pending = true
this[JOBS] += 1
const stat = this.follow ? 'stat' : 'lstat'
fs[stat](job.absolute, (er, stat) => {
job.pending = false
this[JOBS] -= 1
if (er)
this.emit('error', er)
else
this[ONSTAT](job, stat)
})
}
[ONSTAT] (job, stat) {
this.statCache.set(job.absolute, stat)
job.stat = stat
// now we have the stat, we can filter it.
if (!this.filter(job.path, stat))
job.ignore = true
this[PROCESS]()
}
[READDIR] (job) {
job.pending = true
this[JOBS] += 1
fs.readdir(job.absolute, (er, entries) => {
job.pending = false
this[JOBS] -= 1
if (er)
return this.emit('error', er)
this[ONREADDIR](job, entries)
})
}
[ONREADDIR] (job, entries) {
this.readdirCache.set(job.absolute, entries)
job.readdir = entries
this[PROCESS]()
}
[PROCESS] () {
if (this[PROCESSING])
return
this[PROCESSING] = true
for (let w = this[QUEUE].head;
w !== null && this[JOBS] < this.jobs;
w = w.next) {
this[PROCESSJOB](w.value)
if (w.value.ignore) {
const p = w.next
this[QUEUE].removeNode(w)
w.next = p
}
}
this[PROCESSING] = false
if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) {
if (this.zip)
this.zip.end(EOF)
else {
super.write(EOF)
super.end()
}
}
}
get [CURRENT] () {
return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value
}
[JOBDONE] (job) {
this[QUEUE].shift()
this[JOBS] -= 1
this[PROCESS]()
}
[PROCESSJOB] (job) {
if (job.pending)
return
if (job.entry) {
if (job === this[CURRENT] && !job.piped)
this[PIPE](job)
return
}
if (!job.stat) {
if (this.statCache.has(job.absolute))
this[ONSTAT](job, this.statCache.get(job.absolute))
else
this[STAT](job)
}
if (!job.stat)
return
// filtered out!
if (job.ignore)
return
if (!this.noDirRecurse && job.stat.isDirectory() && !job.readdir) {
if (this.readdirCache.has(job.absolute))
this[ONREADDIR](job, this.readdirCache.get(job.absolute))
else
this[READDIR](job)
if (!job.readdir)
return
}
// we know it doesn't have an entry, because that got checked above
job.entry = this[ENTRY](job)
if (!job.entry) {
job.ignore = true
return
}
if (job === this[CURRENT] && !job.piped)
this[PIPE](job)
}
[ENTRYOPT] (job) {
return {
onwarn: (msg, data) => {
this.warn(msg, data)
},
noPax: this.noPax,
cwd: this.cwd,
absolute: job.absolute,
preservePaths: this.preservePaths,
maxReadSize: this.maxReadSize,
strict: this.strict,
portable: this.portable,
linkCache: this.linkCache,
statCache: this.statCache,
noMtime: this.noMtime,
mtime: this.mtime
}
}
[ENTRY] (job) {
this[JOBS] += 1
try {
return new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job))
.on('end', () => this[JOBDONE](job))
.on('error', er => this.emit('error', er))
} catch (er) {
this.emit('error', er)
}
}
[ONDRAIN] () {
if (this[CURRENT] && this[CURRENT].entry)
this[CURRENT].entry.resume()
}
// like .pipe() but using super, because our write() is special
[PIPE] (job) {
job.piped = true
if (job.readdir)
job.readdir.forEach(entry => {
const p = this.prefix ?
job.path.slice(this.prefix.length + 1) || './'
: job.path
const base = p === './' ? '' : p.replace(/\/*$/, '/')
this[ADDFSENTRY](base + entry)
})
const source = job.entry
const zip = this.zip
if (zip)
source.on('data', chunk => {
if (!zip.write(chunk))
source.pause()
})
else
source.on('data', chunk => {
if (!super.write(chunk))
source.pause()
})
}
pause () {
if (this.zip)
this.zip.pause()
return super.pause()
}
})
class PackSync extends Pack {
constructor (opt) {
super(opt)
this[WRITEENTRYCLASS] = WriteEntrySync
}
// pause/resume are no-ops in sync streams.
pause () {}
resume () {}
[STAT] (job) {
const stat = this.follow ? 'statSync' : 'lstatSync'
this[ONSTAT](job, fs[stat](job.absolute))
}
[READDIR] (job, stat) {
this[ONREADDIR](job, fs.readdirSync(job.absolute))
}
// gotta get it all in this tick
[PIPE] (job) {
const source = job.entry
const zip = this.zip
if (job.readdir)
job.readdir.forEach(entry => {
const p = this.prefix ?
job.path.slice(this.prefix.length + 1) || './'
: job.path
const base = p === './' ? '' : p.replace(/\/*$/, '/')
this[ADDFSENTRY](base + entry)
})
if (zip)
source.on('data', chunk => {
zip.write(chunk)
})
else
source.on('data', chunk => {
super[WRITE](chunk)
})
}
}
Pack.Sync = PackSync
module.exports = Pack

428
node_modules/tar/lib/parse.js generated vendored Normal file
View File

@ -0,0 +1,428 @@
'use strict'
// this[BUFFER] is the remainder of a chunk if we're waiting for
// the full 512 bytes of a header to come in. We will Buffer.concat()
// it to the next write(), which is a mem copy, but a small one.
//
// this[QUEUE] is a Yallist of entries that haven't been emitted
// yet this can only get filled up if the user keeps write()ing after
// a write() returns false, or does a write() with more than one entry
//
// We don't buffer chunks, we always parse them and either create an
// entry, or push it into the active entry. The ReadEntry class knows
// to throw data away if .ignore=true
//
// Shift entry off the buffer when it emits 'end', and emit 'entry' for
// the next one in the list.
//
// At any time, we're pushing body chunks into the entry at WRITEENTRY,
// and waiting for 'end' on the entry at READENTRY
//
// ignored entries get .resume() called on them straight away
const warner = require('./warn-mixin.js')
const path = require('path')
const Header = require('./header.js')
const EE = require('events')
const Yallist = require('yallist')
const maxMetaEntrySize = 1024 * 1024
const Entry = require('./read-entry.js')
const Pax = require('./pax.js')
const zlib = require('minizlib')
const Buffer = require('./buffer.js')
const gzipHeader = Buffer.from([0x1f, 0x8b])
const STATE = Symbol('state')
const WRITEENTRY = Symbol('writeEntry')
const READENTRY = Symbol('readEntry')
const NEXTENTRY = Symbol('nextEntry')
const PROCESSENTRY = Symbol('processEntry')
const EX = Symbol('extendedHeader')
const GEX = Symbol('globalExtendedHeader')
const META = Symbol('meta')
const EMITMETA = Symbol('emitMeta')
const BUFFER = Symbol('buffer')
const QUEUE = Symbol('queue')
const ENDED = Symbol('ended')
const EMITTEDEND = Symbol('emittedEnd')
const EMIT = Symbol('emit')
const UNZIP = Symbol('unzip')
const CONSUMECHUNK = Symbol('consumeChunk')
const CONSUMECHUNKSUB = Symbol('consumeChunkSub')
const CONSUMEBODY = Symbol('consumeBody')
const CONSUMEMETA = Symbol('consumeMeta')
const CONSUMEHEADER = Symbol('consumeHeader')
const CONSUMING = Symbol('consuming')
const BUFFERCONCAT = Symbol('bufferConcat')
const MAYBEEND = Symbol('maybeEnd')
const WRITING = Symbol('writing')
const ABORTED = Symbol('aborted')
const DONE = Symbol('onDone')
const noop = _ => true
module.exports = warner(class Parser extends EE {
constructor (opt) {
opt = opt || {}
super(opt)
if (opt.ondone)
this.on(DONE, opt.ondone)
else
this.on(DONE, _ => {
this.emit('prefinish')
this.emit('finish')
this.emit('end')
this.emit('close')
})
this.strict = !!opt.strict
this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize
this.filter = typeof opt.filter === 'function' ? opt.filter : noop
// have to set this so that streams are ok piping into it
this.writable = true
this.readable = false
this[QUEUE] = new Yallist()
this[BUFFER] = null
this[READENTRY] = null
this[WRITEENTRY] = null
this[STATE] = 'begin'
this[META] = ''
this[EX] = null
this[GEX] = null
this[ENDED] = false
this[UNZIP] = null
this[ABORTED] = false
if (typeof opt.onwarn === 'function')
this.on('warn', opt.onwarn)
if (typeof opt.onentry === 'function')
this.on('entry', opt.onentry)
}
[CONSUMEHEADER] (chunk, position) {
let header
try {
header = new Header(chunk, position, this[EX], this[GEX])
} catch (er) {
return this.warn('invalid entry', er)
}
if (header.nullBlock)
this[EMIT]('nullBlock')
else if (!header.cksumValid)
this.warn('invalid entry', header)
else if (!header.path)
this.warn('invalid: path is required', header)
else {
const type = header.type
if (/^(Symbolic)?Link$/.test(type) && !header.linkpath)
this.warn('invalid: linkpath required', header)
else if (!/^(Symbolic)?Link$/.test(type) && header.linkpath)
this.warn('invalid: linkpath forbidden', header)
else {
const entry = this[WRITEENTRY] = new Entry(header, this[EX], this[GEX])
if (entry.meta) {
if (entry.size > this.maxMetaEntrySize) {
entry.ignore = true
this[EMIT]('ignoredEntry', entry)
this[STATE] = 'ignore'
} else if (entry.size > 0) {
this[META] = ''
entry.on('data', c => this[META] += c)
this[STATE] = 'meta'
}
} else {
this[EX] = null
entry.ignore = entry.ignore || !this.filter(entry.path, entry)
if (entry.ignore) {
this[EMIT]('ignoredEntry', entry)
this[STATE] = entry.remain ? 'ignore' : 'begin'
} else {
if (entry.remain)
this[STATE] = 'body'
else {
this[STATE] = 'begin'
entry.end()
}
if (!this[READENTRY]) {
this[QUEUE].push(entry)
this[NEXTENTRY]()
} else
this[QUEUE].push(entry)
}
}
}
}
}
[PROCESSENTRY] (entry) {
let go = true
if (!entry) {
this[READENTRY] = null
go = false
} else if (Array.isArray(entry))
this.emit.apply(this, entry)
else {
this[READENTRY] = entry
this.emit('entry', entry)
if (!entry.emittedEnd) {
entry.on('end', _ => this[NEXTENTRY]())
go = false
}
}
return go
}
[NEXTENTRY] () {
do {} while (this[PROCESSENTRY](this[QUEUE].shift()))
if (!this[QUEUE].length) {
// At this point, there's nothing in the queue, but we may have an
// entry which is being consumed (readEntry).
// If we don't, then we definitely can handle more data.
// If we do, and either it's flowing, or it has never had any data
// written to it, then it needs more.
// The only other possibility is that it has returned false from a
// write() call, so we wait for the next drain to continue.
const re = this[READENTRY]
const drainNow = !re || re.flowing || re.size === re.remain
if (drainNow) {
if (!this[WRITING])
this.emit('drain')
} else
re.once('drain', _ => this.emit('drain'))
}
}
[CONSUMEBODY] (chunk, position) {
// write up to but no more than writeEntry.blockRemain
const entry = this[WRITEENTRY]
const br = entry.blockRemain
const c = (br >= chunk.length && position === 0) ? chunk
: chunk.slice(position, position + br)
entry.write(c)
if (!entry.blockRemain) {
this[STATE] = 'begin'
this[WRITEENTRY] = null
entry.end()
}
return c.length
}
[CONSUMEMETA] (chunk, position) {
const entry = this[WRITEENTRY]
const ret = this[CONSUMEBODY](chunk, position)
// if we finished, then the entry is reset
if (!this[WRITEENTRY])
this[EMITMETA](entry)
return ret
}
[EMIT] (ev, data, extra) {
if (!this[QUEUE].length && !this[READENTRY])
this.emit(ev, data, extra)
else
this[QUEUE].push([ev, data, extra])
}
[EMITMETA] (entry) {
this[EMIT]('meta', this[META])
switch (entry.type) {
case 'ExtendedHeader':
case 'OldExtendedHeader':
this[EX] = Pax.parse(this[META], this[EX], false)
break
case 'GlobalExtendedHeader':
this[GEX] = Pax.parse(this[META], this[GEX], true)
break
case 'NextFileHasLongPath':
case 'OldGnuLongPath':
this[EX] = this[EX] || Object.create(null)
this[EX].path = this[META].replace(/\0.*/, '')
break
case 'NextFileHasLongLinkpath':
this[EX] = this[EX] || Object.create(null)
this[EX].linkpath = this[META].replace(/\0.*/, '')
break
/* istanbul ignore next */
default: throw new Error('unknown meta: ' + entry.type)
}
}
abort (msg, error) {
this[ABORTED] = true
this.warn(msg, error)
this.emit('abort', error)
this.emit('error', error)
}
write (chunk) {
if (this[ABORTED])
return
// first write, might be gzipped
if (this[UNZIP] === null && chunk) {
if (this[BUFFER]) {
chunk = Buffer.concat([this[BUFFER], chunk])
this[BUFFER] = null
}
if (chunk.length < gzipHeader.length) {
this[BUFFER] = chunk
return true
}
for (let i = 0; this[UNZIP] === null && i < gzipHeader.length; i++) {
if (chunk[i] !== gzipHeader[i])
this[UNZIP] = false
}
if (this[UNZIP] === null) {
const ended = this[ENDED]
this[ENDED] = false
this[UNZIP] = new zlib.Unzip()
this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk))
this[UNZIP].on('error', er =>
this.abort(er.message, er))
this[UNZIP].on('end', _ => {
this[ENDED] = true
this[CONSUMECHUNK]()
})
this[WRITING] = true
const ret = this[UNZIP][ended ? 'end' : 'write' ](chunk)
this[WRITING] = false
return ret
}
}
this[WRITING] = true
if (this[UNZIP])
this[UNZIP].write(chunk)
else
this[CONSUMECHUNK](chunk)
this[WRITING] = false
// return false if there's a queue, or if the current entry isn't flowing
const ret =
this[QUEUE].length ? false :
this[READENTRY] ? this[READENTRY].flowing :
true
// if we have no queue, then that means a clogged READENTRY
if (!ret && !this[QUEUE].length)
this[READENTRY].once('drain', _ => this.emit('drain'))
return ret
}
[BUFFERCONCAT] (c) {
if (c && !this[ABORTED])
this[BUFFER] = this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c
}
[MAYBEEND] () {
if (this[ENDED] &&
!this[EMITTEDEND] &&
!this[ABORTED] &&
!this[CONSUMING]) {
this[EMITTEDEND] = true
const entry = this[WRITEENTRY]
if (entry && entry.blockRemain) {
const have = this[BUFFER] ? this[BUFFER].length : 0
this.warn('Truncated input (needed ' + entry.blockRemain +
' more bytes, only ' + have + ' available)', entry)
if (this[BUFFER])
entry.write(this[BUFFER])
entry.end()
}
this[EMIT](DONE)
}
}
[CONSUMECHUNK] (chunk) {
if (this[CONSUMING]) {
this[BUFFERCONCAT](chunk)
} else if (!chunk && !this[BUFFER]) {
this[MAYBEEND]()
} else {
this[CONSUMING] = true
if (this[BUFFER]) {
this[BUFFERCONCAT](chunk)
const c = this[BUFFER]
this[BUFFER] = null
this[CONSUMECHUNKSUB](c)
} else {
this[CONSUMECHUNKSUB](chunk)
}
while (this[BUFFER] && this[BUFFER].length >= 512 && !this[ABORTED]) {
const c = this[BUFFER]
this[BUFFER] = null
this[CONSUMECHUNKSUB](c)
}
this[CONSUMING] = false
}
if (!this[BUFFER] || this[ENDED])
this[MAYBEEND]()
}
[CONSUMECHUNKSUB] (chunk) {
// we know that we are in CONSUMING mode, so anything written goes into
// the buffer. Advance the position and put any remainder in the buffer.
let position = 0
let length = chunk.length
while (position + 512 <= length && !this[ABORTED]) {
switch (this[STATE]) {
case 'begin':
this[CONSUMEHEADER](chunk, position)
position += 512
break
case 'ignore':
case 'body':
position += this[CONSUMEBODY](chunk, position)
break
case 'meta':
position += this[CONSUMEMETA](chunk, position)
break
/* istanbul ignore next */
default:
throw new Error('invalid state: ' + this[STATE])
}
}
if (position < length) {
if (this[BUFFER])
this[BUFFER] = Buffer.concat([chunk.slice(position), this[BUFFER]])
else
this[BUFFER] = chunk.slice(position)
}
}
end (chunk) {
if (!this[ABORTED]) {
if (this[UNZIP])
this[UNZIP].end(chunk)
else {
this[ENDED] = true
this.write(chunk)
}
}
}
})

146
node_modules/tar/lib/pax.js generated vendored Normal file
View File

@ -0,0 +1,146 @@
'use strict'
const Buffer = require('./buffer.js')
const Header = require('./header.js')
const path = require('path')
class Pax {
constructor (obj, global) {
this.atime = obj.atime || null
this.charset = obj.charset || null
this.comment = obj.comment || null
this.ctime = obj.ctime || null
this.gid = obj.gid || null
this.gname = obj.gname || null
this.linkpath = obj.linkpath || null
this.mtime = obj.mtime || null
this.path = obj.path || null
this.size = obj.size || null
this.uid = obj.uid || null
this.uname = obj.uname || null
this.dev = obj.dev || null
this.ino = obj.ino || null
this.nlink = obj.nlink || null
this.global = global || false
}
encode () {
const body = this.encodeBody()
if (body === '')
return null
const bodyLen = Buffer.byteLength(body)
// round up to 512 bytes
// add 512 for header
const bufLen = 512 * Math.ceil(1 + bodyLen / 512)
const buf = Buffer.allocUnsafe(bufLen)
// 0-fill the header section, it might not hit every field
for (let i = 0; i < 512; i++) {
buf[i] = 0
}
new Header({
// XXX split the path
// then the path should be PaxHeader + basename, but less than 99,
// prepend with the dirname
path: ('PaxHeader/' + path.basename(this.path)).slice(0, 99),
mode: this.mode || 0o644,
uid: this.uid || null,
gid: this.gid || null,
size: bodyLen,
mtime: this.mtime || null,
type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader',
linkpath: '',
uname: this.uname || '',
gname: this.gname || '',
devmaj: 0,
devmin: 0,
atime: this.atime || null,
ctime: this.ctime || null
}).encode(buf)
buf.write(body, 512, bodyLen, 'utf8')
// null pad after the body
for (let i = bodyLen + 512; i < buf.length; i++) {
buf[i] = 0
}
return buf
}
encodeBody () {
return (
this.encodeField('path') +
this.encodeField('ctime') +
this.encodeField('atime') +
this.encodeField('dev') +
this.encodeField('ino') +
this.encodeField('nlink') +
this.encodeField('charset') +
this.encodeField('comment') +
this.encodeField('gid') +
this.encodeField('gname') +
this.encodeField('linkpath') +
this.encodeField('mtime') +
this.encodeField('size') +
this.encodeField('uid') +
this.encodeField('uname')
)
}
encodeField (field) {
if (this[field] === null || this[field] === undefined)
return ''
const v = this[field] instanceof Date ? this[field].getTime() / 1000
: this[field]
const s = ' ' +
(field === 'dev' || field === 'ino' || field === 'nlink'
? 'SCHILY.' : '') +
field + '=' + v + '\n'
const byteLen = Buffer.byteLength(s)
// the digits includes the length of the digits in ascii base-10
// so if it's 9 characters, then adding 1 for the 9 makes it 10
// which makes it 11 chars.
let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1
if (byteLen + digits >= Math.pow(10, digits))
digits += 1
const len = digits + byteLen
return len + s
}
}
Pax.parse = (string, ex, g) => new Pax(merge(parseKV(string), ex), g)
const merge = (a, b) =>
b ? Object.keys(a).reduce((s, k) => (s[k] = a[k], s), b) : a
const parseKV = string =>
string
.replace(/\n$/, '')
.split('\n')
.reduce(parseKVLine, Object.create(null))
const parseKVLine = (set, line) => {
const n = parseInt(line, 10)
// XXX Values with \n in them will fail this.
// Refactor to not be a naive line-by-line parse.
if (n !== Buffer.byteLength(line) + 1)
return set
line = line.substr((n + ' ').length)
const kv = line.split('=')
const k = kv.shift().replace(/^SCHILY\.(dev|ino|nlink)/, '$1')
if (!k)
return set
const v = kv.join('=')
set[k] = /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k)
? new Date(v * 1000)
: /^[0-9]+$/.test(v) ? +v
: v
return set
}
module.exports = Pax

98
node_modules/tar/lib/read-entry.js generated vendored Normal file
View File

@ -0,0 +1,98 @@
'use strict'
const types = require('./types.js')
const MiniPass = require('minipass')
const SLURP = Symbol('slurp')
module.exports = class ReadEntry extends MiniPass {
constructor (header, ex, gex) {
super()
// read entries always start life paused. this is to avoid the
// situation where Minipass's auto-ending empty streams results
// in an entry ending before we're ready for it.
this.pause()
this.extended = ex
this.globalExtended = gex
this.header = header
this.startBlockSize = 512 * Math.ceil(header.size / 512)
this.blockRemain = this.startBlockSize
this.remain = header.size
this.type = header.type
this.meta = false
this.ignore = false
switch (this.type) {
case 'File':
case 'OldFile':
case 'Link':
case 'SymbolicLink':
case 'CharacterDevice':
case 'BlockDevice':
case 'Directory':
case 'FIFO':
case 'ContiguousFile':
case 'GNUDumpDir':
break
case 'NextFileHasLongLinkpath':
case 'NextFileHasLongPath':
case 'OldGnuLongPath':
case 'GlobalExtendedHeader':
case 'ExtendedHeader':
case 'OldExtendedHeader':
this.meta = true
break
// NOTE: gnutar and bsdtar treat unrecognized types as 'File'
// it may be worth doing the same, but with a warning.
default:
this.ignore = true
}
this.path = header.path
this.mode = header.mode
if (this.mode)
this.mode = this.mode & 0o7777
this.uid = header.uid
this.gid = header.gid
this.uname = header.uname
this.gname = header.gname
this.size = header.size
this.mtime = header.mtime
this.atime = header.atime
this.ctime = header.ctime
this.linkpath = header.linkpath
this.uname = header.uname
this.gname = header.gname
if (ex) this[SLURP](ex)
if (gex) this[SLURP](gex, true)
}
write (data) {
const writeLen = data.length
if (writeLen > this.blockRemain)
throw new Error('writing more to entry than is appropriate')
const r = this.remain
const br = this.blockRemain
this.remain = Math.max(0, r - writeLen)
this.blockRemain = Math.max(0, br - writeLen)
if (this.ignore)
return true
if (r >= writeLen)
return super.write(data)
// r < writeLen
return super.write(data.slice(0, r))
}
[SLURP] (ex, global) {
for (let k in ex) {
// we slurp in everything except for the path attribute in
// a global extended header, because that's weird.
if (ex[k] !== null && ex[k] !== undefined &&
!(global && k === 'path'))
this[k] = ex[k]
}
}
}

220
node_modules/tar/lib/replace.js generated vendored Normal file
View File

@ -0,0 +1,220 @@
'use strict'
const Buffer = require('./buffer.js')
// tar -r
const hlo = require('./high-level-opt.js')
const Pack = require('./pack.js')
const Parse = require('./parse.js')
const fs = require('fs')
const fsm = require('fs-minipass')
const t = require('./list.js')
const path = require('path')
// starting at the head of the file, read a Header
// If the checksum is invalid, that's our position to start writing
// If it is, jump forward by the specified size (round up to 512)
// and try again.
// Write the new Pack stream starting there.
const Header = require('./header.js')
const r = module.exports = (opt_, files, cb) => {
const opt = hlo(opt_)
if (!opt.file)
throw new TypeError('file is required')
if (opt.gzip)
throw new TypeError('cannot append to compressed archives')
if (!files || !Array.isArray(files) || !files.length)
throw new TypeError('no files or directories specified')
files = Array.from(files)
return opt.sync ? replaceSync(opt, files)
: replace(opt, files, cb)
}
const replaceSync = (opt, files) => {
const p = new Pack.Sync(opt)
let threw = true
let fd
let position
try {
try {
fd = fs.openSync(opt.file, 'r+')
} catch (er) {
if (er.code === 'ENOENT')
fd = fs.openSync(opt.file, 'w+')
else
throw er
}
const st = fs.fstatSync(fd)
const headBuf = Buffer.alloc(512)
POSITION: for (position = 0; position < st.size; position += 512) {
for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {
bytes = fs.readSync(
fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos
)
if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b)
throw new Error('cannot append to compressed archives')
if (!bytes)
break POSITION
}
let h = new Header(headBuf)
if (!h.cksumValid)
break
let entryBlockSize = 512 * Math.ceil(h.size / 512)
if (position + entryBlockSize + 512 > st.size)
break
// the 512 for the header we just parsed will be added as well
// also jump ahead all the blocks for the body
position += entryBlockSize
if (opt.mtimeCache)
opt.mtimeCache.set(h.path, h.mtime)
}
threw = false
streamSync(opt, p, position, fd, files)
} finally {
if (threw)
try { fs.closeSync(fd) } catch (er) {}
}
}
const streamSync = (opt, p, position, fd, files) => {
const stream = new fsm.WriteStreamSync(opt.file, {
fd: fd,
start: position
})
p.pipe(stream)
addFilesSync(p, files)
}
const replace = (opt, files, cb) => {
files = Array.from(files)
const p = new Pack(opt)
const getPos = (fd, size, cb_) => {
const cb = (er, pos) => {
if (er)
fs.close(fd, _ => cb_(er))
else
cb_(null, pos)
}
let position = 0
if (size === 0)
return cb(null, 0)
let bufPos = 0
const headBuf = Buffer.alloc(512)
const onread = (er, bytes) => {
if (er)
return cb(er)
bufPos += bytes
if (bufPos < 512 && bytes)
return fs.read(
fd, headBuf, bufPos, headBuf.length - bufPos,
position + bufPos, onread
)
if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b)
return cb(new Error('cannot append to compressed archives'))
// truncated header
if (bufPos < 512)
return cb(null, position)
const h = new Header(headBuf)
if (!h.cksumValid)
return cb(null, position)
const entryBlockSize = 512 * Math.ceil(h.size / 512)
if (position + entryBlockSize + 512 > size)
return cb(null, position)
position += entryBlockSize + 512
if (position >= size)
return cb(null, position)
if (opt.mtimeCache)
opt.mtimeCache.set(h.path, h.mtime)
bufPos = 0
fs.read(fd, headBuf, 0, 512, position, onread)
}
fs.read(fd, headBuf, 0, 512, position, onread)
}
const promise = new Promise((resolve, reject) => {
p.on('error', reject)
let flag = 'r+'
const onopen = (er, fd) => {
if (er && er.code === 'ENOENT' && flag === 'r+') {
flag = 'w+'
return fs.open(opt.file, flag, onopen)
}
if (er)
return reject(er)
fs.fstat(fd, (er, st) => {
if (er)
return reject(er)
getPos(fd, st.size, (er, position) => {
if (er)
return reject(er)
const stream = new fsm.WriteStream(opt.file, {
fd: fd,
start: position
})
p.pipe(stream)
stream.on('error', reject)
stream.on('close', resolve)
addFilesAsync(p, files)
})
})
}
fs.open(opt.file, flag, onopen)
})
return cb ? promise.then(cb, cb) : promise
}
const addFilesSync = (p, files) => {
files.forEach(file => {
if (file.charAt(0) === '@')
t({
file: path.resolve(p.cwd, file.substr(1)),
sync: true,
noResume: true,
onentry: entry => p.add(entry)
})
else
p.add(file)
})
p.end()
}
const addFilesAsync = (p, files) => {
while (files.length) {
const file = files.shift()
if (file.charAt(0) === '@')
return t({
file: path.resolve(p.cwd, file.substr(1)),
noResume: true,
onentry: entry => p.add(entry)
}).then(_ => addFilesAsync(p, files))
else
p.add(file)
}
p.end()
}

44
node_modules/tar/lib/types.js generated vendored Normal file
View File

@ -0,0 +1,44 @@
'use strict'
// map types from key to human-friendly name
exports.name = new Map([
['0', 'File'],
// same as File
['', 'OldFile'],
['1', 'Link'],
['2', 'SymbolicLink'],
// Devices and FIFOs aren't fully supported
// they are parsed, but skipped when unpacking
['3', 'CharacterDevice'],
['4', 'BlockDevice'],
['5', 'Directory'],
['6', 'FIFO'],
// same as File
['7', 'ContiguousFile'],
// pax headers
['g', 'GlobalExtendedHeader'],
['x', 'ExtendedHeader'],
// vendor-specific stuff
// skip
['A', 'SolarisACL'],
// like 5, but with data, which should be skipped
['D', 'GNUDumpDir'],
// metadata only, skip
['I', 'Inode'],
// data = link path of next file
['K', 'NextFileHasLongLinkpath'],
// data = path of next file
['L', 'NextFileHasLongPath'],
// skip
['M', 'ContinuationFile'],
// like L
['N', 'OldGnuLongPath'],
// skip
['S', 'SparseFile'],
// skip
['V', 'TapeVolumeHeader'],
// like x
['X', 'OldExtendedHeader']
])
// map the other direction
exports.code = new Map(Array.from(exports.name).map(kv => [kv[1], kv[0]]))

621
node_modules/tar/lib/unpack.js generated vendored Normal file
View File

@ -0,0 +1,621 @@
'use strict'
const assert = require('assert')
const EE = require('events').EventEmitter
const Parser = require('./parse.js')
const fs = require('fs')
const fsm = require('fs-minipass')
const path = require('path')
const mkdir = require('./mkdir.js')
const mkdirSync = mkdir.sync
const wc = require('./winchars.js')
const ONENTRY = Symbol('onEntry')
const CHECKFS = Symbol('checkFs')
const ISREUSABLE = Symbol('isReusable')
const MAKEFS = Symbol('makeFs')
const FILE = Symbol('file')
const DIRECTORY = Symbol('directory')
const LINK = Symbol('link')
const SYMLINK = Symbol('symlink')
const HARDLINK = Symbol('hardlink')
const UNSUPPORTED = Symbol('unsupported')
const UNKNOWN = Symbol('unknown')
const CHECKPATH = Symbol('checkPath')
const MKDIR = Symbol('mkdir')
const ONERROR = Symbol('onError')
const PENDING = Symbol('pending')
const PEND = Symbol('pend')
const UNPEND = Symbol('unpend')
const ENDED = Symbol('ended')
const MAYBECLOSE = Symbol('maybeClose')
const SKIP = Symbol('skip')
const DOCHOWN = Symbol('doChown')
const UID = Symbol('uid')
const GID = Symbol('gid')
const crypto = require('crypto')
// Unlinks on Windows are not atomic.
//
// This means that if you have a file entry, followed by another
// file entry with an identical name, and you cannot re-use the file
// (because it's a hardlink, or because unlink:true is set, or it's
// Windows, which does not have useful nlink values), then the unlink
// will be committed to the disk AFTER the new file has been written
// over the old one, deleting the new file.
//
// To work around this, on Windows systems, we rename the file and then
// delete the renamed file. It's a sloppy kludge, but frankly, I do not
// know of a better way to do this, given windows' non-atomic unlink
// semantics.
//
// See: https://github.com/npm/node-tar/issues/183
/* istanbul ignore next */
const unlinkFile = (path, cb) => {
if (process.platform !== 'win32')
return fs.unlink(path, cb)
const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex')
fs.rename(path, name, er => {
if (er)
return cb(er)
fs.unlink(name, cb)
})
}
/* istanbul ignore next */
const unlinkFileSync = path => {
if (process.platform !== 'win32')
return fs.unlinkSync(path)
const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex')
fs.renameSync(path, name)
fs.unlinkSync(name)
}
// this.gid, entry.gid, this.processUid
const uint32 = (a, b, c) =>
a === a >>> 0 ? a
: b === b >>> 0 ? b
: c
class Unpack extends Parser {
constructor (opt) {
if (!opt)
opt = {}
opt.ondone = _ => {
this[ENDED] = true
this[MAYBECLOSE]()
}
super(opt)
this.transform = typeof opt.transform === 'function' ? opt.transform : null
this.writable = true
this.readable = false
this[PENDING] = 0
this[ENDED] = false
this.dirCache = opt.dirCache || new Map()
if (typeof opt.uid === 'number' || typeof opt.gid === 'number') {
// need both or neither
if (typeof opt.uid !== 'number' || typeof opt.gid !== 'number')
throw new TypeError('cannot set owner without number uid and gid')
if (opt.preserveOwner)
throw new TypeError(
'cannot preserve owner in archive and also set owner explicitly')
this.uid = opt.uid
this.gid = opt.gid
this.setOwner = true
} else {
this.uid = null
this.gid = null
this.setOwner = false
}
// default true for root
if (opt.preserveOwner === undefined && typeof opt.uid !== 'number')
this.preserveOwner = process.getuid && process.getuid() === 0
else
this.preserveOwner = !!opt.preserveOwner
this.processUid = (this.preserveOwner || this.setOwner) && process.getuid ?
process.getuid() : null
this.processGid = (this.preserveOwner || this.setOwner) && process.getgid ?
process.getgid() : null
// mostly just for testing, but useful in some cases.
// Forcibly trigger a chown on every entry, no matter what
this.forceChown = opt.forceChown === true
// turn ><?| in filenames into 0xf000-higher encoded forms
this.win32 = !!opt.win32 || process.platform === 'win32'
// do not unpack over files that are newer than what's in the archive
this.newer = !!opt.newer
// do not unpack over ANY files
this.keep = !!opt.keep
// do not set mtime/atime of extracted entries
this.noMtime = !!opt.noMtime
// allow .., absolute path entries, and unpacking through symlinks
// without this, warn and skip .., relativize absolutes, and error
// on symlinks in extraction path
this.preservePaths = !!opt.preservePaths
// unlink files and links before writing. This breaks existing hard
// links, and removes symlink directories rather than erroring
this.unlink = !!opt.unlink
this.cwd = path.resolve(opt.cwd || process.cwd())
this.strip = +opt.strip || 0
this.processUmask = process.umask()
this.umask = typeof opt.umask === 'number' ? opt.umask : this.processUmask
// default mode for dirs created as parents
this.dmode = opt.dmode || (0o0777 & (~this.umask))
this.fmode = opt.fmode || (0o0666 & (~this.umask))
this.on('entry', entry => this[ONENTRY](entry))
}
[MAYBECLOSE] () {
if (this[ENDED] && this[PENDING] === 0) {
this.emit('prefinish')
this.emit('finish')
this.emit('end')
this.emit('close')
}
}
[CHECKPATH] (entry) {
if (this.strip) {
const parts = entry.path.split(/\/|\\/)
if (parts.length < this.strip)
return false
entry.path = parts.slice(this.strip).join('/')
if (entry.type === 'Link') {
const linkparts = entry.linkpath.split(/\/|\\/)
if (linkparts.length >= this.strip)
entry.linkpath = linkparts.slice(this.strip).join('/')
}
}
if (!this.preservePaths) {
const p = entry.path
if (p.match(/(^|\/|\\)\.\.(\\|\/|$)/)) {
this.warn('path contains \'..\'', p)
return false
}
// absolutes on posix are also absolutes on win32
// so we only need to test this one to get both
if (path.win32.isAbsolute(p)) {
const parsed = path.win32.parse(p)
this.warn('stripping ' + parsed.root + ' from absolute path', p)
entry.path = p.substr(parsed.root.length)
}
}
// only encode : chars that aren't drive letter indicators
if (this.win32) {
const parsed = path.win32.parse(entry.path)
entry.path = parsed.root === '' ? wc.encode(entry.path)
: parsed.root + wc.encode(entry.path.substr(parsed.root.length))
}
if (path.isAbsolute(entry.path))
entry.absolute = entry.path
else
entry.absolute = path.resolve(this.cwd, entry.path)
return true
}
[ONENTRY] (entry) {
if (!this[CHECKPATH](entry))
return entry.resume()
assert.equal(typeof entry.absolute, 'string')
switch (entry.type) {
case 'Directory':
case 'GNUDumpDir':
if (entry.mode)
entry.mode = entry.mode | 0o700
case 'File':
case 'OldFile':
case 'ContiguousFile':
case 'Link':
case 'SymbolicLink':
return this[CHECKFS](entry)
case 'CharacterDevice':
case 'BlockDevice':
case 'FIFO':
return this[UNSUPPORTED](entry)
}
}
[ONERROR] (er, entry) {
// Cwd has to exist, or else nothing works. That's serious.
// Other errors are warnings, which raise the error in strict
// mode, but otherwise continue on.
if (er.name === 'CwdError')
this.emit('error', er)
else {
this.warn(er.message, er)
this[UNPEND]()
entry.resume()
}
}
[MKDIR] (dir, mode, cb) {
mkdir(dir, {
uid: this.uid,
gid: this.gid,
processUid: this.processUid,
processGid: this.processGid,
umask: this.processUmask,
preserve: this.preservePaths,
unlink: this.unlink,
cache: this.dirCache,
cwd: this.cwd,
mode: mode
}, cb)
}
[DOCHOWN] (entry) {
// in preserve owner mode, chown if the entry doesn't match process
// in set owner mode, chown if setting doesn't match process
return this.forceChown ||
this.preserveOwner &&
( typeof entry.uid === 'number' && entry.uid !== this.processUid ||
typeof entry.gid === 'number' && entry.gid !== this.processGid )
||
( typeof this.uid === 'number' && this.uid !== this.processUid ||
typeof this.gid === 'number' && this.gid !== this.processGid )
}
[UID] (entry) {
return uint32(this.uid, entry.uid, this.processUid)
}
[GID] (entry) {
return uint32(this.gid, entry.gid, this.processGid)
}
[FILE] (entry) {
const mode = entry.mode & 0o7777 || this.fmode
const stream = new fsm.WriteStream(entry.absolute, {
mode: mode,
autoClose: false
})
stream.on('error', er => this[ONERROR](er, entry))
let actions = 1
const done = er => {
if (er)
return this[ONERROR](er, entry)
if (--actions === 0)
fs.close(stream.fd, _ => this[UNPEND]())
}
stream.on('finish', _ => {
// if futimes fails, try utimes
// if utimes fails, fail with the original error
// same for fchown/chown
const abs = entry.absolute
const fd = stream.fd
if (entry.mtime && !this.noMtime) {
actions++
const atime = entry.atime || new Date()
const mtime = entry.mtime
fs.futimes(fd, atime, mtime, er =>
er ? fs.utimes(abs, atime, mtime, er2 => done(er2 && er))
: done())
}
if (this[DOCHOWN](entry)) {
actions++
const uid = this[UID](entry)
const gid = this[GID](entry)
fs.fchown(fd, uid, gid, er =>
er ? fs.chown(abs, uid, gid, er2 => done(er2 && er))
: done())
}
done()
})
const tx = this.transform ? this.transform(entry) || entry : entry
if (tx !== entry) {
tx.on('error', er => this[ONERROR](er, entry))
entry.pipe(tx)
}
tx.pipe(stream)
}
[DIRECTORY] (entry) {
const mode = entry.mode & 0o7777 || this.dmode
this[MKDIR](entry.absolute, mode, er => {
if (er)
return this[ONERROR](er, entry)
let actions = 1
const done = _ => {
if (--actions === 0) {
this[UNPEND]()
entry.resume()
}
}
if (entry.mtime && !this.noMtime) {
actions++
fs.utimes(entry.absolute, entry.atime || new Date(), entry.mtime, done)
}
if (this[DOCHOWN](entry)) {
actions++
fs.chown(entry.absolute, this[UID](entry), this[GID](entry), done)
}
done()
})
}
[UNSUPPORTED] (entry) {
this.warn('unsupported entry type: ' + entry.type, entry)
entry.resume()
}
[SYMLINK] (entry) {
this[LINK](entry, entry.linkpath, 'symlink')
}
[HARDLINK] (entry) {
this[LINK](entry, path.resolve(this.cwd, entry.linkpath), 'link')
}
[PEND] () {
this[PENDING]++
}
[UNPEND] () {
this[PENDING]--
this[MAYBECLOSE]()
}
[SKIP] (entry) {
this[UNPEND]()
entry.resume()
}
// Check if we can reuse an existing filesystem entry safely and
// overwrite it, rather than unlinking and recreating
// Windows doesn't report a useful nlink, so we just never reuse entries
[ISREUSABLE] (entry, st) {
return entry.type === 'File' &&
!this.unlink &&
st.isFile() &&
st.nlink <= 1 &&
process.platform !== 'win32'
}
// check if a thing is there, and if so, try to clobber it
[CHECKFS] (entry) {
this[PEND]()
this[MKDIR](path.dirname(entry.absolute), this.dmode, er => {
if (er)
return this[ONERROR](er, entry)
fs.lstat(entry.absolute, (er, st) => {
if (st && (this.keep || this.newer && st.mtime > entry.mtime))
this[SKIP](entry)
else if (er || this[ISREUSABLE](entry, st))
this[MAKEFS](null, entry)
else if (st.isDirectory()) {
if (entry.type === 'Directory') {
if (!entry.mode || (st.mode & 0o7777) === entry.mode)
this[MAKEFS](null, entry)
else
fs.chmod(entry.absolute, entry.mode, er => this[MAKEFS](er, entry))
} else
fs.rmdir(entry.absolute, er => this[MAKEFS](er, entry))
} else
unlinkFile(entry.absolute, er => this[MAKEFS](er, entry))
})
})
}
[MAKEFS] (er, entry) {
if (er)
return this[ONERROR](er, entry)
switch (entry.type) {
case 'File':
case 'OldFile':
case 'ContiguousFile':
return this[FILE](entry)
case 'Link':
return this[HARDLINK](entry)
case 'SymbolicLink':
return this[SYMLINK](entry)
case 'Directory':
case 'GNUDumpDir':
return this[DIRECTORY](entry)
}
}
[LINK] (entry, linkpath, link) {
// XXX: get the type ('file' or 'dir') for windows
fs[link](linkpath, entry.absolute, er => {
if (er)
return this[ONERROR](er, entry)
this[UNPEND]()
entry.resume()
})
}
}
class UnpackSync extends Unpack {
constructor (opt) {
super(opt)
}
[CHECKFS] (entry) {
const er = this[MKDIR](path.dirname(entry.absolute), this.dmode)
if (er)
return this[ONERROR](er, entry)
try {
const st = fs.lstatSync(entry.absolute)
if (this.keep || this.newer && st.mtime > entry.mtime)
return this[SKIP](entry)
else if (this[ISREUSABLE](entry, st))
return this[MAKEFS](null, entry)
else {
try {
if (st.isDirectory()) {
if (entry.type === 'Directory') {
if (entry.mode && (st.mode & 0o7777) !== entry.mode)
fs.chmodSync(entry.absolute, entry.mode)
} else
fs.rmdirSync(entry.absolute)
} else
unlinkFileSync(entry.absolute)
return this[MAKEFS](null, entry)
} catch (er) {
return this[ONERROR](er, entry)
}
}
} catch (er) {
return this[MAKEFS](null, entry)
}
}
[FILE] (entry) {
const mode = entry.mode & 0o7777 || this.fmode
const oner = er => {
try { fs.closeSync(fd) } catch (_) {}
if (er)
this[ONERROR](er, entry)
}
let stream
let fd
try {
fd = fs.openSync(entry.absolute, 'w', mode)
} catch (er) {
return oner(er)
}
const tx = this.transform ? this.transform(entry) || entry : entry
if (tx !== entry) {
tx.on('error', er => this[ONERROR](er, entry))
entry.pipe(tx)
}
tx.on('data', chunk => {
try {
fs.writeSync(fd, chunk, 0, chunk.length)
} catch (er) {
oner(er)
}
})
tx.on('end', _ => {
let er = null
// try both, falling futimes back to utimes
// if either fails, handle the first error
if (entry.mtime && !this.noMtime) {
const atime = entry.atime || new Date()
const mtime = entry.mtime
try {
fs.futimesSync(fd, atime, mtime)
} catch (futimeser) {
try {
fs.utimesSync(entry.absolute, atime, mtime)
} catch (utimeser) {
er = futimeser
}
}
}
if (this[DOCHOWN](entry)) {
const uid = this[UID](entry)
const gid = this[GID](entry)
try {
fs.fchownSync(fd, uid, gid)
} catch (fchowner) {
try {
fs.chownSync(entry.absolute, uid, gid)
} catch (chowner) {
er = er || fchowner
}
}
}
oner(er)
})
}
[DIRECTORY] (entry) {
const mode = entry.mode & 0o7777 || this.dmode
const er = this[MKDIR](entry.absolute, mode)
if (er)
return this[ONERROR](er, entry)
if (entry.mtime && !this.noMtime) {
try {
fs.utimesSync(entry.absolute, entry.atime || new Date(), entry.mtime)
} catch (er) {}
}
if (this[DOCHOWN](entry)) {
try {
fs.chownSync(entry.absolute, this[UID](entry), this[GID](entry))
} catch (er) {}
}
entry.resume()
}
[MKDIR] (dir, mode) {
try {
return mkdir.sync(dir, {
uid: this.uid,
gid: this.gid,
processUid: this.processUid,
processGid: this.processGid,
umask: this.processUmask,
preserve: this.preservePaths,
unlink: this.unlink,
cache: this.dirCache,
cwd: this.cwd,
mode: mode
})
} catch (er) {
return er
}
}
[LINK] (entry, linkpath, link) {
try {
fs[link + 'Sync'](linkpath, entry.absolute)
entry.resume()
} catch (er) {
return this[ONERROR](er, entry)
}
}
}
Unpack.Sync = UnpackSync
module.exports = Unpack

36
node_modules/tar/lib/update.js generated vendored Normal file
View File

@ -0,0 +1,36 @@
'use strict'
// tar -u
const hlo = require('./high-level-opt.js')
const r = require('./replace.js')
// just call tar.r with the filter and mtimeCache
const u = module.exports = (opt_, files, cb) => {
const opt = hlo(opt_)
if (!opt.file)
throw new TypeError('file is required')
if (opt.gzip)
throw new TypeError('cannot append to compressed archives')
if (!files || !Array.isArray(files) || !files.length)
throw new TypeError('no files or directories specified')
files = Array.from(files)
mtimeFilter(opt)
return r(opt, files, cb)
}
const mtimeFilter = opt => {
const filter = opt.filter
if (!opt.mtimeCache)
opt.mtimeCache = new Map()
opt.filter = filter ? (path, stat) =>
filter(path, stat) && !(opt.mtimeCache.get(path) > stat.mtime)
: (path, stat) => !(opt.mtimeCache.get(path) > stat.mtime)
}

14
node_modules/tar/lib/warn-mixin.js generated vendored Normal file
View File

@ -0,0 +1,14 @@
'use strict'
module.exports = Base => class extends Base {
warn (msg, data) {
if (!this.strict)
this.emit('warn', msg, data)
else if (data instanceof Error)
this.emit('error', data)
else {
const er = new Error(msg)
er.data = data
this.emit('error', er)
}
}
}

23
node_modules/tar/lib/winchars.js generated vendored Normal file
View File

@ -0,0 +1,23 @@
'use strict'
// When writing files on Windows, translate the characters to their
// 0xf000 higher-encoded versions.
const raw = [
'|',
'<',
'>',
'?',
':'
]
const win = raw.map(char =>
String.fromCharCode(0xf000 + char.charCodeAt(0)))
const toWin = new Map(raw.map((char, i) => [char, win[i]]))
const toRaw = new Map(win.map((char, i) => [char, raw[i]]))
module.exports = {
encode: s => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s),
decode: s => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s)
}

422
node_modules/tar/lib/write-entry.js generated vendored Normal file
View File

@ -0,0 +1,422 @@
'use strict'
const Buffer = require('./buffer.js')
const MiniPass = require('minipass')
const Pax = require('./pax.js')
const Header = require('./header.js')
const ReadEntry = require('./read-entry.js')
const fs = require('fs')
const path = require('path')
const types = require('./types.js')
const maxReadSize = 16 * 1024 * 1024
const PROCESS = Symbol('process')
const FILE = Symbol('file')
const DIRECTORY = Symbol('directory')
const SYMLINK = Symbol('symlink')
const HARDLINK = Symbol('hardlink')
const HEADER = Symbol('header')
const READ = Symbol('read')
const LSTAT = Symbol('lstat')
const ONLSTAT = Symbol('onlstat')
const ONREAD = Symbol('onread')
const ONREADLINK = Symbol('onreadlink')
const OPENFILE = Symbol('openfile')
const ONOPENFILE = Symbol('onopenfile')
const CLOSE = Symbol('close')
const MODE = Symbol('mode')
const warner = require('./warn-mixin.js')
const winchars = require('./winchars.js')
const modeFix = require('./mode-fix.js')
const WriteEntry = warner(class WriteEntry extends MiniPass {
constructor (p, opt) {
opt = opt || {}
super(opt)
if (typeof p !== 'string')
throw new TypeError('path is required')
this.path = p
// suppress atime, ctime, uid, gid, uname, gname
this.portable = !!opt.portable
// until node has builtin pwnam functions, this'll have to do
this.myuid = process.getuid && process.getuid()
this.myuser = process.env.USER || ''
this.maxReadSize = opt.maxReadSize || maxReadSize
this.linkCache = opt.linkCache || new Map()
this.statCache = opt.statCache || new Map()
this.preservePaths = !!opt.preservePaths
this.cwd = opt.cwd || process.cwd()
this.strict = !!opt.strict
this.noPax = !!opt.noPax
this.noMtime = !!opt.noMtime
this.mtime = opt.mtime || null
if (typeof opt.onwarn === 'function')
this.on('warn', opt.onwarn)
if (!this.preservePaths && path.win32.isAbsolute(p)) {
// absolutes on posix are also absolutes on win32
// so we only need to test this one to get both
const parsed = path.win32.parse(p)
this.warn('stripping ' + parsed.root + ' from absolute path', p)
this.path = p.substr(parsed.root.length)
}
this.win32 = !!opt.win32 || process.platform === 'win32'
if (this.win32) {
this.path = winchars.decode(this.path.replace(/\\/g, '/'))
p = p.replace(/\\/g, '/')
}
this.absolute = opt.absolute || path.resolve(this.cwd, p)
if (this.path === '')
this.path = './'
if (this.statCache.has(this.absolute))
this[ONLSTAT](this.statCache.get(this.absolute))
else
this[LSTAT]()
}
[LSTAT] () {
fs.lstat(this.absolute, (er, stat) => {
if (er)
return this.emit('error', er)
this[ONLSTAT](stat)
})
}
[ONLSTAT] (stat) {
this.statCache.set(this.absolute, stat)
this.stat = stat
if (!stat.isFile())
stat.size = 0
this.type = getType(stat)
this.emit('stat', stat)
this[PROCESS]()
}
[PROCESS] () {
switch (this.type) {
case 'File': return this[FILE]()
case 'Directory': return this[DIRECTORY]()
case 'SymbolicLink': return this[SYMLINK]()
// unsupported types are ignored.
default: return this.end()
}
}
[MODE] (mode) {
return modeFix(mode, this.type === 'Directory')
}
[HEADER] () {
if (this.type === 'Directory' && this.portable)
this.noMtime = true
this.header = new Header({
path: this.path,
linkpath: this.linkpath,
// only the permissions and setuid/setgid/sticky bitflags
// not the higher-order bits that specify file type
mode: this[MODE](this.stat.mode),
uid: this.portable ? null : this.stat.uid,
gid: this.portable ? null : this.stat.gid,
size: this.stat.size,
mtime: this.noMtime ? null : this.mtime || this.stat.mtime,
type: this.type,
uname: this.portable ? null :
this.stat.uid === this.myuid ? this.myuser : '',
atime: this.portable ? null : this.stat.atime,
ctime: this.portable ? null : this.stat.ctime
})
if (this.header.encode() && !this.noPax)
this.write(new Pax({
atime: this.portable ? null : this.header.atime,
ctime: this.portable ? null : this.header.ctime,
gid: this.portable ? null : this.header.gid,
mtime: this.noMtime ? null : this.mtime || this.header.mtime,
path: this.path,
linkpath: this.linkpath,
size: this.header.size,
uid: this.portable ? null : this.header.uid,
uname: this.portable ? null : this.header.uname,
dev: this.portable ? null : this.stat.dev,
ino: this.portable ? null : this.stat.ino,
nlink: this.portable ? null : this.stat.nlink
}).encode())
this.write(this.header.block)
}
[DIRECTORY] () {
if (this.path.substr(-1) !== '/')
this.path += '/'
this.stat.size = 0
this[HEADER]()
this.end()
}
[SYMLINK] () {
fs.readlink(this.absolute, (er, linkpath) => {
if (er)
return this.emit('error', er)
this[ONREADLINK](linkpath)
})
}
[ONREADLINK] (linkpath) {
this.linkpath = linkpath
this[HEADER]()
this.end()
}
[HARDLINK] (linkpath) {
this.type = 'Link'
this.linkpath = path.relative(this.cwd, linkpath)
this.stat.size = 0
this[HEADER]()
this.end()
}
[FILE] () {
if (this.stat.nlink > 1) {
const linkKey = this.stat.dev + ':' + this.stat.ino
if (this.linkCache.has(linkKey)) {
const linkpath = this.linkCache.get(linkKey)
if (linkpath.indexOf(this.cwd) === 0)
return this[HARDLINK](linkpath)
}
this.linkCache.set(linkKey, this.absolute)
}
this[HEADER]()
if (this.stat.size === 0)
return this.end()
this[OPENFILE]()
}
[OPENFILE] () {
fs.open(this.absolute, 'r', (er, fd) => {
if (er)
return this.emit('error', er)
this[ONOPENFILE](fd)
})
}
[ONOPENFILE] (fd) {
const blockLen = 512 * Math.ceil(this.stat.size / 512)
const bufLen = Math.min(blockLen, this.maxReadSize)
const buf = Buffer.allocUnsafe(bufLen)
this[READ](fd, buf, 0, buf.length, 0, this.stat.size, blockLen)
}
[READ] (fd, buf, offset, length, pos, remain, blockRemain) {
fs.read(fd, buf, offset, length, pos, (er, bytesRead) => {
if (er)
return this[CLOSE](fd, _ => this.emit('error', er))
this[ONREAD](fd, buf, offset, length, pos, remain, blockRemain, bytesRead)
})
}
[CLOSE] (fd, cb) {
fs.close(fd, cb)
}
[ONREAD] (fd, buf, offset, length, pos, remain, blockRemain, bytesRead) {
if (bytesRead <= 0 && remain > 0) {
const er = new Error('encountered unexpected EOF')
er.path = this.absolute
er.syscall = 'read'
er.code = 'EOF'
this[CLOSE](fd, _ => _)
return this.emit('error', er)
}
if (bytesRead > remain) {
const er = new Error('did not encounter expected EOF')
er.path = this.absolute
er.syscall = 'read'
er.code = 'EOF'
this[CLOSE](fd, _ => _)
return this.emit('error', er)
}
// null out the rest of the buffer, if we could fit the block padding
if (bytesRead === remain) {
for (let i = bytesRead; i < length && bytesRead < blockRemain; i++) {
buf[i + offset] = 0
bytesRead ++
remain ++
}
}
const writeBuf = offset === 0 && bytesRead === buf.length ?
buf : buf.slice(offset, offset + bytesRead)
remain -= bytesRead
blockRemain -= bytesRead
pos += bytesRead
offset += bytesRead
this.write(writeBuf)
if (!remain) {
if (blockRemain)
this.write(Buffer.alloc(blockRemain))
this.end()
this[CLOSE](fd, _ => _)
return
}
if (offset >= length) {
buf = Buffer.allocUnsafe(length)
offset = 0
}
length = buf.length - offset
this[READ](fd, buf, offset, length, pos, remain, blockRemain)
}
})
class WriteEntrySync extends WriteEntry {
constructor (path, opt) {
super(path, opt)
}
[LSTAT] () {
this[ONLSTAT](fs.lstatSync(this.absolute))
}
[SYMLINK] () {
this[ONREADLINK](fs.readlinkSync(this.absolute))
}
[OPENFILE] () {
this[ONOPENFILE](fs.openSync(this.absolute, 'r'))
}
[READ] (fd, buf, offset, length, pos, remain, blockRemain) {
let threw = true
try {
const bytesRead = fs.readSync(fd, buf, offset, length, pos)
this[ONREAD](fd, buf, offset, length, pos, remain, blockRemain, bytesRead)
threw = false
} finally {
if (threw)
try { this[CLOSE](fd) } catch (er) {}
}
}
[CLOSE] (fd) {
fs.closeSync(fd)
}
}
const WriteEntryTar = warner(class WriteEntryTar extends MiniPass {
constructor (readEntry, opt) {
opt = opt || {}
super(opt)
this.preservePaths = !!opt.preservePaths
this.portable = !!opt.portable
this.strict = !!opt.strict
this.noPax = !!opt.noPax
this.noMtime = !!opt.noMtime
this.readEntry = readEntry
this.type = readEntry.type
if (this.type === 'Directory' && this.portable)
this.noMtime = true
this.path = readEntry.path
this.mode = this[MODE](readEntry.mode)
this.uid = this.portable ? null : readEntry.uid
this.gid = this.portable ? null : readEntry.gid
this.uname = this.portable ? null : readEntry.uname
this.gname = this.portable ? null : readEntry.gname
this.size = readEntry.size
this.mtime = this.noMtime ? null : opt.mtime || readEntry.mtime
this.atime = this.portable ? null : readEntry.atime
this.ctime = this.portable ? null : readEntry.ctime
this.linkpath = readEntry.linkpath
if (typeof opt.onwarn === 'function')
this.on('warn', opt.onwarn)
if (path.isAbsolute(this.path) && !this.preservePaths) {
const parsed = path.parse(this.path)
this.warn(
'stripping ' + parsed.root + ' from absolute path',
this.path
)
this.path = this.path.substr(parsed.root.length)
}
this.remain = readEntry.size
this.blockRemain = readEntry.startBlockSize
this.header = new Header({
path: this.path,
linkpath: this.linkpath,
// only the permissions and setuid/setgid/sticky bitflags
// not the higher-order bits that specify file type
mode: this.mode,
uid: this.portable ? null : this.uid,
gid: this.portable ? null : this.gid,
size: this.size,
mtime: this.noMtime ? null : this.mtime,
type: this.type,
uname: this.portable ? null : this.uname,
atime: this.portable ? null : this.atime,
ctime: this.portable ? null : this.ctime
})
if (this.header.encode() && !this.noPax)
super.write(new Pax({
atime: this.portable ? null : this.atime,
ctime: this.portable ? null : this.ctime,
gid: this.portable ? null : this.gid,
mtime: this.noMtime ? null : this.mtime,
path: this.path,
linkpath: this.linkpath,
size: this.size,
uid: this.portable ? null : this.uid,
uname: this.portable ? null : this.uname,
dev: this.portable ? null : this.readEntry.dev,
ino: this.portable ? null : this.readEntry.ino,
nlink: this.portable ? null : this.readEntry.nlink
}).encode())
super.write(this.header.block)
readEntry.pipe(this)
}
[MODE] (mode) {
return modeFix(mode, this.type === 'Directory')
}
write (data) {
const writeLen = data.length
if (writeLen > this.blockRemain)
throw new Error('writing more to entry than is appropriate')
this.blockRemain -= writeLen
return super.write(data)
}
end () {
if (this.blockRemain)
this.write(Buffer.alloc(this.blockRemain))
return super.end()
}
})
WriteEntry.Sync = WriteEntrySync
WriteEntry.Tar = WriteEntryTar
const getType = stat =>
stat.isFile() ? 'File'
: stat.isDirectory() ? 'Directory'
: stat.isSymbolicLink() ? 'SymbolicLink'
: 'Unsupported'
module.exports = WriteEntry