1
0
Fork 0
forked from svrjs/svrjs

Updated "tar" and "graceful-fs" libraries

This commit is contained in:
Dorian Niemiec 2024-02-22 02:50:46 +01:00
parent 38350597e5
commit c3aba19ca3
16 changed files with 379 additions and 454 deletions

110
node_modules/formidable/package.json generated vendored
View file

@ -1,36 +1,33 @@
{ {
"_from": "formidable@2", "name": "formidable",
"_id": "formidable@2.1.2", "version": "2.1.2",
"_inBundle": false, "license": "MIT",
"_integrity": "sha512-CM3GuJ57US06mlpQ47YcunuUZ9jpm8Vx+P2CGt2j7HpgkKZO/DJYQ0Bobim8G6PFQmK5lOqOOdUXboU+h73A4g==", "description": "A node.js module for parsing form data, especially file uploads.",
"_location": "/formidable", "homepage": "https://github.com/node-formidable/formidable",
"_phantomChildren": {}, "funding": "https://ko-fi.com/tunnckoCore/commissions",
"_requested": { "repository": "node-formidable/formidable",
"type": "range", "main": "./src/index.js",
"registry": true, "files": [
"raw": "formidable@2", "src"
"name": "formidable",
"escapedName": "formidable",
"rawSpec": "2",
"saveSpec": null,
"fetchSpec": "2"
},
"_requiredBy": [
"#USER",
"/"
], ],
"_resolved": "https://registry.npmjs.org/formidable/-/formidable-2.1.2.tgz", "publishConfig": {
"_shasum": "fa973a2bec150e4ce7cac15589d7a25fc30ebd89", "access": "public",
"_spec": "formidable@2", "tag": "v2-latest"
"_where": "/home/ubuntu/forbidden",
"bugs": {
"url": "https://github.com/node-formidable/formidable/issues"
}, },
"bundleDependencies": false, "scripts": {
"commitlint": { "bench": "node benchmark",
"extends": [ "fmt": "yarn run fmt:prepare '**/*'",
"@commitlint/config-conventional" "fmt:prepare": "prettier --write",
] "lint": "yarn run lint:prepare .",
"lint:prepare": "eslint --cache --fix --quiet --format codeframe",
"reinstall": "del-cli ./node_modules ./yarn.lock",
"postreinstall": "yarn setup",
"setup": "yarn",
"pretest": "del-cli ./test/tmp && make-dir ./test/tmp",
"test": "jest --coverage",
"pretest:ci": "yarn run pretest",
"test:ci": "nyc jest --coverage",
"test:jest": "jest --coverage"
}, },
"dependencies": { "dependencies": {
"dezalgo": "^1.0.4", "dezalgo": "^1.0.4",
@ -38,8 +35,6 @@
"once": "^1.4.0", "once": "^1.4.0",
"qs": "^6.11.0" "qs": "^6.11.0"
}, },
"deprecated": false,
"description": "A node.js module for parsing form data, especially file uploads.",
"devDependencies": { "devDependencies": {
"@commitlint/cli": "8.3.5", "@commitlint/cli": "8.3.5",
"@commitlint/config-conventional": "8.3.4", "@commitlint/config-conventional": "8.3.4",
@ -62,17 +57,6 @@
"request": "2.88.2", "request": "2.88.2",
"supertest": "4.0.2" "supertest": "4.0.2"
}, },
"files": [
"src"
],
"funding": "https://ko-fi.com/tunnckoCore/commissions",
"homepage": "https://github.com/node-formidable/formidable",
"husky": {
"hooks": {
"pre-commit": "git status --porcelain && yarn lint-staged",
"commit-msg": "yarn commitlint -E HUSKY_GIT_PARAMS"
}
},
"jest": { "jest": {
"verbose": true "verbose": true
}, },
@ -86,7 +70,17 @@
"ulpoad", "ulpoad",
"file" "file"
], ],
"license": "MIT", "husky": {
"hooks": {
"pre-commit": "git status --porcelain && yarn lint-staged",
"commit-msg": "yarn commitlint -E HUSKY_GIT_PARAMS"
}
},
"commitlint": {
"extends": [
"@commitlint/config-conventional"
]
},
"lint-staged": { "lint-staged": {
"!*.{js,jsx,ts,tsx}": [ "!*.{js,jsx,ts,tsx}": [
"yarn run fmt:prepare" "yarn run fmt:prepare"
@ -95,36 +89,10 @@
"yarn run lint" "yarn run lint"
] ]
}, },
"main": "./src/index.js",
"name": "formidable",
"publishConfig": {
"access": "public",
"tag": "v2-latest"
},
"renovate": { "renovate": {
"extends": [ "extends": [
"@tunnckocore", "@tunnckocore",
":pinAllExceptPeerDependencies" ":pinAllExceptPeerDependencies"
] ]
}, }
"repository": {
"type": "git",
"url": "git+https://github.com/node-formidable/formidable.git"
},
"scripts": {
"bench": "node benchmark",
"fmt": "yarn run fmt:prepare '**/*'",
"fmt:prepare": "prettier --write",
"lint": "yarn run lint:prepare .",
"lint:prepare": "eslint --cache --fix --quiet --format codeframe",
"postreinstall": "yarn setup",
"pretest": "del-cli ./test/tmp && make-dir ./test/tmp",
"pretest:ci": "yarn run pretest",
"reinstall": "del-cli ./node_modules ./yarn.lock",
"setup": "yarn",
"test": "jest --coverage",
"test:ci": "nyc jest --coverage",
"test:jest": "jest --coverage"
},
"version": "2.1.2"
} }

2
node_modules/graceful-fs/LICENSE generated vendored Executable file → Normal file
View file

@ -1,6 +1,6 @@
The ISC License The ISC License
Copyright (c) Isaac Z. Schlueter, Ben Noordhuis, and Contributors Copyright (c) 2011-2022 Isaac Z. Schlueter, Ben Noordhuis, and Contributors
Permission to use, copy, modify, and/or distribute this software for any Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above purpose with or without fee is hereby granted, provided that the above

12
node_modules/graceful-fs/README.md generated vendored Executable file → Normal file
View file

@ -30,9 +30,19 @@ the directory.
var fs = require('graceful-fs') var fs = require('graceful-fs')
// now go and do stuff with it... // now go and do stuff with it...
fs.readFileSync('some-file-or-whatever') fs.readFile('some-file-or-whatever', (err, data) => {
// Do stuff here.
})
``` ```
## Sync methods
This module cannot intercept or handle `EMFILE` or `ENFILE` errors from sync
methods. If you use sync methods which open file descriptors then you are
responsible for dealing with any errors.
This is a known limitation, not a bug.
## Global Patching ## Global Patching
If you want to patch the global fs module (or any other fs-like If you want to patch the global fs module (or any other fs-like

6
node_modules/graceful-fs/clone.js generated vendored Executable file → Normal file
View file

@ -2,12 +2,16 @@
module.exports = clone module.exports = clone
var getPrototypeOf = Object.getPrototypeOf || function (obj) {
return obj.__proto__
}
function clone (obj) { function clone (obj) {
if (obj === null || typeof obj !== 'object') if (obj === null || typeof obj !== 'object')
return obj return obj
if (obj instanceof Object) if (obj instanceof Object)
var copy = { __proto__: obj.__proto__ } var copy = { __proto__: getPrototypeOf(obj) }
else else
var copy = Object.create(null) var copy = Object.create(null)

170
node_modules/graceful-fs/graceful-fs.js generated vendored Executable file → Normal file
View file

@ -54,7 +54,7 @@ if (!fs[gracefulQueue]) {
return fs$close.call(fs, fd, function (err) { return fs$close.call(fs, fd, function (err) {
// This function uses the graceful-fs shared queue // This function uses the graceful-fs shared queue
if (!err) { if (!err) {
retry() resetQueue()
} }
if (typeof cb === 'function') if (typeof cb === 'function')
@ -72,7 +72,7 @@ if (!fs[gracefulQueue]) {
function closeSync (fd) { function closeSync (fd) {
// This function uses the graceful-fs shared queue // This function uses the graceful-fs shared queue
fs$closeSync.apply(fs, arguments) fs$closeSync.apply(fs, arguments)
retry() resetQueue()
} }
Object.defineProperty(closeSync, previousSymbol, { Object.defineProperty(closeSync, previousSymbol, {
@ -114,14 +114,13 @@ function patch (fs) {
return go$readFile(path, options, cb) return go$readFile(path, options, cb)
function go$readFile (path, options, cb) { function go$readFile (path, options, cb, startTime) {
return fs$readFile(path, options, function (err) { return fs$readFile(path, options, function (err) {
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
enqueue([go$readFile, [path, options, cb]]) enqueue([go$readFile, [path, options, cb], err, startTime || Date.now(), Date.now()])
else { else {
if (typeof cb === 'function') if (typeof cb === 'function')
cb.apply(this, arguments) cb.apply(this, arguments)
retry()
} }
}) })
} }
@ -135,14 +134,13 @@ function patch (fs) {
return go$writeFile(path, data, options, cb) return go$writeFile(path, data, options, cb)
function go$writeFile (path, data, options, cb) { function go$writeFile (path, data, options, cb, startTime) {
return fs$writeFile(path, data, options, function (err) { return fs$writeFile(path, data, options, function (err) {
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
enqueue([go$writeFile, [path, data, options, cb]]) enqueue([go$writeFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()])
else { else {
if (typeof cb === 'function') if (typeof cb === 'function')
cb.apply(this, arguments) cb.apply(this, arguments)
retry()
} }
}) })
} }
@ -157,14 +155,35 @@ function patch (fs) {
return go$appendFile(path, data, options, cb) return go$appendFile(path, data, options, cb)
function go$appendFile (path, data, options, cb) { function go$appendFile (path, data, options, cb, startTime) {
return fs$appendFile(path, data, options, function (err) { return fs$appendFile(path, data, options, function (err) {
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
enqueue([go$appendFile, [path, data, options, cb]]) enqueue([go$appendFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()])
else {
if (typeof cb === 'function')
cb.apply(this, arguments)
}
})
}
}
var fs$copyFile = fs.copyFile
if (fs$copyFile)
fs.copyFile = copyFile
function copyFile (src, dest, flags, cb) {
if (typeof flags === 'function') {
cb = flags
flags = 0
}
return go$copyFile(src, dest, flags, cb)
function go$copyFile (src, dest, flags, cb, startTime) {
return fs$copyFile(src, dest, flags, function (err) {
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
enqueue([go$copyFile, [src, dest, flags, cb], err, startTime || Date.now(), Date.now()])
else { else {
if (typeof cb === 'function') if (typeof cb === 'function')
cb.apply(this, arguments) cb.apply(this, arguments)
retry()
} }
}) })
} }
@ -172,36 +191,46 @@ function patch (fs) {
var fs$readdir = fs.readdir var fs$readdir = fs.readdir
fs.readdir = readdir fs.readdir = readdir
var noReaddirOptionVersions = /^v[0-5]\./
function readdir (path, options, cb) { function readdir (path, options, cb) {
var args = [path] if (typeof options === 'function')
if (typeof options !== 'function') { cb = options, options = null
args.push(options)
} else {
cb = options
}
args.push(go$readdir$cb)
return go$readdir(args) var go$readdir = noReaddirOptionVersions.test(process.version)
? function go$readdir (path, options, cb, startTime) {
return fs$readdir(path, fs$readdirCallback(
path, options, cb, startTime
))
}
: function go$readdir (path, options, cb, startTime) {
return fs$readdir(path, options, fs$readdirCallback(
path, options, cb, startTime
))
}
function go$readdir$cb (err, files) { return go$readdir(path, options, cb)
if (files && files.sort)
files.sort()
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) function fs$readdirCallback (path, options, cb, startTime) {
enqueue([go$readdir, [args]]) return function (err, files) {
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
enqueue([
go$readdir,
[path, options, cb],
err,
startTime || Date.now(),
Date.now()
])
else {
if (files && files.sort)
files.sort()
else { if (typeof cb === 'function')
if (typeof cb === 'function') cb.call(this, err, files)
cb.apply(this, arguments) }
retry()
} }
} }
} }
function go$readdir (args) {
return fs$readdir.apply(fs, args)
}
if (process.version.substr(0, 4) === 'v0.8') { if (process.version.substr(0, 4) === 'v0.8') {
var legStreams = legacy(fs) var legStreams = legacy(fs)
ReadStream = legStreams.ReadStream ReadStream = legStreams.ReadStream
@ -324,14 +353,13 @@ function patch (fs) {
return go$open(path, flags, mode, cb) return go$open(path, flags, mode, cb)
function go$open (path, flags, mode, cb) { function go$open (path, flags, mode, cb, startTime) {
return fs$open(path, flags, mode, function (err, fd) { return fs$open(path, flags, mode, function (err, fd) {
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
enqueue([go$open, [path, flags, mode, cb]]) enqueue([go$open, [path, flags, mode, cb], err, startTime || Date.now(), Date.now()])
else { else {
if (typeof cb === 'function') if (typeof cb === 'function')
cb.apply(this, arguments) cb.apply(this, arguments)
retry()
} }
}) })
} }
@ -343,12 +371,78 @@ function patch (fs) {
function enqueue (elem) { function enqueue (elem) {
debug('ENQUEUE', elem[0].name, elem[1]) debug('ENQUEUE', elem[0].name, elem[1])
fs[gracefulQueue].push(elem) fs[gracefulQueue].push(elem)
retry()
}
// keep track of the timeout between retry() calls
var retryTimer
// reset the startTime and lastTime to now
// this resets the start of the 60 second overall timeout as well as the
// delay between attempts so that we'll retry these jobs sooner
function resetQueue () {
var now = Date.now()
for (var i = 0; i < fs[gracefulQueue].length; ++i) {
// entries that are only a length of 2 are from an older version, don't
// bother modifying those since they'll be retried anyway.
if (fs[gracefulQueue][i].length > 2) {
fs[gracefulQueue][i][3] = now // startTime
fs[gracefulQueue][i][4] = now // lastTime
}
}
// call retry to make sure we're actively processing the queue
retry()
} }
function retry () { function retry () {
// clear the timer and remove it to help prevent unintended concurrency
clearTimeout(retryTimer)
retryTimer = undefined
if (fs[gracefulQueue].length === 0)
return
var elem = fs[gracefulQueue].shift() var elem = fs[gracefulQueue].shift()
if (elem) { var fn = elem[0]
debug('RETRY', elem[0].name, elem[1]) var args = elem[1]
elem[0].apply(null, elem[1]) // these items may be unset if they were added by an older graceful-fs
var err = elem[2]
var startTime = elem[3]
var lastTime = elem[4]
// if we don't have a startTime we have no way of knowing if we've waited
// long enough, so go ahead and retry this item now
if (startTime === undefined) {
debug('RETRY', fn.name, args)
fn.apply(null, args)
} else if (Date.now() - startTime >= 60000) {
// it's been more than 60 seconds total, bail now
debug('TIMEOUT', fn.name, args)
var cb = args.pop()
if (typeof cb === 'function')
cb.call(null, err)
} else {
// the amount of time between the last attempt and right now
var sinceAttempt = Date.now() - lastTime
// the amount of time between when we first tried, and when we last tried
// rounded up to at least 1
var sinceStart = Math.max(lastTime - startTime, 1)
// backoff. wait longer than the total time we've been retrying, but only
// up to a maximum of 100ms
var desiredDelay = Math.min(sinceStart * 1.2, 100)
// it's been long enough since the last retry, do it again
if (sinceAttempt >= desiredDelay) {
debug('RETRY', fn.name, args)
fn.apply(null, args.concat([startTime]))
} else {
// if we can't do this job yet, push it to the end of the queue
// and let the next iteration check again
fs[gracefulQueue].push(elem)
}
}
// schedule our next run if one isn't already scheduled
if (retryTimer === undefined) {
retryTimer = setTimeout(retry, 0)
} }
} }

0
node_modules/graceful-fs/legacy-streams.js generated vendored Executable file → Normal file
View file

83
node_modules/graceful-fs/package.json generated vendored Executable file → Normal file
View file

@ -1,52 +1,22 @@
{ {
"_from": "graceful-fs", "name": "graceful-fs",
"_id": "graceful-fs@4.2.4",
"_inBundle": false,
"_integrity": "sha512-WjKPNJF79dtJAVniUlGGWHYGz2jWxT6VhN/4m1NdkbZ2nOsEF+cI1Edgql5zCRhs/VsQYRvrXctxktVXZUkixw==",
"_location": "/graceful-fs",
"_phantomChildren": {},
"_requested": {
"type": "tag",
"registry": true,
"raw": "graceful-fs",
"name": "graceful-fs",
"escapedName": "graceful-fs",
"rawSpec": "",
"saveSpec": null,
"fetchSpec": "latest"
},
"_requiredBy": [
"#USER",
"/"
],
"_resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.4.tgz",
"_shasum": "2256bde14d3632958c465ebc96dc467ca07a29fb",
"_spec": "graceful-fs",
"_where": "/media/serveradmin/Server/developement",
"bugs": {
"url": "https://github.com/isaacs/node-graceful-fs/issues"
},
"bundleDependencies": false,
"dependencies": {},
"deprecated": false,
"description": "A drop-in replacement for fs, making various improvements.", "description": "A drop-in replacement for fs, making various improvements.",
"devDependencies": { "version": "4.2.11",
"import-fresh": "^2.0.0", "repository": {
"mkdirp": "^0.5.0", "type": "git",
"rimraf": "^2.2.8", "url": "https://github.com/isaacs/node-graceful-fs"
"tap": "^12.7.0"
}, },
"main": "graceful-fs.js",
"directories": { "directories": {
"test": "test" "test": "test"
}, },
"files": [ "scripts": {
"fs.js", "preversion": "npm test",
"graceful-fs.js", "postversion": "npm publish",
"legacy-streams.js", "postpublish": "git push origin --follow-tags",
"polyfills.js", "test": "nyc --silent node test.js | tap -c -",
"clone.js" "posttest": "nyc report"
], },
"homepage": "https://github.com/isaacs/node-graceful-fs#readme",
"keywords": [ "keywords": [
"fs", "fs",
"module", "module",
@ -64,17 +34,20 @@
"EACCESS" "EACCESS"
], ],
"license": "ISC", "license": "ISC",
"main": "graceful-fs.js", "devDependencies": {
"name": "graceful-fs", "import-fresh": "^2.0.0",
"repository": { "mkdirp": "^0.5.0",
"type": "git", "rimraf": "^2.2.8",
"url": "git+https://github.com/isaacs/node-graceful-fs.git" "tap": "^16.3.4"
}, },
"scripts": { "files": [
"postpublish": "git push origin --follow-tags", "fs.js",
"postversion": "npm publish", "graceful-fs.js",
"preversion": "npm test", "legacy-streams.js",
"test": "node test.js | tap -" "polyfills.js",
}, "clone.js"
"version": "4.2.4" ],
"tap": {
"reporter": "classic"
}
} }

83
node_modules/graceful-fs/polyfills.js generated vendored Executable file → Normal file
View file

@ -14,10 +14,14 @@ try {
process.cwd() process.cwd()
} catch (er) {} } catch (er) {}
var chdir = process.chdir // This check is needed until node.js 12 is required
process.chdir = function(d) { if (typeof process.chdir === 'function') {
cwd = null var chdir = process.chdir
chdir.call(process, d) process.chdir = function (d) {
cwd = null
chdir.call(process, d)
}
if (Object.setPrototypeOf) Object.setPrototypeOf(process.chdir, chdir)
} }
module.exports = patch module.exports = patch
@ -67,13 +71,13 @@ function patch (fs) {
fs.lstatSync = statFixSync(fs.lstatSync) fs.lstatSync = statFixSync(fs.lstatSync)
// if lchmod/lchown do not exist, then make them no-ops // if lchmod/lchown do not exist, then make them no-ops
if (!fs.lchmod) { if (fs.chmod && !fs.lchmod) {
fs.lchmod = function (path, mode, cb) { fs.lchmod = function (path, mode, cb) {
if (cb) process.nextTick(cb) if (cb) process.nextTick(cb)
} }
fs.lchmodSync = function () {} fs.lchmodSync = function () {}
} }
if (!fs.lchown) { if (fs.chown && !fs.lchown) {
fs.lchown = function (path, uid, gid, cb) { fs.lchown = function (path, uid, gid, cb) {
if (cb) process.nextTick(cb) if (cb) process.nextTick(cb)
} }
@ -90,32 +94,38 @@ function patch (fs) {
// CPU to a busy looping process, which can cause the program causing the lock // CPU to a busy looping process, which can cause the program causing the lock
// contention to be starved of CPU by node, so the contention doesn't resolve. // contention to be starved of CPU by node, so the contention doesn't resolve.
if (platform === "win32") { if (platform === "win32") {
fs.rename = (function (fs$rename) { return function (from, to, cb) { fs.rename = typeof fs.rename !== 'function' ? fs.rename
var start = Date.now() : (function (fs$rename) {
var backoff = 0; function rename (from, to, cb) {
fs$rename(from, to, function CB (er) { var start = Date.now()
if (er var backoff = 0;
&& (er.code === "EACCES" || er.code === "EPERM") fs$rename(from, to, function CB (er) {
&& Date.now() - start < 60000) { if (er
setTimeout(function() { && (er.code === "EACCES" || er.code === "EPERM" || er.code === "EBUSY")
fs.stat(to, function (stater, st) { && Date.now() - start < 60000) {
if (stater && stater.code === "ENOENT") setTimeout(function() {
fs$rename(from, to, CB); fs.stat(to, function (stater, st) {
else if (stater && stater.code === "ENOENT")
cb(er) fs$rename(from, to, CB);
}) else
}, backoff) cb(er)
if (backoff < 100) })
backoff += 10; }, backoff)
return; if (backoff < 100)
} backoff += 10;
if (cb) cb(er) return;
}) }
}})(fs.rename) if (cb) cb(er)
})
}
if (Object.setPrototypeOf) Object.setPrototypeOf(rename, fs$rename)
return rename
})(fs.rename)
} }
// if read() returns EAGAIN, then just try it again. // if read() returns EAGAIN, then just try it again.
fs.read = (function (fs$read) { fs.read = typeof fs.read !== 'function' ? fs.read
: (function (fs$read) {
function read (fd, buffer, offset, length, position, callback_) { function read (fd, buffer, offset, length, position, callback_) {
var callback var callback
if (callback_ && typeof callback_ === 'function') { if (callback_ && typeof callback_ === 'function') {
@ -132,11 +142,12 @@ function patch (fs) {
} }
// This ensures `util.promisify` works as it does for native `fs.read`. // This ensures `util.promisify` works as it does for native `fs.read`.
read.__proto__ = fs$read if (Object.setPrototypeOf) Object.setPrototypeOf(read, fs$read)
return read return read
})(fs.read) })(fs.read)
fs.readSync = (function (fs$readSync) { return function (fd, buffer, offset, length, position) { fs.readSync = typeof fs.readSync !== 'function' ? fs.readSync
: (function (fs$readSync) { return function (fd, buffer, offset, length, position) {
var eagCounter = 0 var eagCounter = 0
while (true) { while (true) {
try { try {
@ -195,7 +206,7 @@ function patch (fs) {
} }
function patchLutimes (fs) { function patchLutimes (fs) {
if (constants.hasOwnProperty("O_SYMLINK")) { if (constants.hasOwnProperty("O_SYMLINK") && fs.futimes) {
fs.lutimes = function (path, at, mt, cb) { fs.lutimes = function (path, at, mt, cb) {
fs.open(path, constants.O_SYMLINK, function (er, fd) { fs.open(path, constants.O_SYMLINK, function (er, fd) {
if (er) { if (er) {
@ -229,7 +240,7 @@ function patch (fs) {
return ret return ret
} }
} else { } else if (fs.futimes) {
fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) } fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) }
fs.lutimesSync = function () {} fs.lutimesSync = function () {}
} }
@ -306,8 +317,10 @@ function patch (fs) {
return function (target, options) { return function (target, options) {
var stats = options ? orig.call(fs, target, options) var stats = options ? orig.call(fs, target, options)
: orig.call(fs, target) : orig.call(fs, target)
if (stats.uid < 0) stats.uid += 0x100000000 if (stats) {
if (stats.gid < 0) stats.gid += 0x100000000 if (stats.uid < 0) stats.uid += 0x100000000
if (stats.gid < 0) stats.gid += 0x100000000
}
return stats; return stats;
} }
} }

85
node_modules/mime-db/package.json generated vendored
View file

@ -1,49 +1,23 @@
{ {
"_from": "mime-db@1.52.0", "name": "mime-db",
"_id": "mime-db@1.52.0",
"_inBundle": false,
"_integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
"_location": "/mime-db",
"_phantomChildren": {},
"_requested": {
"type": "version",
"registry": true,
"raw": "mime-db@1.52.0",
"name": "mime-db",
"escapedName": "mime-db",
"rawSpec": "1.52.0",
"saveSpec": null,
"fetchSpec": "1.52.0"
},
"_requiredBy": [
"/mime-types"
],
"_resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
"_shasum": "bbabcdc02859f4987301c856e3387ce5ec43bf70",
"_spec": "mime-db@1.52.0",
"_where": "/home/ubuntu/formidable/node_modules/mime-types",
"bugs": {
"url": "https://github.com/jshttp/mime-db/issues"
},
"bundleDependencies": false,
"contributors": [
{
"name": "Douglas Christopher Wilson",
"email": "doug@somethingdoug.com"
},
{
"name": "Jonathan Ong",
"email": "me@jongleberry.com",
"url": "http://jongleberry.com"
},
{
"name": "Robert Kieffer",
"email": "robert@broofa.com",
"url": "http://github.com/broofa"
}
],
"deprecated": false,
"description": "Media Type Database", "description": "Media Type Database",
"version": "1.52.0",
"contributors": [
"Douglas Christopher Wilson <doug@somethingdoug.com>",
"Jonathan Ong <me@jongleberry.com> (http://jongleberry.com)",
"Robert Kieffer <robert@broofa.com> (http://github.com/broofa)"
],
"license": "MIT",
"keywords": [
"mime",
"db",
"type",
"types",
"database",
"charset",
"charsets"
],
"repository": "jshttp/mime-db",
"devDependencies": { "devDependencies": {
"bluebird": "3.7.2", "bluebird": "3.7.2",
"co": "4.6.0", "co": "4.6.0",
@ -63,9 +37,6 @@
"raw-body": "2.5.0", "raw-body": "2.5.0",
"stream-to-array": "2.3.0" "stream-to-array": "2.3.0"
}, },
"engines": {
"node": ">= 0.6"
},
"files": [ "files": [
"HISTORY.md", "HISTORY.md",
"LICENSE", "LICENSE",
@ -73,21 +44,8 @@
"db.json", "db.json",
"index.js" "index.js"
], ],
"homepage": "https://github.com/jshttp/mime-db#readme", "engines": {
"keywords": [ "node": ">= 0.6"
"mime",
"db",
"type",
"types",
"database",
"charset",
"charsets"
],
"license": "MIT",
"name": "mime-db",
"repository": {
"type": "git",
"url": "git+https://github.com/jshttp/mime-db.git"
}, },
"scripts": { "scripts": {
"build": "node scripts/build", "build": "node scripts/build",
@ -98,6 +56,5 @@
"test-cov": "nyc --reporter=html --reporter=text npm test", "test-cov": "nyc --reporter=html --reporter=text npm test",
"update": "npm run fetch && npm run build", "update": "npm run fetch && npm run build",
"version": "node scripts/version-history.js && git add HISTORY.md" "version": "node scripts/version-history.js && git add HISTORY.md"
}, }
"version": "1.52.0"
} }

74
node_modules/mime-types/package.json generated vendored
View file

@ -1,53 +1,21 @@
{ {
"_from": "mime-types", "name": "mime-types",
"_id": "mime-types@2.1.35", "description": "The ultimate javascript content-type utility.",
"_inBundle": false, "version": "2.1.35",
"_integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
"_location": "/mime-types",
"_phantomChildren": {},
"_requested": {
"type": "tag",
"registry": true,
"raw": "mime-types",
"name": "mime-types",
"escapedName": "mime-types",
"rawSpec": "",
"saveSpec": null,
"fetchSpec": "latest"
},
"_requiredBy": [
"#USER",
"/"
],
"_resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
"_shasum": "381a871b62a734450660ae3deee44813f70d959a",
"_spec": "mime-types",
"_where": "/home/ubuntu/formidable/a",
"bugs": {
"url": "https://github.com/jshttp/mime-types/issues"
},
"bundleDependencies": false,
"contributors": [ "contributors": [
{ "Douglas Christopher Wilson <doug@somethingdoug.com>",
"name": "Douglas Christopher Wilson", "Jeremiah Senkpiel <fishrock123@rocketmail.com> (https://searchbeam.jit.su)",
"email": "doug@somethingdoug.com" "Jonathan Ong <me@jongleberry.com> (http://jongleberry.com)"
},
{
"name": "Jeremiah Senkpiel",
"email": "fishrock123@rocketmail.com",
"url": "https://searchbeam.jit.su"
},
{
"name": "Jonathan Ong",
"email": "me@jongleberry.com",
"url": "http://jongleberry.com"
}
], ],
"license": "MIT",
"keywords": [
"mime",
"types"
],
"repository": "jshttp/mime-types",
"dependencies": { "dependencies": {
"mime-db": "1.52.0" "mime-db": "1.52.0"
}, },
"deprecated": false,
"description": "The ultimate javascript content-type utility.",
"devDependencies": { "devDependencies": {
"eslint": "7.32.0", "eslint": "7.32.0",
"eslint-config-standard": "14.1.1", "eslint-config-standard": "14.1.1",
@ -59,30 +27,18 @@
"mocha": "9.2.2", "mocha": "9.2.2",
"nyc": "15.1.0" "nyc": "15.1.0"
}, },
"engines": {
"node": ">= 0.6"
},
"files": [ "files": [
"HISTORY.md", "HISTORY.md",
"LICENSE", "LICENSE",
"index.js" "index.js"
], ],
"homepage": "https://github.com/jshttp/mime-types#readme", "engines": {
"keywords": [ "node": ">= 0.6"
"mime",
"types"
],
"license": "MIT",
"name": "mime-types",
"repository": {
"type": "git",
"url": "git+https://github.com/jshttp/mime-types.git"
}, },
"scripts": { "scripts": {
"lint": "eslint .", "lint": "eslint .",
"test": "mocha --reporter spec test/test.js", "test": "mocha --reporter spec test/test.js",
"test-ci": "nyc --reporter=lcov --reporter=text npm test", "test-ci": "nyc --reporter=lcov --reporter=text npm test",
"test-cov": "nyc --reporter=html --reporter=text npm test" "test-cov": "nyc --reporter=html --reporter=text npm test"
}, }
"version": "2.1.35"
} }

68
node_modules/tar/CHANGELOG.md generated vendored
View file

@ -1,68 +0,0 @@
# Changelog
## 6.0
- Drop support for node 6 and 8
- fix symlinks and hardlinks on windows being packed with `\`-style path
targets
## 5.0
- Address unpack race conditions using path reservations
- Change large-numbers errors from TypeError to Error
- Add `TAR_*` error codes
- Raise `TAR_BAD_ARCHIVE` warning/error when there are no valid entries
found in an archive
- do not treat ignored entries as an invalid archive
- drop support for node v4
- unpack: conditionally use a file mapping to write files on Windows
- Set more portable 'mode' value in portable mode
- Set `portable` gzip option in portable mode
## 4.4
- Add 'mtime' option to tar creation to force mtime
- unpack: only reuse file fs entries if nlink = 1
- unpack: rename before unlinking files on Windows
- Fix encoding/decoding of base-256 numbers
- Use `stat` instead of `lstat` when checking CWD
- Always provide a callback to fs.close()
## 4.3
- Add 'transform' unpack option
## 4.2
- Fail when zlib fails
## 4.1
- Add noMtime flag for tar creation
## 4.0
- unpack: raise error if cwd is missing or not a dir
- pack: don't drop dots from dotfiles when prefixing
## 3.1
- Support `@file.tar` as an entry argument to copy entries from one tar
file to another.
- Add `noPax` option
- `noResume` option for tar.t
- win32: convert `>|<?:` chars to windows-friendly form
- Exclude mtime for dirs in portable mode
## 3.0
- Minipass-based implementation
- Entirely new API surface, `tar.c()`, `tar.x()` etc., much closer to
system tar semantics
- Massive performance improvement
- Require node 4.x and higher
## 0.x, 1.x, 2.x - 2011-2014
- fstream-based implementation
- slow and kinda bad, but better than npm shelling out to the system `tar`

24
node_modules/tar/lib/pack.js generated vendored
View file

@ -79,14 +79,26 @@ const Pack = warner(class Pack extends Minipass {
this.portable = !!opt.portable this.portable = !!opt.portable
this.zip = null this.zip = null
if (opt.gzip) {
if (typeof opt.gzip !== 'object') { if (opt.gzip || opt.brotli) {
opt.gzip = {} if (opt.gzip && opt.brotli) {
throw new TypeError('gzip and brotli are mutually exclusive')
} }
if (this.portable) { if (opt.gzip) {
opt.gzip.portable = true if (typeof opt.gzip !== 'object') {
opt.gzip = {}
}
if (this.portable) {
opt.gzip.portable = true
}
this.zip = new zlib.Gzip(opt.gzip)
}
if (opt.brotli) {
if (typeof opt.brotli !== 'object') {
opt.brotli = {}
}
this.zip = new zlib.BrotliCompress(opt.brotli)
} }
this.zip = new zlib.Gzip(opt.gzip)
this.zip.on('data', chunk => super.write(chunk)) this.zip.on('data', chunk => super.write(chunk))
this.zip.on('end', _ => super.end()) this.zip.on('end', _ => super.end())
this.zip.on('drain', _ => this[ONDRAIN]()) this.zip.on('drain', _ => this[ONDRAIN]())

49
node_modules/tar/lib/parse.js generated vendored
View file

@ -97,6 +97,16 @@ module.exports = warner(class Parser extends EE {
this.strict = !!opt.strict this.strict = !!opt.strict
this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize
this.filter = typeof opt.filter === 'function' ? opt.filter : noop this.filter = typeof opt.filter === 'function' ? opt.filter : noop
// Unlike gzip, brotli doesn't have any magic bytes to identify it
// Users need to explicitly tell us they're extracting a brotli file
// Or we infer from the file extension
const isTBR = (opt.file && (
opt.file.endsWith('.tar.br') || opt.file.endsWith('.tbr')))
// if it's a tbr file it MIGHT be brotli, but we don't know until
// we look at it and verify it's not a valid tar file.
this.brotli = !opt.gzip && opt.brotli !== undefined ? opt.brotli
: isTBR ? undefined
: false
// have to set this so that streams are ok piping into it // have to set this so that streams are ok piping into it
this.writable = true this.writable = true
@ -347,7 +357,9 @@ module.exports = warner(class Parser extends EE {
} }
// first write, might be gzipped // first write, might be gzipped
if (this[UNZIP] === null && chunk) { const needSniff = this[UNZIP] === null ||
this.brotli === undefined && this[UNZIP] === false
if (needSniff && chunk) {
if (this[BUFFER]) { if (this[BUFFER]) {
chunk = Buffer.concat([this[BUFFER], chunk]) chunk = Buffer.concat([this[BUFFER], chunk])
this[BUFFER] = null this[BUFFER] = null
@ -356,15 +368,45 @@ module.exports = warner(class Parser extends EE {
this[BUFFER] = chunk this[BUFFER] = chunk
return true return true
} }
// look for gzip header
for (let i = 0; this[UNZIP] === null && i < gzipHeader.length; i++) { for (let i = 0; this[UNZIP] === null && i < gzipHeader.length; i++) {
if (chunk[i] !== gzipHeader[i]) { if (chunk[i] !== gzipHeader[i]) {
this[UNZIP] = false this[UNZIP] = false
} }
} }
if (this[UNZIP] === null) {
const maybeBrotli = this.brotli === undefined
if (this[UNZIP] === false && maybeBrotli) {
// read the first header to see if it's a valid tar file. If so,
// we can safely assume that it's not actually brotli, despite the
// .tbr or .tar.br file extension.
// if we ended before getting a full chunk, yes, def brotli
if (chunk.length < 512) {
if (this[ENDED]) {
this.brotli = true
} else {
this[BUFFER] = chunk
return true
}
} else {
// if it's tar, it's pretty reliably not brotli, chances of
// that happening are astronomical.
try {
new Header(chunk.slice(0, 512))
this.brotli = false
} catch (_) {
this.brotli = true
}
}
}
if (this[UNZIP] === null || (this[UNZIP] === false && this.brotli)) {
const ended = this[ENDED] const ended = this[ENDED]
this[ENDED] = false this[ENDED] = false
this[UNZIP] = new zlib.Unzip() this[UNZIP] = this[UNZIP] === null
? new zlib.Unzip()
: new zlib.BrotliDecompress()
this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk)) this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk))
this[UNZIP].on('error', er => this.abort(er)) this[UNZIP].on('error', er => this.abort(er))
this[UNZIP].on('end', _ => { this[UNZIP].on('end', _ => {
@ -502,6 +544,7 @@ module.exports = warner(class Parser extends EE {
this[UNZIP].end(chunk) this[UNZIP].end(chunk)
} else { } else {
this[ENDED] = true this[ENDED] = true
if (this.brotli === undefined) chunk = chunk || Buffer.alloc(0)
this.write(chunk) this.write(chunk)
} }
} }

2
node_modules/tar/lib/replace.js generated vendored
View file

@ -23,7 +23,7 @@ module.exports = (opt_, files, cb) => {
throw new TypeError('file is required') throw new TypeError('file is required')
} }
if (opt.gzip) { if (opt.gzip || opt.brotli || opt.file.endsWith('.br') || opt.file.endsWith('.tbr')) {
throw new TypeError('cannot append to compressed archives') throw new TypeError('cannot append to compressed archives')
} }

2
node_modules/tar/lib/update.js generated vendored
View file

@ -13,7 +13,7 @@ module.exports = (opt_, files, cb) => {
throw new TypeError('file is required') throw new TypeError('file is required')
} }
if (opt.gzip) { if (opt.gzip || opt.brotli || opt.file.endsWith('.br') || opt.file.endsWith('.tbr')) {
throw new TypeError('cannot append to compressed archives') throw new TypeError('cannot append to compressed archives')
} }

63
node_modules/tar/package.json generated vendored
View file

@ -1,35 +1,17 @@
{ {
"_from": "tar", "author": "GitHub Inc.",
"_id": "tar@6.1.15", "name": "tar",
"_inBundle": false, "description": "tar for node",
"_integrity": "sha512-/zKt9UyngnxIT/EAGYuxaMYgOIJiP81ab9ZfkILq4oNLPFX50qyYmu7jRj9qeXoxmJHjGlbH0+cm2uy1WCs10A==", "version": "6.2.0",
"_location": "/tar", "repository": {
"_phantomChildren": {}, "type": "git",
"_requested": { "url": "https://github.com/isaacs/node-tar.git"
"type": "tag",
"registry": true,
"raw": "tar",
"name": "tar",
"escapedName": "tar",
"rawSpec": "",
"saveSpec": null,
"fetchSpec": "latest"
}, },
"_requiredBy": [ "scripts": {
"#USER", "genparse": "node scripts/generate-parse-fixtures.js",
"/" "snap": "tap",
], "test": "tap"
"_resolved": "https://registry.npmjs.org/tar/-/tar-6.1.15.tgz",
"_shasum": "c9738b0b98845a3b344d334b8fa3041aaba53a69",
"_spec": "tar",
"_where": "/home/ubuntu/fix",
"author": {
"name": "GitHub Inc."
}, },
"bugs": {
"url": "https://github.com/isaacs/node-tar/issues"
},
"bundleDependencies": false,
"dependencies": { "dependencies": {
"chownr": "^2.0.0", "chownr": "^2.0.0",
"fs-minipass": "^2.0.0", "fs-minipass": "^2.0.0",
@ -38,8 +20,6 @@
"mkdirp": "^1.0.3", "mkdirp": "^1.0.3",
"yallist": "^4.0.0" "yallist": "^4.0.0"
}, },
"deprecated": false,
"description": "tar for node",
"devDependencies": { "devDependencies": {
"@npmcli/eslint-config": "^4.0.0", "@npmcli/eslint-config": "^4.0.0",
"@npmcli/template-oss": "4.11.0", "@npmcli/template-oss": "4.11.0",
@ -51,6 +31,7 @@
"rimraf": "^3.0.2", "rimraf": "^3.0.2",
"tap": "^16.0.1" "tap": "^16.0.1"
}, },
"license": "ISC",
"engines": { "engines": {
"node": ">=10" "node": ">=10"
}, },
@ -59,23 +40,6 @@
"lib/", "lib/",
"index.js" "index.js"
], ],
"homepage": "https://github.com/isaacs/node-tar#readme",
"license": "ISC",
"name": "tar",
"repository": {
"type": "git",
"url": "git+https://github.com/isaacs/node-tar.git"
},
"scripts": {
"genparse": "node scripts/generate-parse-fixtures.js",
"lint": "eslint \"**/*.js\"",
"lintfix": "npm run lint -- --fix",
"postlint": "template-oss-check",
"posttest": "npm run lint",
"snap": "tap",
"template-oss-apply": "template-oss-apply --force",
"test": "tap"
},
"tap": { "tap": {
"coverage-map": "map.js", "coverage-map": "map.js",
"timeout": 0, "timeout": 0,
@ -102,6 +66,5 @@
"16.x", "16.x",
"18.x" "18.x"
] ]
}, }
"version": "6.1.15"
} }