1
0
Fork 0
forked from svrjs/svrjs

Updated "tar" and "graceful-fs" libraries

This commit is contained in:
Dorian Niemiec 2024-02-22 02:50:46 +01:00
parent 38350597e5
commit c3aba19ca3
16 changed files with 379 additions and 454 deletions

110
node_modules/formidable/package.json generated vendored
View file

@ -1,36 +1,33 @@
{
"_from": "formidable@2",
"_id": "formidable@2.1.2",
"_inBundle": false,
"_integrity": "sha512-CM3GuJ57US06mlpQ47YcunuUZ9jpm8Vx+P2CGt2j7HpgkKZO/DJYQ0Bobim8G6PFQmK5lOqOOdUXboU+h73A4g==",
"_location": "/formidable",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "formidable@2",
"name": "formidable",
"escapedName": "formidable",
"rawSpec": "2",
"saveSpec": null,
"fetchSpec": "2"
},
"_requiredBy": [
"#USER",
"/"
"name": "formidable",
"version": "2.1.2",
"license": "MIT",
"description": "A node.js module for parsing form data, especially file uploads.",
"homepage": "https://github.com/node-formidable/formidable",
"funding": "https://ko-fi.com/tunnckoCore/commissions",
"repository": "node-formidable/formidable",
"main": "./src/index.js",
"files": [
"src"
],
"_resolved": "https://registry.npmjs.org/formidable/-/formidable-2.1.2.tgz",
"_shasum": "fa973a2bec150e4ce7cac15589d7a25fc30ebd89",
"_spec": "formidable@2",
"_where": "/home/ubuntu/forbidden",
"bugs": {
"url": "https://github.com/node-formidable/formidable/issues"
"publishConfig": {
"access": "public",
"tag": "v2-latest"
},
"bundleDependencies": false,
"commitlint": {
"extends": [
"@commitlint/config-conventional"
]
"scripts": {
"bench": "node benchmark",
"fmt": "yarn run fmt:prepare '**/*'",
"fmt:prepare": "prettier --write",
"lint": "yarn run lint:prepare .",
"lint:prepare": "eslint --cache --fix --quiet --format codeframe",
"reinstall": "del-cli ./node_modules ./yarn.lock",
"postreinstall": "yarn setup",
"setup": "yarn",
"pretest": "del-cli ./test/tmp && make-dir ./test/tmp",
"test": "jest --coverage",
"pretest:ci": "yarn run pretest",
"test:ci": "nyc jest --coverage",
"test:jest": "jest --coverage"
},
"dependencies": {
"dezalgo": "^1.0.4",
@ -38,8 +35,6 @@
"once": "^1.4.0",
"qs": "^6.11.0"
},
"deprecated": false,
"description": "A node.js module for parsing form data, especially file uploads.",
"devDependencies": {
"@commitlint/cli": "8.3.5",
"@commitlint/config-conventional": "8.3.4",
@ -62,17 +57,6 @@
"request": "2.88.2",
"supertest": "4.0.2"
},
"files": [
"src"
],
"funding": "https://ko-fi.com/tunnckoCore/commissions",
"homepage": "https://github.com/node-formidable/formidable",
"husky": {
"hooks": {
"pre-commit": "git status --porcelain && yarn lint-staged",
"commit-msg": "yarn commitlint -E HUSKY_GIT_PARAMS"
}
},
"jest": {
"verbose": true
},
@ -86,7 +70,17 @@
"ulpoad",
"file"
],
"license": "MIT",
"husky": {
"hooks": {
"pre-commit": "git status --porcelain && yarn lint-staged",
"commit-msg": "yarn commitlint -E HUSKY_GIT_PARAMS"
}
},
"commitlint": {
"extends": [
"@commitlint/config-conventional"
]
},
"lint-staged": {
"!*.{js,jsx,ts,tsx}": [
"yarn run fmt:prepare"
@ -95,36 +89,10 @@
"yarn run lint"
]
},
"main": "./src/index.js",
"name": "formidable",
"publishConfig": {
"access": "public",
"tag": "v2-latest"
},
"renovate": {
"extends": [
"@tunnckocore",
":pinAllExceptPeerDependencies"
]
},
"repository": {
"type": "git",
"url": "git+https://github.com/node-formidable/formidable.git"
},
"scripts": {
"bench": "node benchmark",
"fmt": "yarn run fmt:prepare '**/*'",
"fmt:prepare": "prettier --write",
"lint": "yarn run lint:prepare .",
"lint:prepare": "eslint --cache --fix --quiet --format codeframe",
"postreinstall": "yarn setup",
"pretest": "del-cli ./test/tmp && make-dir ./test/tmp",
"pretest:ci": "yarn run pretest",
"reinstall": "del-cli ./node_modules ./yarn.lock",
"setup": "yarn",
"test": "jest --coverage",
"test:ci": "nyc jest --coverage",
"test:jest": "jest --coverage"
},
"version": "2.1.2"
}
}

2
node_modules/graceful-fs/LICENSE generated vendored Executable file → Normal file
View file

@ -1,6 +1,6 @@
The ISC License
Copyright (c) Isaac Z. Schlueter, Ben Noordhuis, and Contributors
Copyright (c) 2011-2022 Isaac Z. Schlueter, Ben Noordhuis, and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above

12
node_modules/graceful-fs/README.md generated vendored Executable file → Normal file
View file

@ -30,9 +30,19 @@ the directory.
var fs = require('graceful-fs')
// now go and do stuff with it...
fs.readFileSync('some-file-or-whatever')
fs.readFile('some-file-or-whatever', (err, data) => {
// Do stuff here.
})
```
## Sync methods
This module cannot intercept or handle `EMFILE` or `ENFILE` errors from sync
methods. If you use sync methods which open file descriptors then you are
responsible for dealing with any errors.
This is a known limitation, not a bug.
## Global Patching
If you want to patch the global fs module (or any other fs-like

6
node_modules/graceful-fs/clone.js generated vendored Executable file → Normal file
View file

@ -2,12 +2,16 @@
module.exports = clone
var getPrototypeOf = Object.getPrototypeOf || function (obj) {
return obj.__proto__
}
function clone (obj) {
if (obj === null || typeof obj !== 'object')
return obj
if (obj instanceof Object)
var copy = { __proto__: obj.__proto__ }
var copy = { __proto__: getPrototypeOf(obj) }
else
var copy = Object.create(null)

170
node_modules/graceful-fs/graceful-fs.js generated vendored Executable file → Normal file
View file

@ -54,7 +54,7 @@ if (!fs[gracefulQueue]) {
return fs$close.call(fs, fd, function (err) {
// This function uses the graceful-fs shared queue
if (!err) {
retry()
resetQueue()
}
if (typeof cb === 'function')
@ -72,7 +72,7 @@ if (!fs[gracefulQueue]) {
function closeSync (fd) {
// This function uses the graceful-fs shared queue
fs$closeSync.apply(fs, arguments)
retry()
resetQueue()
}
Object.defineProperty(closeSync, previousSymbol, {
@ -114,14 +114,13 @@ function patch (fs) {
return go$readFile(path, options, cb)
function go$readFile (path, options, cb) {
function go$readFile (path, options, cb, startTime) {
return fs$readFile(path, options, function (err) {
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
enqueue([go$readFile, [path, options, cb]])
enqueue([go$readFile, [path, options, cb], err, startTime || Date.now(), Date.now()])
else {
if (typeof cb === 'function')
cb.apply(this, arguments)
retry()
}
})
}
@ -135,14 +134,13 @@ function patch (fs) {
return go$writeFile(path, data, options, cb)
function go$writeFile (path, data, options, cb) {
function go$writeFile (path, data, options, cb, startTime) {
return fs$writeFile(path, data, options, function (err) {
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
enqueue([go$writeFile, [path, data, options, cb]])
enqueue([go$writeFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()])
else {
if (typeof cb === 'function')
cb.apply(this, arguments)
retry()
}
})
}
@ -157,14 +155,35 @@ function patch (fs) {
return go$appendFile(path, data, options, cb)
function go$appendFile (path, data, options, cb) {
function go$appendFile (path, data, options, cb, startTime) {
return fs$appendFile(path, data, options, function (err) {
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
enqueue([go$appendFile, [path, data, options, cb]])
enqueue([go$appendFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()])
else {
if (typeof cb === 'function')
cb.apply(this, arguments)
}
})
}
}
var fs$copyFile = fs.copyFile
if (fs$copyFile)
fs.copyFile = copyFile
function copyFile (src, dest, flags, cb) {
if (typeof flags === 'function') {
cb = flags
flags = 0
}
return go$copyFile(src, dest, flags, cb)
function go$copyFile (src, dest, flags, cb, startTime) {
return fs$copyFile(src, dest, flags, function (err) {
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
enqueue([go$copyFile, [src, dest, flags, cb], err, startTime || Date.now(), Date.now()])
else {
if (typeof cb === 'function')
cb.apply(this, arguments)
retry()
}
})
}
@ -172,36 +191,46 @@ function patch (fs) {
var fs$readdir = fs.readdir
fs.readdir = readdir
var noReaddirOptionVersions = /^v[0-5]\./
function readdir (path, options, cb) {
var args = [path]
if (typeof options !== 'function') {
args.push(options)
} else {
cb = options
}
args.push(go$readdir$cb)
if (typeof options === 'function')
cb = options, options = null
return go$readdir(args)
var go$readdir = noReaddirOptionVersions.test(process.version)
? function go$readdir (path, options, cb, startTime) {
return fs$readdir(path, fs$readdirCallback(
path, options, cb, startTime
))
}
: function go$readdir (path, options, cb, startTime) {
return fs$readdir(path, options, fs$readdirCallback(
path, options, cb, startTime
))
}
function go$readdir$cb (err, files) {
if (files && files.sort)
files.sort()
return go$readdir(path, options, cb)
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
enqueue([go$readdir, [args]])
function fs$readdirCallback (path, options, cb, startTime) {
return function (err, files) {
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
enqueue([
go$readdir,
[path, options, cb],
err,
startTime || Date.now(),
Date.now()
])
else {
if (files && files.sort)
files.sort()
else {
if (typeof cb === 'function')
cb.apply(this, arguments)
retry()
if (typeof cb === 'function')
cb.call(this, err, files)
}
}
}
}
function go$readdir (args) {
return fs$readdir.apply(fs, args)
}
if (process.version.substr(0, 4) === 'v0.8') {
var legStreams = legacy(fs)
ReadStream = legStreams.ReadStream
@ -324,14 +353,13 @@ function patch (fs) {
return go$open(path, flags, mode, cb)
function go$open (path, flags, mode, cb) {
function go$open (path, flags, mode, cb, startTime) {
return fs$open(path, flags, mode, function (err, fd) {
if (err && (err.code === 'EMFILE' || err.code === 'ENFILE'))
enqueue([go$open, [path, flags, mode, cb]])
enqueue([go$open, [path, flags, mode, cb], err, startTime || Date.now(), Date.now()])
else {
if (typeof cb === 'function')
cb.apply(this, arguments)
retry()
}
})
}
@ -343,12 +371,78 @@ function patch (fs) {
function enqueue (elem) {
debug('ENQUEUE', elem[0].name, elem[1])
fs[gracefulQueue].push(elem)
retry()
}
// keep track of the timeout between retry() calls
var retryTimer
// reset the startTime and lastTime to now
// this resets the start of the 60 second overall timeout as well as the
// delay between attempts so that we'll retry these jobs sooner
function resetQueue () {
var now = Date.now()
for (var i = 0; i < fs[gracefulQueue].length; ++i) {
// entries that are only a length of 2 are from an older version, don't
// bother modifying those since they'll be retried anyway.
if (fs[gracefulQueue][i].length > 2) {
fs[gracefulQueue][i][3] = now // startTime
fs[gracefulQueue][i][4] = now // lastTime
}
}
// call retry to make sure we're actively processing the queue
retry()
}
function retry () {
// clear the timer and remove it to help prevent unintended concurrency
clearTimeout(retryTimer)
retryTimer = undefined
if (fs[gracefulQueue].length === 0)
return
var elem = fs[gracefulQueue].shift()
if (elem) {
debug('RETRY', elem[0].name, elem[1])
elem[0].apply(null, elem[1])
var fn = elem[0]
var args = elem[1]
// these items may be unset if they were added by an older graceful-fs
var err = elem[2]
var startTime = elem[3]
var lastTime = elem[4]
// if we don't have a startTime we have no way of knowing if we've waited
// long enough, so go ahead and retry this item now
if (startTime === undefined) {
debug('RETRY', fn.name, args)
fn.apply(null, args)
} else if (Date.now() - startTime >= 60000) {
// it's been more than 60 seconds total, bail now
debug('TIMEOUT', fn.name, args)
var cb = args.pop()
if (typeof cb === 'function')
cb.call(null, err)
} else {
// the amount of time between the last attempt and right now
var sinceAttempt = Date.now() - lastTime
// the amount of time between when we first tried, and when we last tried
// rounded up to at least 1
var sinceStart = Math.max(lastTime - startTime, 1)
// backoff. wait longer than the total time we've been retrying, but only
// up to a maximum of 100ms
var desiredDelay = Math.min(sinceStart * 1.2, 100)
// it's been long enough since the last retry, do it again
if (sinceAttempt >= desiredDelay) {
debug('RETRY', fn.name, args)
fn.apply(null, args.concat([startTime]))
} else {
// if we can't do this job yet, push it to the end of the queue
// and let the next iteration check again
fs[gracefulQueue].push(elem)
}
}
// schedule our next run if one isn't already scheduled
if (retryTimer === undefined) {
retryTimer = setTimeout(retry, 0)
}
}

0
node_modules/graceful-fs/legacy-streams.js generated vendored Executable file → Normal file
View file

83
node_modules/graceful-fs/package.json generated vendored Executable file → Normal file
View file

@ -1,52 +1,22 @@
{
"_from": "graceful-fs",
"_id": "graceful-fs@4.2.4",
"_inBundle": false,
"_integrity": "sha512-WjKPNJF79dtJAVniUlGGWHYGz2jWxT6VhN/4m1NdkbZ2nOsEF+cI1Edgql5zCRhs/VsQYRvrXctxktVXZUkixw==",
"_location": "/graceful-fs",
"_phantomChildren": {},
"_requested": {
"type": "tag",
"registry": true,
"raw": "graceful-fs",
"name": "graceful-fs",
"escapedName": "graceful-fs",
"rawSpec": "",
"saveSpec": null,
"fetchSpec": "latest"
},
"_requiredBy": [
"#USER",
"/"
],
"_resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.4.tgz",
"_shasum": "2256bde14d3632958c465ebc96dc467ca07a29fb",
"_spec": "graceful-fs",
"_where": "/media/serveradmin/Server/developement",
"bugs": {
"url": "https://github.com/isaacs/node-graceful-fs/issues"
},
"bundleDependencies": false,
"dependencies": {},
"deprecated": false,
"name": "graceful-fs",
"description": "A drop-in replacement for fs, making various improvements.",
"devDependencies": {
"import-fresh": "^2.0.0",
"mkdirp": "^0.5.0",
"rimraf": "^2.2.8",
"tap": "^12.7.0"
"version": "4.2.11",
"repository": {
"type": "git",
"url": "https://github.com/isaacs/node-graceful-fs"
},
"main": "graceful-fs.js",
"directories": {
"test": "test"
},
"files": [
"fs.js",
"graceful-fs.js",
"legacy-streams.js",
"polyfills.js",
"clone.js"
],
"homepage": "https://github.com/isaacs/node-graceful-fs#readme",
"scripts": {
"preversion": "npm test",
"postversion": "npm publish",
"postpublish": "git push origin --follow-tags",
"test": "nyc --silent node test.js | tap -c -",
"posttest": "nyc report"
},
"keywords": [
"fs",
"module",
@ -64,17 +34,20 @@
"EACCESS"
],
"license": "ISC",
"main": "graceful-fs.js",
"name": "graceful-fs",
"repository": {
"type": "git",
"url": "git+https://github.com/isaacs/node-graceful-fs.git"
"devDependencies": {
"import-fresh": "^2.0.0",
"mkdirp": "^0.5.0",
"rimraf": "^2.2.8",
"tap": "^16.3.4"
},
"scripts": {
"postpublish": "git push origin --follow-tags",
"postversion": "npm publish",
"preversion": "npm test",
"test": "node test.js | tap -"
},
"version": "4.2.4"
"files": [
"fs.js",
"graceful-fs.js",
"legacy-streams.js",
"polyfills.js",
"clone.js"
],
"tap": {
"reporter": "classic"
}
}

83
node_modules/graceful-fs/polyfills.js generated vendored Executable file → Normal file
View file

@ -14,10 +14,14 @@ try {
process.cwd()
} catch (er) {}
var chdir = process.chdir
process.chdir = function(d) {
cwd = null
chdir.call(process, d)
// This check is needed until node.js 12 is required
if (typeof process.chdir === 'function') {
var chdir = process.chdir
process.chdir = function (d) {
cwd = null
chdir.call(process, d)
}
if (Object.setPrototypeOf) Object.setPrototypeOf(process.chdir, chdir)
}
module.exports = patch
@ -67,13 +71,13 @@ function patch (fs) {
fs.lstatSync = statFixSync(fs.lstatSync)
// if lchmod/lchown do not exist, then make them no-ops
if (!fs.lchmod) {
if (fs.chmod && !fs.lchmod) {
fs.lchmod = function (path, mode, cb) {
if (cb) process.nextTick(cb)
}
fs.lchmodSync = function () {}
}
if (!fs.lchown) {
if (fs.chown && !fs.lchown) {
fs.lchown = function (path, uid, gid, cb) {
if (cb) process.nextTick(cb)
}
@ -90,32 +94,38 @@ function patch (fs) {
// CPU to a busy looping process, which can cause the program causing the lock
// contention to be starved of CPU by node, so the contention doesn't resolve.
if (platform === "win32") {
fs.rename = (function (fs$rename) { return function (from, to, cb) {
var start = Date.now()
var backoff = 0;
fs$rename(from, to, function CB (er) {
if (er
&& (er.code === "EACCES" || er.code === "EPERM")
&& Date.now() - start < 60000) {
setTimeout(function() {
fs.stat(to, function (stater, st) {
if (stater && stater.code === "ENOENT")
fs$rename(from, to, CB);
else
cb(er)
})
}, backoff)
if (backoff < 100)
backoff += 10;
return;
}
if (cb) cb(er)
})
}})(fs.rename)
fs.rename = typeof fs.rename !== 'function' ? fs.rename
: (function (fs$rename) {
function rename (from, to, cb) {
var start = Date.now()
var backoff = 0;
fs$rename(from, to, function CB (er) {
if (er
&& (er.code === "EACCES" || er.code === "EPERM" || er.code === "EBUSY")
&& Date.now() - start < 60000) {
setTimeout(function() {
fs.stat(to, function (stater, st) {
if (stater && stater.code === "ENOENT")
fs$rename(from, to, CB);
else
cb(er)
})
}, backoff)
if (backoff < 100)
backoff += 10;
return;
}
if (cb) cb(er)
})
}
if (Object.setPrototypeOf) Object.setPrototypeOf(rename, fs$rename)
return rename
})(fs.rename)
}
// if read() returns EAGAIN, then just try it again.
fs.read = (function (fs$read) {
fs.read = typeof fs.read !== 'function' ? fs.read
: (function (fs$read) {
function read (fd, buffer, offset, length, position, callback_) {
var callback
if (callback_ && typeof callback_ === 'function') {
@ -132,11 +142,12 @@ function patch (fs) {
}
// This ensures `util.promisify` works as it does for native `fs.read`.
read.__proto__ = fs$read
if (Object.setPrototypeOf) Object.setPrototypeOf(read, fs$read)
return read
})(fs.read)
fs.readSync = (function (fs$readSync) { return function (fd, buffer, offset, length, position) {
fs.readSync = typeof fs.readSync !== 'function' ? fs.readSync
: (function (fs$readSync) { return function (fd, buffer, offset, length, position) {
var eagCounter = 0
while (true) {
try {
@ -195,7 +206,7 @@ function patch (fs) {
}
function patchLutimes (fs) {
if (constants.hasOwnProperty("O_SYMLINK")) {
if (constants.hasOwnProperty("O_SYMLINK") && fs.futimes) {
fs.lutimes = function (path, at, mt, cb) {
fs.open(path, constants.O_SYMLINK, function (er, fd) {
if (er) {
@ -229,7 +240,7 @@ function patch (fs) {
return ret
}
} else {
} else if (fs.futimes) {
fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) }
fs.lutimesSync = function () {}
}
@ -306,8 +317,10 @@ function patch (fs) {
return function (target, options) {
var stats = options ? orig.call(fs, target, options)
: orig.call(fs, target)
if (stats.uid < 0) stats.uid += 0x100000000
if (stats.gid < 0) stats.gid += 0x100000000
if (stats) {
if (stats.uid < 0) stats.uid += 0x100000000
if (stats.gid < 0) stats.gid += 0x100000000
}
return stats;
}
}

85
node_modules/mime-db/package.json generated vendored
View file

@ -1,49 +1,23 @@
{
"_from": "mime-db@1.52.0",
"_id": "mime-db@1.52.0",
"_inBundle": false,
"_integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
"_location": "/mime-db",
"_phantomChildren": {},
"_requested": {
"type": "version",
"registry": true,
"raw": "mime-db@1.52.0",
"name": "mime-db",
"escapedName": "mime-db",
"rawSpec": "1.52.0",
"saveSpec": null,
"fetchSpec": "1.52.0"
},
"_requiredBy": [
"/mime-types"
],
"_resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
"_shasum": "bbabcdc02859f4987301c856e3387ce5ec43bf70",
"_spec": "mime-db@1.52.0",
"_where": "/home/ubuntu/formidable/node_modules/mime-types",
"bugs": {
"url": "https://github.com/jshttp/mime-db/issues"
},
"bundleDependencies": false,
"contributors": [
{
"name": "Douglas Christopher Wilson",
"email": "doug@somethingdoug.com"
},
{
"name": "Jonathan Ong",
"email": "me@jongleberry.com",
"url": "http://jongleberry.com"
},
{
"name": "Robert Kieffer",
"email": "robert@broofa.com",
"url": "http://github.com/broofa"
}
],
"deprecated": false,
"name": "mime-db",
"description": "Media Type Database",
"version": "1.52.0",
"contributors": [
"Douglas Christopher Wilson <doug@somethingdoug.com>",
"Jonathan Ong <me@jongleberry.com> (http://jongleberry.com)",
"Robert Kieffer <robert@broofa.com> (http://github.com/broofa)"
],
"license": "MIT",
"keywords": [
"mime",
"db",
"type",
"types",
"database",
"charset",
"charsets"
],
"repository": "jshttp/mime-db",
"devDependencies": {
"bluebird": "3.7.2",
"co": "4.6.0",
@ -63,9 +37,6 @@
"raw-body": "2.5.0",
"stream-to-array": "2.3.0"
},
"engines": {
"node": ">= 0.6"
},
"files": [
"HISTORY.md",
"LICENSE",
@ -73,21 +44,8 @@
"db.json",
"index.js"
],
"homepage": "https://github.com/jshttp/mime-db#readme",
"keywords": [
"mime",
"db",
"type",
"types",
"database",
"charset",
"charsets"
],
"license": "MIT",
"name": "mime-db",
"repository": {
"type": "git",
"url": "git+https://github.com/jshttp/mime-db.git"
"engines": {
"node": ">= 0.6"
},
"scripts": {
"build": "node scripts/build",
@ -98,6 +56,5 @@
"test-cov": "nyc --reporter=html --reporter=text npm test",
"update": "npm run fetch && npm run build",
"version": "node scripts/version-history.js && git add HISTORY.md"
},
"version": "1.52.0"
}
}

74
node_modules/mime-types/package.json generated vendored
View file

@ -1,53 +1,21 @@
{
"_from": "mime-types",
"_id": "mime-types@2.1.35",
"_inBundle": false,
"_integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
"_location": "/mime-types",
"_phantomChildren": {},
"_requested": {
"type": "tag",
"registry": true,
"raw": "mime-types",
"name": "mime-types",
"escapedName": "mime-types",
"rawSpec": "",
"saveSpec": null,
"fetchSpec": "latest"
},
"_requiredBy": [
"#USER",
"/"
],
"_resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
"_shasum": "381a871b62a734450660ae3deee44813f70d959a",
"_spec": "mime-types",
"_where": "/home/ubuntu/formidable/a",
"bugs": {
"url": "https://github.com/jshttp/mime-types/issues"
},
"bundleDependencies": false,
"name": "mime-types",
"description": "The ultimate javascript content-type utility.",
"version": "2.1.35",
"contributors": [
{
"name": "Douglas Christopher Wilson",
"email": "doug@somethingdoug.com"
},
{
"name": "Jeremiah Senkpiel",
"email": "fishrock123@rocketmail.com",
"url": "https://searchbeam.jit.su"
},
{
"name": "Jonathan Ong",
"email": "me@jongleberry.com",
"url": "http://jongleberry.com"
}
"Douglas Christopher Wilson <doug@somethingdoug.com>",
"Jeremiah Senkpiel <fishrock123@rocketmail.com> (https://searchbeam.jit.su)",
"Jonathan Ong <me@jongleberry.com> (http://jongleberry.com)"
],
"license": "MIT",
"keywords": [
"mime",
"types"
],
"repository": "jshttp/mime-types",
"dependencies": {
"mime-db": "1.52.0"
},
"deprecated": false,
"description": "The ultimate javascript content-type utility.",
"devDependencies": {
"eslint": "7.32.0",
"eslint-config-standard": "14.1.1",
@ -59,30 +27,18 @@
"mocha": "9.2.2",
"nyc": "15.1.0"
},
"engines": {
"node": ">= 0.6"
},
"files": [
"HISTORY.md",
"LICENSE",
"index.js"
],
"homepage": "https://github.com/jshttp/mime-types#readme",
"keywords": [
"mime",
"types"
],
"license": "MIT",
"name": "mime-types",
"repository": {
"type": "git",
"url": "git+https://github.com/jshttp/mime-types.git"
"engines": {
"node": ">= 0.6"
},
"scripts": {
"lint": "eslint .",
"test": "mocha --reporter spec test/test.js",
"test-ci": "nyc --reporter=lcov --reporter=text npm test",
"test-cov": "nyc --reporter=html --reporter=text npm test"
},
"version": "2.1.35"
}
}

68
node_modules/tar/CHANGELOG.md generated vendored
View file

@ -1,68 +0,0 @@
# Changelog
## 6.0
- Drop support for node 6 and 8
- fix symlinks and hardlinks on windows being packed with `\`-style path
targets
## 5.0
- Address unpack race conditions using path reservations
- Change large-numbers errors from TypeError to Error
- Add `TAR_*` error codes
- Raise `TAR_BAD_ARCHIVE` warning/error when there are no valid entries
found in an archive
- do not treat ignored entries as an invalid archive
- drop support for node v4
- unpack: conditionally use a file mapping to write files on Windows
- Set more portable 'mode' value in portable mode
- Set `portable` gzip option in portable mode
## 4.4
- Add 'mtime' option to tar creation to force mtime
- unpack: only reuse file fs entries if nlink = 1
- unpack: rename before unlinking files on Windows
- Fix encoding/decoding of base-256 numbers
- Use `stat` instead of `lstat` when checking CWD
- Always provide a callback to fs.close()
## 4.3
- Add 'transform' unpack option
## 4.2
- Fail when zlib fails
## 4.1
- Add noMtime flag for tar creation
## 4.0
- unpack: raise error if cwd is missing or not a dir
- pack: don't drop dots from dotfiles when prefixing
## 3.1
- Support `@file.tar` as an entry argument to copy entries from one tar
file to another.
- Add `noPax` option
- `noResume` option for tar.t
- win32: convert `>|<?:` chars to windows-friendly form
- Exclude mtime for dirs in portable mode
## 3.0
- Minipass-based implementation
- Entirely new API surface, `tar.c()`, `tar.x()` etc., much closer to
system tar semantics
- Massive performance improvement
- Require node 4.x and higher
## 0.x, 1.x, 2.x - 2011-2014
- fstream-based implementation
- slow and kinda bad, but better than npm shelling out to the system `tar`

24
node_modules/tar/lib/pack.js generated vendored
View file

@ -79,14 +79,26 @@ const Pack = warner(class Pack extends Minipass {
this.portable = !!opt.portable
this.zip = null
if (opt.gzip) {
if (typeof opt.gzip !== 'object') {
opt.gzip = {}
if (opt.gzip || opt.brotli) {
if (opt.gzip && opt.brotli) {
throw new TypeError('gzip and brotli are mutually exclusive')
}
if (this.portable) {
opt.gzip.portable = true
if (opt.gzip) {
if (typeof opt.gzip !== 'object') {
opt.gzip = {}
}
if (this.portable) {
opt.gzip.portable = true
}
this.zip = new zlib.Gzip(opt.gzip)
}
if (opt.brotli) {
if (typeof opt.brotli !== 'object') {
opt.brotli = {}
}
this.zip = new zlib.BrotliCompress(opt.brotli)
}
this.zip = new zlib.Gzip(opt.gzip)
this.zip.on('data', chunk => super.write(chunk))
this.zip.on('end', _ => super.end())
this.zip.on('drain', _ => this[ONDRAIN]())

49
node_modules/tar/lib/parse.js generated vendored
View file

@ -97,6 +97,16 @@ module.exports = warner(class Parser extends EE {
this.strict = !!opt.strict
this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize
this.filter = typeof opt.filter === 'function' ? opt.filter : noop
// Unlike gzip, brotli doesn't have any magic bytes to identify it
// Users need to explicitly tell us they're extracting a brotli file
// Or we infer from the file extension
const isTBR = (opt.file && (
opt.file.endsWith('.tar.br') || opt.file.endsWith('.tbr')))
// if it's a tbr file it MIGHT be brotli, but we don't know until
// we look at it and verify it's not a valid tar file.
this.brotli = !opt.gzip && opt.brotli !== undefined ? opt.brotli
: isTBR ? undefined
: false
// have to set this so that streams are ok piping into it
this.writable = true
@ -347,7 +357,9 @@ module.exports = warner(class Parser extends EE {
}
// first write, might be gzipped
if (this[UNZIP] === null && chunk) {
const needSniff = this[UNZIP] === null ||
this.brotli === undefined && this[UNZIP] === false
if (needSniff && chunk) {
if (this[BUFFER]) {
chunk = Buffer.concat([this[BUFFER], chunk])
this[BUFFER] = null
@ -356,15 +368,45 @@ module.exports = warner(class Parser extends EE {
this[BUFFER] = chunk
return true
}
// look for gzip header
for (let i = 0; this[UNZIP] === null && i < gzipHeader.length; i++) {
if (chunk[i] !== gzipHeader[i]) {
this[UNZIP] = false
}
}
if (this[UNZIP] === null) {
const maybeBrotli = this.brotli === undefined
if (this[UNZIP] === false && maybeBrotli) {
// read the first header to see if it's a valid tar file. If so,
// we can safely assume that it's not actually brotli, despite the
// .tbr or .tar.br file extension.
// if we ended before getting a full chunk, yes, def brotli
if (chunk.length < 512) {
if (this[ENDED]) {
this.brotli = true
} else {
this[BUFFER] = chunk
return true
}
} else {
// if it's tar, it's pretty reliably not brotli, chances of
// that happening are astronomical.
try {
new Header(chunk.slice(0, 512))
this.brotli = false
} catch (_) {
this.brotli = true
}
}
}
if (this[UNZIP] === null || (this[UNZIP] === false && this.brotli)) {
const ended = this[ENDED]
this[ENDED] = false
this[UNZIP] = new zlib.Unzip()
this[UNZIP] = this[UNZIP] === null
? new zlib.Unzip()
: new zlib.BrotliDecompress()
this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk))
this[UNZIP].on('error', er => this.abort(er))
this[UNZIP].on('end', _ => {
@ -502,6 +544,7 @@ module.exports = warner(class Parser extends EE {
this[UNZIP].end(chunk)
} else {
this[ENDED] = true
if (this.brotli === undefined) chunk = chunk || Buffer.alloc(0)
this.write(chunk)
}
}

2
node_modules/tar/lib/replace.js generated vendored
View file

@ -23,7 +23,7 @@ module.exports = (opt_, files, cb) => {
throw new TypeError('file is required')
}
if (opt.gzip) {
if (opt.gzip || opt.brotli || opt.file.endsWith('.br') || opt.file.endsWith('.tbr')) {
throw new TypeError('cannot append to compressed archives')
}

2
node_modules/tar/lib/update.js generated vendored
View file

@ -13,7 +13,7 @@ module.exports = (opt_, files, cb) => {
throw new TypeError('file is required')
}
if (opt.gzip) {
if (opt.gzip || opt.brotli || opt.file.endsWith('.br') || opt.file.endsWith('.tbr')) {
throw new TypeError('cannot append to compressed archives')
}

63
node_modules/tar/package.json generated vendored
View file

@ -1,35 +1,17 @@
{
"_from": "tar",
"_id": "tar@6.1.15",
"_inBundle": false,
"_integrity": "sha512-/zKt9UyngnxIT/EAGYuxaMYgOIJiP81ab9ZfkILq4oNLPFX50qyYmu7jRj9qeXoxmJHjGlbH0+cm2uy1WCs10A==",
"_location": "/tar",
"_phantomChildren": {},
"_requested": {
"type": "tag",
"registry": true,
"raw": "tar",
"name": "tar",
"escapedName": "tar",
"rawSpec": "",
"saveSpec": null,
"fetchSpec": "latest"
"author": "GitHub Inc.",
"name": "tar",
"description": "tar for node",
"version": "6.2.0",
"repository": {
"type": "git",
"url": "https://github.com/isaacs/node-tar.git"
},
"_requiredBy": [
"#USER",
"/"
],
"_resolved": "https://registry.npmjs.org/tar/-/tar-6.1.15.tgz",
"_shasum": "c9738b0b98845a3b344d334b8fa3041aaba53a69",
"_spec": "tar",
"_where": "/home/ubuntu/fix",
"author": {
"name": "GitHub Inc."
"scripts": {
"genparse": "node scripts/generate-parse-fixtures.js",
"snap": "tap",
"test": "tap"
},
"bugs": {
"url": "https://github.com/isaacs/node-tar/issues"
},
"bundleDependencies": false,
"dependencies": {
"chownr": "^2.0.0",
"fs-minipass": "^2.0.0",
@ -38,8 +20,6 @@
"mkdirp": "^1.0.3",
"yallist": "^4.0.0"
},
"deprecated": false,
"description": "tar for node",
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
"@npmcli/template-oss": "4.11.0",
@ -51,6 +31,7 @@
"rimraf": "^3.0.2",
"tap": "^16.0.1"
},
"license": "ISC",
"engines": {
"node": ">=10"
},
@ -59,23 +40,6 @@
"lib/",
"index.js"
],
"homepage": "https://github.com/isaacs/node-tar#readme",
"license": "ISC",
"name": "tar",
"repository": {
"type": "git",
"url": "git+https://github.com/isaacs/node-tar.git"
},
"scripts": {
"genparse": "node scripts/generate-parse-fixtures.js",
"lint": "eslint \"**/*.js\"",
"lintfix": "npm run lint -- --fix",
"postlint": "template-oss-check",
"posttest": "npm run lint",
"snap": "tap",
"template-oss-apply": "template-oss-apply --force",
"test": "tap"
},
"tap": {
"coverage-map": "map.js",
"timeout": 0,
@ -102,6 +66,5 @@
"16.x",
"18.x"
]
},
"version": "6.1.15"
}
}