=c&&(g=Math.min(g,e-1)),f<=v+1&&(m=Math.max(m,e+1))}else h[e]=void 0}p++}if(r)!function e(){setTimeout(function(){return il?r():void(w()||e())},0)}();else for(;p<=i&&Date.now()<=l;){var y=w();if(y)return y}},addToPath:function(e,n,t,r){var i=e.lastComponent;return i&&i.added===n&&i.removed===t?{oldPos:e.oldPos+r,lastComponent:{count:i.count+1,added:n,removed:t,previousComponent:i.previousComponent}}:{oldPos:e.oldPos+r,lastComponent:{count:1,added:n,removed:t,previousComponent:i}}},extractCommon:function(e,n,t,r){for(var i=n.length,o=t.length,l=e.oldPos,s=l-r,a=0;s+1e.length)&&(n=e.length);for(var t=0,r=new Array(n);t=c.length-2&&a.length<=f.context&&(i=/\n$/.test(u),o=/\n$/.test(d),l=0==a.length&&m.length>r.oldLines,!i&&l&&0e.length)return!1;for(var t=0;t"):i.removed&&t.push(""),t.push((n=i.value,n.replace(/&/g,"&").replace(//g,">").replace(/"/g,"""))),i.added?t.push(""):i.removed&&t.push("")}return t.join("")},e.createPatch=function(e,n,t,r,i,o){return S(e,e,n,t,r,i,o)},e.createTwoFilesPatch=S,e.diffArrays=function(e,n,t){return g.diff(e,n,t)},e.diffChars=function(e,n,t){return r.diff(e,n,t)},e.diffCss=function(e,n,t){return d.diff(e,n,t)},e.diffJson=function(e,n,t){return v.diff(e,n,t)},e.diffLines=L,e.diffSentences=function(e,n,t){return u.diff(e,n,t)},e.diffTrimmedLines=function(e,n,t){var r=i(t,{ignoreWhitespace:!0});return a.diff(e,n,r)},e.diffWords=function(e,n,t){return t=i(t,{ignoreWhitespace:!0}),s.diff(e,n,t)},e.diffWordsWithSpace=function(e,n,t){return s.diff(e,n,t)},e.formatPatch=b,e.merge=function(e,n,t){e=N(e,t),n=N(n,t);var r={};(e.index||n.index)&&(r.index=e.index||n.index),(e.newFileName||n.newFileName)&&(P(e)?P(n)?(r.oldFileName=j(r,e.oldFileName,n.oldFileName),r.newFileName=j(r,e.newFileName,n.newFileName),r.oldHeader=j(r,e.oldHeader,n.oldHeader),r.newHeader=j(r,e.newHeader,n.newHeader)):(r.oldFileName=e.oldFileName,r.newFileName=e.newFileName,r.oldHeader=e.oldHeader,r.newHeader=e.newHeader):(r.oldFileName=n.oldFileName||e.oldFileName,r.newFileName=n.newFileName||e.newFileName,r.oldHeader=n.oldHeader||e.oldHeader,r.newHeader=n.newHeader||e.newHeader)),r.hunks=[];for(var i=0,o=0,l=0,s=0;i {
+ fs$1.realpath(path$1, (error, resolvedPath) => {
if (error) return queue.dequeue(suppressErrors ? null : error, state);
- fs.default.stat(resolvedPath, (error$1, stat) => {
+ fs$1.stat(resolvedPath, (error$1, stat) => {
if (error$1) return queue.dequeue(suppressErrors ? null : error$1, state);
if (stat.isDirectory() && isRecursive(path$1, resolvedPath, state)) return queue.dequeue(null, state);
callback$1(stat, resolvedPath);
@@ -164,11 +164,11 @@ const resolveSymlinksAsync = function(path$1, state, callback$1) {
});
};
const resolveSymlinks = function(path$1, state, callback$1) {
- const { queue, options: { suppressErrors } } = state;
+ const { queue, fs: fs$1, options: { suppressErrors } } = state;
queue.enqueue();
try {
- const resolvedPath = fs.default.realpathSync(path$1);
- const stat = fs.default.statSync(resolvedPath);
+ const resolvedPath = fs$1.realpathSync(path$1);
+ const stat = fs$1.statSync(resolvedPath);
if (stat.isDirectory() && isRecursive(path$1, resolvedPath, state)) return;
callback$1(stat, resolvedPath);
} catch (e) {
@@ -243,21 +243,23 @@ function build$1(options, isSynchronous) {
const readdirOpts = { withFileTypes: true };
const walkAsync = (state, crawlPath, directoryPath, currentDepth, callback$1) => {
state.queue.enqueue();
- if (currentDepth <= 0) return state.queue.dequeue(null, state);
+ if (currentDepth < 0) return state.queue.dequeue(null, state);
+ const { fs: fs$1 } = state;
state.visited.push(crawlPath);
state.counts.directories++;
- fs.default.readdir(crawlPath || ".", readdirOpts, (error, entries = []) => {
+ fs$1.readdir(crawlPath || ".", readdirOpts, (error, entries = []) => {
callback$1(entries, directoryPath, currentDepth);
state.queue.dequeue(state.options.suppressErrors ? null : error, state);
});
};
const walkSync = (state, crawlPath, directoryPath, currentDepth, callback$1) => {
- if (currentDepth <= 0) return;
+ const { fs: fs$1 } = state;
+ if (currentDepth < 0) return;
state.visited.push(crawlPath);
state.counts.directories++;
let entries = [];
try {
- entries = fs.default.readdirSync(crawlPath || ".", readdirOpts);
+ entries = fs$1.readdirSync(crawlPath || ".", readdirOpts);
} catch (e) {
if (!state.options.suppressErrors) throw e;
}
@@ -320,6 +322,19 @@ var Counter = class {
}
};
+//#endregion
+//#region src/api/aborter.ts
+/**
+* AbortController is not supported on Node 14 so we use this until we can drop
+* support for Node 14.
+*/
+var Aborter = class {
+ aborted = false;
+ abort() {
+ this.aborted = true;
+ }
+};
+
//#endregion
//#region src/api/walker.ts
var Walker = class {
@@ -347,7 +362,8 @@ var Walker = class {
queue: new Queue((error, state) => this.callbackInvoker(state, error, callback$1)),
symlinks: /* @__PURE__ */ new Map(),
visited: [""].slice(0, 0),
- controller: new AbortController()
+ controller: new Aborter(),
+ fs: options.fs || fs
};
this.joinPath = build$7(this.root, options);
this.pushDirectory = build$6(this.root, options);
@@ -364,7 +380,7 @@ var Walker = class {
}
walk = (entries, directoryPath, depth) => {
const { paths, options: { filters, resolveSymlinks: resolveSymlinks$1, excludeSymlinks, exclude, maxFiles, signal, useRealPaths, pathSeparator }, controller } = this.state;
- if (controller.signal.aborted || signal && signal.aborted || maxFiles && paths.length > maxFiles) return;
+ if (controller.aborted || signal && signal.aborted || maxFiles && paths.length > maxFiles) return;
const files = this.getArray(this.state.paths);
for (let i = 0; i < entries.length; ++i) {
const entry = entries[i];
@@ -439,12 +455,12 @@ var APIBuilder = class {
//#endregion
//#region src/builder/index.ts
-var pm = null;
+let pm = null;
/* c8 ignore next 6 */
try {
require.resolve("picomatch");
pm = require("picomatch");
-} catch (_e) {}
+} catch {}
var Builder = class {
globCache = {};
options = {
diff --git a/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/index.d.cts b/deps/npm/node_modules/fdir/dist/index.d.cts
similarity index 84%
rename from deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/index.d.cts
rename to deps/npm/node_modules/fdir/dist/index.d.cts
index 8eb36bc363449a..f448ef5d9b563f 100644
--- a/deps/npm/node_modules/tinyglobby/node_modules/fdir/dist/index.d.cts
+++ b/deps/npm/node_modules/fdir/dist/index.d.cts
@@ -1,6 +1,17 @@
///
+import * as nativeFs from "fs";
import picomatch from "picomatch";
+//#region src/api/aborter.d.ts
+/**
+ * AbortController is not supported on Node 14 so we use this until we can drop
+ * support for Node 14.
+ */
+declare class Aborter {
+ aborted: boolean;
+ abort(): void;
+}
+//#endregion
//#region src/api/queue.d.ts
type OnQueueEmptyCallback = (error: Error | null, output: WalkerState) => void;
/**
@@ -37,6 +48,14 @@ type GroupOutput = Group[];
type OnlyCountsOutput = Counts;
type PathsOutput = string[];
type Output = OnlyCountsOutput | PathsOutput | GroupOutput;
+type FSLike = {
+ readdir: typeof nativeFs.readdir;
+ readdirSync: typeof nativeFs.readdirSync;
+ realpath: typeof nativeFs.realpath;
+ realpathSync: typeof nativeFs.realpathSync;
+ stat: typeof nativeFs.stat;
+ statSync: typeof nativeFs.statSync;
+};
type WalkerState = {
root: string;
paths: string[];
@@ -44,7 +63,8 @@ type WalkerState = {
counts: Counts;
options: Options;
queue: Queue;
- controller: AbortController;
+ controller: Aborter;
+ fs: FSLike;
symlinks: Map;
visited: string[];
};
@@ -72,6 +92,7 @@ type Options = {
pathSeparator: PathSeparator;
signal?: AbortSignal;
globFunction?: TGlobFunction;
+ fs?: FSLike;
};
type GlobMatcher = (test: string) => boolean;
type GlobFunction = (glob: string | string[], ...params: unknown[]) => GlobMatcher;
@@ -131,4 +152,4 @@ declare class Builder
+import * as nativeFs from "fs";
import picomatch from "picomatch";
+//#region src/api/aborter.d.ts
+/**
+ * AbortController is not supported on Node 14 so we use this until we can drop
+ * support for Node 14.
+ */
+declare class Aborter {
+ aborted: boolean;
+ abort(): void;
+}
+//#endregion
//#region src/api/queue.d.ts
type OnQueueEmptyCallback = (error: Error | null, output: WalkerState) => void;
/**
@@ -37,6 +48,14 @@ type GroupOutput = Group[];
type OnlyCountsOutput = Counts;
type PathsOutput = string[];
type Output = OnlyCountsOutput | PathsOutput | GroupOutput;
+type FSLike = {
+ readdir: typeof nativeFs.readdir;
+ readdirSync: typeof nativeFs.readdirSync;
+ realpath: typeof nativeFs.realpath;
+ realpathSync: typeof nativeFs.realpathSync;
+ stat: typeof nativeFs.stat;
+ statSync: typeof nativeFs.statSync;
+};
type WalkerState = {
root: string;
paths: string[];
@@ -44,7 +63,8 @@ type WalkerState = {
counts: Counts;
options: Options;
queue: Queue;
- controller: AbortController;
+ controller: Aborter;
+ fs: FSLike;
symlinks: Map;
visited: string[];
};
@@ -72,6 +92,7 @@ type Options = {
pathSeparator: PathSeparator;
signal?: AbortSignal;
globFunction?: TGlobFunction;
+ fs?: FSLike;
};
type GlobMatcher = (test: string) => boolean;
type GlobFunction = (glob: string | string[], ...params: unknown[]) => GlobMatcher;
@@ -131,4 +152,4 @@ declare class Builder {
if (error) return queue.dequeue(suppressErrors ? null : error, state);
@@ -146,7 +146,7 @@ const resolveSymlinksAsync = function(path, state, callback$1) {
});
};
const resolveSymlinks = function(path, state, callback$1) {
- const { queue, options: { suppressErrors } } = state;
+ const { queue, fs, options: { suppressErrors } } = state;
queue.enqueue();
try {
const resolvedPath = fs.realpathSync(path);
@@ -225,7 +225,8 @@ function build$1(options, isSynchronous) {
const readdirOpts = { withFileTypes: true };
const walkAsync = (state, crawlPath, directoryPath, currentDepth, callback$1) => {
state.queue.enqueue();
- if (currentDepth <= 0) return state.queue.dequeue(null, state);
+ if (currentDepth < 0) return state.queue.dequeue(null, state);
+ const { fs } = state;
state.visited.push(crawlPath);
state.counts.directories++;
fs.readdir(crawlPath || ".", readdirOpts, (error, entries = []) => {
@@ -234,7 +235,8 @@ const walkAsync = (state, crawlPath, directoryPath, currentDepth, callback$1) =>
});
};
const walkSync = (state, crawlPath, directoryPath, currentDepth, callback$1) => {
- if (currentDepth <= 0) return;
+ const { fs } = state;
+ if (currentDepth < 0) return;
state.visited.push(crawlPath);
state.counts.directories++;
let entries = [];
@@ -302,6 +304,19 @@ var Counter = class {
}
};
+//#endregion
+//#region src/api/aborter.ts
+/**
+* AbortController is not supported on Node 14 so we use this until we can drop
+* support for Node 14.
+*/
+var Aborter = class {
+ aborted = false;
+ abort() {
+ this.aborted = true;
+ }
+};
+
//#endregion
//#region src/api/walker.ts
var Walker = class {
@@ -329,7 +344,8 @@ var Walker = class {
queue: new Queue((error, state) => this.callbackInvoker(state, error, callback$1)),
symlinks: /* @__PURE__ */ new Map(),
visited: [""].slice(0, 0),
- controller: new AbortController()
+ controller: new Aborter(),
+ fs: options.fs || nativeFs
};
this.joinPath = build$7(this.root, options);
this.pushDirectory = build$6(this.root, options);
@@ -346,7 +362,7 @@ var Walker = class {
}
walk = (entries, directoryPath, depth) => {
const { paths, options: { filters, resolveSymlinks: resolveSymlinks$1, excludeSymlinks, exclude, maxFiles, signal, useRealPaths, pathSeparator }, controller } = this.state;
- if (controller.signal.aborted || signal && signal.aborted || maxFiles && paths.length > maxFiles) return;
+ if (controller.aborted || signal && signal.aborted || maxFiles && paths.length > maxFiles) return;
const files = this.getArray(this.state.paths);
for (let i = 0; i < entries.length; ++i) {
const entry = entries[i];
@@ -421,12 +437,12 @@ var APIBuilder = class {
//#endregion
//#region src/builder/index.ts
-var pm = null;
+let pm = null;
/* c8 ignore next 6 */
try {
__require.resolve("picomatch");
pm = __require("picomatch");
-} catch (_e) {}
+} catch {}
var Builder = class {
globCache = {};
options = {
diff --git a/deps/npm/node_modules/tinyglobby/node_modules/fdir/package.json b/deps/npm/node_modules/fdir/package.json
similarity index 84%
rename from deps/npm/node_modules/tinyglobby/node_modules/fdir/package.json
rename to deps/npm/node_modules/fdir/package.json
index f76638120f3df1..e229dff8150800 100644
--- a/deps/npm/node_modules/tinyglobby/node_modules/fdir/package.json
+++ b/deps/npm/node_modules/fdir/package.json
@@ -1,12 +1,13 @@
{
"name": "fdir",
- "version": "6.4.6",
+ "version": "6.5.0",
"description": "The fastest directory crawler & globbing alternative to glob, fast-glob, & tiny-glob. Crawls 1m files in < 1s",
- "main": "dist/index.js",
- "types": "dist/index.d.ts",
+ "main": "./dist/index.cjs",
+ "types": "./dist/index.d.cts",
+ "type": "module",
"scripts": {
"prepublishOnly": "npm run test && npm run build",
- "build": "tsc",
+ "build": "tsdown",
"format": "prettier --write src __tests__ benchmarks",
"test": "vitest run __tests__/",
"test:coverage": "vitest run --coverage __tests__/",
@@ -16,6 +17,9 @@
"bench:fdir": "ts-node benchmarks/fdir-benchmark.ts",
"release": "./scripts/release.sh"
},
+ "engines": {
+ "node": ">=12.0.0"
+ },
"repository": {
"type": "git",
"url": "git+https://github.com/thecodrr/fdir.git"
@@ -47,7 +51,7 @@
"@types/glob": "^8.1.0",
"@types/mock-fs": "^4.13.4",
"@types/node": "^20.9.4",
- "@types/picomatch": "^3.0.0",
+ "@types/picomatch": "^4.0.0",
"@types/tap": "^15.0.11",
"@vitest/coverage-v8": "^0.34.6",
"all-files-in-tree": "^1.1.2",
@@ -75,6 +79,7 @@
"systeminformation": "^5.21.17",
"tiny-glob": "^0.2.9",
"ts-node": "^10.9.1",
+ "tsdown": "^0.12.5",
"typescript": "^5.3.2",
"vitest": "^0.34.6",
"walk-sync": "^3.0.0"
@@ -86,5 +91,13 @@
"picomatch": {
"optional": true
}
+ },
+ "module": "./dist/index.mjs",
+ "exports": {
+ ".": {
+ "import": "./dist/index.mjs",
+ "require": "./dist/index.cjs"
+ },
+ "./package.json": "./package.json"
}
}
diff --git a/deps/npm/node_modules/glob/dist/commonjs/has-magic.d.ts.map b/deps/npm/node_modules/glob/dist/commonjs/has-magic.d.ts.map
index b24dd4ec47e0bb..e2f7e449672a5f 100644
--- a/deps/npm/node_modules/glob/dist/commonjs/has-magic.d.ts.map
+++ b/deps/npm/node_modules/glob/dist/commonjs/has-magic.d.ts.map
@@ -1 +1 @@
-{"version":3,"file":"has-magic.d.ts","sourceRoot":"","sources":["../../src/has-magic.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,WAAW,EAAE,MAAM,WAAW,CAAA;AAEvC;;;;;;;;;;GAUG;AACH,eAAO,MAAM,QAAQ,YACV,MAAM,GAAG,MAAM,EAAE,YACjB,WAAW,KACnB,OAQF,CAAA"}
\ No newline at end of file
+{"version":3,"file":"has-magic.d.ts","sourceRoot":"","sources":["../../src/has-magic.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,WAAW,EAAE,MAAM,WAAW,CAAA;AAEvC;;;;;;;;;;GAUG;AACH,eAAO,MAAM,QAAQ,GACnB,SAAS,MAAM,GAAG,MAAM,EAAE,EAC1B,UAAS,WAAgB,KACxB,OAQF,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/glob/dist/commonjs/index.d.ts b/deps/npm/node_modules/glob/dist/commonjs/index.d.ts
index 9c326ddc895b61..cb09bfb64acb34 100644
--- a/deps/npm/node_modules/glob/dist/commonjs/index.d.ts
+++ b/deps/npm/node_modules/glob/dist/commonjs/index.d.ts
@@ -91,7 +91,7 @@ export declare const glob: typeof glob_ & {
iterateSync: typeof globIterateSync;
Glob: typeof Glob;
hasMagic: (pattern: string | string[], options?: GlobOptions) => boolean;
- escape: (s: string, { windowsPathsNoEscape, }?: Pick) => string;
- unescape: (s: string, { windowsPathsNoEscape, }?: Pick) => string;
+ escape: (s: string, { windowsPathsNoEscape, magicalBraces, }?: Pick) => string;
+ unescape: (s: string, { windowsPathsNoEscape, magicalBraces, }?: Pick) => string;
};
//# sourceMappingURL=index.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/glob/dist/esm/bin.mjs b/deps/npm/node_modules/glob/dist/esm/bin.mjs
index 5c7bf1e9256105..d4511ae0c3c17c 100755
--- a/deps/npm/node_modules/glob/dist/esm/bin.mjs
+++ b/deps/npm/node_modules/glob/dist/esm/bin.mjs
@@ -3,7 +3,7 @@ import { foregroundChild } from 'foreground-child';
import { existsSync } from 'fs';
import { jack } from 'jackspeak';
import { loadPackageJson } from 'package-json-from-dist';
-import { join } from 'path';
+import { basename, join } from 'path';
import { globStream } from './index.js';
const { version } = loadPackageJson(import.meta.url, '../package.json');
const j = jack({
@@ -30,6 +30,50 @@ const j = jack({
description: `If no positional arguments are provided, glob will use
this pattern`,
},
+})
+ .flag({
+ shell: {
+ default: false,
+ description: `Interpret the command as a shell command by passing it
+ to the shell, with all matched filesystem paths appended,
+ **even if this cannot be done safely**.
+
+ This is **not** unsafe (and usually unnecessary) when using
+ the known Unix shells sh, bash, zsh, and fish, as these can
+ all be executed in such a way as to pass positional
+ arguments safely.
+
+ **Note**: THIS IS UNSAFE IF THE FILE PATHS ARE UNTRUSTED,
+ because a path like \`'some/path/\\$\\(cmd)'\` will be
+ executed by the shell.
+
+ If you do have positional arguments that you wish to pass to
+ the command ahead of the glob pattern matches, use the
+ \`--cmd-arg\`/\`-g\` option instead.
+
+ The next major release of glob will fully remove the ability
+ to use this option unsafely.`,
+ },
+})
+ .optList({
+ 'cmd-arg': {
+ short: 'g',
+ hint: 'arg',
+ default: [],
+ description: `Pass the provided values to the supplied command, ahead of
+ the glob matches.
+
+ For example, the command:
+
+ glob -c echo -g"hello" -g"world" *.txt
+
+ might output:
+
+ hello world a.txt b.txt
+
+ This is a safer (and future-proof) alternative than putting
+ positional arguments in the \`-c\`/\`--cmd\` option.`,
+ },
})
.flag({
all: {
@@ -74,7 +118,7 @@ const j = jack({
description: `Always resolve to posix style paths, using '/' as the
directory separator, even on Windows. Drive letter
absolute matches on Windows will be expanded to their
- full resolved UNC maths, eg instead of 'C:\\foo\\bar',
+ full resolved UNC paths, eg instead of 'C:\\foo\\bar',
it will expand to '//?/C:/foo/bar'.
`,
},
@@ -209,8 +253,10 @@ const j = jack({
description: `Output a huge amount of noisy debug information about
patterns as they are parsed and used to match files.`,
},
-})
- .flag({
+ version: {
+ short: 'V',
+ description: `Output the version (${version})`,
+ },
help: {
short: 'h',
description: 'Show this usage information',
@@ -218,48 +264,78 @@ const j = jack({
});
try {
const { positionals, values } = j.parse();
- if (values.help) {
+ const { cmd, shell, all, default: def, version: showVersion, help, absolute, cwd, dot, 'dot-relative': dotRelative, follow, ignore, 'match-base': matchBase, 'max-depth': maxDepth, mark, nobrace, nocase, nodir, noext, noglobstar, platform, realpath, root, stat, debug, posix, 'cmd-arg': cmdArg, } = values;
+ if (showVersion) {
+ console.log(version);
+ process.exit(0);
+ }
+ if (help) {
console.log(j.usage());
process.exit(0);
}
- if (positionals.length === 0 && !values.default)
+ //const { shell, help } = values
+ if (positionals.length === 0 && !def)
throw 'No patterns provided';
- if (positionals.length === 0 && values.default)
- positionals.push(values.default);
- const patterns = values.all ? positionals : positionals.filter(p => !existsSync(p));
- const matches = values.all ?
- []
- : positionals.filter(p => existsSync(p)).map(p => join(p));
+ if (positionals.length === 0 && def)
+ positionals.push(def);
+ const patterns = all ? positionals : positionals.filter(p => !existsSync(p));
+ const matches = all ? [] : positionals.filter(p => existsSync(p)).map(p => join(p));
const stream = globStream(patterns, {
- absolute: values.absolute,
- cwd: values.cwd,
- dot: values.dot,
- dotRelative: values['dot-relative'],
- follow: values.follow,
- ignore: values.ignore,
- mark: values.mark,
- matchBase: values['match-base'],
- maxDepth: values['max-depth'],
- nobrace: values.nobrace,
- nocase: values.nocase,
- nodir: values.nodir,
- noext: values.noext,
- noglobstar: values.noglobstar,
- platform: values.platform,
- realpath: values.realpath,
- root: values.root,
- stat: values.stat,
- debug: values.debug,
- posix: values.posix,
+ absolute,
+ cwd,
+ dot,
+ dotRelative,
+ follow,
+ ignore,
+ mark,
+ matchBase,
+ maxDepth,
+ nobrace,
+ nocase,
+ nodir,
+ noext,
+ noglobstar,
+ platform: platform,
+ realpath,
+ root,
+ stat,
+ debug,
+ posix,
});
- const cmd = values.cmd;
if (!cmd) {
matches.forEach(m => console.log(m));
stream.on('data', f => console.log(f));
}
else {
- stream.on('data', f => matches.push(f));
- stream.on('end', () => foregroundChild(cmd, matches, { shell: true }));
+ cmdArg.push(...matches);
+ stream.on('data', f => cmdArg.push(f));
+ // Attempt to support commands that contain spaces and otherwise require
+ // shell interpretation, but do NOT shell-interpret the arguments, to avoid
+ // injections via filenames. This affordance can only be done on known Unix
+ // shells, unfortunately.
+ //
+ // 'bash', ['-c', cmd + ' "$@"', 'bash', ...matches]
+ // 'zsh', ['-c', cmd + ' "$@"', 'zsh', ...matches]
+ // 'fish', ['-c', cmd + ' "$argv"', ...matches]
+ const { SHELL = 'unknown' } = process.env;
+ const shellBase = basename(SHELL);
+ const knownShells = ['sh', 'ksh', 'zsh', 'bash', 'fish'];
+ if ((shell || /[ "']/.test(cmd)) &&
+ knownShells.includes(shellBase)) {
+ const cmdWithArgs = `${cmd} "\$${shellBase === 'fish' ? 'argv' : '@'}"`;
+ if (shellBase !== 'fish') {
+ cmdArg.unshift(SHELL);
+ }
+ cmdArg.unshift('-c', cmdWithArgs);
+ stream.on('end', () => foregroundChild(SHELL, cmdArg));
+ }
+ else {
+ if (shell) {
+ process.emitWarning('The --shell option is unsafe, and will be removed. To pass ' +
+ 'positional arguments to the subprocess, use -g/--cmd-arg instead.', 'DeprecationWarning', 'GLOB_SHELL');
+ }
+ stream.on('end', () => foregroundChild(cmd, cmdArg, { shell }));
+ }
}
}
catch (e) {
diff --git a/deps/npm/node_modules/glob/dist/esm/bin.mjs.map b/deps/npm/node_modules/glob/dist/esm/bin.mjs.map
index 67247d5b4634a5..5472e71207a65e 100644
--- a/deps/npm/node_modules/glob/dist/esm/bin.mjs.map
+++ b/deps/npm/node_modules/glob/dist/esm/bin.mjs.map
@@ -1 +1 @@
-{"version":3,"file":"bin.mjs","sourceRoot":"","sources":["../../src/bin.mts"],"names":[],"mappings":";AACA,OAAO,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAA;AAClD,OAAO,EAAE,UAAU,EAAE,MAAM,IAAI,CAAA;AAC/B,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAA;AAChC,OAAO,EAAE,eAAe,EAAE,MAAM,wBAAwB,CAAA;AACxD,OAAO,EAAE,IAAI,EAAE,MAAM,MAAM,CAAA;AAC3B,OAAO,EAAE,UAAU,EAAE,MAAM,YAAY,CAAA;AAEvC,MAAM,EAAE,OAAO,EAAE,GAAG,eAAe,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,iBAAiB,CAAC,CAAA;AAEvE,MAAM,CAAC,GAAG,IAAI,CAAC;IACb,KAAK,EAAE,4CAA4C;CACpD,CAAC;KACC,WAAW,CACV;YACQ,OAAO;;;;GAIhB,CACA;KACA,GAAG,CAAC;IACH,GAAG,EAAE;QACH,KAAK,EAAE,GAAG;QACV,IAAI,EAAE,SAAS;QACf,WAAW,EAAE;0CACuB;KACrC;CACF,CAAC;KACD,GAAG,CAAC;IACH,OAAO,EAAE;QACP,KAAK,EAAE,GAAG;QACV,IAAI,EAAE,SAAS;QACf,WAAW,EAAE;iCACc;KAC5B;CACF,CAAC;KACD,IAAI,CAAC;IACJ,GAAG,EAAE;QACH,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;;;;;;;;;;;;;;;;;;OAqBZ;KACF;IACD,QAAQ,EAAE;QACR,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,0BAA0B;KACxC;IACD,cAAc,EAAE;QACd,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,kCAAkC;KAChD;IACD,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,uCAAuC;KACrD;IACD,KAAK,EAAE;QACL,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;;OAKZ;KACF;IAED,MAAM,EAAE;QACN,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,kDAAkD;KAChE;IACD,QAAQ,EAAE;QACR,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;+DAG4C;KAC1D;IACD,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;wDACqC;KACnD;IACD,YAAY,EAAE;QACZ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;OAIZ;KACF;IAED,GAAG,EAAE;QACH,WAAW,EAAE;;OAEZ;KACF;IACD,OAAO,EAAE;QACP,WAAW,EAAE,8BAA8B;KAC5C;IACD,MAAM,EAAE;QACN,WAAW,EAAE;;;;;;;;;OASZ;KACF;IACD,KAAK,EAAE;QACL,WAAW,EAAE;;;;OAIZ;KACF;IACD,KAAK,EAAE;QACL,WAAW,EAAE,kDAAkD;KAChE;IACD,UAAU,EAAE;QACV,WAAW,EAAE;0DACuC;KACrD;IACD,wBAAwB,EAAE;QACxB,WAAW,EAAE;;sDAEmC;KACjD;CACF,CAAC;KACD,GAAG,CAAC;IACH,WAAW,EAAE;QACX,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;sCACmB;KACjC;CACF,CAAC;KACD,GAAG,CAAC;IACH,GAAG,EAAE;QACH,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,+CAA+C;QAC5D,OAAO,EAAE,OAAO,CAAC,GAAG,EAAE;KACvB;IACD,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;;;;;;;;;;;;;;;;OAmBZ;KACF;IACD,QAAQ,EAAE;QACR,WAAW,EAAE;;uEAEoD;QACjE,YAAY,EAAE;YACZ,KAAK;YACL,SAAS;YACT,QAAQ;YACR,SAAS;YACT,OAAO;YACP,OAAO;YACP,SAAS;YACT,OAAO;YACP,OAAO;YACP,QAAQ;YACR,QAAQ;SACT;KACF;CACF,CAAC;KACD,OAAO,CAAC;IACP,MAAM,EAAE;QACN,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,yBAAyB;KACvC;CACF,CAAC;KACD,IAAI,CAAC;IACJ,KAAK,EAAE;QACL,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;yEACsD;KACpE;CACF,CAAC;KACD,IAAI,CAAC;IACJ,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,6BAA6B;KAC3C;CACF,CAAC,CAAA;AAEJ,IAAI,CAAC;IACH,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,GAAG,CAAC,CAAC,KAAK,EAAE,CAAA;IACzC,IAAI,MAAM,CAAC,IAAI,EAAE,CAAC;QAChB,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAA;QACtB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;IACjB,CAAC;IACD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO;QAC7C,MAAM,sBAAsB,CAAA;IAC9B,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO;QAC5C,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAA;IAClC,MAAM,QAAQ,GACZ,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAA;IACpE,MAAM,OAAO,GACX,MAAM,CAAC,GAAG,CAAC,CAAC;QACV,EAAE;QACJ,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAA;IAC5D,MAAM,MAAM,GAAG,UAAU,CAAC,QAAQ,EAAE;QAClC,QAAQ,EAAE,MAAM,CAAC,QAAQ;QACzB,GAAG,EAAE,MAAM,CAAC,GAAG;QACf,GAAG,EAAE,MAAM,CAAC,GAAG;QACf,WAAW,EAAE,MAAM,CAAC,cAAc,CAAC;QACnC,MAAM,EAAE,MAAM,CAAC,MAAM;QACrB,MAAM,EAAE,MAAM,CAAC,MAAM;QACrB,IAAI,EAAE,MAAM,CAAC,IAAI;QACjB,SAAS,EAAE,MAAM,CAAC,YAAY,CAAC;QAC/B,QAAQ,EAAE,MAAM,CAAC,WAAW,CAAC;QAC7B,OAAO,EAAE,MAAM,CAAC,OAAO;QACvB,MAAM,EAAE,MAAM,CAAC,MAAM;QACrB,KAAK,EAAE,MAAM,CAAC,KAAK;QACnB,KAAK,EAAE,MAAM,CAAC,KAAK;QACnB,UAAU,EAAE,MAAM,CAAC,UAAU;QAC7B,QAAQ,EAAE,MAAM,CAAC,QAAuC;QACxD,QAAQ,EAAE,MAAM,CAAC,QAAQ;QACzB,IAAI,EAAE,MAAM,CAAC,IAAI;QACjB,IAAI,EAAE,MAAM,CAAC,IAAI;QACjB,KAAK,EAAE,MAAM,CAAC,KAAK;QACnB,KAAK,EAAE,MAAM,CAAC,KAAK;KACpB,CAAC,CAAA;IAEF,MAAM,GAAG,GAAG,MAAM,CAAC,GAAG,CAAA;IACtB,IAAI,CAAC,GAAG,EAAE,CAAC;QACT,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAA;QACpC,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAA;IACxC,CAAC;SAAM,CAAC;QACN,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAA;QACvC,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE,CAAC,eAAe,CAAC,GAAG,EAAE,OAAO,EAAE,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;IACxE,CAAC;AACH,CAAC;AAAC,OAAO,CAAC,EAAE,CAAC;IACX,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAA;IACxB,OAAO,CAAC,KAAK,CAAC,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAA;IACzD,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;AACjB,CAAC","sourcesContent":["#!/usr/bin/env node\nimport { foregroundChild } from 'foreground-child'\nimport { existsSync } from 'fs'\nimport { jack } from 'jackspeak'\nimport { loadPackageJson } from 'package-json-from-dist'\nimport { join } from 'path'\nimport { globStream } from './index.js'\n\nconst { version } = loadPackageJson(import.meta.url, '../package.json')\n\nconst j = jack({\n usage: 'glob [options] [ [ ...]]',\n})\n .description(\n `\n Glob v${version}\n\n Expand the positional glob expression arguments into any matching file\n system paths found.\n `,\n )\n .opt({\n cmd: {\n short: 'c',\n hint: 'command',\n description: `Run the command provided, passing the glob expression\n matches as arguments.`,\n },\n })\n .opt({\n default: {\n short: 'p',\n hint: 'pattern',\n description: `If no positional arguments are provided, glob will use\n this pattern`,\n },\n })\n .flag({\n all: {\n short: 'A',\n description: `By default, the glob cli command will not expand any\n arguments that are an exact match to a file on disk.\n\n This prevents double-expanding, in case the shell expands\n an argument whose filename is a glob expression.\n\n For example, if 'app/*.ts' would match 'app/[id].ts', then\n on Windows powershell or cmd.exe, 'glob app/*.ts' will\n expand to 'app/[id].ts', as expected. However, in posix\n shells such as bash or zsh, the shell will first expand\n 'app/*.ts' to a list of filenames. Then glob will look\n for a file matching 'app/[id].ts' (ie, 'app/i.ts' or\n 'app/d.ts'), which is unexpected.\n\n Setting '--all' prevents this behavior, causing glob\n to treat ALL patterns as glob expressions to be expanded,\n even if they are an exact match to a file on disk.\n\n When setting this option, be sure to enquote arguments\n so that the shell will not expand them prior to passing\n them to the glob command process.\n `,\n },\n absolute: {\n short: 'a',\n description: 'Expand to absolute paths',\n },\n 'dot-relative': {\n short: 'd',\n description: `Prepend './' on relative matches`,\n },\n mark: {\n short: 'm',\n description: `Append a / on any directories matched`,\n },\n posix: {\n short: 'x',\n description: `Always resolve to posix style paths, using '/' as the\n directory separator, even on Windows. Drive letter\n absolute matches on Windows will be expanded to their\n full resolved UNC maths, eg instead of 'C:\\\\foo\\\\bar',\n it will expand to '//?/C:/foo/bar'.\n `,\n },\n\n follow: {\n short: 'f',\n description: `Follow symlinked directories when expanding '**'`,\n },\n realpath: {\n short: 'R',\n description: `Call 'fs.realpath' on all of the results. In the case\n of an entry that cannot be resolved, the entry is\n omitted. This incurs a slight performance penalty, of\n course, because of the added system calls.`,\n },\n stat: {\n short: 's',\n description: `Call 'fs.lstat' on all entries, whether required or not\n to determine if it's a valid match.`,\n },\n 'match-base': {\n short: 'b',\n description: `Perform a basename-only match if the pattern does not\n contain any slash characters. That is, '*.js' would be\n treated as equivalent to '**/*.js', matching js files\n in all directories.\n `,\n },\n\n dot: {\n description: `Allow patterns to match files/directories that start\n with '.', even if the pattern does not start with '.'\n `,\n },\n nobrace: {\n description: 'Do not expand {...} patterns',\n },\n nocase: {\n description: `Perform a case-insensitive match. This defaults to\n 'true' on macOS and Windows platforms, and false on\n all others.\n\n Note: 'nocase' should only be explicitly set when it is\n known that the filesystem's case sensitivity differs\n from the platform default. If set 'true' on\n case-insensitive file systems, then the walk may return\n more or less results than expected.\n `,\n },\n nodir: {\n description: `Do not match directories, only files.\n\n Note: to *only* match directories, append a '/' at the\n end of the pattern.\n `,\n },\n noext: {\n description: `Do not expand extglob patterns, such as '+(a|b)'`,\n },\n noglobstar: {\n description: `Do not expand '**' against multiple path portions.\n Ie, treat it as a normal '*' instead.`,\n },\n 'windows-path-no-escape': {\n description: `Use '\\\\' as a path separator *only*, and *never* as an\n escape character. If set, all '\\\\' characters are\n replaced with '/' in the pattern.`,\n },\n })\n .num({\n 'max-depth': {\n short: 'D',\n description: `Maximum depth to traverse from the current\n working directory`,\n },\n })\n .opt({\n cwd: {\n short: 'C',\n description: 'Current working directory to execute/match in',\n default: process.cwd(),\n },\n root: {\n short: 'r',\n description: `A string path resolved against the 'cwd', which is\n used as the starting point for absolute patterns that\n start with '/' (but not drive letters or UNC paths\n on Windows).\n\n Note that this *doesn't* necessarily limit the walk to\n the 'root' directory, and doesn't affect the cwd\n starting point for non-absolute patterns. A pattern\n containing '..' will still be able to traverse out of\n the root directory, if it is not an actual root directory\n on the filesystem, and any non-absolute patterns will\n still be matched in the 'cwd'.\n\n To start absolute and non-absolute patterns in the same\n path, you can use '--root=' to set it to the empty\n string. However, be aware that on Windows systems, a\n pattern like 'x:/*' or '//host/share/*' will *always*\n start in the 'x:/' or '//host/share/' directory,\n regardless of the --root setting.\n `,\n },\n platform: {\n description: `Defaults to the value of 'process.platform' if\n available, or 'linux' if not. Setting --platform=win32\n on non-Windows systems may cause strange behavior!`,\n validOptions: [\n 'aix',\n 'android',\n 'darwin',\n 'freebsd',\n 'haiku',\n 'linux',\n 'openbsd',\n 'sunos',\n 'win32',\n 'cygwin',\n 'netbsd',\n ],\n },\n })\n .optList({\n ignore: {\n short: 'i',\n description: `Glob patterns to ignore`,\n },\n })\n .flag({\n debug: {\n short: 'v',\n description: `Output a huge amount of noisy debug information about\n patterns as they are parsed and used to match files.`,\n },\n })\n .flag({\n help: {\n short: 'h',\n description: 'Show this usage information',\n },\n })\n\ntry {\n const { positionals, values } = j.parse()\n if (values.help) {\n console.log(j.usage())\n process.exit(0)\n }\n if (positionals.length === 0 && !values.default)\n throw 'No patterns provided'\n if (positionals.length === 0 && values.default)\n positionals.push(values.default)\n const patterns =\n values.all ? positionals : positionals.filter(p => !existsSync(p))\n const matches =\n values.all ?\n []\n : positionals.filter(p => existsSync(p)).map(p => join(p))\n const stream = globStream(patterns, {\n absolute: values.absolute,\n cwd: values.cwd,\n dot: values.dot,\n dotRelative: values['dot-relative'],\n follow: values.follow,\n ignore: values.ignore,\n mark: values.mark,\n matchBase: values['match-base'],\n maxDepth: values['max-depth'],\n nobrace: values.nobrace,\n nocase: values.nocase,\n nodir: values.nodir,\n noext: values.noext,\n noglobstar: values.noglobstar,\n platform: values.platform as undefined | NodeJS.Platform,\n realpath: values.realpath,\n root: values.root,\n stat: values.stat,\n debug: values.debug,\n posix: values.posix,\n })\n\n const cmd = values.cmd\n if (!cmd) {\n matches.forEach(m => console.log(m))\n stream.on('data', f => console.log(f))\n } else {\n stream.on('data', f => matches.push(f))\n stream.on('end', () => foregroundChild(cmd, matches, { shell: true }))\n }\n} catch (e) {\n console.error(j.usage())\n console.error(e instanceof Error ? e.message : String(e))\n process.exit(1)\n}\n"]}
\ No newline at end of file
+{"version":3,"file":"bin.mjs","sourceRoot":"","sources":["../../src/bin.mts"],"names":[],"mappings":";AACA,OAAO,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAA;AAClD,OAAO,EAAE,UAAU,EAAE,MAAM,IAAI,CAAA;AAC/B,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAA;AAChC,OAAO,EAAE,eAAe,EAAE,MAAM,wBAAwB,CAAA;AACxD,OAAO,EAAE,QAAQ,EAAE,IAAI,EAAE,MAAM,MAAM,CAAA;AACrC,OAAO,EAAE,UAAU,EAAE,MAAM,YAAY,CAAA;AAEvC,MAAM,EAAE,OAAO,EAAE,GAAG,eAAe,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,iBAAiB,CAAC,CAAA;AAEvE,MAAM,CAAC,GAAG,IAAI,CAAC;IACb,KAAK,EAAE,4CAA4C;CACpD,CAAC;KACC,WAAW,CACV;YACQ,OAAO;;;;GAIhB,CACA;KACA,GAAG,CAAC;IACH,GAAG,EAAE;QACH,KAAK,EAAE,GAAG;QACV,IAAI,EAAE,SAAS;QACf,WAAW,EAAE;0CACuB;KACrC;CACF,CAAC;KACD,GAAG,CAAC;IACH,OAAO,EAAE;QACP,KAAK,EAAE,GAAG;QACV,IAAI,EAAE,SAAS;QACf,WAAW,EAAE;iCACc;KAC5B;CACF,CAAC;KACD,IAAI,CAAC;IACJ,KAAK,EAAE;QACL,OAAO,EAAE,KAAK;QACd,WAAW,EAAE;;;;;;;;;;;;;;;;;;iDAkB8B;KAC5C;CACF,CAAC;KACD,OAAO,CAAC;IACP,SAAS,EAAE;QACT,KAAK,EAAE,GAAG;QACV,IAAI,EAAE,KAAK;QACX,OAAO,EAAE,EAAE;QACX,WAAW,EAAE;;;;;;;;;;;;yEAYsD;KACpE;CACF,CAAC;KACD,IAAI,CAAC;IACJ,GAAG,EAAE;QACH,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;;;;;;;;;;;;;;;;;;OAqBZ;KACF;IACD,QAAQ,EAAE;QACR,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,0BAA0B;KACxC;IACD,cAAc,EAAE;QACd,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,kCAAkC;KAChD;IACD,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,uCAAuC;KACrD;IACD,KAAK,EAAE;QACL,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;;OAKZ;KACF;IAED,MAAM,EAAE;QACN,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,kDAAkD;KAChE;IACD,QAAQ,EAAE;QACR,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;+DAG4C;KAC1D;IACD,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;wDACqC;KACnD;IACD,YAAY,EAAE;QACZ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;OAIZ;KACF;IAED,GAAG,EAAE;QACH,WAAW,EAAE;;OAEZ;KACF;IACD,OAAO,EAAE;QACP,WAAW,EAAE,8BAA8B;KAC5C;IACD,MAAM,EAAE;QACN,WAAW,EAAE;;;;;;;;;OASZ;KACF;IACD,KAAK,EAAE;QACL,WAAW,EAAE;;;;OAIZ;KACF;IACD,KAAK,EAAE;QACL,WAAW,EAAE,kDAAkD;KAChE;IACD,UAAU,EAAE;QACV,WAAW,EAAE;0DACuC;KACrD;IACD,wBAAwB,EAAE;QACxB,WAAW,EAAE;;sDAEmC;KACjD;CACF,CAAC;KACD,GAAG,CAAC;IACH,WAAW,EAAE;QACX,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;sCACmB;KACjC;CACF,CAAC;KACD,GAAG,CAAC;IACH,GAAG,EAAE;QACH,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,+CAA+C;QAC5D,OAAO,EAAE,OAAO,CAAC,GAAG,EAAE;KACvB;IACD,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;;;;;;;;;;;;;;;;OAmBZ;KACF;IACD,QAAQ,EAAE;QACR,WAAW,EAAE;;uEAEoD;QACjE,YAAY,EAAE;YACZ,KAAK;YACL,SAAS;YACT,QAAQ;YACR,SAAS;YACT,OAAO;YACP,OAAO;YACP,SAAS;YACT,OAAO;YACP,OAAO;YACP,QAAQ;YACR,QAAQ;SACT;KACF;CACF,CAAC;KACD,OAAO,CAAC;IACP,MAAM,EAAE;QACN,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,yBAAyB;KACvC;CACF,CAAC;KACD,IAAI,CAAC;IACJ,KAAK,EAAE;QACL,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;yEACsD;KACpE;IACD,OAAO,EAAE;QACP,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,uBAAuB,OAAO,GAAG;KAC/C;IACD,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,6BAA6B;KAC3C;CACF,CAAC,CAAA;AAEJ,IAAI,CAAC;IACH,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,GAAG,CAAC,CAAC,KAAK,EAAE,CAAA;IACzC,MAAM,EACJ,GAAG,EACH,KAAK,EACL,GAAG,EACH,OAAO,EAAE,GAAG,EACZ,OAAO,EAAE,WAAW,EACpB,IAAI,EACJ,QAAQ,EACR,GAAG,EACH,GAAG,EAEH,cAAc,EAAE,WAAW,EAC3B,MAAM,EACN,MAAM,EACN,YAAY,EAAE,SAAS,EACvB,WAAW,EAAE,QAAQ,EACrB,IAAI,EACJ,OAAO,EACP,MAAM,EACN,KAAK,EACL,KAAK,EACL,UAAU,EACV,QAAQ,EACR,QAAQ,EACR,IAAI,EACJ,IAAI,EACJ,KAAK,EACL,KAAK,EACL,SAAS,EAAE,MAAM,GAClB,GAAG,MAAM,CAAA;IACV,IAAI,WAAW,EAAE,CAAC;QAChB,OAAO,CAAC,GAAG,CAAC,OAAO,CAAC,CAAA;QACpB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;IACjB,CAAC;IACD,IAAI,IAAI,EAAE,CAAC;QACT,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAA;QACtB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;IACjB,CAAC;IACD,gCAAgC;IAChC,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,IAAI,CAAC,GAAG;QAAE,MAAM,sBAAsB,CAAA;IAClE,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,IAAI,GAAG;QAAE,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;IAC1D,MAAM,QAAQ,GACZ,GAAG,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAA;IAC7D,MAAM,OAAO,GACX,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAA;IAErE,MAAM,MAAM,GAAG,UAAU,CAAC,QAAQ,EAAE;QAClC,QAAQ;QACR,GAAG;QACH,GAAG;QACH,WAAW;QACX,MAAM;QACN,MAAM;QACN,IAAI;QACJ,SAAS;QACT,QAAQ;QACR,OAAO;QACP,MAAM;QACN,KAAK;QACL,KAAK;QACL,UAAU;QACV,QAAQ,EAAE,QAAuC;QACjD,QAAQ;QACR,IAAI;QACJ,IAAI;QACJ,KAAK;QACL,KAAK;KACN,CAAC,CAAA;IAEF,IAAI,CAAC,GAAG,EAAE,CAAC;QACT,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAA;QACpC,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAA;IACxC,CAAC;SAAM,CAAC;QACN,MAAM,CAAC,IAAI,CAAC,GAAG,OAAO,CAAC,CAAA;QACvB,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAA;QACtC,wEAAwE;QACxE,2EAA2E;QAC3E,2EAA2E;QAC3E,yBAAyB;QACzB,EAAE;QACF,oDAAoD;QACpD,kDAAkD;QAClD,+CAA+C;QAC/C,MAAM,EAAE,KAAK,GAAG,SAAS,EAAE,GAAG,OAAO,CAAC,GAAG,CAAA;QACzC,MAAM,SAAS,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAA;QACjC,MAAM,WAAW,GAAG,CAAC,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,CAAC,CAAA;QACxD,IACE,CAAC,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;YAC5B,WAAW,CAAC,QAAQ,CAAC,SAAS,CAAC,EAC/B,CAAC;YACD,MAAM,WAAW,GAAG,GAAG,GAAG,OAAO,SAAS,KAAK,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,GAAG,CAAA;YACvE,IAAI,SAAS,KAAK,MAAM,EAAE,CAAC;gBACzB,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,CAAA;YACvB,CAAC;YACD,MAAM,CAAC,OAAO,CAAC,IAAI,EAAE,WAAW,CAAC,CAAA;YACjC,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE,CAAC,eAAe,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC,CAAA;QACxD,CAAC;aAAM,CAAC;YACN,IAAI,KAAK,EAAE,CAAC;gBACV,OAAO,CAAC,WAAW,CACjB,6DAA6D;oBAC3D,mEAAmE,EACrE,oBAAoB,EACpB,YAAY,CACb,CAAA;YACH,CAAC;YACD,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE,CAAC,eAAe,CAAC,GAAG,EAAE,MAAM,EAAE,EAAE,KAAK,EAAE,CAAC,CAAC,CAAA;QACjE,CAAC;IACH,CAAC;AACH,CAAC;AAAC,OAAO,CAAC,EAAE,CAAC;IACX,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAA;IACxB,OAAO,CAAC,KAAK,CAAC,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAA;IACzD,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;AACjB,CAAC","sourcesContent":["#!/usr/bin/env node\nimport { foregroundChild } from 'foreground-child'\nimport { existsSync } from 'fs'\nimport { jack } from 'jackspeak'\nimport { loadPackageJson } from 'package-json-from-dist'\nimport { basename, join } from 'path'\nimport { globStream } from './index.js'\n\nconst { version } = loadPackageJson(import.meta.url, '../package.json')\n\nconst j = jack({\n usage: 'glob [options] [ [ ...]]',\n})\n .description(\n `\n Glob v${version}\n\n Expand the positional glob expression arguments into any matching file\n system paths found.\n `,\n )\n .opt({\n cmd: {\n short: 'c',\n hint: 'command',\n description: `Run the command provided, passing the glob expression\n matches as arguments.`,\n },\n })\n .opt({\n default: {\n short: 'p',\n hint: 'pattern',\n description: `If no positional arguments are provided, glob will use\n this pattern`,\n },\n })\n .flag({\n shell: {\n default: false,\n description: `Interpret the command as a shell command by passing it\n to the shell, with all matched filesystem paths appended,\n **even if this cannot be done safely**.\n\n This is **not** unsafe (and usually unnecessary) when using\n the known Unix shells sh, bash, zsh, and fish, as these can\n all be executed in such a way as to pass positional\n arguments safely.\n\n **Note**: THIS IS UNSAFE IF THE FILE PATHS ARE UNTRUSTED,\n because a path like \\`'some/path/\\\\$\\\\(cmd)'\\` will be\n executed by the shell.\n\n If you do have positional arguments that you wish to pass to\n the command ahead of the glob pattern matches, use the\n \\`--cmd-arg\\`/\\`-g\\` option instead.\n\n The next major release of glob will fully remove the ability\n to use this option unsafely.`,\n },\n })\n .optList({\n 'cmd-arg': {\n short: 'g',\n hint: 'arg',\n default: [],\n description: `Pass the provided values to the supplied command, ahead of\n the glob matches.\n\n For example, the command:\n\n glob -c echo -g\"hello\" -g\"world\" *.txt\n\n might output:\n\n hello world a.txt b.txt\n\n This is a safer (and future-proof) alternative than putting\n positional arguments in the \\`-c\\`/\\`--cmd\\` option.`,\n },\n })\n .flag({\n all: {\n short: 'A',\n description: `By default, the glob cli command will not expand any\n arguments that are an exact match to a file on disk.\n\n This prevents double-expanding, in case the shell expands\n an argument whose filename is a glob expression.\n\n For example, if 'app/*.ts' would match 'app/[id].ts', then\n on Windows powershell or cmd.exe, 'glob app/*.ts' will\n expand to 'app/[id].ts', as expected. However, in posix\n shells such as bash or zsh, the shell will first expand\n 'app/*.ts' to a list of filenames. Then glob will look\n for a file matching 'app/[id].ts' (ie, 'app/i.ts' or\n 'app/d.ts'), which is unexpected.\n\n Setting '--all' prevents this behavior, causing glob\n to treat ALL patterns as glob expressions to be expanded,\n even if they are an exact match to a file on disk.\n\n When setting this option, be sure to enquote arguments\n so that the shell will not expand them prior to passing\n them to the glob command process.\n `,\n },\n absolute: {\n short: 'a',\n description: 'Expand to absolute paths',\n },\n 'dot-relative': {\n short: 'd',\n description: `Prepend './' on relative matches`,\n },\n mark: {\n short: 'm',\n description: `Append a / on any directories matched`,\n },\n posix: {\n short: 'x',\n description: `Always resolve to posix style paths, using '/' as the\n directory separator, even on Windows. Drive letter\n absolute matches on Windows will be expanded to their\n full resolved UNC paths, eg instead of 'C:\\\\foo\\\\bar',\n it will expand to '//?/C:/foo/bar'.\n `,\n },\n\n follow: {\n short: 'f',\n description: `Follow symlinked directories when expanding '**'`,\n },\n realpath: {\n short: 'R',\n description: `Call 'fs.realpath' on all of the results. In the case\n of an entry that cannot be resolved, the entry is\n omitted. This incurs a slight performance penalty, of\n course, because of the added system calls.`,\n },\n stat: {\n short: 's',\n description: `Call 'fs.lstat' on all entries, whether required or not\n to determine if it's a valid match.`,\n },\n 'match-base': {\n short: 'b',\n description: `Perform a basename-only match if the pattern does not\n contain any slash characters. That is, '*.js' would be\n treated as equivalent to '**/*.js', matching js files\n in all directories.\n `,\n },\n\n dot: {\n description: `Allow patterns to match files/directories that start\n with '.', even if the pattern does not start with '.'\n `,\n },\n nobrace: {\n description: 'Do not expand {...} patterns',\n },\n nocase: {\n description: `Perform a case-insensitive match. This defaults to\n 'true' on macOS and Windows platforms, and false on\n all others.\n\n Note: 'nocase' should only be explicitly set when it is\n known that the filesystem's case sensitivity differs\n from the platform default. If set 'true' on\n case-insensitive file systems, then the walk may return\n more or less results than expected.\n `,\n },\n nodir: {\n description: `Do not match directories, only files.\n\n Note: to *only* match directories, append a '/' at the\n end of the pattern.\n `,\n },\n noext: {\n description: `Do not expand extglob patterns, such as '+(a|b)'`,\n },\n noglobstar: {\n description: `Do not expand '**' against multiple path portions.\n Ie, treat it as a normal '*' instead.`,\n },\n 'windows-path-no-escape': {\n description: `Use '\\\\' as a path separator *only*, and *never* as an\n escape character. If set, all '\\\\' characters are\n replaced with '/' in the pattern.`,\n },\n })\n .num({\n 'max-depth': {\n short: 'D',\n description: `Maximum depth to traverse from the current\n working directory`,\n },\n })\n .opt({\n cwd: {\n short: 'C',\n description: 'Current working directory to execute/match in',\n default: process.cwd(),\n },\n root: {\n short: 'r',\n description: `A string path resolved against the 'cwd', which is\n used as the starting point for absolute patterns that\n start with '/' (but not drive letters or UNC paths\n on Windows).\n\n Note that this *doesn't* necessarily limit the walk to\n the 'root' directory, and doesn't affect the cwd\n starting point for non-absolute patterns. A pattern\n containing '..' will still be able to traverse out of\n the root directory, if it is not an actual root directory\n on the filesystem, and any non-absolute patterns will\n still be matched in the 'cwd'.\n\n To start absolute and non-absolute patterns in the same\n path, you can use '--root=' to set it to the empty\n string. However, be aware that on Windows systems, a\n pattern like 'x:/*' or '//host/share/*' will *always*\n start in the 'x:/' or '//host/share/' directory,\n regardless of the --root setting.\n `,\n },\n platform: {\n description: `Defaults to the value of 'process.platform' if\n available, or 'linux' if not. Setting --platform=win32\n on non-Windows systems may cause strange behavior!`,\n validOptions: [\n 'aix',\n 'android',\n 'darwin',\n 'freebsd',\n 'haiku',\n 'linux',\n 'openbsd',\n 'sunos',\n 'win32',\n 'cygwin',\n 'netbsd',\n ],\n },\n })\n .optList({\n ignore: {\n short: 'i',\n description: `Glob patterns to ignore`,\n },\n })\n .flag({\n debug: {\n short: 'v',\n description: `Output a huge amount of noisy debug information about\n patterns as they are parsed and used to match files.`,\n },\n version: {\n short: 'V',\n description: `Output the version (${version})`,\n },\n help: {\n short: 'h',\n description: 'Show this usage information',\n },\n })\n\ntry {\n const { positionals, values } = j.parse()\n const {\n cmd,\n shell,\n all,\n default: def,\n version: showVersion,\n help,\n absolute,\n cwd,\n dot,\n\n 'dot-relative': dotRelative,\n follow,\n ignore,\n 'match-base': matchBase,\n 'max-depth': maxDepth,\n mark,\n nobrace,\n nocase,\n nodir,\n noext,\n noglobstar,\n platform,\n realpath,\n root,\n stat,\n debug,\n posix,\n 'cmd-arg': cmdArg,\n } = values\n if (showVersion) {\n console.log(version)\n process.exit(0)\n }\n if (help) {\n console.log(j.usage())\n process.exit(0)\n }\n //const { shell, help } = values\n if (positionals.length === 0 && !def) throw 'No patterns provided'\n if (positionals.length === 0 && def) positionals.push(def)\n const patterns =\n all ? positionals : positionals.filter(p => !existsSync(p))\n const matches =\n all ? [] : positionals.filter(p => existsSync(p)).map(p => join(p))\n\n const stream = globStream(patterns, {\n absolute,\n cwd,\n dot,\n dotRelative,\n follow,\n ignore,\n mark,\n matchBase,\n maxDepth,\n nobrace,\n nocase,\n nodir,\n noext,\n noglobstar,\n platform: platform as undefined | NodeJS.Platform,\n realpath,\n root,\n stat,\n debug,\n posix,\n })\n\n if (!cmd) {\n matches.forEach(m => console.log(m))\n stream.on('data', f => console.log(f))\n } else {\n cmdArg.push(...matches)\n stream.on('data', f => cmdArg.push(f))\n // Attempt to support commands that contain spaces and otherwise require\n // shell interpretation, but do NOT shell-interpret the arguments, to avoid\n // injections via filenames. This affordance can only be done on known Unix\n // shells, unfortunately.\n //\n // 'bash', ['-c', cmd + ' \"$@\"', 'bash', ...matches]\n // 'zsh', ['-c', cmd + ' \"$@\"', 'zsh', ...matches]\n // 'fish', ['-c', cmd + ' \"$argv\"', ...matches]\n const { SHELL = 'unknown' } = process.env\n const shellBase = basename(SHELL)\n const knownShells = ['sh', 'ksh', 'zsh', 'bash', 'fish']\n if (\n (shell || /[ \"']/.test(cmd)) &&\n knownShells.includes(shellBase)\n ) {\n const cmdWithArgs = `${cmd} \"\\$${shellBase === 'fish' ? 'argv' : '@'}\"`\n if (shellBase !== 'fish') {\n cmdArg.unshift(SHELL)\n }\n cmdArg.unshift('-c', cmdWithArgs)\n stream.on('end', () => foregroundChild(SHELL, cmdArg))\n } else {\n if (shell) {\n process.emitWarning(\n 'The --shell option is unsafe, and will be removed. To pass ' +\n 'positional arguments to the subprocess, use -g/--cmd-arg instead.',\n 'DeprecationWarning',\n 'GLOB_SHELL',\n )\n }\n stream.on('end', () => foregroundChild(cmd, cmdArg, { shell }))\n }\n }\n} catch (e) {\n console.error(j.usage())\n console.error(e instanceof Error ? e.message : String(e))\n process.exit(1)\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/glob/dist/esm/has-magic.d.ts.map b/deps/npm/node_modules/glob/dist/esm/has-magic.d.ts.map
index b24dd4ec47e0bb..e2f7e449672a5f 100644
--- a/deps/npm/node_modules/glob/dist/esm/has-magic.d.ts.map
+++ b/deps/npm/node_modules/glob/dist/esm/has-magic.d.ts.map
@@ -1 +1 @@
-{"version":3,"file":"has-magic.d.ts","sourceRoot":"","sources":["../../src/has-magic.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,WAAW,EAAE,MAAM,WAAW,CAAA;AAEvC;;;;;;;;;;GAUG;AACH,eAAO,MAAM,QAAQ,YACV,MAAM,GAAG,MAAM,EAAE,YACjB,WAAW,KACnB,OAQF,CAAA"}
\ No newline at end of file
+{"version":3,"file":"has-magic.d.ts","sourceRoot":"","sources":["../../src/has-magic.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,WAAW,EAAE,MAAM,WAAW,CAAA;AAEvC;;;;;;;;;;GAUG;AACH,eAAO,MAAM,QAAQ,GACnB,SAAS,MAAM,GAAG,MAAM,EAAE,EAC1B,UAAS,WAAgB,KACxB,OAQF,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/glob/dist/esm/index.d.ts b/deps/npm/node_modules/glob/dist/esm/index.d.ts
index 9c326ddc895b61..cb09bfb64acb34 100644
--- a/deps/npm/node_modules/glob/dist/esm/index.d.ts
+++ b/deps/npm/node_modules/glob/dist/esm/index.d.ts
@@ -91,7 +91,7 @@ export declare const glob: typeof glob_ & {
iterateSync: typeof globIterateSync;
Glob: typeof Glob;
hasMagic: (pattern: string | string[], options?: GlobOptions) => boolean;
- escape: (s: string, { windowsPathsNoEscape, }?: Pick) => string;
- unescape: (s: string, { windowsPathsNoEscape, }?: Pick) => string;
+ escape: (s: string, { windowsPathsNoEscape, magicalBraces, }?: Pick) => string;
+ unescape: (s: string, { windowsPathsNoEscape, magicalBraces, }?: Pick) => string;
};
//# sourceMappingURL=index.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/glob/package.json b/deps/npm/node_modules/glob/package.json
index 6d4893b5f327ba..644aece15b356d 100644
--- a/deps/npm/node_modules/glob/package.json
+++ b/deps/npm/node_modules/glob/package.json
@@ -5,7 +5,7 @@
},
"name": "glob",
"description": "the most correct and second fastest glob implementation in JavaScript",
- "version": "10.4.5",
+ "version": "10.5.0",
"type": "module",
"tshy": {
"main": true,
diff --git a/deps/npm/node_modules/ip-address/dist/address-error.js b/deps/npm/node_modules/ip-address/dist/address-error.js
index 4fcade3ba2486c..c178ae48200acd 100644
--- a/deps/npm/node_modules/ip-address/dist/address-error.js
+++ b/deps/npm/node_modules/ip-address/dist/address-error.js
@@ -5,9 +5,7 @@ class AddressError extends Error {
constructor(message, parseMessage) {
super(message);
this.name = 'AddressError';
- if (parseMessage !== null) {
- this.parseMessage = parseMessage;
- }
+ this.parseMessage = parseMessage;
}
}
exports.AddressError = AddressError;
diff --git a/deps/npm/node_modules/ip-address/dist/common.js b/deps/npm/node_modules/ip-address/dist/common.js
index 4d10c9a4e82035..273a01e28e317d 100644
--- a/deps/npm/node_modules/ip-address/dist/common.js
+++ b/deps/npm/node_modules/ip-address/dist/common.js
@@ -1,6 +1,10 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.isCorrect = exports.isInSubnet = void 0;
+exports.isInSubnet = isInSubnet;
+exports.isCorrect = isCorrect;
+exports.numberToPaddedHex = numberToPaddedHex;
+exports.stringToPaddedHex = stringToPaddedHex;
+exports.testBit = testBit;
function isInSubnet(address) {
if (this.subnetMask < address.subnetMask) {
return false;
@@ -10,7 +14,6 @@ function isInSubnet(address) {
}
return false;
}
-exports.isInSubnet = isInSubnet;
function isCorrect(defaultBits) {
return function () {
if (this.addressMinusSuffix !== this.correctForm()) {
@@ -22,5 +25,22 @@ function isCorrect(defaultBits) {
return this.parsedSubnet === String(this.subnetMask);
};
}
-exports.isCorrect = isCorrect;
+function numberToPaddedHex(number) {
+ return number.toString(16).padStart(2, '0');
+}
+function stringToPaddedHex(numberString) {
+ return numberToPaddedHex(parseInt(numberString, 10));
+}
+/**
+ * @param binaryValue Binary representation of a value (e.g. `10`)
+ * @param position Byte position, where 0 is the least significant bit
+ */
+function testBit(binaryValue, position) {
+ const { length } = binaryValue;
+ if (position > length) {
+ return false;
+ }
+ const positionInString = length - position;
+ return binaryValue.substring(positionInString, positionInString + 1) === '1';
+}
//# sourceMappingURL=common.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/ip-address/dist/ip-address.js b/deps/npm/node_modules/ip-address/dist/ip-address.js
index 553c005a63cb64..84f348709fe549 100644
--- a/deps/npm/node_modules/ip-address/dist/ip-address.js
+++ b/deps/npm/node_modules/ip-address/dist/ip-address.js
@@ -24,11 +24,11 @@ var __importStar = (this && this.__importStar) || function (mod) {
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.v6 = exports.AddressError = exports.Address6 = exports.Address4 = void 0;
-const ipv4_1 = require("./ipv4");
+var ipv4_1 = require("./ipv4");
Object.defineProperty(exports, "Address4", { enumerable: true, get: function () { return ipv4_1.Address4; } });
-const ipv6_1 = require("./ipv6");
+var ipv6_1 = require("./ipv6");
Object.defineProperty(exports, "Address6", { enumerable: true, get: function () { return ipv6_1.Address6; } });
-const address_error_1 = require("./address-error");
+var address_error_1 = require("./address-error");
Object.defineProperty(exports, "AddressError", { enumerable: true, get: function () { return address_error_1.AddressError; } });
const helpers = __importStar(require("./v6/helpers"));
exports.v6 = { helpers };
diff --git a/deps/npm/node_modules/ip-address/dist/ipv4.js b/deps/npm/node_modules/ip-address/dist/ipv4.js
index 22a81b5047f05a..311c89c6965cb8 100644
--- a/deps/npm/node_modules/ip-address/dist/ipv4.js
+++ b/deps/npm/node_modules/ip-address/dist/ipv4.js
@@ -28,8 +28,6 @@ exports.Address4 = void 0;
const common = __importStar(require("./common"));
const constants = __importStar(require("./v4/constants"));
const address_error_1 = require("./address-error");
-const jsbn_1 = require("jsbn");
-const sprintf_js_1 = require("sprintf-js");
/**
* Represents an IPv4 address
* @class Address4
@@ -150,7 +148,7 @@ class Address4 {
* @returns {String}
*/
toHex() {
- return this.parsedAddress.map((part) => (0, sprintf_js_1.sprintf)('%02x', parseInt(part, 10))).join(':');
+ return this.parsedAddress.map((part) => common.stringToPaddedHex(part)).join(':');
}
/**
* Converts an IPv4 address object to an array of bytes
@@ -171,28 +169,27 @@ class Address4 {
const output = [];
let i;
for (i = 0; i < constants.GROUPS; i += 2) {
- const hex = (0, sprintf_js_1.sprintf)('%02x%02x', parseInt(this.parsedAddress[i], 10), parseInt(this.parsedAddress[i + 1], 10));
- output.push((0, sprintf_js_1.sprintf)('%x', parseInt(hex, 16)));
+ output.push(`${common.stringToPaddedHex(this.parsedAddress[i])}${common.stringToPaddedHex(this.parsedAddress[i + 1])}`);
}
return output.join(':');
}
/**
- * Returns the address as a BigInteger
+ * Returns the address as a `bigint`
* @memberof Address4
* @instance
- * @returns {BigInteger}
+ * @returns {bigint}
*/
- bigInteger() {
- return new jsbn_1.BigInteger(this.parsedAddress.map((n) => (0, sprintf_js_1.sprintf)('%02x', parseInt(n, 10))).join(''), 16);
+ bigInt() {
+ return BigInt(`0x${this.parsedAddress.map((n) => common.stringToPaddedHex(n)).join('')}`);
}
/**
* Helper function getting start address.
* @memberof Address4
* @instance
- * @returns {BigInteger}
+ * @returns {bigint}
*/
_startAddress() {
- return new jsbn_1.BigInteger(this.mask() + '0'.repeat(constants.BITS - this.subnetMask), 2);
+ return BigInt(`0b${this.mask() + '0'.repeat(constants.BITS - this.subnetMask)}`);
}
/**
* The first address in the range given by this address' subnet.
@@ -202,7 +199,7 @@ class Address4 {
* @returns {Address4}
*/
startAddress() {
- return Address4.fromBigInteger(this._startAddress());
+ return Address4.fromBigInt(this._startAddress());
}
/**
* The first host address in the range given by this address's subnet ie
@@ -212,17 +209,17 @@ class Address4 {
* @returns {Address4}
*/
startAddressExclusive() {
- const adjust = new jsbn_1.BigInteger('1');
- return Address4.fromBigInteger(this._startAddress().add(adjust));
+ const adjust = BigInt('1');
+ return Address4.fromBigInt(this._startAddress() + adjust);
}
/**
* Helper function getting end address.
* @memberof Address4
* @instance
- * @returns {BigInteger}
+ * @returns {bigint}
*/
_endAddress() {
- return new jsbn_1.BigInteger(this.mask() + '1'.repeat(constants.BITS - this.subnetMask), 2);
+ return BigInt(`0b${this.mask() + '1'.repeat(constants.BITS - this.subnetMask)}`);
}
/**
* The last address in the range given by this address' subnet
@@ -232,7 +229,7 @@ class Address4 {
* @returns {Address4}
*/
endAddress() {
- return Address4.fromBigInteger(this._endAddress());
+ return Address4.fromBigInt(this._endAddress());
}
/**
* The last host address in the range given by this address's subnet ie
@@ -242,18 +239,51 @@ class Address4 {
* @returns {Address4}
*/
endAddressExclusive() {
- const adjust = new jsbn_1.BigInteger('1');
- return Address4.fromBigInteger(this._endAddress().subtract(adjust));
+ const adjust = BigInt('1');
+ return Address4.fromBigInt(this._endAddress() - adjust);
}
/**
- * Converts a BigInteger to a v4 address object
+ * Converts a BigInt to a v4 address object
* @memberof Address4
* @static
- * @param {BigInteger} bigInteger - a BigInteger to convert
+ * @param {bigint} bigInt - a BigInt to convert
* @returns {Address4}
*/
- static fromBigInteger(bigInteger) {
- return Address4.fromInteger(parseInt(bigInteger.toString(), 10));
+ static fromBigInt(bigInt) {
+ return Address4.fromHex(bigInt.toString(16));
+ }
+ /**
+ * Convert a byte array to an Address4 object
+ * @memberof Address4
+ * @static
+ * @param {Array} bytes - an array of 4 bytes (0-255)
+ * @returns {Address4}
+ */
+ static fromByteArray(bytes) {
+ if (bytes.length !== 4) {
+ throw new address_error_1.AddressError('IPv4 addresses require exactly 4 bytes');
+ }
+ // Validate that all bytes are within valid range (0-255)
+ for (let i = 0; i < bytes.length; i++) {
+ if (!Number.isInteger(bytes[i]) || bytes[i] < 0 || bytes[i] > 255) {
+ throw new address_error_1.AddressError('All bytes must be integers between 0 and 255');
+ }
+ }
+ return this.fromUnsignedByteArray(bytes);
+ }
+ /**
+ * Convert an unsigned byte array to an Address4 object
+ * @memberof Address4
+ * @static
+ * @param {Array} bytes - an array of 4 unsigned bytes (0-255)
+ * @returns {Address4}
+ */
+ static fromUnsignedByteArray(bytes) {
+ if (bytes.length !== 4) {
+ throw new address_error_1.AddressError('IPv4 addresses require exactly 4 bytes');
+ }
+ const address = bytes.join('.');
+ return new Address4(address);
}
/**
* Returns the first n bits of the address, defaulting to the
@@ -293,7 +323,7 @@ class Address4 {
if (options.omitSuffix) {
return reversed;
}
- return (0, sprintf_js_1.sprintf)('%s.in-addr.arpa.', reversed);
+ return `${reversed}.in-addr.arpa.`;
}
/**
* Returns true if the given address is a multicast address
@@ -311,7 +341,7 @@ class Address4 {
* @returns {string}
*/
binaryZeroPad() {
- return this.bigInteger().toString(2).padStart(constants.BITS, '0');
+ return this.bigInt().toString(2).padStart(constants.BITS, '0');
}
/**
* Groups an IPv4 address for inclusion at the end of an IPv6 address
@@ -319,7 +349,11 @@ class Address4 {
*/
groupForV6() {
const segments = this.parsedAddress;
- return this.address.replace(constants.RE_ADDRESS, (0, sprintf_js_1.sprintf)('%s.%s', segments.slice(0, 2).join('.'), segments.slice(2, 4).join('.')));
+ return this.address.replace(constants.RE_ADDRESS, `${segments
+ .slice(0, 2)
+ .join('.')}.${segments
+ .slice(2, 4)
+ .join('.')}`);
}
}
exports.Address4 = Address4;
diff --git a/deps/npm/node_modules/ip-address/dist/ipv6.js b/deps/npm/node_modules/ip-address/dist/ipv6.js
index c88ab84b9ad77a..5f88ab63a56eb8 100644
--- a/deps/npm/node_modules/ip-address/dist/ipv6.js
+++ b/deps/npm/node_modules/ip-address/dist/ipv6.js
@@ -33,8 +33,7 @@ const helpers = __importStar(require("./v6/helpers"));
const ipv4_1 = require("./ipv4");
const regular_expressions_1 = require("./v6/regular-expressions");
const address_error_1 = require("./address-error");
-const jsbn_1 = require("jsbn");
-const sprintf_js_1 = require("sprintf-js");
+const common_1 = require("./common");
function assert(condition) {
if (!condition) {
throw new Error('Assertion failed.');
@@ -70,7 +69,7 @@ function compact(address, slice) {
return s1.concat(['compact']).concat(s2);
}
function paddedHex(octet) {
- return (0, sprintf_js_1.sprintf)('%04x', parseInt(octet, 16));
+ return parseInt(octet, 16).toString(16).padStart(4, '0');
}
function unsignByte(b) {
// eslint-disable-next-line no-bitwise
@@ -148,18 +147,18 @@ class Address6 {
}
}
/**
- * Convert a BigInteger to a v6 address object
+ * Convert a BigInt to a v6 address object
* @memberof Address6
* @static
- * @param {BigInteger} bigInteger - a BigInteger to convert
+ * @param {bigint} bigInt - a BigInt to convert
* @returns {Address6}
* @example
- * var bigInteger = new BigInteger('1000000000000');
- * var address = Address6.fromBigInteger(bigInteger);
+ * var bigInt = BigInt('1000000000000');
+ * var address = Address6.fromBigInt(bigInt);
* address.correctForm(); // '::e8:d4a5:1000'
*/
- static fromBigInteger(bigInteger) {
- const hex = bigInteger.toString(16).padStart(32, '0');
+ static fromBigInt(bigInt) {
+ const hex = bigInt.toString(16).padStart(32, '0');
const groups = [];
let i;
for (i = 0; i < constants6.GROUPS; i++) {
@@ -279,7 +278,7 @@ class Address6 {
* @returns {String} the Microsoft UNC transcription of the address
*/
microsoftTranscription() {
- return (0, sprintf_js_1.sprintf)('%s.ipv6-literal.net', this.correctForm().replace(/:/g, '-'));
+ return `${this.correctForm().replace(/:/g, '-')}.ipv6-literal.net`;
}
/**
* Return the first n bits of the address, defaulting to the subnet mask
@@ -295,7 +294,7 @@ class Address6 {
* Return the number of possible subnets of a given size in the address
* @memberof Address6
* @instance
- * @param {number} [size=128] - the subnet size
+ * @param {number} [subnetSize=128] - the subnet size
* @returns {String}
*/
// TODO: probably useful to have a numeric version of this too
@@ -306,16 +305,16 @@ class Address6 {
if (subnetPowers < 0) {
return '0';
}
- return addCommas(new jsbn_1.BigInteger('2', 10).pow(subnetPowers).toString(10));
+ return addCommas((BigInt('2') ** BigInt(subnetPowers)).toString(10));
}
/**
* Helper function getting start address.
* @memberof Address6
* @instance
- * @returns {BigInteger}
+ * @returns {bigint}
*/
_startAddress() {
- return new jsbn_1.BigInteger(this.mask() + '0'.repeat(constants6.BITS - this.subnetMask), 2);
+ return BigInt(`0b${this.mask() + '0'.repeat(constants6.BITS - this.subnetMask)}`);
}
/**
* The first address in the range given by this address' subnet
@@ -325,7 +324,7 @@ class Address6 {
* @returns {Address6}
*/
startAddress() {
- return Address6.fromBigInteger(this._startAddress());
+ return Address6.fromBigInt(this._startAddress());
}
/**
* The first host address in the range given by this address's subnet ie
@@ -335,17 +334,17 @@ class Address6 {
* @returns {Address6}
*/
startAddressExclusive() {
- const adjust = new jsbn_1.BigInteger('1');
- return Address6.fromBigInteger(this._startAddress().add(adjust));
+ const adjust = BigInt('1');
+ return Address6.fromBigInt(this._startAddress() + adjust);
}
/**
* Helper function getting end address.
* @memberof Address6
* @instance
- * @returns {BigInteger}
+ * @returns {bigint}
*/
_endAddress() {
- return new jsbn_1.BigInteger(this.mask() + '1'.repeat(constants6.BITS - this.subnetMask), 2);
+ return BigInt(`0b${this.mask() + '1'.repeat(constants6.BITS - this.subnetMask)}`);
}
/**
* The last address in the range given by this address' subnet
@@ -355,7 +354,7 @@ class Address6 {
* @returns {Address6}
*/
endAddress() {
- return Address6.fromBigInteger(this._endAddress());
+ return Address6.fromBigInt(this._endAddress());
}
/**
* The last host address in the range given by this address's subnet ie
@@ -365,8 +364,8 @@ class Address6 {
* @returns {Address6}
*/
endAddressExclusive() {
- const adjust = new jsbn_1.BigInteger('1');
- return Address6.fromBigInteger(this._endAddress().subtract(adjust));
+ const adjust = BigInt('1');
+ return Address6.fromBigInt(this._endAddress() - adjust);
}
/**
* Return the scope of the address
@@ -375,7 +374,7 @@ class Address6 {
* @returns {String}
*/
getScope() {
- let scope = constants6.SCOPES[this.getBits(12, 16).intValue()];
+ let scope = constants6.SCOPES[parseInt(this.getBits(12, 16).toString(10), 10)];
if (this.getType() === 'Global unicast' && scope !== 'Link local') {
scope = 'Global';
}
@@ -396,13 +395,13 @@ class Address6 {
return 'Global unicast';
}
/**
- * Return the bits in the given range as a BigInteger
+ * Return the bits in the given range as a BigInt
* @memberof Address6
* @instance
- * @returns {BigInteger}
+ * @returns {bigint}
*/
getBits(start, end) {
- return new jsbn_1.BigInteger(this.getBitsBase2(start, end), 2);
+ return BigInt(`0b${this.getBitsBase2(start, end)}`);
}
/**
* Return the bits in the given range as a base-2 string
@@ -460,7 +459,7 @@ class Address6 {
if (options.omitSuffix) {
return reversed;
}
- return (0, sprintf_js_1.sprintf)('%s.ip6.arpa.', reversed);
+ return `${reversed}.ip6.arpa.`;
}
if (options.omitSuffix) {
return '';
@@ -509,7 +508,7 @@ class Address6 {
}
let correct = groups.join(':');
correct = correct.replace(/^compact$/, '::');
- correct = correct.replace(/^compact|compact$/, ':');
+ correct = correct.replace(/(^compact)|(compact$)/, ':');
correct = correct.replace(/compact/, '');
return correct;
}
@@ -525,7 +524,7 @@ class Address6 {
* // 0000000000000000000000000000000000000000000000000001000000010001'
*/
binaryZeroPad() {
- return this.bigInteger().toString(2).padStart(constants6.BITS, '0');
+ return this.bigInt().toString(2).padStart(constants6.BITS, '0');
}
// TODO: Improve the semantics of this helper function
parse4in6(address) {
@@ -551,11 +550,11 @@ class Address6 {
address = this.parse4in6(address);
const badCharacters = address.match(constants6.RE_BAD_CHARACTERS);
if (badCharacters) {
- throw new address_error_1.AddressError((0, sprintf_js_1.sprintf)('Bad character%s detected in address: %s', badCharacters.length > 1 ? 's' : '', badCharacters.join('')), address.replace(constants6.RE_BAD_CHARACTERS, '$1'));
+ throw new address_error_1.AddressError(`Bad character${badCharacters.length > 1 ? 's' : ''} detected in address: ${badCharacters.join('')}`, address.replace(constants6.RE_BAD_CHARACTERS, '$1'));
}
const badAddress = address.match(constants6.RE_BAD_ADDRESS);
if (badAddress) {
- throw new address_error_1.AddressError((0, sprintf_js_1.sprintf)('Address failed regex: %s', badAddress.join('')), address.replace(constants6.RE_BAD_ADDRESS, '$1'));
+ throw new address_error_1.AddressError(`Address failed regex: ${badAddress.join('')}`, address.replace(constants6.RE_BAD_ADDRESS, '$1'));
}
let groups = [];
const halves = address.split('::');
@@ -588,7 +587,7 @@ class Address6 {
else {
throw new address_error_1.AddressError('Too many :: groups found');
}
- groups = groups.map((group) => (0, sprintf_js_1.sprintf)('%x', parseInt(group, 16)));
+ groups = groups.map((group) => parseInt(group, 16).toString(16));
if (groups.length !== this.groups) {
throw new address_error_1.AddressError('Incorrect number of groups found');
}
@@ -610,16 +609,16 @@ class Address6 {
* @returns {String}
*/
decimal() {
- return this.parsedAddress.map((n) => (0, sprintf_js_1.sprintf)('%05d', parseInt(n, 16))).join(':');
+ return this.parsedAddress.map((n) => parseInt(n, 16).toString(10).padStart(5, '0')).join(':');
}
/**
- * Return the address as a BigInteger
+ * Return the address as a BigInt
* @memberof Address6
* @instance
- * @returns {BigInteger}
+ * @returns {bigint}
*/
- bigInteger() {
- return new jsbn_1.BigInteger(this.parsedAddress.map(paddedHex).join(''), 16);
+ bigInt() {
+ return BigInt(`0x${this.parsedAddress.map(paddedHex).join('')}`);
}
/**
* Return the last two groups of this address as an IPv4 address string
@@ -632,7 +631,7 @@ class Address6 {
*/
to4() {
const binary = this.binaryZeroPad().split('');
- return ipv4_1.Address4.fromHex(new jsbn_1.BigInteger(binary.slice(96, 128).join(''), 2).toString(16));
+ return ipv4_1.Address4.fromHex(BigInt(`0b${binary.slice(96, 128).join('')}`).toString(16));
}
/**
* Return the v4-in-v6 form of the address
@@ -679,18 +678,21 @@ class Address6 {
public IPv4 address of the NAT with all bits inverted.
*/
const prefix = this.getBitsBase16(0, 32);
- const udpPort = this.getBits(80, 96).xor(new jsbn_1.BigInteger('ffff', 16)).toString();
+ const bitsForUdpPort = this.getBits(80, 96);
+ // eslint-disable-next-line no-bitwise
+ const udpPort = (bitsForUdpPort ^ BigInt('0xffff')).toString();
const server4 = ipv4_1.Address4.fromHex(this.getBitsBase16(32, 64));
- const client4 = ipv4_1.Address4.fromHex(this.getBits(96, 128).xor(new jsbn_1.BigInteger('ffffffff', 16)).toString(16));
- const flags = this.getBits(64, 80);
+ const bitsForClient4 = this.getBits(96, 128);
+ // eslint-disable-next-line no-bitwise
+ const client4 = ipv4_1.Address4.fromHex((bitsForClient4 ^ BigInt('0xffffffff')).toString(16));
const flagsBase2 = this.getBitsBase2(64, 80);
- const coneNat = flags.testBit(15);
- const reserved = flags.testBit(14);
- const groupIndividual = flags.testBit(8);
- const universalLocal = flags.testBit(9);
- const nonce = new jsbn_1.BigInteger(flagsBase2.slice(2, 6) + flagsBase2.slice(8, 16), 2).toString(10);
+ const coneNat = (0, common_1.testBit)(flagsBase2, 15);
+ const reserved = (0, common_1.testBit)(flagsBase2, 14);
+ const groupIndividual = (0, common_1.testBit)(flagsBase2, 8);
+ const universalLocal = (0, common_1.testBit)(flagsBase2, 9);
+ const nonce = BigInt(`0b${flagsBase2.slice(2, 6) + flagsBase2.slice(8, 16)}`).toString(10);
return {
- prefix: (0, sprintf_js_1.sprintf)('%s:%s', prefix.slice(0, 4), prefix.slice(4, 8)),
+ prefix: `${prefix.slice(0, 4)}:${prefix.slice(4, 8)}`,
server4: server4.address,
client4: client4.address,
flags: flagsBase2,
@@ -718,7 +720,7 @@ class Address6 {
const prefix = this.getBitsBase16(0, 16);
const gateway = ipv4_1.Address4.fromHex(this.getBitsBase16(16, 48));
return {
- prefix: (0, sprintf_js_1.sprintf)('%s', prefix.slice(0, 4)),
+ prefix: prefix.slice(0, 4),
gateway: gateway.address,
};
}
@@ -748,12 +750,14 @@ class Address6 {
* @returns {Array}
*/
toByteArray() {
- const byteArray = this.bigInteger().toByteArray();
- // work around issue where `toByteArray` returns a leading 0 element
- if (byteArray.length === 17 && byteArray[0] === 0) {
- return byteArray.slice(1);
+ const valueWithoutPadding = this.bigInt().toString(16);
+ const leadingPad = '0'.repeat(valueWithoutPadding.length % 2);
+ const value = `${leadingPad}${valueWithoutPadding}`;
+ const bytes = [];
+ for (let i = 0, length = value.length; i < length; i += 2) {
+ bytes.push(parseInt(value.substring(i, i + 2), 16));
}
- return byteArray;
+ return bytes;
}
/**
* Return an unsigned byte array
@@ -780,14 +784,14 @@ class Address6 {
* @returns {Address6}
*/
static fromUnsignedByteArray(bytes) {
- const BYTE_MAX = new jsbn_1.BigInteger('256', 10);
- let result = new jsbn_1.BigInteger('0', 10);
- let multiplier = new jsbn_1.BigInteger('1', 10);
+ const BYTE_MAX = BigInt('256');
+ let result = BigInt('0');
+ let multiplier = BigInt('1');
for (let i = bytes.length - 1; i >= 0; i--) {
- result = result.add(multiplier.multiply(new jsbn_1.BigInteger(bytes[i].toString(10), 10)));
- multiplier = multiplier.multiply(BYTE_MAX);
+ result += multiplier * BigInt(bytes[i].toString(10));
+ multiplier *= BYTE_MAX;
}
- return Address6.fromBigInteger(result);
+ return Address6.fromBigInt(result);
}
/**
* Returns true if the address is in the canonical form, false otherwise
@@ -867,9 +871,9 @@ class Address6 {
optionalPort = '';
}
else {
- optionalPort = (0, sprintf_js_1.sprintf)(':%s', optionalPort);
+ optionalPort = `:${optionalPort}`;
}
- return (0, sprintf_js_1.sprintf)('http://[%s]%s/', this.correctForm(), optionalPort);
+ return `http://[${this.correctForm()}]${optionalPort}/`;
}
/**
* @returns {String} a link suitable for conveying the address via a URL hash
@@ -891,10 +895,11 @@ class Address6 {
if (options.v4) {
formFunction = this.to4in6;
}
+ const form = formFunction.call(this);
if (options.className) {
- return (0, sprintf_js_1.sprintf)('%2$s', options.prefix, formFunction.call(this), options.className);
+ return `${form}`;
}
- return (0, sprintf_js_1.sprintf)('%2$s', options.prefix, formFunction.call(this));
+ return `${form}`;
}
/**
* Groups an address
@@ -918,9 +923,9 @@ class Address6 {
}
const classes = ['hover-group'];
for (let i = this.elisionBegin; i < this.elisionBegin + this.elidedGroups; i++) {
- classes.push((0, sprintf_js_1.sprintf)('group-%d', i));
+ classes.push(`group-${i}`);
}
- output.push((0, sprintf_js_1.sprintf)('', classes.join(' ')));
+ output.push(``);
if (right.length) {
output.push(...helpers.simpleGroup(right, this.elisionEnd));
}
diff --git a/deps/npm/node_modules/ip-address/dist/v6/constants.js b/deps/npm/node_modules/ip-address/dist/v6/constants.js
index e316bb0d0c2cd5..0abc423e0a91ab 100644
--- a/deps/npm/node_modules/ip-address/dist/v6/constants.js
+++ b/deps/npm/node_modules/ip-address/dist/v6/constants.js
@@ -71,6 +71,6 @@ exports.RE_SUBNET_STRING = /\/\d{1,3}(?=%|$)/;
* @static
*/
exports.RE_ZONE_STRING = /%.*$/;
-exports.RE_URL = new RegExp(/^\[{0,1}([0-9a-f:]+)\]{0,1}/);
-exports.RE_URL_WITH_PORT = new RegExp(/\[([0-9a-f:]+)\]:([0-9]{1,5})/);
+exports.RE_URL = /^\[{0,1}([0-9a-f:]+)\]{0,1}/;
+exports.RE_URL_WITH_PORT = /\[([0-9a-f:]+)\]:([0-9]{1,5})/;
//# sourceMappingURL=constants.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/ip-address/dist/v6/helpers.js b/deps/npm/node_modules/ip-address/dist/v6/helpers.js
index 918aaa58c85d79..fafca0c2712ddc 100644
--- a/deps/npm/node_modules/ip-address/dist/v6/helpers.js
+++ b/deps/npm/node_modules/ip-address/dist/v6/helpers.js
@@ -1,25 +1,24 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-exports.simpleGroup = exports.spanLeadingZeroes = exports.spanAll = exports.spanAllZeroes = void 0;
-const sprintf_js_1 = require("sprintf-js");
+exports.spanAllZeroes = spanAllZeroes;
+exports.spanAll = spanAll;
+exports.spanLeadingZeroes = spanLeadingZeroes;
+exports.simpleGroup = simpleGroup;
/**
* @returns {String} the string with all zeroes contained in a
*/
function spanAllZeroes(s) {
return s.replace(/(0+)/g, '$1');
}
-exports.spanAllZeroes = spanAllZeroes;
/**
* @returns {String} the string with each character contained in a
*/
function spanAll(s, offset = 0) {
const letters = s.split('');
return letters
- .map((n, i) => (0, sprintf_js_1.sprintf)('%s', n, i + offset, spanAllZeroes(n)) // XXX Use #base-2 .value-0 instead?
- )
+ .map((n, i) => `${spanAllZeroes(n)}`)
.join('');
}
-exports.spanAll = spanAll;
function spanLeadingZeroesSimple(group) {
return group.replace(/^(0+)/, '$1');
}
@@ -30,7 +29,6 @@ function spanLeadingZeroes(address) {
const groups = address.split(':');
return groups.map((g) => spanLeadingZeroesSimple(g)).join(':');
}
-exports.spanLeadingZeroes = spanLeadingZeroes;
/**
* Groups an address
* @returns {String} a grouped address
@@ -41,8 +39,7 @@ function simpleGroup(addressString, offset = 0) {
if (/group-v4/.test(g)) {
return g;
}
- return (0, sprintf_js_1.sprintf)('%s', i + offset, spanLeadingZeroesSimple(g));
+ return `${spanLeadingZeroesSimple(g)}`;
});
}
-exports.simpleGroup = simpleGroup;
//# sourceMappingURL=helpers.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/ip-address/dist/v6/regular-expressions.js b/deps/npm/node_modules/ip-address/dist/v6/regular-expressions.js
index 616550a864509f..a2c51459307fdd 100644
--- a/deps/npm/node_modules/ip-address/dist/v6/regular-expressions.js
+++ b/deps/npm/node_modules/ip-address/dist/v6/regular-expressions.js
@@ -23,20 +23,21 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
-exports.possibleElisions = exports.simpleRegularExpression = exports.ADDRESS_BOUNDARY = exports.padGroup = exports.groupPossibilities = void 0;
+exports.ADDRESS_BOUNDARY = void 0;
+exports.groupPossibilities = groupPossibilities;
+exports.padGroup = padGroup;
+exports.simpleRegularExpression = simpleRegularExpression;
+exports.possibleElisions = possibleElisions;
const v6 = __importStar(require("./constants"));
-const sprintf_js_1 = require("sprintf-js");
function groupPossibilities(possibilities) {
- return (0, sprintf_js_1.sprintf)('(%s)', possibilities.join('|'));
+ return `(${possibilities.join('|')})`;
}
-exports.groupPossibilities = groupPossibilities;
function padGroup(group) {
if (group.length < 4) {
- return (0, sprintf_js_1.sprintf)('0{0,%d}%s', 4 - group.length, group);
+ return `0{0,${4 - group.length}}${group}`;
}
return group;
}
-exports.padGroup = padGroup;
exports.ADDRESS_BOUNDARY = '[^A-Fa-f0-9:]';
function simpleRegularExpression(groups) {
const zeroIndexes = [];
@@ -61,7 +62,6 @@ function simpleRegularExpression(groups) {
possibilities.push(groups.map(padGroup).join(':'));
return groupPossibilities(possibilities);
}
-exports.simpleRegularExpression = simpleRegularExpression;
function possibleElisions(elidedGroups, moreLeft, moreRight) {
const left = moreLeft ? '' : ':';
const right = moreRight ? '' : ':';
@@ -79,18 +79,17 @@ function possibleElisions(elidedGroups, moreLeft, moreRight) {
possibilities.push(':');
}
// 4. elision from the left side
- possibilities.push((0, sprintf_js_1.sprintf)('%s(:0{1,4}){1,%d}', left, elidedGroups - 1));
+ possibilities.push(`${left}(:0{1,4}){1,${elidedGroups - 1}}`);
// 5. elision from the right side
- possibilities.push((0, sprintf_js_1.sprintf)('(0{1,4}:){1,%d}%s', elidedGroups - 1, right));
+ possibilities.push(`(0{1,4}:){1,${elidedGroups - 1}}${right}`);
// 6. no elision
- possibilities.push((0, sprintf_js_1.sprintf)('(0{1,4}:){%d}0{1,4}', elidedGroups - 1));
+ possibilities.push(`(0{1,4}:){${elidedGroups - 1}}0{1,4}`);
// 7. elision (including sloppy elision) from the middle
for (let groups = 1; groups < elidedGroups - 1; groups++) {
for (let position = 1; position < elidedGroups - groups; position++) {
- possibilities.push((0, sprintf_js_1.sprintf)('(0{1,4}:){%d}:(0{1,4}:){%d}0{1,4}', position, elidedGroups - position - groups - 1));
+ possibilities.push(`(0{1,4}:){${position}}:(0{1,4}:){${elidedGroups - position - groups - 1}}0{1,4}`);
}
}
return groupPossibilities(possibilities);
}
-exports.possibleElisions = possibleElisions;
//# sourceMappingURL=regular-expressions.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/ip-address/package.json b/deps/npm/node_modules/ip-address/package.json
index 0543fc41a13061..5cf811e8c563af 100644
--- a/deps/npm/node_modules/ip-address/package.json
+++ b/deps/npm/node_modules/ip-address/package.json
@@ -7,7 +7,7 @@
"browser",
"validation"
],
- "version": "9.0.5",
+ "version": "10.1.0",
"author": "Beau Gunderson (https://beaugunderson.com/)",
"license": "MIT",
"main": "dist/ip-address.js",
@@ -51,37 +51,28 @@
"type": "git",
"url": "git://github.com/beaugunderson/ip-address.git"
},
- "dependencies": {
- "jsbn": "1.1.0",
- "sprintf-js": "^1.1.3"
- },
"devDependencies": {
- "@types/chai": "^4.2.18",
- "@types/jsbn": "^1.2.31",
- "@types/mocha": "^10.0.1",
- "@types/sprintf-js": "^1.1.2",
- "@typescript-eslint/eslint-plugin": "^6.7.2",
- "@typescript-eslint/parser": "^6.7.2",
- "browserify": "^17.0.0",
- "chai": "^4.3.4",
- "codecov": "^3.8.2",
- "documentation": "^14.0.2",
+ "@types/chai": "^5.0.0",
+ "@types/mocha": "^10.0.8",
+ "@typescript-eslint/eslint-plugin": "^8.8.0",
+ "@typescript-eslint/parser": "^8.8.0",
+ "chai": "^5.1.1",
+ "documentation": "^14.0.3",
"eslint": "^8.50.0",
+ "eslint_d": "^14.0.4",
"eslint-config-airbnb": "^19.0.4",
- "eslint-config-prettier": "^9.0.0",
+ "eslint-config-prettier": "^9.1.0",
"eslint-plugin-filenames": "^1.3.2",
- "eslint-plugin-import": "^2.23.4",
- "eslint-plugin-jsx-a11y": "^6.4.1",
- "eslint-plugin-prettier": "^5.0.0",
- "eslint-plugin-react": "^7.24.0",
- "eslint-plugin-react-hooks": "^4.2.0",
+ "eslint-plugin-import": "^2.30.0",
+ "eslint-plugin-jsx-a11y": "^6.10.0",
+ "eslint-plugin-prettier": "^5.2.1",
"eslint-plugin-sort-imports-es6-autofix": "^0.6.0",
- "mocha": "^10.2.0",
- "nyc": "^15.1.0",
- "prettier": "^3.0.3",
- "release-it": "^16.2.0",
- "source-map-support": "^0.5.19",
- "ts-node": "^10.0.0",
- "typescript": "^5.2.2"
+ "mocha": "^10.7.3",
+ "nyc": "^17.1.0",
+ "prettier": "^3.3.3",
+ "release-it": "^17.6.0",
+ "source-map-support": "^0.5.21",
+ "tsx": "^4.19.1",
+ "typescript": "<5.6.0"
}
}
diff --git a/deps/npm/node_modules/jsbn/LICENSE b/deps/npm/node_modules/jsbn/LICENSE
deleted file mode 100644
index c769b38beabae1..00000000000000
--- a/deps/npm/node_modules/jsbn/LICENSE
+++ /dev/null
@@ -1,40 +0,0 @@
-Licensing
----------
-
-This software is covered under the following copyright:
-
-/*
- * Copyright (c) 2003-2005 Tom Wu
- * All Rights Reserved.
- *
- * Permission is hereby granted, free of charge, to any person obtaining
- * a copy of this software and associated documentation files (the
- * "Software"), to deal in the Software without restriction, including
- * without limitation the rights to use, copy, modify, merge, publish,
- * distribute, sublicense, and/or sell copies of the Software, and to
- * permit persons to whom the Software is furnished to do so, subject to
- * the following conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS-IS" AND WITHOUT WARRANTY OF ANY KIND,
- * EXPRESS, IMPLIED OR OTHERWISE, INCLUDING WITHOUT LIMITATION, ANY
- * WARRANTY OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE.
- *
- * IN NO EVENT SHALL TOM WU BE LIABLE FOR ANY SPECIAL, INCIDENTAL,
- * INDIRECT OR CONSEQUENTIAL DAMAGES OF ANY KIND, OR ANY DAMAGES WHATSOEVER
- * RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER OR NOT ADVISED OF
- * THE POSSIBILITY OF DAMAGE, AND ON ANY THEORY OF LIABILITY, ARISING OUT
- * OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
- *
- * In addition, the following condition applies:
- *
- * All redistributions must retain an intact copy of this copyright notice
- * and disclaimer.
- */
-
-Address all questions regarding this license to:
-
- Tom Wu
- tjw@cs.Stanford.EDU
diff --git a/deps/npm/node_modules/jsbn/example.html b/deps/npm/node_modules/jsbn/example.html
deleted file mode 100644
index 1c0489b1376352..00000000000000
--- a/deps/npm/node_modules/jsbn/example.html
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
diff --git a/deps/npm/node_modules/jsbn/example.js b/deps/npm/node_modules/jsbn/example.js
deleted file mode 100644
index 85979909d7b1d8..00000000000000
--- a/deps/npm/node_modules/jsbn/example.js
+++ /dev/null
@@ -1,5 +0,0 @@
-(function () {
- var BigInteger = jsbn.BigInteger;
- var a = new BigInteger('91823918239182398123');
- console.log(a.bitLength());
-}());
diff --git a/deps/npm/node_modules/jsbn/index.js b/deps/npm/node_modules/jsbn/index.js
deleted file mode 100644
index e9eb697b07a891..00000000000000
--- a/deps/npm/node_modules/jsbn/index.js
+++ /dev/null
@@ -1,1361 +0,0 @@
-(function(){
-
- // Copyright (c) 2005 Tom Wu
- // All Rights Reserved.
- // See "LICENSE" for details.
-
- // Basic JavaScript BN library - subset useful for RSA encryption.
-
- // Bits per digit
- var dbits;
-
- // JavaScript engine analysis
- var canary = 0xdeadbeefcafe;
- var j_lm = ((canary&0xffffff)==0xefcafe);
-
- // (public) Constructor
- function BigInteger(a,b,c) {
- if(a != null)
- if("number" == typeof a) this.fromNumber(a,b,c);
- else if(b == null && "string" != typeof a) this.fromString(a,256);
- else this.fromString(a,b);
- }
-
- // return new, unset BigInteger
- function nbi() { return new BigInteger(null); }
-
- // am: Compute w_j += (x*this_i), propagate carries,
- // c is initial carry, returns final carry.
- // c < 3*dvalue, x < 2*dvalue, this_i < dvalue
- // We need to select the fastest one that works in this environment.
-
- // am1: use a single mult and divide to get the high bits,
- // max digit bits should be 26 because
- // max internal value = 2*dvalue^2-2*dvalue (< 2^53)
- function am1(i,x,w,j,c,n) {
- while(--n >= 0) {
- var v = x*this[i++]+w[j]+c;
- c = Math.floor(v/0x4000000);
- w[j++] = v&0x3ffffff;
- }
- return c;
- }
- // am2 avoids a big mult-and-extract completely.
- // Max digit bits should be <= 30 because we do bitwise ops
- // on values up to 2*hdvalue^2-hdvalue-1 (< 2^31)
- function am2(i,x,w,j,c,n) {
- var xl = x&0x7fff, xh = x>>15;
- while(--n >= 0) {
- var l = this[i]&0x7fff;
- var h = this[i++]>>15;
- var m = xh*l+h*xl;
- l = xl*l+((m&0x7fff)<<15)+w[j]+(c&0x3fffffff);
- c = (l>>>30)+(m>>>15)+xh*h+(c>>>30);
- w[j++] = l&0x3fffffff;
- }
- return c;
- }
- // Alternately, set max digit bits to 28 since some
- // browsers slow down when dealing with 32-bit numbers.
- function am3(i,x,w,j,c,n) {
- var xl = x&0x3fff, xh = x>>14;
- while(--n >= 0) {
- var l = this[i]&0x3fff;
- var h = this[i++]>>14;
- var m = xh*l+h*xl;
- l = xl*l+((m&0x3fff)<<14)+w[j]+c;
- c = (l>>28)+(m>>14)+xh*h;
- w[j++] = l&0xfffffff;
- }
- return c;
- }
- var inBrowser = typeof navigator !== "undefined";
- if(inBrowser && j_lm && (navigator.appName == "Microsoft Internet Explorer")) {
- BigInteger.prototype.am = am2;
- dbits = 30;
- }
- else if(inBrowser && j_lm && (navigator.appName != "Netscape")) {
- BigInteger.prototype.am = am1;
- dbits = 26;
- }
- else { // Mozilla/Netscape seems to prefer am3
- BigInteger.prototype.am = am3;
- dbits = 28;
- }
-
- BigInteger.prototype.DB = dbits;
- BigInteger.prototype.DM = ((1<= 0; --i) r[i] = this[i];
- r.t = this.t;
- r.s = this.s;
- }
-
- // (protected) set from integer value x, -DV <= x < DV
- function bnpFromInt(x) {
- this.t = 1;
- this.s = (x<0)?-1:0;
- if(x > 0) this[0] = x;
- else if(x < -1) this[0] = x+this.DV;
- else this.t = 0;
- }
-
- // return bigint initialized to value
- function nbv(i) { var r = nbi(); r.fromInt(i); return r; }
-
- // (protected) set from string and radix
- function bnpFromString(s,b) {
- var k;
- if(b == 16) k = 4;
- else if(b == 8) k = 3;
- else if(b == 256) k = 8; // byte array
- else if(b == 2) k = 1;
- else if(b == 32) k = 5;
- else if(b == 4) k = 2;
- else { this.fromRadix(s,b); return; }
- this.t = 0;
- this.s = 0;
- var i = s.length, mi = false, sh = 0;
- while(--i >= 0) {
- var x = (k==8)?s[i]&0xff:intAt(s,i);
- if(x < 0) {
- if(s.charAt(i) == "-") mi = true;
- continue;
- }
- mi = false;
- if(sh == 0)
- this[this.t++] = x;
- else if(sh+k > this.DB) {
- this[this.t-1] |= (x&((1<<(this.DB-sh))-1))<>(this.DB-sh));
- }
- else
- this[this.t-1] |= x<= this.DB) sh -= this.DB;
- }
- if(k == 8 && (s[0]&0x80) != 0) {
- this.s = -1;
- if(sh > 0) this[this.t-1] |= ((1<<(this.DB-sh))-1)< 0 && this[this.t-1] == c) --this.t;
- }
-
- // (public) return string representation in given radix
- function bnToString(b) {
- if(this.s < 0) return "-"+this.negate().toString(b);
- var k;
- if(b == 16) k = 4;
- else if(b == 8) k = 3;
- else if(b == 2) k = 1;
- else if(b == 32) k = 5;
- else if(b == 4) k = 2;
- else return this.toRadix(b);
- var km = (1< 0) {
- if(p < this.DB && (d = this[i]>>p) > 0) { m = true; r = int2char(d); }
- while(i >= 0) {
- if(p < k) {
- d = (this[i]&((1<>(p+=this.DB-k);
- }
- else {
- d = (this[i]>>(p-=k))&km;
- if(p <= 0) { p += this.DB; --i; }
- }
- if(d > 0) m = true;
- if(m) r += int2char(d);
- }
- }
- return m?r:"0";
- }
-
- // (public) -this
- function bnNegate() { var r = nbi(); BigInteger.ZERO.subTo(this,r); return r; }
-
- // (public) |this|
- function bnAbs() { return (this.s<0)?this.negate():this; }
-
- // (public) return + if this > a, - if this < a, 0 if equal
- function bnCompareTo(a) {
- var r = this.s-a.s;
- if(r != 0) return r;
- var i = this.t;
- r = i-a.t;
- if(r != 0) return (this.s<0)?-r:r;
- while(--i >= 0) if((r=this[i]-a[i]) != 0) return r;
- return 0;
- }
-
- // returns bit length of the integer x
- function nbits(x) {
- var r = 1, t;
- if((t=x>>>16) != 0) { x = t; r += 16; }
- if((t=x>>8) != 0) { x = t; r += 8; }
- if((t=x>>4) != 0) { x = t; r += 4; }
- if((t=x>>2) != 0) { x = t; r += 2; }
- if((t=x>>1) != 0) { x = t; r += 1; }
- return r;
- }
-
- // (public) return the number of bits in "this"
- function bnBitLength() {
- if(this.t <= 0) return 0;
- return this.DB*(this.t-1)+nbits(this[this.t-1]^(this.s&this.DM));
- }
-
- // (protected) r = this << n*DB
- function bnpDLShiftTo(n,r) {
- var i;
- for(i = this.t-1; i >= 0; --i) r[i+n] = this[i];
- for(i = n-1; i >= 0; --i) r[i] = 0;
- r.t = this.t+n;
- r.s = this.s;
- }
-
- // (protected) r = this >> n*DB
- function bnpDRShiftTo(n,r) {
- for(var i = n; i < this.t; ++i) r[i-n] = this[i];
- r.t = Math.max(this.t-n,0);
- r.s = this.s;
- }
-
- // (protected) r = this << n
- function bnpLShiftTo(n,r) {
- var bs = n%this.DB;
- var cbs = this.DB-bs;
- var bm = (1<= 0; --i) {
- r[i+ds+1] = (this[i]>>cbs)|c;
- c = (this[i]&bm)<= 0; --i) r[i] = 0;
- r[ds] = c;
- r.t = this.t+ds+1;
- r.s = this.s;
- r.clamp();
- }
-
- // (protected) r = this >> n
- function bnpRShiftTo(n,r) {
- r.s = this.s;
- var ds = Math.floor(n/this.DB);
- if(ds >= this.t) { r.t = 0; return; }
- var bs = n%this.DB;
- var cbs = this.DB-bs;
- var bm = (1<>bs;
- for(var i = ds+1; i < this.t; ++i) {
- r[i-ds-1] |= (this[i]&bm)<>bs;
- }
- if(bs > 0) r[this.t-ds-1] |= (this.s&bm)<>= this.DB;
- }
- if(a.t < this.t) {
- c -= a.s;
- while(i < this.t) {
- c += this[i];
- r[i++] = c&this.DM;
- c >>= this.DB;
- }
- c += this.s;
- }
- else {
- c += this.s;
- while(i < a.t) {
- c -= a[i];
- r[i++] = c&this.DM;
- c >>= this.DB;
- }
- c -= a.s;
- }
- r.s = (c<0)?-1:0;
- if(c < -1) r[i++] = this.DV+c;
- else if(c > 0) r[i++] = c;
- r.t = i;
- r.clamp();
- }
-
- // (protected) r = this * a, r != this,a (HAC 14.12)
- // "this" should be the larger one if appropriate.
- function bnpMultiplyTo(a,r) {
- var x = this.abs(), y = a.abs();
- var i = x.t;
- r.t = i+y.t;
- while(--i >= 0) r[i] = 0;
- for(i = 0; i < y.t; ++i) r[i+x.t] = x.am(0,y[i],r,i,0,x.t);
- r.s = 0;
- r.clamp();
- if(this.s != a.s) BigInteger.ZERO.subTo(r,r);
- }
-
- // (protected) r = this^2, r != this (HAC 14.16)
- function bnpSquareTo(r) {
- var x = this.abs();
- var i = r.t = 2*x.t;
- while(--i >= 0) r[i] = 0;
- for(i = 0; i < x.t-1; ++i) {
- var c = x.am(i,x[i],r,2*i,0,1);
- if((r[i+x.t]+=x.am(i+1,2*x[i],r,2*i+1,c,x.t-i-1)) >= x.DV) {
- r[i+x.t] -= x.DV;
- r[i+x.t+1] = 1;
- }
- }
- if(r.t > 0) r[r.t-1] += x.am(i,x[i],r,2*i,0,1);
- r.s = 0;
- r.clamp();
- }
-
- // (protected) divide this by m, quotient and remainder to q, r (HAC 14.20)
- // r != q, this != m. q or r may be null.
- function bnpDivRemTo(m,q,r) {
- var pm = m.abs();
- if(pm.t <= 0) return;
- var pt = this.abs();
- if(pt.t < pm.t) {
- if(q != null) q.fromInt(0);
- if(r != null) this.copyTo(r);
- return;
- }
- if(r == null) r = nbi();
- var y = nbi(), ts = this.s, ms = m.s;
- var nsh = this.DB-nbits(pm[pm.t-1]); // normalize modulus
- if(nsh > 0) { pm.lShiftTo(nsh,y); pt.lShiftTo(nsh,r); }
- else { pm.copyTo(y); pt.copyTo(r); }
- var ys = y.t;
- var y0 = y[ys-1];
- if(y0 == 0) return;
- var yt = y0*(1<1)?y[ys-2]>>this.F2:0);
- var d1 = this.FV/yt, d2 = (1<= 0) {
- r[r.t++] = 1;
- r.subTo(t,r);
- }
- BigInteger.ONE.dlShiftTo(ys,t);
- t.subTo(y,y); // "negative" y so we can replace sub with am later
- while(y.t < ys) y[y.t++] = 0;
- while(--j >= 0) {
- // Estimate quotient digit
- var qd = (r[--i]==y0)?this.DM:Math.floor(r[i]*d1+(r[i-1]+e)*d2);
- if((r[i]+=y.am(0,qd,r,j,0,ys)) < qd) { // Try it out
- y.dlShiftTo(j,t);
- r.subTo(t,r);
- while(r[i] < --qd) r.subTo(t,r);
- }
- }
- if(q != null) {
- r.drShiftTo(ys,q);
- if(ts != ms) BigInteger.ZERO.subTo(q,q);
- }
- r.t = ys;
- r.clamp();
- if(nsh > 0) r.rShiftTo(nsh,r); // Denormalize remainder
- if(ts < 0) BigInteger.ZERO.subTo(r,r);
- }
-
- // (public) this mod a
- function bnMod(a) {
- var r = nbi();
- this.abs().divRemTo(a,null,r);
- if(this.s < 0 && r.compareTo(BigInteger.ZERO) > 0) a.subTo(r,r);
- return r;
- }
-
- // Modular reduction using "classic" algorithm
- function Classic(m) { this.m = m; }
- function cConvert(x) {
- if(x.s < 0 || x.compareTo(this.m) >= 0) return x.mod(this.m);
- else return x;
- }
- function cRevert(x) { return x; }
- function cReduce(x) { x.divRemTo(this.m,null,x); }
- function cMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); }
- function cSqrTo(x,r) { x.squareTo(r); this.reduce(r); }
-
- Classic.prototype.convert = cConvert;
- Classic.prototype.revert = cRevert;
- Classic.prototype.reduce = cReduce;
- Classic.prototype.mulTo = cMulTo;
- Classic.prototype.sqrTo = cSqrTo;
-
- // (protected) return "-1/this % 2^DB"; useful for Mont. reduction
- // justification:
- // xy == 1 (mod m)
- // xy = 1+km
- // xy(2-xy) = (1+km)(1-km)
- // x[y(2-xy)] = 1-k^2m^2
- // x[y(2-xy)] == 1 (mod m^2)
- // if y is 1/x mod m, then y(2-xy) is 1/x mod m^2
- // should reduce x and y(2-xy) by m^2 at each step to keep size bounded.
- // JS multiply "overflows" differently from C/C++, so care is needed here.
- function bnpInvDigit() {
- if(this.t < 1) return 0;
- var x = this[0];
- if((x&1) == 0) return 0;
- var y = x&3; // y == 1/x mod 2^2
- y = (y*(2-(x&0xf)*y))&0xf; // y == 1/x mod 2^4
- y = (y*(2-(x&0xff)*y))&0xff; // y == 1/x mod 2^8
- y = (y*(2-(((x&0xffff)*y)&0xffff)))&0xffff; // y == 1/x mod 2^16
- // last step - calculate inverse mod DV directly;
- // assumes 16 < DB <= 32 and assumes ability to handle 48-bit ints
- y = (y*(2-x*y%this.DV))%this.DV; // y == 1/x mod 2^dbits
- // we really want the negative inverse, and -DV < y < DV
- return (y>0)?this.DV-y:-y;
- }
-
- // Montgomery reduction
- function Montgomery(m) {
- this.m = m;
- this.mp = m.invDigit();
- this.mpl = this.mp&0x7fff;
- this.mph = this.mp>>15;
- this.um = (1<<(m.DB-15))-1;
- this.mt2 = 2*m.t;
- }
-
- // xR mod m
- function montConvert(x) {
- var r = nbi();
- x.abs().dlShiftTo(this.m.t,r);
- r.divRemTo(this.m,null,r);
- if(x.s < 0 && r.compareTo(BigInteger.ZERO) > 0) this.m.subTo(r,r);
- return r;
- }
-
- // x/R mod m
- function montRevert(x) {
- var r = nbi();
- x.copyTo(r);
- this.reduce(r);
- return r;
- }
-
- // x = x/R mod m (HAC 14.32)
- function montReduce(x) {
- while(x.t <= this.mt2) // pad x so am has enough room later
- x[x.t++] = 0;
- for(var i = 0; i < this.m.t; ++i) {
- // faster way of calculating u0 = x[i]*mp mod DV
- var j = x[i]&0x7fff;
- var u0 = (j*this.mpl+(((j*this.mph+(x[i]>>15)*this.mpl)&this.um)<<15))&x.DM;
- // use am to combine the multiply-shift-add into one call
- j = i+this.m.t;
- x[j] += this.m.am(0,u0,x,i,0,this.m.t);
- // propagate carry
- while(x[j] >= x.DV) { x[j] -= x.DV; x[++j]++; }
- }
- x.clamp();
- x.drShiftTo(this.m.t,x);
- if(x.compareTo(this.m) >= 0) x.subTo(this.m,x);
- }
-
- // r = "x^2/R mod m"; x != r
- function montSqrTo(x,r) { x.squareTo(r); this.reduce(r); }
-
- // r = "xy/R mod m"; x,y != r
- function montMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); }
-
- Montgomery.prototype.convert = montConvert;
- Montgomery.prototype.revert = montRevert;
- Montgomery.prototype.reduce = montReduce;
- Montgomery.prototype.mulTo = montMulTo;
- Montgomery.prototype.sqrTo = montSqrTo;
-
- // (protected) true iff this is even
- function bnpIsEven() { return ((this.t>0)?(this[0]&1):this.s) == 0; }
-
- // (protected) this^e, e < 2^32, doing sqr and mul with "r" (HAC 14.79)
- function bnpExp(e,z) {
- if(e > 0xffffffff || e < 1) return BigInteger.ONE;
- var r = nbi(), r2 = nbi(), g = z.convert(this), i = nbits(e)-1;
- g.copyTo(r);
- while(--i >= 0) {
- z.sqrTo(r,r2);
- if((e&(1< 0) z.mulTo(r2,g,r);
- else { var t = r; r = r2; r2 = t; }
- }
- return z.revert(r);
- }
-
- // (public) this^e % m, 0 <= e < 2^32
- function bnModPowInt(e,m) {
- var z;
- if(e < 256 || m.isEven()) z = new Classic(m); else z = new Montgomery(m);
- return this.exp(e,z);
- }
-
- // protected
- BigInteger.prototype.copyTo = bnpCopyTo;
- BigInteger.prototype.fromInt = bnpFromInt;
- BigInteger.prototype.fromString = bnpFromString;
- BigInteger.prototype.clamp = bnpClamp;
- BigInteger.prototype.dlShiftTo = bnpDLShiftTo;
- BigInteger.prototype.drShiftTo = bnpDRShiftTo;
- BigInteger.prototype.lShiftTo = bnpLShiftTo;
- BigInteger.prototype.rShiftTo = bnpRShiftTo;
- BigInteger.prototype.subTo = bnpSubTo;
- BigInteger.prototype.multiplyTo = bnpMultiplyTo;
- BigInteger.prototype.squareTo = bnpSquareTo;
- BigInteger.prototype.divRemTo = bnpDivRemTo;
- BigInteger.prototype.invDigit = bnpInvDigit;
- BigInteger.prototype.isEven = bnpIsEven;
- BigInteger.prototype.exp = bnpExp;
-
- // public
- BigInteger.prototype.toString = bnToString;
- BigInteger.prototype.negate = bnNegate;
- BigInteger.prototype.abs = bnAbs;
- BigInteger.prototype.compareTo = bnCompareTo;
- BigInteger.prototype.bitLength = bnBitLength;
- BigInteger.prototype.mod = bnMod;
- BigInteger.prototype.modPowInt = bnModPowInt;
-
- // "constants"
- BigInteger.ZERO = nbv(0);
- BigInteger.ONE = nbv(1);
-
- // Copyright (c) 2005-2009 Tom Wu
- // All Rights Reserved.
- // See "LICENSE" for details.
-
- // Extended JavaScript BN functions, required for RSA private ops.
-
- // Version 1.1: new BigInteger("0", 10) returns "proper" zero
- // Version 1.2: square() API, isProbablePrime fix
-
- // (public)
- function bnClone() { var r = nbi(); this.copyTo(r); return r; }
-
- // (public) return value as integer
- function bnIntValue() {
- if(this.s < 0) {
- if(this.t == 1) return this[0]-this.DV;
- else if(this.t == 0) return -1;
- }
- else if(this.t == 1) return this[0];
- else if(this.t == 0) return 0;
- // assumes 16 < DB < 32
- return ((this[1]&((1<<(32-this.DB))-1))<>24; }
-
- // (public) return value as short (assumes DB>=16)
- function bnShortValue() { return (this.t==0)?this.s:(this[0]<<16)>>16; }
-
- // (protected) return x s.t. r^x < DV
- function bnpChunkSize(r) { return Math.floor(Math.LN2*this.DB/Math.log(r)); }
-
- // (public) 0 if this == 0, 1 if this > 0
- function bnSigNum() {
- if(this.s < 0) return -1;
- else if(this.t <= 0 || (this.t == 1 && this[0] <= 0)) return 0;
- else return 1;
- }
-
- // (protected) convert to radix string
- function bnpToRadix(b) {
- if(b == null) b = 10;
- if(this.signum() == 0 || b < 2 || b > 36) return "0";
- var cs = this.chunkSize(b);
- var a = Math.pow(b,cs);
- var d = nbv(a), y = nbi(), z = nbi(), r = "";
- this.divRemTo(d,y,z);
- while(y.signum() > 0) {
- r = (a+z.intValue()).toString(b).substr(1) + r;
- y.divRemTo(d,y,z);
- }
- return z.intValue().toString(b) + r;
- }
-
- // (protected) convert from radix string
- function bnpFromRadix(s,b) {
- this.fromInt(0);
- if(b == null) b = 10;
- var cs = this.chunkSize(b);
- var d = Math.pow(b,cs), mi = false, j = 0, w = 0;
- for(var i = 0; i < s.length; ++i) {
- var x = intAt(s,i);
- if(x < 0) {
- if(s.charAt(i) == "-" && this.signum() == 0) mi = true;
- continue;
- }
- w = b*w+x;
- if(++j >= cs) {
- this.dMultiply(d);
- this.dAddOffset(w,0);
- j = 0;
- w = 0;
- }
- }
- if(j > 0) {
- this.dMultiply(Math.pow(b,j));
- this.dAddOffset(w,0);
- }
- if(mi) BigInteger.ZERO.subTo(this,this);
- }
-
- // (protected) alternate constructor
- function bnpFromNumber(a,b,c) {
- if("number" == typeof b) {
- // new BigInteger(int,int,RNG)
- if(a < 2) this.fromInt(1);
- else {
- this.fromNumber(a,c);
- if(!this.testBit(a-1)) // force MSB set
- this.bitwiseTo(BigInteger.ONE.shiftLeft(a-1),op_or,this);
- if(this.isEven()) this.dAddOffset(1,0); // force odd
- while(!this.isProbablePrime(b)) {
- this.dAddOffset(2,0);
- if(this.bitLength() > a) this.subTo(BigInteger.ONE.shiftLeft(a-1),this);
- }
- }
- }
- else {
- // new BigInteger(int,RNG)
- var x = new Array(), t = a&7;
- x.length = (a>>3)+1;
- b.nextBytes(x);
- if(t > 0) x[0] &= ((1< 0) {
- if(p < this.DB && (d = this[i]>>p) != (this.s&this.DM)>>p)
- r[k++] = d|(this.s<<(this.DB-p));
- while(i >= 0) {
- if(p < 8) {
- d = (this[i]&((1<>(p+=this.DB-8);
- }
- else {
- d = (this[i]>>(p-=8))&0xff;
- if(p <= 0) { p += this.DB; --i; }
- }
- if((d&0x80) != 0) d |= -256;
- if(k == 0 && (this.s&0x80) != (d&0x80)) ++k;
- if(k > 0 || d != this.s) r[k++] = d;
- }
- }
- return r;
- }
-
- function bnEquals(a) { return(this.compareTo(a)==0); }
- function bnMin(a) { return(this.compareTo(a)<0)?this:a; }
- function bnMax(a) { return(this.compareTo(a)>0)?this:a; }
-
- // (protected) r = this op a (bitwise)
- function bnpBitwiseTo(a,op,r) {
- var i, f, m = Math.min(a.t,this.t);
- for(i = 0; i < m; ++i) r[i] = op(this[i],a[i]);
- if(a.t < this.t) {
- f = a.s&this.DM;
- for(i = m; i < this.t; ++i) r[i] = op(this[i],f);
- r.t = this.t;
- }
- else {
- f = this.s&this.DM;
- for(i = m; i < a.t; ++i) r[i] = op(f,a[i]);
- r.t = a.t;
- }
- r.s = op(this.s,a.s);
- r.clamp();
- }
-
- // (public) this & a
- function op_and(x,y) { return x&y; }
- function bnAnd(a) { var r = nbi(); this.bitwiseTo(a,op_and,r); return r; }
-
- // (public) this | a
- function op_or(x,y) { return x|y; }
- function bnOr(a) { var r = nbi(); this.bitwiseTo(a,op_or,r); return r; }
-
- // (public) this ^ a
- function op_xor(x,y) { return x^y; }
- function bnXor(a) { var r = nbi(); this.bitwiseTo(a,op_xor,r); return r; }
-
- // (public) this & ~a
- function op_andnot(x,y) { return x&~y; }
- function bnAndNot(a) { var r = nbi(); this.bitwiseTo(a,op_andnot,r); return r; }
-
- // (public) ~this
- function bnNot() {
- var r = nbi();
- for(var i = 0; i < this.t; ++i) r[i] = this.DM&~this[i];
- r.t = this.t;
- r.s = ~this.s;
- return r;
- }
-
- // (public) this << n
- function bnShiftLeft(n) {
- var r = nbi();
- if(n < 0) this.rShiftTo(-n,r); else this.lShiftTo(n,r);
- return r;
- }
-
- // (public) this >> n
- function bnShiftRight(n) {
- var r = nbi();
- if(n < 0) this.lShiftTo(-n,r); else this.rShiftTo(n,r);
- return r;
- }
-
- // return index of lowest 1-bit in x, x < 2^31
- function lbit(x) {
- if(x == 0) return -1;
- var r = 0;
- if((x&0xffff) == 0) { x >>= 16; r += 16; }
- if((x&0xff) == 0) { x >>= 8; r += 8; }
- if((x&0xf) == 0) { x >>= 4; r += 4; }
- if((x&3) == 0) { x >>= 2; r += 2; }
- if((x&1) == 0) ++r;
- return r;
- }
-
- // (public) returns index of lowest 1-bit (or -1 if none)
- function bnGetLowestSetBit() {
- for(var i = 0; i < this.t; ++i)
- if(this[i] != 0) return i*this.DB+lbit(this[i]);
- if(this.s < 0) return this.t*this.DB;
- return -1;
- }
-
- // return number of 1 bits in x
- function cbit(x) {
- var r = 0;
- while(x != 0) { x &= x-1; ++r; }
- return r;
- }
-
- // (public) return number of set bits
- function bnBitCount() {
- var r = 0, x = this.s&this.DM;
- for(var i = 0; i < this.t; ++i) r += cbit(this[i]^x);
- return r;
- }
-
- // (public) true iff nth bit is set
- function bnTestBit(n) {
- var j = Math.floor(n/this.DB);
- if(j >= this.t) return(this.s!=0);
- return((this[j]&(1<<(n%this.DB)))!=0);
- }
-
- // (protected) this op (1<>= this.DB;
- }
- if(a.t < this.t) {
- c += a.s;
- while(i < this.t) {
- c += this[i];
- r[i++] = c&this.DM;
- c >>= this.DB;
- }
- c += this.s;
- }
- else {
- c += this.s;
- while(i < a.t) {
- c += a[i];
- r[i++] = c&this.DM;
- c >>= this.DB;
- }
- c += a.s;
- }
- r.s = (c<0)?-1:0;
- if(c > 0) r[i++] = c;
- else if(c < -1) r[i++] = this.DV+c;
- r.t = i;
- r.clamp();
- }
-
- // (public) this + a
- function bnAdd(a) { var r = nbi(); this.addTo(a,r); return r; }
-
- // (public) this - a
- function bnSubtract(a) { var r = nbi(); this.subTo(a,r); return r; }
-
- // (public) this * a
- function bnMultiply(a) { var r = nbi(); this.multiplyTo(a,r); return r; }
-
- // (public) this^2
- function bnSquare() { var r = nbi(); this.squareTo(r); return r; }
-
- // (public) this / a
- function bnDivide(a) { var r = nbi(); this.divRemTo(a,r,null); return r; }
-
- // (public) this % a
- function bnRemainder(a) { var r = nbi(); this.divRemTo(a,null,r); return r; }
-
- // (public) [this/a,this%a]
- function bnDivideAndRemainder(a) {
- var q = nbi(), r = nbi();
- this.divRemTo(a,q,r);
- return new Array(q,r);
- }
-
- // (protected) this *= n, this >= 0, 1 < n < DV
- function bnpDMultiply(n) {
- this[this.t] = this.am(0,n-1,this,0,0,this.t);
- ++this.t;
- this.clamp();
- }
-
- // (protected) this += n << w words, this >= 0
- function bnpDAddOffset(n,w) {
- if(n == 0) return;
- while(this.t <= w) this[this.t++] = 0;
- this[w] += n;
- while(this[w] >= this.DV) {
- this[w] -= this.DV;
- if(++w >= this.t) this[this.t++] = 0;
- ++this[w];
- }
- }
-
- // A "null" reducer
- function NullExp() {}
- function nNop(x) { return x; }
- function nMulTo(x,y,r) { x.multiplyTo(y,r); }
- function nSqrTo(x,r) { x.squareTo(r); }
-
- NullExp.prototype.convert = nNop;
- NullExp.prototype.revert = nNop;
- NullExp.prototype.mulTo = nMulTo;
- NullExp.prototype.sqrTo = nSqrTo;
-
- // (public) this^e
- function bnPow(e) { return this.exp(e,new NullExp()); }
-
- // (protected) r = lower n words of "this * a", a.t <= n
- // "this" should be the larger one if appropriate.
- function bnpMultiplyLowerTo(a,n,r) {
- var i = Math.min(this.t+a.t,n);
- r.s = 0; // assumes a,this >= 0
- r.t = i;
- while(i > 0) r[--i] = 0;
- var j;
- for(j = r.t-this.t; i < j; ++i) r[i+this.t] = this.am(0,a[i],r,i,0,this.t);
- for(j = Math.min(a.t,n); i < j; ++i) this.am(0,a[i],r,i,0,n-i);
- r.clamp();
- }
-
- // (protected) r = "this * a" without lower n words, n > 0
- // "this" should be the larger one if appropriate.
- function bnpMultiplyUpperTo(a,n,r) {
- --n;
- var i = r.t = this.t+a.t-n;
- r.s = 0; // assumes a,this >= 0
- while(--i >= 0) r[i] = 0;
- for(i = Math.max(n-this.t,0); i < a.t; ++i)
- r[this.t+i-n] = this.am(n-i,a[i],r,0,0,this.t+i-n);
- r.clamp();
- r.drShiftTo(1,r);
- }
-
- // Barrett modular reduction
- function Barrett(m) {
- // setup Barrett
- this.r2 = nbi();
- this.q3 = nbi();
- BigInteger.ONE.dlShiftTo(2*m.t,this.r2);
- this.mu = this.r2.divide(m);
- this.m = m;
- }
-
- function barrettConvert(x) {
- if(x.s < 0 || x.t > 2*this.m.t) return x.mod(this.m);
- else if(x.compareTo(this.m) < 0) return x;
- else { var r = nbi(); x.copyTo(r); this.reduce(r); return r; }
- }
-
- function barrettRevert(x) { return x; }
-
- // x = x mod m (HAC 14.42)
- function barrettReduce(x) {
- x.drShiftTo(this.m.t-1,this.r2);
- if(x.t > this.m.t+1) { x.t = this.m.t+1; x.clamp(); }
- this.mu.multiplyUpperTo(this.r2,this.m.t+1,this.q3);
- this.m.multiplyLowerTo(this.q3,this.m.t+1,this.r2);
- while(x.compareTo(this.r2) < 0) x.dAddOffset(1,this.m.t+1);
- x.subTo(this.r2,x);
- while(x.compareTo(this.m) >= 0) x.subTo(this.m,x);
- }
-
- // r = x^2 mod m; x != r
- function barrettSqrTo(x,r) { x.squareTo(r); this.reduce(r); }
-
- // r = x*y mod m; x,y != r
- function barrettMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); }
-
- Barrett.prototype.convert = barrettConvert;
- Barrett.prototype.revert = barrettRevert;
- Barrett.prototype.reduce = barrettReduce;
- Barrett.prototype.mulTo = barrettMulTo;
- Barrett.prototype.sqrTo = barrettSqrTo;
-
- // (public) this^e % m (HAC 14.85)
- function bnModPow(e,m) {
- var i = e.bitLength(), k, r = nbv(1), z;
- if(i <= 0) return r;
- else if(i < 18) k = 1;
- else if(i < 48) k = 3;
- else if(i < 144) k = 4;
- else if(i < 768) k = 5;
- else k = 6;
- if(i < 8)
- z = new Classic(m);
- else if(m.isEven())
- z = new Barrett(m);
- else
- z = new Montgomery(m);
-
- // precomputation
- var g = new Array(), n = 3, k1 = k-1, km = (1< 1) {
- var g2 = nbi();
- z.sqrTo(g[1],g2);
- while(n <= km) {
- g[n] = nbi();
- z.mulTo(g2,g[n-2],g[n]);
- n += 2;
- }
- }
-
- var j = e.t-1, w, is1 = true, r2 = nbi(), t;
- i = nbits(e[j])-1;
- while(j >= 0) {
- if(i >= k1) w = (e[j]>>(i-k1))&km;
- else {
- w = (e[j]&((1<<(i+1))-1))<<(k1-i);
- if(j > 0) w |= e[j-1]>>(this.DB+i-k1);
- }
-
- n = k;
- while((w&1) == 0) { w >>= 1; --n; }
- if((i -= n) < 0) { i += this.DB; --j; }
- if(is1) { // ret == 1, don't bother squaring or multiplying it
- g[w].copyTo(r);
- is1 = false;
- }
- else {
- while(n > 1) { z.sqrTo(r,r2); z.sqrTo(r2,r); n -= 2; }
- if(n > 0) z.sqrTo(r,r2); else { t = r; r = r2; r2 = t; }
- z.mulTo(r2,g[w],r);
- }
-
- while(j >= 0 && (e[j]&(1< 0) {
- x.rShiftTo(g,x);
- y.rShiftTo(g,y);
- }
- while(x.signum() > 0) {
- if((i = x.getLowestSetBit()) > 0) x.rShiftTo(i,x);
- if((i = y.getLowestSetBit()) > 0) y.rShiftTo(i,y);
- if(x.compareTo(y) >= 0) {
- x.subTo(y,x);
- x.rShiftTo(1,x);
- }
- else {
- y.subTo(x,y);
- y.rShiftTo(1,y);
- }
- }
- if(g > 0) y.lShiftTo(g,y);
- return y;
- }
-
- // (protected) this % n, n < 2^26
- function bnpModInt(n) {
- if(n <= 0) return 0;
- var d = this.DV%n, r = (this.s<0)?n-1:0;
- if(this.t > 0)
- if(d == 0) r = this[0]%n;
- else for(var i = this.t-1; i >= 0; --i) r = (d*r+this[i])%n;
- return r;
- }
-
- // (public) 1/this % m (HAC 14.61)
- function bnModInverse(m) {
- var ac = m.isEven();
- if((this.isEven() && ac) || m.signum() == 0) return BigInteger.ZERO;
- var u = m.clone(), v = this.clone();
- var a = nbv(1), b = nbv(0), c = nbv(0), d = nbv(1);
- while(u.signum() != 0) {
- while(u.isEven()) {
- u.rShiftTo(1,u);
- if(ac) {
- if(!a.isEven() || !b.isEven()) { a.addTo(this,a); b.subTo(m,b); }
- a.rShiftTo(1,a);
- }
- else if(!b.isEven()) b.subTo(m,b);
- b.rShiftTo(1,b);
- }
- while(v.isEven()) {
- v.rShiftTo(1,v);
- if(ac) {
- if(!c.isEven() || !d.isEven()) { c.addTo(this,c); d.subTo(m,d); }
- c.rShiftTo(1,c);
- }
- else if(!d.isEven()) d.subTo(m,d);
- d.rShiftTo(1,d);
- }
- if(u.compareTo(v) >= 0) {
- u.subTo(v,u);
- if(ac) a.subTo(c,a);
- b.subTo(d,b);
- }
- else {
- v.subTo(u,v);
- if(ac) c.subTo(a,c);
- d.subTo(b,d);
- }
- }
- if(v.compareTo(BigInteger.ONE) != 0) return BigInteger.ZERO;
- if(d.compareTo(m) >= 0) return d.subtract(m);
- if(d.signum() < 0) d.addTo(m,d); else return d;
- if(d.signum() < 0) return d.add(m); else return d;
- }
-
- var lowprimes = [2,3,5,7,11,13,17,19,23,29,31,37,41,43,47,53,59,61,67,71,73,79,83,89,97,101,103,107,109,113,127,131,137,139,149,151,157,163,167,173,179,181,191,193,197,199,211,223,227,229,233,239,241,251,257,263,269,271,277,281,283,293,307,311,313,317,331,337,347,349,353,359,367,373,379,383,389,397,401,409,419,421,431,433,439,443,449,457,461,463,467,479,487,491,499,503,509,521,523,541,547,557,563,569,571,577,587,593,599,601,607,613,617,619,631,641,643,647,653,659,661,673,677,683,691,701,709,719,727,733,739,743,751,757,761,769,773,787,797,809,811,821,823,827,829,839,853,857,859,863,877,881,883,887,907,911,919,929,937,941,947,953,967,971,977,983,991,997];
- var lplim = (1<<26)/lowprimes[lowprimes.length-1];
-
- // (public) test primality with certainty >= 1-.5^t
- function bnIsProbablePrime(t) {
- var i, x = this.abs();
- if(x.t == 1 && x[0] <= lowprimes[lowprimes.length-1]) {
- for(i = 0; i < lowprimes.length; ++i)
- if(x[0] == lowprimes[i]) return true;
- return false;
- }
- if(x.isEven()) return false;
- i = 1;
- while(i < lowprimes.length) {
- var m = lowprimes[i], j = i+1;
- while(j < lowprimes.length && m < lplim) m *= lowprimes[j++];
- m = x.modInt(m);
- while(i < j) if(m%lowprimes[i++] == 0) return false;
- }
- return x.millerRabin(t);
- }
-
- // (protected) true if probably prime (HAC 4.24, Miller-Rabin)
- function bnpMillerRabin(t) {
- var n1 = this.subtract(BigInteger.ONE);
- var k = n1.getLowestSetBit();
- if(k <= 0) return false;
- var r = n1.shiftRight(k);
- t = (t+1)>>1;
- if(t > lowprimes.length) t = lowprimes.length;
- var a = nbi();
- for(var i = 0; i < t; ++i) {
- //Pick bases at random, instead of starting at 2
- a.fromInt(lowprimes[Math.floor(Math.random()*lowprimes.length)]);
- var y = a.modPow(r,this);
- if(y.compareTo(BigInteger.ONE) != 0 && y.compareTo(n1) != 0) {
- var j = 1;
- while(j++ < k && y.compareTo(n1) != 0) {
- y = y.modPowInt(2,this);
- if(y.compareTo(BigInteger.ONE) == 0) return false;
- }
- if(y.compareTo(n1) != 0) return false;
- }
- }
- return true;
- }
-
- // protected
- BigInteger.prototype.chunkSize = bnpChunkSize;
- BigInteger.prototype.toRadix = bnpToRadix;
- BigInteger.prototype.fromRadix = bnpFromRadix;
- BigInteger.prototype.fromNumber = bnpFromNumber;
- BigInteger.prototype.bitwiseTo = bnpBitwiseTo;
- BigInteger.prototype.changeBit = bnpChangeBit;
- BigInteger.prototype.addTo = bnpAddTo;
- BigInteger.prototype.dMultiply = bnpDMultiply;
- BigInteger.prototype.dAddOffset = bnpDAddOffset;
- BigInteger.prototype.multiplyLowerTo = bnpMultiplyLowerTo;
- BigInteger.prototype.multiplyUpperTo = bnpMultiplyUpperTo;
- BigInteger.prototype.modInt = bnpModInt;
- BigInteger.prototype.millerRabin = bnpMillerRabin;
-
- // public
- BigInteger.prototype.clone = bnClone;
- BigInteger.prototype.intValue = bnIntValue;
- BigInteger.prototype.byteValue = bnByteValue;
- BigInteger.prototype.shortValue = bnShortValue;
- BigInteger.prototype.signum = bnSigNum;
- BigInteger.prototype.toByteArray = bnToByteArray;
- BigInteger.prototype.equals = bnEquals;
- BigInteger.prototype.min = bnMin;
- BigInteger.prototype.max = bnMax;
- BigInteger.prototype.and = bnAnd;
- BigInteger.prototype.or = bnOr;
- BigInteger.prototype.xor = bnXor;
- BigInteger.prototype.andNot = bnAndNot;
- BigInteger.prototype.not = bnNot;
- BigInteger.prototype.shiftLeft = bnShiftLeft;
- BigInteger.prototype.shiftRight = bnShiftRight;
- BigInteger.prototype.getLowestSetBit = bnGetLowestSetBit;
- BigInteger.prototype.bitCount = bnBitCount;
- BigInteger.prototype.testBit = bnTestBit;
- BigInteger.prototype.setBit = bnSetBit;
- BigInteger.prototype.clearBit = bnClearBit;
- BigInteger.prototype.flipBit = bnFlipBit;
- BigInteger.prototype.add = bnAdd;
- BigInteger.prototype.subtract = bnSubtract;
- BigInteger.prototype.multiply = bnMultiply;
- BigInteger.prototype.divide = bnDivide;
- BigInteger.prototype.remainder = bnRemainder;
- BigInteger.prototype.divideAndRemainder = bnDivideAndRemainder;
- BigInteger.prototype.modPow = bnModPow;
- BigInteger.prototype.modInverse = bnModInverse;
- BigInteger.prototype.pow = bnPow;
- BigInteger.prototype.gcd = bnGCD;
- BigInteger.prototype.isProbablePrime = bnIsProbablePrime;
-
- // JSBN-specific extension
- BigInteger.prototype.square = bnSquare;
-
- // Expose the Barrett function
- BigInteger.prototype.Barrett = Barrett
-
- // BigInteger interfaces not implemented in jsbn:
-
- // BigInteger(int signum, byte[] magnitude)
- // double doubleValue()
- // float floatValue()
- // int hashCode()
- // long longValue()
- // static BigInteger valueOf(long val)
-
- // Random number generator - requires a PRNG backend, e.g. prng4.js
-
- // For best results, put code like
- //
- // in your main HTML document.
-
- var rng_state;
- var rng_pool;
- var rng_pptr;
-
- // Mix in a 32-bit integer into the pool
- function rng_seed_int(x) {
- rng_pool[rng_pptr++] ^= x & 255;
- rng_pool[rng_pptr++] ^= (x >> 8) & 255;
- rng_pool[rng_pptr++] ^= (x >> 16) & 255;
- rng_pool[rng_pptr++] ^= (x >> 24) & 255;
- if(rng_pptr >= rng_psize) rng_pptr -= rng_psize;
- }
-
- // Mix in the current time (w/milliseconds) into the pool
- function rng_seed_time() {
- rng_seed_int(new Date().getTime());
- }
-
- // Initialize the pool with junk if needed.
- if(rng_pool == null) {
- rng_pool = new Array();
- rng_pptr = 0;
- var t;
- if(typeof window !== "undefined" && window.crypto) {
- if (window.crypto.getRandomValues) {
- // Use webcrypto if available
- var ua = new Uint8Array(32);
- window.crypto.getRandomValues(ua);
- for(t = 0; t < 32; ++t)
- rng_pool[rng_pptr++] = ua[t];
- }
- else if(navigator.appName == "Netscape" && navigator.appVersion < "5") {
- // Extract entropy (256 bits) from NS4 RNG if available
- var z = window.crypto.random(32);
- for(t = 0; t < z.length; ++t)
- rng_pool[rng_pptr++] = z.charCodeAt(t) & 255;
- }
- }
- while(rng_pptr < rng_psize) { // extract some randomness from Math.random()
- t = Math.floor(65536 * Math.random());
- rng_pool[rng_pptr++] = t >>> 8;
- rng_pool[rng_pptr++] = t & 255;
- }
- rng_pptr = 0;
- rng_seed_time();
- //rng_seed_int(window.screenX);
- //rng_seed_int(window.screenY);
- }
-
- function rng_get_byte() {
- if(rng_state == null) {
- rng_seed_time();
- rng_state = prng_newstate();
- rng_state.init(rng_pool);
- for(rng_pptr = 0; rng_pptr < rng_pool.length; ++rng_pptr)
- rng_pool[rng_pptr] = 0;
- rng_pptr = 0;
- //rng_pool = null;
- }
- // TODO: allow reseeding after first request
- return rng_state.next();
- }
-
- function rng_get_bytes(ba) {
- var i;
- for(i = 0; i < ba.length; ++i) ba[i] = rng_get_byte();
- }
-
- function SecureRandom() {}
-
- SecureRandom.prototype.nextBytes = rng_get_bytes;
-
- // prng4.js - uses Arcfour as a PRNG
-
- function Arcfour() {
- this.i = 0;
- this.j = 0;
- this.S = new Array();
- }
-
- // Initialize arcfour context from key, an array of ints, each from [0..255]
- function ARC4init(key) {
- var i, j, t;
- for(i = 0; i < 256; ++i)
- this.S[i] = i;
- j = 0;
- for(i = 0; i < 256; ++i) {
- j = (j + this.S[i] + key[i % key.length]) & 255;
- t = this.S[i];
- this.S[i] = this.S[j];
- this.S[j] = t;
- }
- this.i = 0;
- this.j = 0;
- }
-
- function ARC4next() {
- var t;
- this.i = (this.i + 1) & 255;
- this.j = (this.j + this.S[this.i]) & 255;
- t = this.S[this.i];
- this.S[this.i] = this.S[this.j];
- this.S[this.j] = t;
- return this.S[(t + this.S[this.i]) & 255];
- }
-
- Arcfour.prototype.init = ARC4init;
- Arcfour.prototype.next = ARC4next;
-
- // Plug in your RNG constructor here
- function prng_newstate() {
- return new Arcfour();
- }
-
- // Pool size must be a multiple of 4 and greater than 32.
- // An array of bytes the size of the pool will be passed to init()
- var rng_psize = 256;
-
- if (typeof exports !== 'undefined') {
- exports = module.exports = {
- default: BigInteger,
- BigInteger: BigInteger,
- SecureRandom: SecureRandom,
- };
- } else {
- this.jsbn = {
- BigInteger: BigInteger,
- SecureRandom: SecureRandom
- };
- }
-
-}).call(this);
diff --git a/deps/npm/node_modules/jsbn/package.json b/deps/npm/node_modules/jsbn/package.json
deleted file mode 100644
index 97b137c2e2db9b..00000000000000
--- a/deps/npm/node_modules/jsbn/package.json
+++ /dev/null
@@ -1,21 +0,0 @@
-{
- "name": "jsbn",
- "version": "1.1.0",
- "description": "The jsbn library is a fast, portable implementation of large-number math in pure JavaScript, enabling public-key crypto and other applications on desktop and mobile browsers.",
- "main": "index.js",
- "scripts": {
- "test": "mocha test.js"
- },
- "repository": {
- "type": "git",
- "url": "https://github.com/andyperlitch/jsbn.git"
- },
- "keywords": [
- "biginteger",
- "bignumber",
- "big",
- "integer"
- ],
- "author": "Tom Wu",
- "license": "MIT"
-}
diff --git a/deps/npm/node_modules/jsbn/test/es6-import.js b/deps/npm/node_modules/jsbn/test/es6-import.js
deleted file mode 100644
index 668cbdfdc5bef3..00000000000000
--- a/deps/npm/node_modules/jsbn/test/es6-import.js
+++ /dev/null
@@ -1,3 +0,0 @@
-import {BigInteger} from '../';
-
-console.log(typeof BigInteger)
diff --git a/deps/npm/node_modules/libnpmaccess/package.json b/deps/npm/node_modules/libnpmaccess/package.json
index c13cea28bce064..12167a6cef4a40 100644
--- a/deps/npm/node_modules/libnpmaccess/package.json
+++ b/deps/npm/node_modules/libnpmaccess/package.json
@@ -18,7 +18,7 @@
"devDependencies": {
"@npmcli/eslint-config": "^5.0.1",
"@npmcli/mock-registry": "^1.0.0",
- "@npmcli/template-oss": "4.24.4",
+ "@npmcli/template-oss": "4.29.0",
"nock": "^13.3.3",
"tap": "^16.3.8"
},
@@ -42,7 +42,7 @@
],
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.24.4",
+ "version": "4.29.0",
"content": "../../scripts/template-oss/index.js"
},
"tap": {
diff --git a/deps/npm/node_modules/libnpmdiff/lib/untar.js b/deps/npm/node_modules/libnpmdiff/lib/untar.js
index 341ae27d1e8263..d832f29f026dc6 100644
--- a/deps/npm/node_modules/libnpmdiff/lib/untar.js
+++ b/deps/npm/node_modules/libnpmdiff/lib/untar.js
@@ -37,7 +37,6 @@ const untar = ({ files, refs }, { filterFiles, item, prefix }) => {
// should skip reading file when using --name-only option
let content
try {
- entry.setEncoding('utf8')
content = entry.concat()
} catch (e) {
/* istanbul ignore next */
@@ -84,7 +83,7 @@ const readTarballs = async (tarballs, opts = {}) => {
const contents = await Promise.all(allRefs.map(async ref => ref.content))
contents.forEach((content, index) => {
- allRefs[index].content = content
+ allRefs[index].content = content.toString('utf8')
})
return {
diff --git a/deps/npm/node_modules/libnpmdiff/package.json b/deps/npm/node_modules/libnpmdiff/package.json
index 9e2fff29442a1e..90c58f1c4b4d6f 100644
--- a/deps/npm/node_modules/libnpmdiff/package.json
+++ b/deps/npm/node_modules/libnpmdiff/package.json
@@ -1,6 +1,6 @@
{
"name": "libnpmdiff",
- "version": "7.0.1",
+ "version": "7.0.3",
"description": "The registry diff",
"repository": {
"type": "git",
@@ -43,22 +43,22 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^5.0.1",
- "@npmcli/template-oss": "4.24.4",
+ "@npmcli/template-oss": "4.29.0",
"tap": "^16.3.8"
},
"dependencies": {
- "@npmcli/arborist": "^8.0.1",
+ "@npmcli/arborist": "^8.0.3",
"@npmcli/installed-package-contents": "^3.0.0",
"binary-extensions": "^2.3.0",
"diff": "^5.1.0",
"minimatch": "^9.0.4",
"npm-package-arg": "^12.0.0",
"pacote": "^19.0.0",
- "tar": "^6.2.1"
+ "tar": "^7.5.11"
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.24.4",
+ "version": "4.29.0",
"content": "../../scripts/template-oss/index.js"
},
"tap": {
diff --git a/deps/npm/node_modules/libnpmexec/package.json b/deps/npm/node_modules/libnpmexec/package.json
index f987e492b212f2..bbaec99a07e46c 100644
--- a/deps/npm/node_modules/libnpmexec/package.json
+++ b/deps/npm/node_modules/libnpmexec/package.json
@@ -1,6 +1,6 @@
{
"name": "libnpmexec",
- "version": "9.0.1",
+ "version": "9.0.3",
"files": [
"bin/",
"lib/"
@@ -52,7 +52,7 @@
"devDependencies": {
"@npmcli/eslint-config": "^5.0.1",
"@npmcli/mock-registry": "^1.0.0",
- "@npmcli/template-oss": "4.24.4",
+ "@npmcli/template-oss": "4.29.0",
"bin-links": "^5.0.0",
"chalk": "^5.2.0",
"just-extend": "^6.2.0",
@@ -60,7 +60,7 @@
"tap": "^16.3.8"
},
"dependencies": {
- "@npmcli/arborist": "^8.0.1",
+ "@npmcli/arborist": "^8.0.3",
"@npmcli/run-script": "^9.0.1",
"ci-info": "^4.0.0",
"npm-package-arg": "^12.0.0",
@@ -73,7 +73,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.24.4",
+ "version": "4.29.0",
"content": "../../scripts/template-oss/index.js"
}
}
diff --git a/deps/npm/node_modules/libnpmfund/package.json b/deps/npm/node_modules/libnpmfund/package.json
index 44599e839ef4a2..2732e1c40b74cb 100644
--- a/deps/npm/node_modules/libnpmfund/package.json
+++ b/deps/npm/node_modules/libnpmfund/package.json
@@ -1,6 +1,6 @@
{
"name": "libnpmfund",
- "version": "6.0.1",
+ "version": "6.0.3",
"main": "lib/index.js",
"files": [
"bin/",
@@ -42,18 +42,18 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^5.0.1",
- "@npmcli/template-oss": "4.24.4",
+ "@npmcli/template-oss": "4.29.0",
"tap": "^16.3.8"
},
"dependencies": {
- "@npmcli/arborist": "^8.0.1"
+ "@npmcli/arborist": "^8.0.3"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.24.4",
+ "version": "4.29.0",
"content": "../../scripts/template-oss/index.js"
},
"tap": {
diff --git a/deps/npm/node_modules/libnpmhook/package.json b/deps/npm/node_modules/libnpmhook/package.json
index 1b707ed9c37b8f..06e8d175fc2c97 100644
--- a/deps/npm/node_modules/libnpmhook/package.json
+++ b/deps/npm/node_modules/libnpmhook/package.json
@@ -36,7 +36,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^5.0.1",
- "@npmcli/template-oss": "4.24.4",
+ "@npmcli/template-oss": "4.29.0",
"nock": "^13.3.3",
"tap": "^16.3.8"
},
@@ -45,7 +45,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.24.4",
+ "version": "4.29.0",
"content": "../../scripts/template-oss/index.js"
},
"tap": {
diff --git a/deps/npm/node_modules/libnpmorg/package.json b/deps/npm/node_modules/libnpmorg/package.json
index 55d2802ffbb518..4e4c3c8204dcd2 100644
--- a/deps/npm/node_modules/libnpmorg/package.json
+++ b/deps/npm/node_modules/libnpmorg/package.json
@@ -29,7 +29,7 @@
],
"devDependencies": {
"@npmcli/eslint-config": "^5.0.1",
- "@npmcli/template-oss": "4.24.4",
+ "@npmcli/template-oss": "4.29.0",
"minipass": "^7.1.1",
"nock": "^13.3.3",
"tap": "^16.3.8"
@@ -50,7 +50,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.24.4",
+ "version": "4.29.0",
"content": "../../scripts/template-oss/index.js"
},
"tap": {
diff --git a/deps/npm/node_modules/libnpmpack/package.json b/deps/npm/node_modules/libnpmpack/package.json
index 3d29025d85066d..5ba811c30e2f45 100644
--- a/deps/npm/node_modules/libnpmpack/package.json
+++ b/deps/npm/node_modules/libnpmpack/package.json
@@ -1,6 +1,6 @@
{
"name": "libnpmpack",
- "version": "8.0.1",
+ "version": "8.0.3",
"description": "Programmatic API for the bits behind npm pack",
"author": "GitHub Inc.",
"main": "lib/index.js",
@@ -24,7 +24,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^5.0.1",
- "@npmcli/template-oss": "4.24.4",
+ "@npmcli/template-oss": "4.29.0",
"nock": "^13.3.3",
"spawk": "^1.7.1",
"tap": "^16.3.8"
@@ -37,7 +37,7 @@
"bugs": "https://github.com/npm/libnpmpack/issues",
"homepage": "https://npmjs.com/package/libnpmpack",
"dependencies": {
- "@npmcli/arborist": "^8.0.1",
+ "@npmcli/arborist": "^8.0.3",
"@npmcli/run-script": "^9.0.1",
"npm-package-arg": "^12.0.0",
"pacote": "^19.0.0"
@@ -47,7 +47,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.24.4",
+ "version": "4.29.0",
"content": "../../scripts/template-oss/index.js"
},
"tap": {
diff --git a/deps/npm/node_modules/libnpmpublish/package.json b/deps/npm/node_modules/libnpmpublish/package.json
index 1118679c90828d..19877c45006dc5 100644
--- a/deps/npm/node_modules/libnpmpublish/package.json
+++ b/deps/npm/node_modules/libnpmpublish/package.json
@@ -1,6 +1,6 @@
{
"name": "libnpmpublish",
- "version": "10.0.1",
+ "version": "10.0.2",
"description": "Programmatic API for the bits behind npm publish and unpublish",
"author": "GitHub Inc.",
"main": "lib/index.js",
@@ -27,7 +27,7 @@
"@npmcli/eslint-config": "^5.0.1",
"@npmcli/mock-globals": "^1.0.0",
"@npmcli/mock-registry": "^1.0.0",
- "@npmcli/template-oss": "4.24.4",
+ "@npmcli/template-oss": "4.29.0",
"nock": "^13.3.3",
"tap": "^16.3.8"
},
@@ -53,7 +53,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.24.4",
+ "version": "4.29.0",
"content": "../../scripts/template-oss/index.js"
},
"tap": {
diff --git a/deps/npm/node_modules/libnpmsearch/package.json b/deps/npm/node_modules/libnpmsearch/package.json
index 66c9cd289261fd..219f38132d5542 100644
--- a/deps/npm/node_modules/libnpmsearch/package.json
+++ b/deps/npm/node_modules/libnpmsearch/package.json
@@ -27,7 +27,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^5.0.1",
- "@npmcli/template-oss": "4.24.4",
+ "@npmcli/template-oss": "4.29.0",
"nock": "^13.3.3",
"tap": "^16.3.8"
},
@@ -46,7 +46,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.24.4",
+ "version": "4.29.0",
"content": "../../scripts/template-oss/index.js"
},
"tap": {
diff --git a/deps/npm/node_modules/libnpmteam/package.json b/deps/npm/node_modules/libnpmteam/package.json
index f5ca76c6b1a164..ecb34ab48eff67 100644
--- a/deps/npm/node_modules/libnpmteam/package.json
+++ b/deps/npm/node_modules/libnpmteam/package.json
@@ -17,7 +17,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^5.0.1",
- "@npmcli/template-oss": "4.24.4",
+ "@npmcli/template-oss": "4.29.0",
"nock": "^13.3.3",
"tap": "^16.3.8"
},
@@ -40,7 +40,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.24.4",
+ "version": "4.29.0",
"content": "../../scripts/template-oss/index.js"
},
"tap": {
diff --git a/deps/npm/node_modules/libnpmversion/package.json b/deps/npm/node_modules/libnpmversion/package.json
index 7c99fa9c5c0708..f6fa3b533b3620 100644
--- a/deps/npm/node_modules/libnpmversion/package.json
+++ b/deps/npm/node_modules/libnpmversion/package.json
@@ -33,7 +33,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^5.0.1",
- "@npmcli/template-oss": "4.24.4",
+ "@npmcli/template-oss": "4.29.0",
"require-inject": "^1.4.4",
"tap": "^16.3.8"
},
@@ -49,7 +49,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.24.4",
+ "version": "4.29.0",
"content": "../../scripts/template-oss/index.js"
}
}
diff --git a/deps/npm/node_modules/minimatch/dist/commonjs/ast.js b/deps/npm/node_modules/minimatch/dist/commonjs/ast.js
index 9e1f9e765c597e..fa116ee7cc3ed2 100644
--- a/deps/npm/node_modules/minimatch/dist/commonjs/ast.js
+++ b/deps/npm/node_modules/minimatch/dist/commonjs/ast.js
@@ -1,11 +1,38 @@
"use strict";
// parse a single path portion
+var _a;
Object.defineProperty(exports, "__esModule", { value: true });
exports.AST = void 0;
const brace_expressions_js_1 = require("./brace-expressions.js");
const unescape_js_1 = require("./unescape.js");
const types = new Set(['!', '?', '+', '*', '@']);
const isExtglobType = (c) => types.has(c);
+const isExtglobAST = (c) => isExtglobType(c.type);
+const adoptionMap = new Map([
+ ['!', ['@']],
+ ['?', ['?', '@']],
+ ['@', ['@']],
+ ['*', ['*', '+', '?', '@']],
+ ['+', ['+', '@']],
+]);
+const adoptionWithSpaceMap = new Map([
+ ['!', ['?']],
+ ['@', ['?']],
+ ['+', ['?', '*']],
+]);
+const adoptionAnyMap = new Map([
+ ['!', ['?', '@']],
+ ['?', ['?', '@']],
+ ['@', ['?', '@']],
+ ['*', ['*', '+', '?', '@']],
+ ['+', ['+', '@', '?', '*']],
+]);
+const usurpMap = new Map([
+ ['!', new Map([['!', '@']])],
+ ['?', new Map([['*', '*'], ['+', '*']])],
+ ['@', new Map([['!', '!'], ['?', '?'], ['@', '@'], ['*', '*'], ['+', '+']])],
+ ['+', new Map([['?', '*'], ['*', '*']])],
+]);
// Patterns that get prepended to bind to the start of either the
// entire string, or just a single path portion, to prevent dots
// and/or traversal patterns, when needed.
@@ -122,7 +149,7 @@ class AST {
if (p === '')
continue;
/* c8 ignore start */
- if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) {
+ if (typeof p !== 'string' && !(p instanceof _a && p.#parent === this)) {
throw new Error('invalid part: ' + p);
}
/* c8 ignore stop */
@@ -154,7 +181,7 @@ class AST {
const p = this.#parent;
for (let i = 0; i < this.#parentIndex; i++) {
const pp = p.#parts[i];
- if (!(pp instanceof AST && pp.type === '!')) {
+ if (!(pp instanceof _a && pp.type === '!')) {
return false;
}
}
@@ -182,13 +209,14 @@ class AST {
this.push(part.clone(this));
}
clone(parent) {
- const c = new AST(this.type, parent);
+ const c = new _a(this.type, parent);
for (const p of this.#parts) {
c.copyIn(p);
}
return c;
}
- static #parseAST(str, ast, pos, opt) {
+ static #parseAST(str, ast, pos, opt, extDepth) {
+ const maxDepth = opt.maxExtglobRecursion ?? 2;
let escaping = false;
let inBrace = false;
let braceStart = -1;
@@ -225,11 +253,15 @@ class AST {
acc += c;
continue;
}
- if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') {
+ const doRecurse = !opt.noext &&
+ isExtglobType(c) &&
+ str.charAt(i) === '(' &&
+ extDepth <= maxDepth;
+ if (doRecurse) {
ast.push(acc);
acc = '';
- const ext = new AST(c, ast);
- i = AST.#parseAST(str, ext, i, opt);
+ const ext = new _a(c, ast);
+ i = _a.#parseAST(str, ext, i, opt, extDepth + 1);
ast.push(ext);
continue;
}
@@ -241,7 +273,7 @@ class AST {
// some kind of extglob, pos is at the (
// find the next | or )
let i = pos + 1;
- let part = new AST(null, ast);
+ let part = new _a(null, ast);
const parts = [];
let acc = '';
while (i < str.length) {
@@ -272,19 +304,25 @@ class AST {
acc += c;
continue;
}
- if (isExtglobType(c) && str.charAt(i) === '(') {
+ const doRecurse = isExtglobType(c) &&
+ str.charAt(i) === '(' &&
+ /* c8 ignore start - the maxDepth is sufficient here */
+ (extDepth <= maxDepth || (ast && ast.#canAdoptType(c)));
+ /* c8 ignore stop */
+ if (doRecurse) {
+ const depthAdd = ast && ast.#canAdoptType(c) ? 0 : 1;
part.push(acc);
acc = '';
- const ext = new AST(c, part);
+ const ext = new _a(c, part);
part.push(ext);
- i = AST.#parseAST(str, ext, i, opt);
+ i = _a.#parseAST(str, ext, i, opt, extDepth + depthAdd);
continue;
}
if (c === '|') {
part.push(acc);
acc = '';
parts.push(part);
- part = new AST(null, ast);
+ part = new _a(null, ast);
continue;
}
if (c === ')') {
@@ -306,9 +344,115 @@ class AST {
ast.#parts = [str.substring(pos - 1)];
return i;
}
+ #canAdoptWithSpace(child) {
+ return this.#canAdopt(child, adoptionWithSpaceMap);
+ }
+ #canAdopt(child, map = adoptionMap) {
+ if (!child ||
+ typeof child !== 'object' ||
+ child.type !== null ||
+ child.#parts.length !== 1 ||
+ this.type === null) {
+ return false;
+ }
+ const gc = child.#parts[0];
+ if (!gc || typeof gc !== 'object' || gc.type === null) {
+ return false;
+ }
+ return this.#canAdoptType(gc.type, map);
+ }
+ #canAdoptType(c, map = adoptionAnyMap) {
+ return !!map.get(this.type)?.includes(c);
+ }
+ #adoptWithSpace(child, index) {
+ const gc = child.#parts[0];
+ const blank = new _a(null, gc, this.options);
+ blank.#parts.push('');
+ gc.push(blank);
+ this.#adopt(child, index);
+ }
+ #adopt(child, index) {
+ const gc = child.#parts[0];
+ this.#parts.splice(index, 1, ...gc.#parts);
+ for (const p of gc.#parts) {
+ if (typeof p === 'object')
+ p.#parent = this;
+ }
+ this.#toString = undefined;
+ }
+ #canUsurpType(c) {
+ const m = usurpMap.get(this.type);
+ return !!(m?.has(c));
+ }
+ #canUsurp(child) {
+ if (!child ||
+ typeof child !== 'object' ||
+ child.type !== null ||
+ child.#parts.length !== 1 ||
+ this.type === null ||
+ this.#parts.length !== 1) {
+ return false;
+ }
+ const gc = child.#parts[0];
+ if (!gc || typeof gc !== 'object' || gc.type === null) {
+ return false;
+ }
+ return this.#canUsurpType(gc.type);
+ }
+ #usurp(child) {
+ const m = usurpMap.get(this.type);
+ const gc = child.#parts[0];
+ const nt = m?.get(gc.type);
+ /* c8 ignore start - impossible */
+ if (!nt)
+ return false;
+ /* c8 ignore stop */
+ this.#parts = gc.#parts;
+ for (const p of this.#parts) {
+ if (typeof p === 'object')
+ p.#parent = this;
+ }
+ this.type = nt;
+ this.#toString = undefined;
+ this.#emptyExt = false;
+ }
+ #flatten() {
+ if (!isExtglobAST(this)) {
+ for (const p of this.#parts) {
+ if (typeof p === 'object')
+ p.#flatten();
+ }
+ }
+ else {
+ let iterations = 0;
+ let done = false;
+ do {
+ done = true;
+ for (let i = 0; i < this.#parts.length; i++) {
+ const c = this.#parts[i];
+ if (typeof c === 'object') {
+ c.#flatten();
+ if (this.#canAdopt(c)) {
+ done = false;
+ this.#adopt(c, i);
+ }
+ else if (this.#canAdoptWithSpace(c)) {
+ done = false;
+ this.#adoptWithSpace(c, i);
+ }
+ else if (this.#canUsurp(c)) {
+ done = false;
+ this.#usurp(c);
+ }
+ }
+ }
+ } while (!done && ++iterations < 10);
+ }
+ this.#toString = undefined;
+ }
static fromGlob(pattern, options = {}) {
- const ast = new AST(null, undefined, options);
- AST.#parseAST(pattern, ast, 0, options);
+ const ast = new _a(null, undefined, options);
+ _a.#parseAST(pattern, ast, 0, options, 0);
return ast;
}
// returns the regular expression if there's magic, or the unescaped
@@ -412,14 +556,16 @@ class AST {
// or start or whatever) and prepend ^ or / at the Regexp construction.
toRegExpSource(allowDot) {
const dot = allowDot ?? !!this.#options.dot;
- if (this.#root === this)
+ if (this.#root === this) {
+ this.#flatten();
this.#fillNegs();
- if (!this.type) {
+ }
+ if (!isExtglobAST(this)) {
const noEmpty = this.isStart() && this.isEnd();
const src = this.#parts
.map(p => {
const [re, _, hasMagic, uflag] = typeof p === 'string'
- ? AST.#parseGlob(p, this.#hasMagic, noEmpty)
+ ? _a.#parseGlob(p, this.#hasMagic, noEmpty)
: p.toRegExpSource(allowDot);
this.#hasMagic = this.#hasMagic || hasMagic;
this.#uflag = this.#uflag || uflag;
@@ -478,9 +624,10 @@ class AST {
// invalid extglob, has to at least be *something* present, if it's
// the entire path portion.
const s = this.toString();
- this.#parts = [s];
- this.type = null;
- this.#hasMagic = undefined;
+ const me = this;
+ me.#parts = [s];
+ me.type = null;
+ me.#hasMagic = undefined;
return [s, (0, unescape_js_1.unescape)(this.toString()), false, false];
}
// XXX abstract out this map method
@@ -544,11 +691,14 @@ class AST {
let escaping = false;
let re = '';
let uflag = false;
+ // multiple stars that aren't globstars coalesce into one *
+ let inStar = false;
for (let i = 0; i < glob.length; i++) {
const c = glob.charAt(i);
if (escaping) {
escaping = false;
re += (reSpecials.has(c) ? '\\' : '') + c;
+ inStar = false;
continue;
}
if (c === '\\') {
@@ -567,17 +717,21 @@ class AST {
uflag = uflag || needUflag;
i += consumed - 1;
hasMagic = hasMagic || magic;
+ inStar = false;
continue;
}
}
if (c === '*') {
- if (noEmpty && glob === '*')
- re += starNoEmpty;
- else
- re += star;
+ if (inStar)
+ continue;
+ inStar = true;
+ re += noEmpty && /^[*]+$/.test(glob) ? starNoEmpty : star;
hasMagic = true;
continue;
}
+ else {
+ inStar = false;
+ }
if (c === '?') {
re += qmark;
hasMagic = true;
@@ -589,4 +743,5 @@ class AST {
}
}
exports.AST = AST;
+_a = AST;
//# sourceMappingURL=ast.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/minimatch/dist/commonjs/index.js b/deps/npm/node_modules/minimatch/dist/commonjs/index.js
index 64a0f1f833222e..c12dc5e6476874 100644
--- a/deps/npm/node_modules/minimatch/dist/commonjs/index.js
+++ b/deps/npm/node_modules/minimatch/dist/commonjs/index.js
@@ -205,11 +205,13 @@ class Minimatch {
isWindows;
platform;
windowsNoMagicRoot;
+ maxGlobstarRecursion;
regexp;
constructor(pattern, options = {}) {
(0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
options = options || {};
this.options = options;
+ this.maxGlobstarRecursion = options.maxGlobstarRecursion ?? 200;
this.pattern = pattern;
this.platform = options.platform || defaultPlatform;
this.isWindows = this.platform === 'win32';
@@ -609,7 +611,8 @@ class Minimatch {
// out of pattern, then that's fine, as long as all
// the parts match.
matchOne(file, pattern, partial = false) {
- const options = this.options;
+ let fileStartIndex = 0;
+ let patternStartIndex = 0;
// UNC paths like //?/X:/... can match X:/... and vice versa
// Drive letters in absolute drive or unc paths are always compared
// case-insensitively.
@@ -630,15 +633,14 @@ class Minimatch {
const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
if (typeof fdi === 'number' && typeof pdi === 'number') {
- const [fd, pd] = [file[fdi], pattern[pdi]];
+ const [fd, pd] = [
+ file[fdi],
+ pattern[pdi],
+ ];
if (fd.toLowerCase() === pd.toLowerCase()) {
pattern[pdi] = fd;
- if (pdi > fdi) {
- pattern = pattern.slice(pdi);
- }
- else if (fdi > pdi) {
- file = file.slice(fdi);
- }
+ patternStartIndex = pdi;
+ fileStartIndex = fdi;
}
}
}
@@ -648,102 +650,127 @@ class Minimatch {
if (optimizationLevel >= 2) {
file = this.levelTwoFileOptimize(file);
}
- this.debug('matchOne', this, { file, pattern });
- this.debug('matchOne', file.length, pattern.length);
- for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
- this.debug('matchOne loop');
- var p = pattern[pi];
- var f = file[fi];
- this.debug(pattern, p, f);
- // should be impossible.
- // some invalid regexp stuff in the set.
- /* c8 ignore start */
- if (p === false) {
+ if (pattern.includes(exports.GLOBSTAR)) {
+ return this.#matchGlobstar(file, pattern, partial, fileStartIndex, patternStartIndex);
+ }
+ return this.#matchOne(file, pattern, partial, fileStartIndex, patternStartIndex);
+ }
+ #matchGlobstar(file, pattern, partial, fileIndex, patternIndex) {
+ const firstgs = pattern.indexOf(exports.GLOBSTAR, patternIndex);
+ const lastgs = pattern.lastIndexOf(exports.GLOBSTAR);
+ const [head, body, tail] = partial ? [
+ pattern.slice(patternIndex, firstgs),
+ pattern.slice(firstgs + 1),
+ [],
+ ] : [
+ pattern.slice(patternIndex, firstgs),
+ pattern.slice(firstgs + 1, lastgs),
+ pattern.slice(lastgs + 1),
+ ];
+ if (head.length) {
+ const fileHead = file.slice(fileIndex, fileIndex + head.length);
+ if (!this.#matchOne(fileHead, head, partial, 0, 0))
return false;
+ fileIndex += head.length;
+ }
+ let fileTailMatch = 0;
+ if (tail.length) {
+ if (tail.length + fileIndex > file.length)
+ return false;
+ let tailStart = file.length - tail.length;
+ if (this.#matchOne(file, tail, partial, tailStart, 0)) {
+ fileTailMatch = tail.length;
}
- /* c8 ignore stop */
- if (p === exports.GLOBSTAR) {
- this.debug('GLOBSTAR', [pattern, p, f]);
- // "**"
- // a/**/b/**/c would match the following:
- // a/b/x/y/z/c
- // a/x/y/z/b/c
- // a/b/x/b/x/c
- // a/b/c
- // To do this, take the rest of the pattern after
- // the **, and see if it would match the file remainder.
- // If so, return success.
- // If not, the ** "swallows" a segment, and try again.
- // This is recursively awful.
- //
- // a/**/b/**/c matching a/b/x/y/z/c
- // - a matches a
- // - doublestar
- // - matchOne(b/x/y/z/c, b/**/c)
- // - b matches b
- // - doublestar
- // - matchOne(x/y/z/c, c) -> no
- // - matchOne(y/z/c, c) -> no
- // - matchOne(z/c, c) -> no
- // - matchOne(c, c) yes, hit
- var fr = fi;
- var pr = pi + 1;
- if (pr === pl) {
- this.debug('** at the end');
- // a ** at the end will just swallow the rest.
- // We have found a match.
- // however, it will not swallow /.x, unless
- // options.dot is set.
- // . and .. are *never* matched by **, for explosively
- // exponential reasons.
- for (; fi < fl; fi++) {
- if (file[fi] === '.' ||
- file[fi] === '..' ||
- (!options.dot && file[fi].charAt(0) === '.'))
- return false;
- }
- return true;
+ else {
+ if (file[file.length - 1] !== '' ||
+ fileIndex + tail.length === file.length) {
+ return false;
}
- // ok, let's see if we can swallow whatever we can.
- while (fr < fl) {
- var swallowee = file[fr];
- this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
- // XXX remove this slice. Just pass the start index.
- if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
- this.debug('globstar found match!', fr, fl, swallowee);
- // found a match.
- return true;
- }
- else {
- // can't swallow "." or ".." ever.
- // can only swallow ".foo" when explicitly asked.
- if (swallowee === '.' ||
- swallowee === '..' ||
- (!options.dot && swallowee.charAt(0) === '.')) {
- this.debug('dot detected!', file, fr, pattern, pr);
- break;
- }
- // ** swallows a segment, and continue.
- this.debug('globstar swallow a segment, and continue');
- fr++;
- }
+ tailStart--;
+ if (!this.#matchOne(file, tail, partial, tailStart, 0))
+ return false;
+ fileTailMatch = tail.length + 1;
+ }
+ }
+ if (!body.length) {
+ let sawSome = !!fileTailMatch;
+ for (let i = fileIndex; i < file.length - fileTailMatch; i++) {
+ const f = String(file[i]);
+ sawSome = true;
+ if (f === '.' || f === '..' ||
+ (!this.options.dot && f.startsWith('.'))) {
+ return false;
}
- // no match was found.
- // However, in partial mode, we can't say this is necessarily over.
- /* c8 ignore start */
- if (partial) {
- // ran out of file
- this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
- if (fr === fl) {
- return true;
- }
+ }
+ return partial || sawSome;
+ }
+ const bodySegments = [[[], 0]];
+ let currentBody = bodySegments[0];
+ let nonGsParts = 0;
+ const nonGsPartsSums = [0];
+ for (const b of body) {
+ if (b === exports.GLOBSTAR) {
+ nonGsPartsSums.push(nonGsParts);
+ currentBody = [[], 0];
+ bodySegments.push(currentBody);
+ }
+ else {
+ currentBody[0].push(b);
+ nonGsParts++;
+ }
+ }
+ let i = bodySegments.length - 1;
+ const fileLength = file.length - fileTailMatch;
+ for (const b of bodySegments) {
+ b[1] = fileLength - (nonGsPartsSums[i--] + b[0].length);
+ }
+ return !!this.#matchGlobStarBodySections(file, bodySegments, fileIndex, 0, partial, 0, !!fileTailMatch);
+ }
+ #matchGlobStarBodySections(file, bodySegments, fileIndex, bodyIndex, partial, globStarDepth, sawTail) {
+ const bs = bodySegments[bodyIndex];
+ if (!bs) {
+ for (let i = fileIndex; i < file.length; i++) {
+ sawTail = true;
+ const f = file[i];
+ if (f === '.' || f === '..' ||
+ (!this.options.dot && f.startsWith('.'))) {
+ return false;
}
- /* c8 ignore stop */
+ }
+ return sawTail;
+ }
+ const [body, after] = bs;
+ while (fileIndex <= after) {
+ const m = this.#matchOne(file.slice(0, fileIndex + body.length), body, partial, fileIndex, 0);
+ if (m && globStarDepth < this.maxGlobstarRecursion) {
+ const sub = this.#matchGlobStarBodySections(file, bodySegments, fileIndex + body.length, bodyIndex + 1, partial, globStarDepth + 1, sawTail);
+ if (sub !== false)
+ return sub;
+ }
+ const f = file[fileIndex];
+ if (f === '.' || f === '..' ||
+ (!this.options.dot && f.startsWith('.'))) {
return false;
}
- // something other than **
- // non-magic patterns just have to match exactly
- // patterns with magic have been turned into regexps.
+ fileIndex++;
+ }
+ return partial || null;
+ }
+ #matchOne(file, pattern, partial, fileIndex, patternIndex) {
+ let fi;
+ let pi;
+ let pl;
+ let fl;
+ for (fi = fileIndex, pi = patternIndex,
+ fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
+ this.debug('matchOne loop');
+ let p = pattern[pi];
+ let f = file[fi];
+ this.debug(pattern, p, f);
+ /* c8 ignore start */
+ if (p === false || p === exports.GLOBSTAR)
+ return false;
+ /* c8 ignore stop */
let hit;
if (typeof p === 'string') {
hit = f === p;
@@ -756,38 +783,17 @@ class Minimatch {
if (!hit)
return false;
}
- // Note: ending in / means that we'll get a final ""
- // at the end of the pattern. This can only match a
- // corresponding "" at the end of the file.
- // If the file ends in /, then it can only match a
- // a pattern that ends in /, unless the pattern just
- // doesn't have any more for it. But, a/b/ should *not*
- // match "a/b/*", even though "" matches against the
- // [^/]*? pattern, except in partial mode, where it might
- // simply not be reached yet.
- // However, a/b/ should still satisfy a/*
- // now either we fell off the end of the pattern, or we're done.
if (fi === fl && pi === pl) {
- // ran out of pattern and filename at the same time.
- // an exact hit!
return true;
}
else if (fi === fl) {
- // ran out of file, but still had pattern left.
- // this is ok if we're doing the match as part of
- // a glob fs traversal.
return partial;
}
else if (pi === pl) {
- // ran out of pattern, still have file left.
- // this is only acceptable if we're on the very last
- // empty segment of a file with a trailing slash.
- // a/* should match a/b/
return fi === fl - 1 && file[fi] === '';
/* c8 ignore start */
}
else {
- // should be unreachable.
throw new Error('wtf?');
}
/* c8 ignore stop */
diff --git a/deps/npm/node_modules/minimatch/dist/esm/ast.js b/deps/npm/node_modules/minimatch/dist/esm/ast.js
index 02c6bda68427fc..437ec43641af0e 100644
--- a/deps/npm/node_modules/minimatch/dist/esm/ast.js
+++ b/deps/npm/node_modules/minimatch/dist/esm/ast.js
@@ -1,8 +1,35 @@
// parse a single path portion
+var _a;
import { parseClass } from './brace-expressions.js';
import { unescape } from './unescape.js';
const types = new Set(['!', '?', '+', '*', '@']);
const isExtglobType = (c) => types.has(c);
+const isExtglobAST = (c) => isExtglobType(c.type);
+const adoptionMap = new Map([
+ ['!', ['@']],
+ ['?', ['?', '@']],
+ ['@', ['@']],
+ ['*', ['*', '+', '?', '@']],
+ ['+', ['+', '@']],
+]);
+const adoptionWithSpaceMap = new Map([
+ ['!', ['?']],
+ ['@', ['?']],
+ ['+', ['?', '*']],
+]);
+const adoptionAnyMap = new Map([
+ ['!', ['?', '@']],
+ ['?', ['?', '@']],
+ ['@', ['?', '@']],
+ ['*', ['*', '+', '?', '@']],
+ ['+', ['+', '@', '?', '*']],
+]);
+const usurpMap = new Map([
+ ['!', new Map([['!', '@']])],
+ ['?', new Map([['*', '*'], ['+', '*']])],
+ ['@', new Map([['!', '!'], ['?', '?'], ['@', '@'], ['*', '*'], ['+', '+']])],
+ ['+', new Map([['?', '*'], ['*', '*']])],
+]);
// Patterns that get prepended to bind to the start of either the
// entire string, or just a single path portion, to prevent dots
// and/or traversal patterns, when needed.
@@ -119,7 +146,7 @@ export class AST {
if (p === '')
continue;
/* c8 ignore start */
- if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) {
+ if (typeof p !== 'string' && !(p instanceof _a && p.#parent === this)) {
throw new Error('invalid part: ' + p);
}
/* c8 ignore stop */
@@ -151,7 +178,7 @@ export class AST {
const p = this.#parent;
for (let i = 0; i < this.#parentIndex; i++) {
const pp = p.#parts[i];
- if (!(pp instanceof AST && pp.type === '!')) {
+ if (!(pp instanceof _a && pp.type === '!')) {
return false;
}
}
@@ -179,13 +206,14 @@ export class AST {
this.push(part.clone(this));
}
clone(parent) {
- const c = new AST(this.type, parent);
+ const c = new _a(this.type, parent);
for (const p of this.#parts) {
c.copyIn(p);
}
return c;
}
- static #parseAST(str, ast, pos, opt) {
+ static #parseAST(str, ast, pos, opt, extDepth) {
+ const maxDepth = opt.maxExtglobRecursion ?? 2;
let escaping = false;
let inBrace = false;
let braceStart = -1;
@@ -222,11 +250,15 @@ export class AST {
acc += c;
continue;
}
- if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') {
+ const doRecurse = !opt.noext &&
+ isExtglobType(c) &&
+ str.charAt(i) === '(' &&
+ extDepth <= maxDepth;
+ if (doRecurse) {
ast.push(acc);
acc = '';
- const ext = new AST(c, ast);
- i = AST.#parseAST(str, ext, i, opt);
+ const ext = new _a(c, ast);
+ i = _a.#parseAST(str, ext, i, opt, extDepth + 1);
ast.push(ext);
continue;
}
@@ -238,7 +270,7 @@ export class AST {
// some kind of extglob, pos is at the (
// find the next | or )
let i = pos + 1;
- let part = new AST(null, ast);
+ let part = new _a(null, ast);
const parts = [];
let acc = '';
while (i < str.length) {
@@ -269,19 +301,25 @@ export class AST {
acc += c;
continue;
}
- if (isExtglobType(c) && str.charAt(i) === '(') {
+ const doRecurse = isExtglobType(c) &&
+ str.charAt(i) === '(' &&
+ /* c8 ignore start - the maxDepth is sufficient here */
+ (extDepth <= maxDepth || (ast && ast.#canAdoptType(c)));
+ /* c8 ignore stop */
+ if (doRecurse) {
+ const depthAdd = ast && ast.#canAdoptType(c) ? 0 : 1;
part.push(acc);
acc = '';
- const ext = new AST(c, part);
+ const ext = new _a(c, part);
part.push(ext);
- i = AST.#parseAST(str, ext, i, opt);
+ i = _a.#parseAST(str, ext, i, opt, extDepth + depthAdd);
continue;
}
if (c === '|') {
part.push(acc);
acc = '';
parts.push(part);
- part = new AST(null, ast);
+ part = new _a(null, ast);
continue;
}
if (c === ')') {
@@ -303,9 +341,115 @@ export class AST {
ast.#parts = [str.substring(pos - 1)];
return i;
}
+ #canAdoptWithSpace(child) {
+ return this.#canAdopt(child, adoptionWithSpaceMap);
+ }
+ #canAdopt(child, map = adoptionMap) {
+ if (!child ||
+ typeof child !== 'object' ||
+ child.type !== null ||
+ child.#parts.length !== 1 ||
+ this.type === null) {
+ return false;
+ }
+ const gc = child.#parts[0];
+ if (!gc || typeof gc !== 'object' || gc.type === null) {
+ return false;
+ }
+ return this.#canAdoptType(gc.type, map);
+ }
+ #canAdoptType(c, map = adoptionAnyMap) {
+ return !!map.get(this.type)?.includes(c);
+ }
+ #adoptWithSpace(child, index) {
+ const gc = child.#parts[0];
+ const blank = new _a(null, gc, this.options);
+ blank.#parts.push('');
+ gc.push(blank);
+ this.#adopt(child, index);
+ }
+ #adopt(child, index) {
+ const gc = child.#parts[0];
+ this.#parts.splice(index, 1, ...gc.#parts);
+ for (const p of gc.#parts) {
+ if (typeof p === 'object')
+ p.#parent = this;
+ }
+ this.#toString = undefined;
+ }
+ #canUsurpType(c) {
+ const m = usurpMap.get(this.type);
+ return !!(m?.has(c));
+ }
+ #canUsurp(child) {
+ if (!child ||
+ typeof child !== 'object' ||
+ child.type !== null ||
+ child.#parts.length !== 1 ||
+ this.type === null ||
+ this.#parts.length !== 1) {
+ return false;
+ }
+ const gc = child.#parts[0];
+ if (!gc || typeof gc !== 'object' || gc.type === null) {
+ return false;
+ }
+ return this.#canUsurpType(gc.type);
+ }
+ #usurp(child) {
+ const m = usurpMap.get(this.type);
+ const gc = child.#parts[0];
+ const nt = m?.get(gc.type);
+ /* c8 ignore start - impossible */
+ if (!nt)
+ return false;
+ /* c8 ignore stop */
+ this.#parts = gc.#parts;
+ for (const p of this.#parts) {
+ if (typeof p === 'object')
+ p.#parent = this;
+ }
+ this.type = nt;
+ this.#toString = undefined;
+ this.#emptyExt = false;
+ }
+ #flatten() {
+ if (!isExtglobAST(this)) {
+ for (const p of this.#parts) {
+ if (typeof p === 'object')
+ p.#flatten();
+ }
+ }
+ else {
+ let iterations = 0;
+ let done = false;
+ do {
+ done = true;
+ for (let i = 0; i < this.#parts.length; i++) {
+ const c = this.#parts[i];
+ if (typeof c === 'object') {
+ c.#flatten();
+ if (this.#canAdopt(c)) {
+ done = false;
+ this.#adopt(c, i);
+ }
+ else if (this.#canAdoptWithSpace(c)) {
+ done = false;
+ this.#adoptWithSpace(c, i);
+ }
+ else if (this.#canUsurp(c)) {
+ done = false;
+ this.#usurp(c);
+ }
+ }
+ }
+ } while (!done && ++iterations < 10);
+ }
+ this.#toString = undefined;
+ }
static fromGlob(pattern, options = {}) {
- const ast = new AST(null, undefined, options);
- AST.#parseAST(pattern, ast, 0, options);
+ const ast = new _a(null, undefined, options);
+ _a.#parseAST(pattern, ast, 0, options, 0);
return ast;
}
// returns the regular expression if there's magic, or the unescaped
@@ -409,14 +553,16 @@ export class AST {
// or start or whatever) and prepend ^ or / at the Regexp construction.
toRegExpSource(allowDot) {
const dot = allowDot ?? !!this.#options.dot;
- if (this.#root === this)
+ if (this.#root === this) {
+ this.#flatten();
this.#fillNegs();
- if (!this.type) {
+ }
+ if (!isExtglobAST(this)) {
const noEmpty = this.isStart() && this.isEnd();
const src = this.#parts
.map(p => {
const [re, _, hasMagic, uflag] = typeof p === 'string'
- ? AST.#parseGlob(p, this.#hasMagic, noEmpty)
+ ? _a.#parseGlob(p, this.#hasMagic, noEmpty)
: p.toRegExpSource(allowDot);
this.#hasMagic = this.#hasMagic || hasMagic;
this.#uflag = this.#uflag || uflag;
@@ -475,9 +621,10 @@ export class AST {
// invalid extglob, has to at least be *something* present, if it's
// the entire path portion.
const s = this.toString();
- this.#parts = [s];
- this.type = null;
- this.#hasMagic = undefined;
+ const me = this;
+ me.#parts = [s];
+ me.type = null;
+ me.#hasMagic = undefined;
return [s, unescape(this.toString()), false, false];
}
// XXX abstract out this map method
@@ -541,11 +688,14 @@ export class AST {
let escaping = false;
let re = '';
let uflag = false;
+ // multiple stars that aren't globstars coalesce into one *
+ let inStar = false;
for (let i = 0; i < glob.length; i++) {
const c = glob.charAt(i);
if (escaping) {
escaping = false;
re += (reSpecials.has(c) ? '\\' : '') + c;
+ inStar = false;
continue;
}
if (c === '\\') {
@@ -564,17 +714,21 @@ export class AST {
uflag = uflag || needUflag;
i += consumed - 1;
hasMagic = hasMagic || magic;
+ inStar = false;
continue;
}
}
if (c === '*') {
- if (noEmpty && glob === '*')
- re += starNoEmpty;
- else
- re += star;
+ if (inStar)
+ continue;
+ inStar = true;
+ re += noEmpty && /^[*]+$/.test(glob) ? starNoEmpty : star;
hasMagic = true;
continue;
}
+ else {
+ inStar = false;
+ }
if (c === '?') {
re += qmark;
hasMagic = true;
@@ -585,4 +739,5 @@ export class AST {
return [re, unescape(glob), !!hasMagic, uflag];
}
}
+_a = AST;
//# sourceMappingURL=ast.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/minimatch/dist/esm/index.js b/deps/npm/node_modules/minimatch/dist/esm/index.js
index 84b577b0472cb6..737c8095415235 100644
--- a/deps/npm/node_modules/minimatch/dist/esm/index.js
+++ b/deps/npm/node_modules/minimatch/dist/esm/index.js
@@ -193,11 +193,13 @@ export class Minimatch {
isWindows;
platform;
windowsNoMagicRoot;
+ maxGlobstarRecursion;
regexp;
constructor(pattern, options = {}) {
assertValidPattern(pattern);
options = options || {};
this.options = options;
+ this.maxGlobstarRecursion = options.maxGlobstarRecursion ?? 200;
this.pattern = pattern;
this.platform = options.platform || defaultPlatform;
this.isWindows = this.platform === 'win32';
@@ -597,7 +599,8 @@ export class Minimatch {
// out of pattern, then that's fine, as long as all
// the parts match.
matchOne(file, pattern, partial = false) {
- const options = this.options;
+ let fileStartIndex = 0;
+ let patternStartIndex = 0;
// UNC paths like //?/X:/... can match X:/... and vice versa
// Drive letters in absolute drive or unc paths are always compared
// case-insensitively.
@@ -618,15 +621,14 @@ export class Minimatch {
const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
if (typeof fdi === 'number' && typeof pdi === 'number') {
- const [fd, pd] = [file[fdi], pattern[pdi]];
+ const [fd, pd] = [
+ file[fdi],
+ pattern[pdi],
+ ];
if (fd.toLowerCase() === pd.toLowerCase()) {
pattern[pdi] = fd;
- if (pdi > fdi) {
- pattern = pattern.slice(pdi);
- }
- else if (fdi > pdi) {
- file = file.slice(fdi);
- }
+ patternStartIndex = pdi;
+ fileStartIndex = fdi;
}
}
}
@@ -636,102 +638,127 @@ export class Minimatch {
if (optimizationLevel >= 2) {
file = this.levelTwoFileOptimize(file);
}
- this.debug('matchOne', this, { file, pattern });
- this.debug('matchOne', file.length, pattern.length);
- for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
- this.debug('matchOne loop');
- var p = pattern[pi];
- var f = file[fi];
- this.debug(pattern, p, f);
- // should be impossible.
- // some invalid regexp stuff in the set.
- /* c8 ignore start */
- if (p === false) {
+ if (pattern.includes(GLOBSTAR)) {
+ return this.#matchGlobstar(file, pattern, partial, fileStartIndex, patternStartIndex);
+ }
+ return this.#matchOne(file, pattern, partial, fileStartIndex, patternStartIndex);
+ }
+ #matchGlobstar(file, pattern, partial, fileIndex, patternIndex) {
+ const firstgs = pattern.indexOf(GLOBSTAR, patternIndex);
+ const lastgs = pattern.lastIndexOf(GLOBSTAR);
+ const [head, body, tail] = partial ? [
+ pattern.slice(patternIndex, firstgs),
+ pattern.slice(firstgs + 1),
+ [],
+ ] : [
+ pattern.slice(patternIndex, firstgs),
+ pattern.slice(firstgs + 1, lastgs),
+ pattern.slice(lastgs + 1),
+ ];
+ if (head.length) {
+ const fileHead = file.slice(fileIndex, fileIndex + head.length);
+ if (!this.#matchOne(fileHead, head, partial, 0, 0))
return false;
+ fileIndex += head.length;
+ }
+ let fileTailMatch = 0;
+ if (tail.length) {
+ if (tail.length + fileIndex > file.length)
+ return false;
+ let tailStart = file.length - tail.length;
+ if (this.#matchOne(file, tail, partial, tailStart, 0)) {
+ fileTailMatch = tail.length;
}
- /* c8 ignore stop */
- if (p === GLOBSTAR) {
- this.debug('GLOBSTAR', [pattern, p, f]);
- // "**"
- // a/**/b/**/c would match the following:
- // a/b/x/y/z/c
- // a/x/y/z/b/c
- // a/b/x/b/x/c
- // a/b/c
- // To do this, take the rest of the pattern after
- // the **, and see if it would match the file remainder.
- // If so, return success.
- // If not, the ** "swallows" a segment, and try again.
- // This is recursively awful.
- //
- // a/**/b/**/c matching a/b/x/y/z/c
- // - a matches a
- // - doublestar
- // - matchOne(b/x/y/z/c, b/**/c)
- // - b matches b
- // - doublestar
- // - matchOne(x/y/z/c, c) -> no
- // - matchOne(y/z/c, c) -> no
- // - matchOne(z/c, c) -> no
- // - matchOne(c, c) yes, hit
- var fr = fi;
- var pr = pi + 1;
- if (pr === pl) {
- this.debug('** at the end');
- // a ** at the end will just swallow the rest.
- // We have found a match.
- // however, it will not swallow /.x, unless
- // options.dot is set.
- // . and .. are *never* matched by **, for explosively
- // exponential reasons.
- for (; fi < fl; fi++) {
- if (file[fi] === '.' ||
- file[fi] === '..' ||
- (!options.dot && file[fi].charAt(0) === '.'))
- return false;
- }
- return true;
+ else {
+ if (file[file.length - 1] !== '' ||
+ fileIndex + tail.length === file.length) {
+ return false;
}
- // ok, let's see if we can swallow whatever we can.
- while (fr < fl) {
- var swallowee = file[fr];
- this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
- // XXX remove this slice. Just pass the start index.
- if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
- this.debug('globstar found match!', fr, fl, swallowee);
- // found a match.
- return true;
- }
- else {
- // can't swallow "." or ".." ever.
- // can only swallow ".foo" when explicitly asked.
- if (swallowee === '.' ||
- swallowee === '..' ||
- (!options.dot && swallowee.charAt(0) === '.')) {
- this.debug('dot detected!', file, fr, pattern, pr);
- break;
- }
- // ** swallows a segment, and continue.
- this.debug('globstar swallow a segment, and continue');
- fr++;
- }
+ tailStart--;
+ if (!this.#matchOne(file, tail, partial, tailStart, 0))
+ return false;
+ fileTailMatch = tail.length + 1;
+ }
+ }
+ if (!body.length) {
+ let sawSome = !!fileTailMatch;
+ for (let i = fileIndex; i < file.length - fileTailMatch; i++) {
+ const f = String(file[i]);
+ sawSome = true;
+ if (f === '.' || f === '..' ||
+ (!this.options.dot && f.startsWith('.'))) {
+ return false;
}
- // no match was found.
- // However, in partial mode, we can't say this is necessarily over.
- /* c8 ignore start */
- if (partial) {
- // ran out of file
- this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
- if (fr === fl) {
- return true;
- }
+ }
+ return partial || sawSome;
+ }
+ const bodySegments = [[[], 0]];
+ let currentBody = bodySegments[0];
+ let nonGsParts = 0;
+ const nonGsPartsSums = [0];
+ for (const b of body) {
+ if (b === GLOBSTAR) {
+ nonGsPartsSums.push(nonGsParts);
+ currentBody = [[], 0];
+ bodySegments.push(currentBody);
+ }
+ else {
+ currentBody[0].push(b);
+ nonGsParts++;
+ }
+ }
+ let i = bodySegments.length - 1;
+ const fileLength = file.length - fileTailMatch;
+ for (const b of bodySegments) {
+ b[1] = fileLength - (nonGsPartsSums[i--] + b[0].length);
+ }
+ return !!this.#matchGlobStarBodySections(file, bodySegments, fileIndex, 0, partial, 0, !!fileTailMatch);
+ }
+ #matchGlobStarBodySections(file, bodySegments, fileIndex, bodyIndex, partial, globStarDepth, sawTail) {
+ const bs = bodySegments[bodyIndex];
+ if (!bs) {
+ for (let i = fileIndex; i < file.length; i++) {
+ sawTail = true;
+ const f = file[i];
+ if (f === '.' || f === '..' ||
+ (!this.options.dot && f.startsWith('.'))) {
+ return false;
}
- /* c8 ignore stop */
+ }
+ return sawTail;
+ }
+ const [body, after] = bs;
+ while (fileIndex <= after) {
+ const m = this.#matchOne(file.slice(0, fileIndex + body.length), body, partial, fileIndex, 0);
+ if (m && globStarDepth < this.maxGlobstarRecursion) {
+ const sub = this.#matchGlobStarBodySections(file, bodySegments, fileIndex + body.length, bodyIndex + 1, partial, globStarDepth + 1, sawTail);
+ if (sub !== false)
+ return sub;
+ }
+ const f = file[fileIndex];
+ if (f === '.' || f === '..' ||
+ (!this.options.dot && f.startsWith('.'))) {
return false;
}
- // something other than **
- // non-magic patterns just have to match exactly
- // patterns with magic have been turned into regexps.
+ fileIndex++;
+ }
+ return partial || null;
+ }
+ #matchOne(file, pattern, partial, fileIndex, patternIndex) {
+ let fi;
+ let pi;
+ let pl;
+ let fl;
+ for (fi = fileIndex, pi = patternIndex,
+ fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
+ this.debug('matchOne loop');
+ let p = pattern[pi];
+ let f = file[fi];
+ this.debug(pattern, p, f);
+ /* c8 ignore start */
+ if (p === false || p === GLOBSTAR)
+ return false;
+ /* c8 ignore stop */
let hit;
if (typeof p === 'string') {
hit = f === p;
@@ -744,38 +771,17 @@ export class Minimatch {
if (!hit)
return false;
}
- // Note: ending in / means that we'll get a final ""
- // at the end of the pattern. This can only match a
- // corresponding "" at the end of the file.
- // If the file ends in /, then it can only match a
- // a pattern that ends in /, unless the pattern just
- // doesn't have any more for it. But, a/b/ should *not*
- // match "a/b/*", even though "" matches against the
- // [^/]*? pattern, except in partial mode, where it might
- // simply not be reached yet.
- // However, a/b/ should still satisfy a/*
- // now either we fell off the end of the pattern, or we're done.
if (fi === fl && pi === pl) {
- // ran out of pattern and filename at the same time.
- // an exact hit!
return true;
}
else if (fi === fl) {
- // ran out of file, but still had pattern left.
- // this is ok if we're doing the match as part of
- // a glob fs traversal.
return partial;
}
else if (pi === pl) {
- // ran out of pattern, still have file left.
- // this is only acceptable if we're on the very last
- // empty segment of a file with a trailing slash.
- // a/* should match a/b/
return fi === fl - 1 && file[fi] === '';
/* c8 ignore start */
}
else {
- // should be unreachable.
throw new Error('wtf?');
}
/* c8 ignore stop */
diff --git a/deps/npm/node_modules/minimatch/package.json b/deps/npm/node_modules/minimatch/package.json
index 01fc48ecfd6a9f..3866f1da4fda19 100644
--- a/deps/npm/node_modules/minimatch/package.json
+++ b/deps/npm/node_modules/minimatch/package.json
@@ -2,7 +2,7 @@
"author": "Isaac Z. Schlueter (http://blog.izs.me)",
"name": "minimatch",
"description": "a glob matcher in javascript",
- "version": "9.0.5",
+ "version": "9.0.9",
"repository": {
"type": "git",
"url": "git://github.com/isaacs/minimatch.git"
@@ -53,20 +53,16 @@
"node": ">=16 || 14 >=14.17"
},
"dependencies": {
- "brace-expansion": "^2.0.1"
+ "brace-expansion": "^2.0.2"
},
"devDependencies": {
- "@types/brace-expansion": "^1.1.0",
- "@types/node": "^18.15.11",
- "@types/tap": "^15.0.8",
- "eslint-config-prettier": "^8.6.0",
- "mkdirp": "1",
- "prettier": "^2.8.2",
- "tap": "^18.7.2",
- "ts-node": "^10.9.1",
- "tshy": "^1.12.0",
- "typedoc": "^0.23.21",
- "typescript": "^4.9.3"
+ "@types/brace-expansion": "^1.1.2",
+ "@types/node": "^25.3.0",
+ "mkdirp": "^3.0.1",
+ "prettier": "^3.8.1",
+ "tap": "^21.6.1",
+ "tshy": "^3.3.2",
+ "typescript": "^5.5.3"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
@@ -78,5 +74,9 @@
".": "./src/index.ts"
}
},
- "type": "module"
+ "type": "module",
+ "publishConfig": {
+ "tag": "legacy-v9"
+ },
+ "module": "./dist/esm/index.js"
}
diff --git a/deps/npm/node_modules/cacache/node_modules/tar/LICENSE b/deps/npm/node_modules/minipass-flush/node_modules/yallist/LICENSE
similarity index 100%
rename from deps/npm/node_modules/cacache/node_modules/tar/LICENSE
rename to deps/npm/node_modules/minipass-flush/node_modules/yallist/LICENSE
diff --git a/deps/npm/node_modules/yallist/iterator.js b/deps/npm/node_modules/minipass-flush/node_modules/yallist/iterator.js
similarity index 100%
rename from deps/npm/node_modules/yallist/iterator.js
rename to deps/npm/node_modules/minipass-flush/node_modules/yallist/iterator.js
diff --git a/deps/npm/node_modules/minipass-flush/node_modules/yallist/package.json b/deps/npm/node_modules/minipass-flush/node_modules/yallist/package.json
new file mode 100644
index 00000000000000..8a083867d72e00
--- /dev/null
+++ b/deps/npm/node_modules/minipass-flush/node_modules/yallist/package.json
@@ -0,0 +1,29 @@
+{
+ "name": "yallist",
+ "version": "4.0.0",
+ "description": "Yet Another Linked List",
+ "main": "yallist.js",
+ "directories": {
+ "test": "test"
+ },
+ "files": [
+ "yallist.js",
+ "iterator.js"
+ ],
+ "dependencies": {},
+ "devDependencies": {
+ "tap": "^12.1.0"
+ },
+ "scripts": {
+ "test": "tap test/*.js --100",
+ "preversion": "npm test",
+ "postversion": "npm publish",
+ "postpublish": "git push origin --all; git push origin --tags"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/isaacs/yallist.git"
+ },
+ "author": "Isaac Z. Schlueter (http://blog.izs.me/)",
+ "license": "ISC"
+}
diff --git a/deps/npm/node_modules/yallist/yallist.js b/deps/npm/node_modules/minipass-flush/node_modules/yallist/yallist.js
similarity index 100%
rename from deps/npm/node_modules/yallist/yallist.js
rename to deps/npm/node_modules/minipass-flush/node_modules/yallist/yallist.js
diff --git a/deps/npm/node_modules/chownr/LICENSE b/deps/npm/node_modules/minipass-pipeline/node_modules/yallist/LICENSE
similarity index 100%
rename from deps/npm/node_modules/chownr/LICENSE
rename to deps/npm/node_modules/minipass-pipeline/node_modules/yallist/LICENSE
diff --git a/deps/npm/node_modules/minipass-pipeline/node_modules/yallist/iterator.js b/deps/npm/node_modules/minipass-pipeline/node_modules/yallist/iterator.js
new file mode 100644
index 00000000000000..d41c97a19f9849
--- /dev/null
+++ b/deps/npm/node_modules/minipass-pipeline/node_modules/yallist/iterator.js
@@ -0,0 +1,8 @@
+'use strict'
+module.exports = function (Yallist) {
+ Yallist.prototype[Symbol.iterator] = function* () {
+ for (let walker = this.head; walker; walker = walker.next) {
+ yield walker.value
+ }
+ }
+}
diff --git a/deps/npm/node_modules/minipass-pipeline/node_modules/yallist/package.json b/deps/npm/node_modules/minipass-pipeline/node_modules/yallist/package.json
new file mode 100644
index 00000000000000..8a083867d72e00
--- /dev/null
+++ b/deps/npm/node_modules/minipass-pipeline/node_modules/yallist/package.json
@@ -0,0 +1,29 @@
+{
+ "name": "yallist",
+ "version": "4.0.0",
+ "description": "Yet Another Linked List",
+ "main": "yallist.js",
+ "directories": {
+ "test": "test"
+ },
+ "files": [
+ "yallist.js",
+ "iterator.js"
+ ],
+ "dependencies": {},
+ "devDependencies": {
+ "tap": "^12.1.0"
+ },
+ "scripts": {
+ "test": "tap test/*.js --100",
+ "preversion": "npm test",
+ "postversion": "npm publish",
+ "postpublish": "git push origin --all; git push origin --tags"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/isaacs/yallist.git"
+ },
+ "author": "Isaac Z. Schlueter (http://blog.izs.me/)",
+ "license": "ISC"
+}
diff --git a/deps/npm/node_modules/minipass-pipeline/node_modules/yallist/yallist.js b/deps/npm/node_modules/minipass-pipeline/node_modules/yallist/yallist.js
new file mode 100644
index 00000000000000..4e83ab1c542a51
--- /dev/null
+++ b/deps/npm/node_modules/minipass-pipeline/node_modules/yallist/yallist.js
@@ -0,0 +1,426 @@
+'use strict'
+module.exports = Yallist
+
+Yallist.Node = Node
+Yallist.create = Yallist
+
+function Yallist (list) {
+ var self = this
+ if (!(self instanceof Yallist)) {
+ self = new Yallist()
+ }
+
+ self.tail = null
+ self.head = null
+ self.length = 0
+
+ if (list && typeof list.forEach === 'function') {
+ list.forEach(function (item) {
+ self.push(item)
+ })
+ } else if (arguments.length > 0) {
+ for (var i = 0, l = arguments.length; i < l; i++) {
+ self.push(arguments[i])
+ }
+ }
+
+ return self
+}
+
+Yallist.prototype.removeNode = function (node) {
+ if (node.list !== this) {
+ throw new Error('removing node which does not belong to this list')
+ }
+
+ var next = node.next
+ var prev = node.prev
+
+ if (next) {
+ next.prev = prev
+ }
+
+ if (prev) {
+ prev.next = next
+ }
+
+ if (node === this.head) {
+ this.head = next
+ }
+ if (node === this.tail) {
+ this.tail = prev
+ }
+
+ node.list.length--
+ node.next = null
+ node.prev = null
+ node.list = null
+
+ return next
+}
+
+Yallist.prototype.unshiftNode = function (node) {
+ if (node === this.head) {
+ return
+ }
+
+ if (node.list) {
+ node.list.removeNode(node)
+ }
+
+ var head = this.head
+ node.list = this
+ node.next = head
+ if (head) {
+ head.prev = node
+ }
+
+ this.head = node
+ if (!this.tail) {
+ this.tail = node
+ }
+ this.length++
+}
+
+Yallist.prototype.pushNode = function (node) {
+ if (node === this.tail) {
+ return
+ }
+
+ if (node.list) {
+ node.list.removeNode(node)
+ }
+
+ var tail = this.tail
+ node.list = this
+ node.prev = tail
+ if (tail) {
+ tail.next = node
+ }
+
+ this.tail = node
+ if (!this.head) {
+ this.head = node
+ }
+ this.length++
+}
+
+Yallist.prototype.push = function () {
+ for (var i = 0, l = arguments.length; i < l; i++) {
+ push(this, arguments[i])
+ }
+ return this.length
+}
+
+Yallist.prototype.unshift = function () {
+ for (var i = 0, l = arguments.length; i < l; i++) {
+ unshift(this, arguments[i])
+ }
+ return this.length
+}
+
+Yallist.prototype.pop = function () {
+ if (!this.tail) {
+ return undefined
+ }
+
+ var res = this.tail.value
+ this.tail = this.tail.prev
+ if (this.tail) {
+ this.tail.next = null
+ } else {
+ this.head = null
+ }
+ this.length--
+ return res
+}
+
+Yallist.prototype.shift = function () {
+ if (!this.head) {
+ return undefined
+ }
+
+ var res = this.head.value
+ this.head = this.head.next
+ if (this.head) {
+ this.head.prev = null
+ } else {
+ this.tail = null
+ }
+ this.length--
+ return res
+}
+
+Yallist.prototype.forEach = function (fn, thisp) {
+ thisp = thisp || this
+ for (var walker = this.head, i = 0; walker !== null; i++) {
+ fn.call(thisp, walker.value, i, this)
+ walker = walker.next
+ }
+}
+
+Yallist.prototype.forEachReverse = function (fn, thisp) {
+ thisp = thisp || this
+ for (var walker = this.tail, i = this.length - 1; walker !== null; i--) {
+ fn.call(thisp, walker.value, i, this)
+ walker = walker.prev
+ }
+}
+
+Yallist.prototype.get = function (n) {
+ for (var i = 0, walker = this.head; walker !== null && i < n; i++) {
+ // abort out of the list early if we hit a cycle
+ walker = walker.next
+ }
+ if (i === n && walker !== null) {
+ return walker.value
+ }
+}
+
+Yallist.prototype.getReverse = function (n) {
+ for (var i = 0, walker = this.tail; walker !== null && i < n; i++) {
+ // abort out of the list early if we hit a cycle
+ walker = walker.prev
+ }
+ if (i === n && walker !== null) {
+ return walker.value
+ }
+}
+
+Yallist.prototype.map = function (fn, thisp) {
+ thisp = thisp || this
+ var res = new Yallist()
+ for (var walker = this.head; walker !== null;) {
+ res.push(fn.call(thisp, walker.value, this))
+ walker = walker.next
+ }
+ return res
+}
+
+Yallist.prototype.mapReverse = function (fn, thisp) {
+ thisp = thisp || this
+ var res = new Yallist()
+ for (var walker = this.tail; walker !== null;) {
+ res.push(fn.call(thisp, walker.value, this))
+ walker = walker.prev
+ }
+ return res
+}
+
+Yallist.prototype.reduce = function (fn, initial) {
+ var acc
+ var walker = this.head
+ if (arguments.length > 1) {
+ acc = initial
+ } else if (this.head) {
+ walker = this.head.next
+ acc = this.head.value
+ } else {
+ throw new TypeError('Reduce of empty list with no initial value')
+ }
+
+ for (var i = 0; walker !== null; i++) {
+ acc = fn(acc, walker.value, i)
+ walker = walker.next
+ }
+
+ return acc
+}
+
+Yallist.prototype.reduceReverse = function (fn, initial) {
+ var acc
+ var walker = this.tail
+ if (arguments.length > 1) {
+ acc = initial
+ } else if (this.tail) {
+ walker = this.tail.prev
+ acc = this.tail.value
+ } else {
+ throw new TypeError('Reduce of empty list with no initial value')
+ }
+
+ for (var i = this.length - 1; walker !== null; i--) {
+ acc = fn(acc, walker.value, i)
+ walker = walker.prev
+ }
+
+ return acc
+}
+
+Yallist.prototype.toArray = function () {
+ var arr = new Array(this.length)
+ for (var i = 0, walker = this.head; walker !== null; i++) {
+ arr[i] = walker.value
+ walker = walker.next
+ }
+ return arr
+}
+
+Yallist.prototype.toArrayReverse = function () {
+ var arr = new Array(this.length)
+ for (var i = 0, walker = this.tail; walker !== null; i++) {
+ arr[i] = walker.value
+ walker = walker.prev
+ }
+ return arr
+}
+
+Yallist.prototype.slice = function (from, to) {
+ to = to || this.length
+ if (to < 0) {
+ to += this.length
+ }
+ from = from || 0
+ if (from < 0) {
+ from += this.length
+ }
+ var ret = new Yallist()
+ if (to < from || to < 0) {
+ return ret
+ }
+ if (from < 0) {
+ from = 0
+ }
+ if (to > this.length) {
+ to = this.length
+ }
+ for (var i = 0, walker = this.head; walker !== null && i < from; i++) {
+ walker = walker.next
+ }
+ for (; walker !== null && i < to; i++, walker = walker.next) {
+ ret.push(walker.value)
+ }
+ return ret
+}
+
+Yallist.prototype.sliceReverse = function (from, to) {
+ to = to || this.length
+ if (to < 0) {
+ to += this.length
+ }
+ from = from || 0
+ if (from < 0) {
+ from += this.length
+ }
+ var ret = new Yallist()
+ if (to < from || to < 0) {
+ return ret
+ }
+ if (from < 0) {
+ from = 0
+ }
+ if (to > this.length) {
+ to = this.length
+ }
+ for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) {
+ walker = walker.prev
+ }
+ for (; walker !== null && i > from; i--, walker = walker.prev) {
+ ret.push(walker.value)
+ }
+ return ret
+}
+
+Yallist.prototype.splice = function (start, deleteCount, ...nodes) {
+ if (start > this.length) {
+ start = this.length - 1
+ }
+ if (start < 0) {
+ start = this.length + start;
+ }
+
+ for (var i = 0, walker = this.head; walker !== null && i < start; i++) {
+ walker = walker.next
+ }
+
+ var ret = []
+ for (var i = 0; walker && i < deleteCount; i++) {
+ ret.push(walker.value)
+ walker = this.removeNode(walker)
+ }
+ if (walker === null) {
+ walker = this.tail
+ }
+
+ if (walker !== this.head && walker !== this.tail) {
+ walker = walker.prev
+ }
+
+ for (var i = 0; i < nodes.length; i++) {
+ walker = insert(this, walker, nodes[i])
+ }
+ return ret;
+}
+
+Yallist.prototype.reverse = function () {
+ var head = this.head
+ var tail = this.tail
+ for (var walker = head; walker !== null; walker = walker.prev) {
+ var p = walker.prev
+ walker.prev = walker.next
+ walker.next = p
+ }
+ this.head = tail
+ this.tail = head
+ return this
+}
+
+function insert (self, node, value) {
+ var inserted = node === self.head ?
+ new Node(value, null, node, self) :
+ new Node(value, node, node.next, self)
+
+ if (inserted.next === null) {
+ self.tail = inserted
+ }
+ if (inserted.prev === null) {
+ self.head = inserted
+ }
+
+ self.length++
+
+ return inserted
+}
+
+function push (self, item) {
+ self.tail = new Node(item, self.tail, null, self)
+ if (!self.head) {
+ self.head = self.tail
+ }
+ self.length++
+}
+
+function unshift (self, item) {
+ self.head = new Node(item, null, self.head, self)
+ if (!self.tail) {
+ self.tail = self.head
+ }
+ self.length++
+}
+
+function Node (value, prev, next, list) {
+ if (!(this instanceof Node)) {
+ return new Node(value, prev, next, list)
+ }
+
+ this.list = list
+ this.value = value
+
+ if (prev) {
+ prev.next = this
+ this.prev = prev
+ } else {
+ this.prev = null
+ }
+
+ if (next) {
+ next.prev = this
+ this.next = next
+ } else {
+ this.next = null
+ }
+}
+
+try {
+ // add if support for Symbol.iterator is present
+ require('./iterator.js')(Yallist)
+} catch (er) {}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/LICENSE b/deps/npm/node_modules/minipass-sized/node_modules/yallist/LICENSE
similarity index 100%
rename from deps/npm/node_modules/node-gyp/node_modules/tar/LICENSE
rename to deps/npm/node_modules/minipass-sized/node_modules/yallist/LICENSE
diff --git a/deps/npm/node_modules/minipass-sized/node_modules/yallist/iterator.js b/deps/npm/node_modules/minipass-sized/node_modules/yallist/iterator.js
new file mode 100644
index 00000000000000..d41c97a19f9849
--- /dev/null
+++ b/deps/npm/node_modules/minipass-sized/node_modules/yallist/iterator.js
@@ -0,0 +1,8 @@
+'use strict'
+module.exports = function (Yallist) {
+ Yallist.prototype[Symbol.iterator] = function* () {
+ for (let walker = this.head; walker; walker = walker.next) {
+ yield walker.value
+ }
+ }
+}
diff --git a/deps/npm/node_modules/minipass-sized/node_modules/yallist/package.json b/deps/npm/node_modules/minipass-sized/node_modules/yallist/package.json
new file mode 100644
index 00000000000000..8a083867d72e00
--- /dev/null
+++ b/deps/npm/node_modules/minipass-sized/node_modules/yallist/package.json
@@ -0,0 +1,29 @@
+{
+ "name": "yallist",
+ "version": "4.0.0",
+ "description": "Yet Another Linked List",
+ "main": "yallist.js",
+ "directories": {
+ "test": "test"
+ },
+ "files": [
+ "yallist.js",
+ "iterator.js"
+ ],
+ "dependencies": {},
+ "devDependencies": {
+ "tap": "^12.1.0"
+ },
+ "scripts": {
+ "test": "tap test/*.js --100",
+ "preversion": "npm test",
+ "postversion": "npm publish",
+ "postpublish": "git push origin --all; git push origin --tags"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/isaacs/yallist.git"
+ },
+ "author": "Isaac Z. Schlueter (http://blog.izs.me/)",
+ "license": "ISC"
+}
diff --git a/deps/npm/node_modules/minipass-sized/node_modules/yallist/yallist.js b/deps/npm/node_modules/minipass-sized/node_modules/yallist/yallist.js
new file mode 100644
index 00000000000000..4e83ab1c542a51
--- /dev/null
+++ b/deps/npm/node_modules/minipass-sized/node_modules/yallist/yallist.js
@@ -0,0 +1,426 @@
+'use strict'
+module.exports = Yallist
+
+Yallist.Node = Node
+Yallist.create = Yallist
+
+function Yallist (list) {
+ var self = this
+ if (!(self instanceof Yallist)) {
+ self = new Yallist()
+ }
+
+ self.tail = null
+ self.head = null
+ self.length = 0
+
+ if (list && typeof list.forEach === 'function') {
+ list.forEach(function (item) {
+ self.push(item)
+ })
+ } else if (arguments.length > 0) {
+ for (var i = 0, l = arguments.length; i < l; i++) {
+ self.push(arguments[i])
+ }
+ }
+
+ return self
+}
+
+Yallist.prototype.removeNode = function (node) {
+ if (node.list !== this) {
+ throw new Error('removing node which does not belong to this list')
+ }
+
+ var next = node.next
+ var prev = node.prev
+
+ if (next) {
+ next.prev = prev
+ }
+
+ if (prev) {
+ prev.next = next
+ }
+
+ if (node === this.head) {
+ this.head = next
+ }
+ if (node === this.tail) {
+ this.tail = prev
+ }
+
+ node.list.length--
+ node.next = null
+ node.prev = null
+ node.list = null
+
+ return next
+}
+
+Yallist.prototype.unshiftNode = function (node) {
+ if (node === this.head) {
+ return
+ }
+
+ if (node.list) {
+ node.list.removeNode(node)
+ }
+
+ var head = this.head
+ node.list = this
+ node.next = head
+ if (head) {
+ head.prev = node
+ }
+
+ this.head = node
+ if (!this.tail) {
+ this.tail = node
+ }
+ this.length++
+}
+
+Yallist.prototype.pushNode = function (node) {
+ if (node === this.tail) {
+ return
+ }
+
+ if (node.list) {
+ node.list.removeNode(node)
+ }
+
+ var tail = this.tail
+ node.list = this
+ node.prev = tail
+ if (tail) {
+ tail.next = node
+ }
+
+ this.tail = node
+ if (!this.head) {
+ this.head = node
+ }
+ this.length++
+}
+
+Yallist.prototype.push = function () {
+ for (var i = 0, l = arguments.length; i < l; i++) {
+ push(this, arguments[i])
+ }
+ return this.length
+}
+
+Yallist.prototype.unshift = function () {
+ for (var i = 0, l = arguments.length; i < l; i++) {
+ unshift(this, arguments[i])
+ }
+ return this.length
+}
+
+Yallist.prototype.pop = function () {
+ if (!this.tail) {
+ return undefined
+ }
+
+ var res = this.tail.value
+ this.tail = this.tail.prev
+ if (this.tail) {
+ this.tail.next = null
+ } else {
+ this.head = null
+ }
+ this.length--
+ return res
+}
+
+Yallist.prototype.shift = function () {
+ if (!this.head) {
+ return undefined
+ }
+
+ var res = this.head.value
+ this.head = this.head.next
+ if (this.head) {
+ this.head.prev = null
+ } else {
+ this.tail = null
+ }
+ this.length--
+ return res
+}
+
+Yallist.prototype.forEach = function (fn, thisp) {
+ thisp = thisp || this
+ for (var walker = this.head, i = 0; walker !== null; i++) {
+ fn.call(thisp, walker.value, i, this)
+ walker = walker.next
+ }
+}
+
+Yallist.prototype.forEachReverse = function (fn, thisp) {
+ thisp = thisp || this
+ for (var walker = this.tail, i = this.length - 1; walker !== null; i--) {
+ fn.call(thisp, walker.value, i, this)
+ walker = walker.prev
+ }
+}
+
+Yallist.prototype.get = function (n) {
+ for (var i = 0, walker = this.head; walker !== null && i < n; i++) {
+ // abort out of the list early if we hit a cycle
+ walker = walker.next
+ }
+ if (i === n && walker !== null) {
+ return walker.value
+ }
+}
+
+Yallist.prototype.getReverse = function (n) {
+ for (var i = 0, walker = this.tail; walker !== null && i < n; i++) {
+ // abort out of the list early if we hit a cycle
+ walker = walker.prev
+ }
+ if (i === n && walker !== null) {
+ return walker.value
+ }
+}
+
+Yallist.prototype.map = function (fn, thisp) {
+ thisp = thisp || this
+ var res = new Yallist()
+ for (var walker = this.head; walker !== null;) {
+ res.push(fn.call(thisp, walker.value, this))
+ walker = walker.next
+ }
+ return res
+}
+
+Yallist.prototype.mapReverse = function (fn, thisp) {
+ thisp = thisp || this
+ var res = new Yallist()
+ for (var walker = this.tail; walker !== null;) {
+ res.push(fn.call(thisp, walker.value, this))
+ walker = walker.prev
+ }
+ return res
+}
+
+Yallist.prototype.reduce = function (fn, initial) {
+ var acc
+ var walker = this.head
+ if (arguments.length > 1) {
+ acc = initial
+ } else if (this.head) {
+ walker = this.head.next
+ acc = this.head.value
+ } else {
+ throw new TypeError('Reduce of empty list with no initial value')
+ }
+
+ for (var i = 0; walker !== null; i++) {
+ acc = fn(acc, walker.value, i)
+ walker = walker.next
+ }
+
+ return acc
+}
+
+Yallist.prototype.reduceReverse = function (fn, initial) {
+ var acc
+ var walker = this.tail
+ if (arguments.length > 1) {
+ acc = initial
+ } else if (this.tail) {
+ walker = this.tail.prev
+ acc = this.tail.value
+ } else {
+ throw new TypeError('Reduce of empty list with no initial value')
+ }
+
+ for (var i = this.length - 1; walker !== null; i--) {
+ acc = fn(acc, walker.value, i)
+ walker = walker.prev
+ }
+
+ return acc
+}
+
+Yallist.prototype.toArray = function () {
+ var arr = new Array(this.length)
+ for (var i = 0, walker = this.head; walker !== null; i++) {
+ arr[i] = walker.value
+ walker = walker.next
+ }
+ return arr
+}
+
+Yallist.prototype.toArrayReverse = function () {
+ var arr = new Array(this.length)
+ for (var i = 0, walker = this.tail; walker !== null; i++) {
+ arr[i] = walker.value
+ walker = walker.prev
+ }
+ return arr
+}
+
+Yallist.prototype.slice = function (from, to) {
+ to = to || this.length
+ if (to < 0) {
+ to += this.length
+ }
+ from = from || 0
+ if (from < 0) {
+ from += this.length
+ }
+ var ret = new Yallist()
+ if (to < from || to < 0) {
+ return ret
+ }
+ if (from < 0) {
+ from = 0
+ }
+ if (to > this.length) {
+ to = this.length
+ }
+ for (var i = 0, walker = this.head; walker !== null && i < from; i++) {
+ walker = walker.next
+ }
+ for (; walker !== null && i < to; i++, walker = walker.next) {
+ ret.push(walker.value)
+ }
+ return ret
+}
+
+Yallist.prototype.sliceReverse = function (from, to) {
+ to = to || this.length
+ if (to < 0) {
+ to += this.length
+ }
+ from = from || 0
+ if (from < 0) {
+ from += this.length
+ }
+ var ret = new Yallist()
+ if (to < from || to < 0) {
+ return ret
+ }
+ if (from < 0) {
+ from = 0
+ }
+ if (to > this.length) {
+ to = this.length
+ }
+ for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) {
+ walker = walker.prev
+ }
+ for (; walker !== null && i > from; i--, walker = walker.prev) {
+ ret.push(walker.value)
+ }
+ return ret
+}
+
+Yallist.prototype.splice = function (start, deleteCount, ...nodes) {
+ if (start > this.length) {
+ start = this.length - 1
+ }
+ if (start < 0) {
+ start = this.length + start;
+ }
+
+ for (var i = 0, walker = this.head; walker !== null && i < start; i++) {
+ walker = walker.next
+ }
+
+ var ret = []
+ for (var i = 0; walker && i < deleteCount; i++) {
+ ret.push(walker.value)
+ walker = this.removeNode(walker)
+ }
+ if (walker === null) {
+ walker = this.tail
+ }
+
+ if (walker !== this.head && walker !== this.tail) {
+ walker = walker.prev
+ }
+
+ for (var i = 0; i < nodes.length; i++) {
+ walker = insert(this, walker, nodes[i])
+ }
+ return ret;
+}
+
+Yallist.prototype.reverse = function () {
+ var head = this.head
+ var tail = this.tail
+ for (var walker = head; walker !== null; walker = walker.prev) {
+ var p = walker.prev
+ walker.prev = walker.next
+ walker.next = p
+ }
+ this.head = tail
+ this.tail = head
+ return this
+}
+
+function insert (self, node, value) {
+ var inserted = node === self.head ?
+ new Node(value, null, node, self) :
+ new Node(value, node, node.next, self)
+
+ if (inserted.next === null) {
+ self.tail = inserted
+ }
+ if (inserted.prev === null) {
+ self.head = inserted
+ }
+
+ self.length++
+
+ return inserted
+}
+
+function push (self, item) {
+ self.tail = new Node(item, self.tail, null, self)
+ if (!self.head) {
+ self.head = self.tail
+ }
+ self.length++
+}
+
+function unshift (self, item) {
+ self.head = new Node(item, null, self.head, self)
+ if (!self.tail) {
+ self.tail = self.head
+ }
+ self.length++
+}
+
+function Node (value, prev, next, list) {
+ if (!(this instanceof Node)) {
+ return new Node(value, prev, next, list)
+ }
+
+ this.list = list
+ this.value = value
+
+ if (prev) {
+ prev.next = this
+ this.prev = prev
+ } else {
+ this.prev = null
+ }
+
+ if (next) {
+ next.prev = this
+ this.next = next
+ } else {
+ this.next = null
+ }
+}
+
+try {
+ // add if support for Symbol.iterator is present
+ require('./iterator.js')(Yallist)
+} catch (er) {}
diff --git a/deps/npm/node_modules/minipass/LICENSE b/deps/npm/node_modules/minipass/LICENSE
deleted file mode 100644
index 97f8e32ed82e4c..00000000000000
--- a/deps/npm/node_modules/minipass/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/node-gyp/node_modules/chownr/LICENSE.md b/deps/npm/node_modules/minipass/LICENSE.md
similarity index 87%
rename from deps/npm/node_modules/node-gyp/node_modules/chownr/LICENSE.md
rename to deps/npm/node_modules/minipass/LICENSE.md
index 881248b6d7f0ca..c5402b9577a8cd 100644
--- a/deps/npm/node_modules/node-gyp/node_modules/chownr/LICENSE.md
+++ b/deps/npm/node_modules/minipass/LICENSE.md
@@ -1,11 +1,3 @@
-All packages under `src/` are licensed according to the terms in
-their respective `LICENSE` or `LICENSE.md` files.
-
-The remainder of this project is licensed under the Blue Oak
-Model License, as follows:
-
------
-
# Blue Oak Model License
Version 1.0.0
diff --git a/deps/npm/node_modules/minipass/dist/commonjs/index.js b/deps/npm/node_modules/minipass/dist/commonjs/index.js
index 068c095b697932..91f3a5cfc7a0b9 100644
--- a/deps/npm/node_modules/minipass/dist/commonjs/index.js
+++ b/deps/npm/node_modules/minipass/dist/commonjs/index.js
@@ -22,7 +22,11 @@ const isStream = (s) => !!s &&
(s instanceof Minipass ||
s instanceof node_stream_1.default ||
(0, exports.isReadable)(s) ||
- (0, exports.isWritable)(s));
+ (0, exports.isWritable)(s))
+/**
+ * Return true if the argument is a valid {@link Minipass.Readable}
+ */
+;
exports.isStream = isStream;
/**
* Return true if the argument is a valid {@link Minipass.Readable}
@@ -32,7 +36,11 @@ const isReadable = (s) => !!s &&
s instanceof node_events_1.EventEmitter &&
typeof s.pipe === 'function' &&
// node core Writable streams have a pipe() method, but it throws
- s.pipe !== node_stream_1.default.Writable.prototype.pipe;
+ s.pipe !== node_stream_1.default.Writable.prototype.pipe
+/**
+ * Return true if the argument is a valid {@link Minipass.Writable}
+ */
+;
exports.isReadable = isReadable;
/**
* Return true if the argument is a valid {@link Minipass.Writable}
@@ -129,7 +137,7 @@ class PipeProxyErrors extends Pipe {
}
constructor(src, dest, opts) {
super(src, dest, opts);
- this.proxyErrors = er => dest.emit('error', er);
+ this.proxyErrors = (er) => this.dest.emit('error', er);
src.on('error', this.proxyErrors);
}
}
@@ -939,6 +947,7 @@ class Minipass extends node_events_1.EventEmitter {
[Symbol.asyncIterator]() {
return this;
},
+ [Symbol.asyncDispose]: async () => { },
};
}
/**
@@ -976,6 +985,7 @@ class Minipass extends node_events_1.EventEmitter {
[Symbol.iterator]() {
return this;
},
+ [Symbol.dispose]: () => { },
};
}
/**
diff --git a/deps/npm/node_modules/minipass/dist/esm/index.js b/deps/npm/node_modules/minipass/dist/esm/index.js
index b5fa4513c90838..5df55461e3d491 100644
--- a/deps/npm/node_modules/minipass/dist/esm/index.js
+++ b/deps/npm/node_modules/minipass/dist/esm/index.js
@@ -120,7 +120,7 @@ class PipeProxyErrors extends Pipe {
}
constructor(src, dest, opts) {
super(src, dest, opts);
- this.proxyErrors = er => dest.emit('error', er);
+ this.proxyErrors = (er) => this.dest.emit('error', er);
src.on('error', this.proxyErrors);
}
}
@@ -930,6 +930,7 @@ export class Minipass extends EventEmitter {
[Symbol.asyncIterator]() {
return this;
},
+ [Symbol.asyncDispose]: async () => { },
};
}
/**
@@ -967,6 +968,7 @@ export class Minipass extends EventEmitter {
[Symbol.iterator]() {
return this;
},
+ [Symbol.dispose]: () => { },
};
}
/**
diff --git a/deps/npm/node_modules/minipass/package.json b/deps/npm/node_modules/minipass/package.json
index 771969b0285469..800f215cb02c01 100644
--- a/deps/npm/node_modules/minipass/package.json
+++ b/deps/npm/node_modules/minipass/package.json
@@ -1,13 +1,14 @@
{
"name": "minipass",
- "version": "7.1.2",
+ "version": "7.1.3",
"description": "minimal implementation of a PassThrough stream",
"main": "./dist/commonjs/index.js",
"types": "./dist/commonjs/index.d.ts",
+ "module": "./dist/esm/index.js",
"type": "module",
"tshy": {
"selfLink": false,
- "main": true,
+ "compiler": "tsgo",
"exports": {
"./package.json": "./package.json",
".": "./src/index.ts"
@@ -54,14 +55,14 @@
},
"devDependencies": {
"@types/end-of-stream": "^1.4.2",
- "@types/node": "^20.1.2",
+ "@types/node": "^25.2.3",
"end-of-stream": "^1.4.0",
"node-abort-controller": "^3.1.1",
- "prettier": "^2.6.2",
- "tap": "^19.0.0",
+ "prettier": "^3.8.1",
+ "tap": "^21.6.1",
"through2": "^2.0.3",
- "tshy": "^1.14.0",
- "typedoc": "^0.25.1"
+ "tshy": "^3.3.2",
+ "typedoc": "^0.28.17"
},
"repository": "https://github.com/isaacs/minipass",
"keywords": [
@@ -69,14 +70,8 @@
"stream"
],
"author": "Isaac Z. Schlueter (http://blog.izs.me/)",
- "license": "ISC",
+ "license": "BlueOak-1.0.0",
"engines": {
"node": ">=16 || 14 >=14.17"
- },
- "tap": {
- "typecheck": true,
- "include": [
- "test/*.ts"
- ]
}
}
diff --git a/deps/npm/node_modules/minizlib/dist/commonjs/index.js b/deps/npm/node_modules/minizlib/dist/commonjs/index.js
index b4906d27833720..78c6536baf6be9 100644
--- a/deps/npm/node_modules/minizlib/dist/commonjs/index.js
+++ b/deps/npm/node_modules/minizlib/dist/commonjs/index.js
@@ -36,7 +36,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
-exports.BrotliDecompress = exports.BrotliCompress = exports.Brotli = exports.Unzip = exports.InflateRaw = exports.DeflateRaw = exports.Gunzip = exports.Gzip = exports.Inflate = exports.Deflate = exports.Zlib = exports.ZlibError = exports.constants = void 0;
+exports.ZstdDecompress = exports.ZstdCompress = exports.BrotliDecompress = exports.BrotliCompress = exports.Unzip = exports.InflateRaw = exports.DeflateRaw = exports.Gunzip = exports.Gzip = exports.Inflate = exports.Deflate = exports.Zlib = exports.ZlibError = exports.constants = void 0;
const assert_1 = __importDefault(require("assert"));
const buffer_1 = require("buffer");
const minipass_1 = require("minipass");
@@ -56,15 +56,15 @@ const _superWrite = Symbol('_superWrite');
class ZlibError extends Error {
code;
errno;
- constructor(err) {
- super('zlib: ' + err.message);
+ constructor(err, origin) {
+ super('zlib: ' + err.message, { cause: err });
this.code = err.code;
this.errno = err.errno;
/* c8 ignore next */
if (!this.code)
this.code = 'ZLIB_ERROR';
this.message = 'zlib: ' + err.message;
- Error.captureStackTrace(this, this.constructor);
+ Error.captureStackTrace(this, origin ?? this.constructor);
}
get name() {
return 'ZlibError';
@@ -105,6 +105,10 @@ class ZlibBase extends minipass_1.Minipass {
this.#finishFlushFlag = opts.finishFlush ?? 0;
this.#fullFlushFlag = opts.fullFlushFlag ?? 0;
/* c8 ignore stop */
+ //@ts-ignore
+ if (typeof realZlib[mode] !== 'function') {
+ throw new TypeError('Compression method not supported: ' + mode);
+ }
// this will throw if any options are invalid for the class selected
try {
// @types/node doesn't know that it exports the classes, but they're there
@@ -113,7 +117,7 @@ class ZlibBase extends minipass_1.Minipass {
}
catch (er) {
// make sure that all errors get decorated properly
- throw new ZlibError(er);
+ throw new ZlibError(er, this.constructor);
}
this.#onError = err => {
// no sense raising multiple errors, since we abort on the first one.
@@ -213,7 +217,7 @@ class ZlibBase extends minipass_1.Minipass {
// or if we do, put Buffer.concat() back before we emit error
// Error events call into user code, which may call Buffer.concat()
passthroughBufferConcat(false);
- this.#onError(new ZlibError(err));
+ this.#onError(new ZlibError(err, this.write));
}
finally {
if (this.#handle) {
@@ -232,7 +236,7 @@ class ZlibBase extends minipass_1.Minipass {
}
}
if (this.#handle)
- this.#handle.on('error', er => this.#onError(new ZlibError(er)));
+ this.#handle.on('error', er => this.#onError(new ZlibError(er, this.write)));
let writeReturn;
if (result) {
if (Array.isArray(result) && result.length > 0) {
@@ -376,7 +380,6 @@ class Brotli extends ZlibBase {
super(opts, mode);
}
}
-exports.Brotli = Brotli;
class BrotliCompress extends Brotli {
constructor(opts) {
super(opts, 'BrotliCompress');
@@ -389,4 +392,25 @@ class BrotliDecompress extends Brotli {
}
}
exports.BrotliDecompress = BrotliDecompress;
+class Zstd extends ZlibBase {
+ constructor(opts, mode) {
+ opts = opts || {};
+ opts.flush = opts.flush || constants_js_1.constants.ZSTD_e_continue;
+ opts.finishFlush = opts.finishFlush || constants_js_1.constants.ZSTD_e_end;
+ opts.fullFlushFlag = constants_js_1.constants.ZSTD_e_flush;
+ super(opts, mode);
+ }
+}
+class ZstdCompress extends Zstd {
+ constructor(opts) {
+ super(opts, 'ZstdCompress');
+ }
+}
+exports.ZstdCompress = ZstdCompress;
+class ZstdDecompress extends Zstd {
+ constructor(opts) {
+ super(opts, 'ZstdDecompress');
+ }
+}
+exports.ZstdDecompress = ZstdDecompress;
//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/minizlib/dist/esm/index.js b/deps/npm/node_modules/minizlib/dist/esm/index.js
index f33586a8ab0ec1..b70ba1f2cd84f1 100644
--- a/deps/npm/node_modules/minizlib/dist/esm/index.js
+++ b/deps/npm/node_modules/minizlib/dist/esm/index.js
@@ -16,15 +16,15 @@ const _superWrite = Symbol('_superWrite');
export class ZlibError extends Error {
code;
errno;
- constructor(err) {
- super('zlib: ' + err.message);
+ constructor(err, origin) {
+ super('zlib: ' + err.message, { cause: err });
this.code = err.code;
this.errno = err.errno;
/* c8 ignore next */
if (!this.code)
this.code = 'ZLIB_ERROR';
this.message = 'zlib: ' + err.message;
- Error.captureStackTrace(this, this.constructor);
+ Error.captureStackTrace(this, origin ?? this.constructor);
}
get name() {
return 'ZlibError';
@@ -64,6 +64,10 @@ class ZlibBase extends Minipass {
this.#finishFlushFlag = opts.finishFlush ?? 0;
this.#fullFlushFlag = opts.fullFlushFlag ?? 0;
/* c8 ignore stop */
+ //@ts-ignore
+ if (typeof realZlib[mode] !== 'function') {
+ throw new TypeError('Compression method not supported: ' + mode);
+ }
// this will throw if any options are invalid for the class selected
try {
// @types/node doesn't know that it exports the classes, but they're there
@@ -72,7 +76,7 @@ class ZlibBase extends Minipass {
}
catch (er) {
// make sure that all errors get decorated properly
- throw new ZlibError(er);
+ throw new ZlibError(er, this.constructor);
}
this.#onError = err => {
// no sense raising multiple errors, since we abort on the first one.
@@ -172,7 +176,7 @@ class ZlibBase extends Minipass {
// or if we do, put Buffer.concat() back before we emit error
// Error events call into user code, which may call Buffer.concat()
passthroughBufferConcat(false);
- this.#onError(new ZlibError(err));
+ this.#onError(new ZlibError(err, this.write));
}
finally {
if (this.#handle) {
@@ -191,7 +195,7 @@ class ZlibBase extends Minipass {
}
}
if (this.#handle)
- this.#handle.on('error', er => this.#onError(new ZlibError(er)));
+ this.#handle.on('error', er => this.#onError(new ZlibError(er, this.write)));
let writeReturn;
if (result) {
if (Array.isArray(result) && result.length > 0) {
@@ -317,7 +321,7 @@ export class Unzip extends Zlib {
super(opts, 'Unzip');
}
}
-export class Brotli extends ZlibBase {
+class Brotli extends ZlibBase {
constructor(opts, mode) {
opts = opts || {};
opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS;
@@ -337,4 +341,23 @@ export class BrotliDecompress extends Brotli {
super(opts, 'BrotliDecompress');
}
}
+class Zstd extends ZlibBase {
+ constructor(opts, mode) {
+ opts = opts || {};
+ opts.flush = opts.flush || constants.ZSTD_e_continue;
+ opts.finishFlush = opts.finishFlush || constants.ZSTD_e_end;
+ opts.fullFlushFlag = constants.ZSTD_e_flush;
+ super(opts, mode);
+ }
+}
+export class ZstdCompress extends Zstd {
+ constructor(opts) {
+ super(opts, 'ZstdCompress');
+ }
+}
+export class ZstdDecompress extends Zstd {
+ constructor(opts) {
+ super(opts, 'ZstdDecompress');
+ }
+}
//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/minizlib/package.json b/deps/npm/node_modules/minizlib/package.json
index 43cb855e15a5d8..dceaed923d3db8 100644
--- a/deps/npm/node_modules/minizlib/package.json
+++ b/deps/npm/node_modules/minizlib/package.json
@@ -1,6 +1,6 @@
{
"name": "minizlib",
- "version": "3.0.2",
+ "version": "3.1.0",
"description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.",
"main": "./dist/commonjs/index.js",
"dependencies": {
@@ -33,7 +33,7 @@
"author": "Isaac Z. Schlueter (http://blog.izs.me/)",
"license": "MIT",
"devDependencies": {
- "@types/node": "^22.13.14",
+ "@types/node": "^24.5.2",
"tap": "^21.1.0",
"tshy": "^3.0.2",
"typedoc": "^0.28.1"
diff --git a/deps/npm/node_modules/mkdirp/LICENSE b/deps/npm/node_modules/mkdirp/LICENSE
deleted file mode 100644
index 13fcd15f0e0beb..00000000000000
--- a/deps/npm/node_modules/mkdirp/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-Copyright James Halliday (mail@substack.net) and Isaac Z. Schlueter (i@izs.me)
-
-This project is free software released under the MIT license:
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
diff --git a/deps/npm/node_modules/mkdirp/bin/cmd.js b/deps/npm/node_modules/mkdirp/bin/cmd.js
deleted file mode 100755
index 6e0aa8dc4667b6..00000000000000
--- a/deps/npm/node_modules/mkdirp/bin/cmd.js
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/usr/bin/env node
-
-const usage = () => `
-usage: mkdirp [DIR1,DIR2..] {OPTIONS}
-
- Create each supplied directory including any necessary parent directories
- that don't yet exist.
-
- If the directory already exists, do nothing.
-
-OPTIONS are:
-
- -m If a directory needs to be created, set the mode as an octal
- --mode= permission string.
-
- -v --version Print the mkdirp version number
-
- -h --help Print this helpful banner
-
- -p --print Print the first directories created for each path provided
-
- --manual Use manual implementation, even if native is available
-`
-
-const dirs = []
-const opts = {}
-let print = false
-let dashdash = false
-let manual = false
-for (const arg of process.argv.slice(2)) {
- if (dashdash)
- dirs.push(arg)
- else if (arg === '--')
- dashdash = true
- else if (arg === '--manual')
- manual = true
- else if (/^-h/.test(arg) || /^--help/.test(arg)) {
- console.log(usage())
- process.exit(0)
- } else if (arg === '-v' || arg === '--version') {
- console.log(require('../package.json').version)
- process.exit(0)
- } else if (arg === '-p' || arg === '--print') {
- print = true
- } else if (/^-m/.test(arg) || /^--mode=/.test(arg)) {
- const mode = parseInt(arg.replace(/^(-m|--mode=)/, ''), 8)
- if (isNaN(mode)) {
- console.error(`invalid mode argument: ${arg}\nMust be an octal number.`)
- process.exit(1)
- }
- opts.mode = mode
- } else
- dirs.push(arg)
-}
-
-const mkdirp = require('../')
-const impl = manual ? mkdirp.manual : mkdirp
-if (dirs.length === 0)
- console.error(usage())
-
-Promise.all(dirs.map(dir => impl(dir, opts)))
- .then(made => print ? made.forEach(m => m && console.log(m)) : null)
- .catch(er => {
- console.error(er.message)
- if (er.code)
- console.error(' code: ' + er.code)
- process.exit(1)
- })
diff --git a/deps/npm/node_modules/mkdirp/index.js b/deps/npm/node_modules/mkdirp/index.js
deleted file mode 100644
index ad7a16c9f45d97..00000000000000
--- a/deps/npm/node_modules/mkdirp/index.js
+++ /dev/null
@@ -1,31 +0,0 @@
-const optsArg = require('./lib/opts-arg.js')
-const pathArg = require('./lib/path-arg.js')
-
-const {mkdirpNative, mkdirpNativeSync} = require('./lib/mkdirp-native.js')
-const {mkdirpManual, mkdirpManualSync} = require('./lib/mkdirp-manual.js')
-const {useNative, useNativeSync} = require('./lib/use-native.js')
-
-
-const mkdirp = (path, opts) => {
- path = pathArg(path)
- opts = optsArg(opts)
- return useNative(opts)
- ? mkdirpNative(path, opts)
- : mkdirpManual(path, opts)
-}
-
-const mkdirpSync = (path, opts) => {
- path = pathArg(path)
- opts = optsArg(opts)
- return useNativeSync(opts)
- ? mkdirpNativeSync(path, opts)
- : mkdirpManualSync(path, opts)
-}
-
-mkdirp.sync = mkdirpSync
-mkdirp.native = (path, opts) => mkdirpNative(pathArg(path), optsArg(opts))
-mkdirp.manual = (path, opts) => mkdirpManual(pathArg(path), optsArg(opts))
-mkdirp.nativeSync = (path, opts) => mkdirpNativeSync(pathArg(path), optsArg(opts))
-mkdirp.manualSync = (path, opts) => mkdirpManualSync(pathArg(path), optsArg(opts))
-
-module.exports = mkdirp
diff --git a/deps/npm/node_modules/mkdirp/lib/find-made.js b/deps/npm/node_modules/mkdirp/lib/find-made.js
deleted file mode 100644
index 022e492c085da0..00000000000000
--- a/deps/npm/node_modules/mkdirp/lib/find-made.js
+++ /dev/null
@@ -1,29 +0,0 @@
-const {dirname} = require('path')
-
-const findMade = (opts, parent, path = undefined) => {
- // we never want the 'made' return value to be a root directory
- if (path === parent)
- return Promise.resolve()
-
- return opts.statAsync(parent).then(
- st => st.isDirectory() ? path : undefined, // will fail later
- er => er.code === 'ENOENT'
- ? findMade(opts, dirname(parent), parent)
- : undefined
- )
-}
-
-const findMadeSync = (opts, parent, path = undefined) => {
- if (path === parent)
- return undefined
-
- try {
- return opts.statSync(parent).isDirectory() ? path : undefined
- } catch (er) {
- return er.code === 'ENOENT'
- ? findMadeSync(opts, dirname(parent), parent)
- : undefined
- }
-}
-
-module.exports = {findMade, findMadeSync}
diff --git a/deps/npm/node_modules/mkdirp/lib/mkdirp-manual.js b/deps/npm/node_modules/mkdirp/lib/mkdirp-manual.js
deleted file mode 100644
index 2eb18cd64eb79c..00000000000000
--- a/deps/npm/node_modules/mkdirp/lib/mkdirp-manual.js
+++ /dev/null
@@ -1,64 +0,0 @@
-const {dirname} = require('path')
-
-const mkdirpManual = (path, opts, made) => {
- opts.recursive = false
- const parent = dirname(path)
- if (parent === path) {
- return opts.mkdirAsync(path, opts).catch(er => {
- // swallowed by recursive implementation on posix systems
- // any other error is a failure
- if (er.code !== 'EISDIR')
- throw er
- })
- }
-
- return opts.mkdirAsync(path, opts).then(() => made || path, er => {
- if (er.code === 'ENOENT')
- return mkdirpManual(parent, opts)
- .then(made => mkdirpManual(path, opts, made))
- if (er.code !== 'EEXIST' && er.code !== 'EROFS')
- throw er
- return opts.statAsync(path).then(st => {
- if (st.isDirectory())
- return made
- else
- throw er
- }, () => { throw er })
- })
-}
-
-const mkdirpManualSync = (path, opts, made) => {
- const parent = dirname(path)
- opts.recursive = false
-
- if (parent === path) {
- try {
- return opts.mkdirSync(path, opts)
- } catch (er) {
- // swallowed by recursive implementation on posix systems
- // any other error is a failure
- if (er.code !== 'EISDIR')
- throw er
- else
- return
- }
- }
-
- try {
- opts.mkdirSync(path, opts)
- return made || path
- } catch (er) {
- if (er.code === 'ENOENT')
- return mkdirpManualSync(path, opts, mkdirpManualSync(parent, opts, made))
- if (er.code !== 'EEXIST' && er.code !== 'EROFS')
- throw er
- try {
- if (!opts.statSync(path).isDirectory())
- throw er
- } catch (_) {
- throw er
- }
- }
-}
-
-module.exports = {mkdirpManual, mkdirpManualSync}
diff --git a/deps/npm/node_modules/mkdirp/lib/mkdirp-native.js b/deps/npm/node_modules/mkdirp/lib/mkdirp-native.js
deleted file mode 100644
index c7a6b69800f62b..00000000000000
--- a/deps/npm/node_modules/mkdirp/lib/mkdirp-native.js
+++ /dev/null
@@ -1,39 +0,0 @@
-const {dirname} = require('path')
-const {findMade, findMadeSync} = require('./find-made.js')
-const {mkdirpManual, mkdirpManualSync} = require('./mkdirp-manual.js')
-
-const mkdirpNative = (path, opts) => {
- opts.recursive = true
- const parent = dirname(path)
- if (parent === path)
- return opts.mkdirAsync(path, opts)
-
- return findMade(opts, path).then(made =>
- opts.mkdirAsync(path, opts).then(() => made)
- .catch(er => {
- if (er.code === 'ENOENT')
- return mkdirpManual(path, opts)
- else
- throw er
- }))
-}
-
-const mkdirpNativeSync = (path, opts) => {
- opts.recursive = true
- const parent = dirname(path)
- if (parent === path)
- return opts.mkdirSync(path, opts)
-
- const made = findMadeSync(opts, path)
- try {
- opts.mkdirSync(path, opts)
- return made
- } catch (er) {
- if (er.code === 'ENOENT')
- return mkdirpManualSync(path, opts)
- else
- throw er
- }
-}
-
-module.exports = {mkdirpNative, mkdirpNativeSync}
diff --git a/deps/npm/node_modules/mkdirp/lib/opts-arg.js b/deps/npm/node_modules/mkdirp/lib/opts-arg.js
deleted file mode 100644
index 2fa4833faacc70..00000000000000
--- a/deps/npm/node_modules/mkdirp/lib/opts-arg.js
+++ /dev/null
@@ -1,23 +0,0 @@
-const { promisify } = require('util')
-const fs = require('fs')
-const optsArg = opts => {
- if (!opts)
- opts = { mode: 0o777, fs }
- else if (typeof opts === 'object')
- opts = { mode: 0o777, fs, ...opts }
- else if (typeof opts === 'number')
- opts = { mode: opts, fs }
- else if (typeof opts === 'string')
- opts = { mode: parseInt(opts, 8), fs }
- else
- throw new TypeError('invalid options argument')
-
- opts.mkdir = opts.mkdir || opts.fs.mkdir || fs.mkdir
- opts.mkdirAsync = promisify(opts.mkdir)
- opts.stat = opts.stat || opts.fs.stat || fs.stat
- opts.statAsync = promisify(opts.stat)
- opts.statSync = opts.statSync || opts.fs.statSync || fs.statSync
- opts.mkdirSync = opts.mkdirSync || opts.fs.mkdirSync || fs.mkdirSync
- return opts
-}
-module.exports = optsArg
diff --git a/deps/npm/node_modules/mkdirp/lib/path-arg.js b/deps/npm/node_modules/mkdirp/lib/path-arg.js
deleted file mode 100644
index cc07de5a6f9920..00000000000000
--- a/deps/npm/node_modules/mkdirp/lib/path-arg.js
+++ /dev/null
@@ -1,29 +0,0 @@
-const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform
-const { resolve, parse } = require('path')
-const pathArg = path => {
- if (/\0/.test(path)) {
- // simulate same failure that node raises
- throw Object.assign(
- new TypeError('path must be a string without null bytes'),
- {
- path,
- code: 'ERR_INVALID_ARG_VALUE',
- }
- )
- }
-
- path = resolve(path)
- if (platform === 'win32') {
- const badWinChars = /[*|"<>?:]/
- const {root} = parse(path)
- if (badWinChars.test(path.substr(root.length))) {
- throw Object.assign(new Error('Illegal characters in path.'), {
- path,
- code: 'EINVAL',
- })
- }
- }
-
- return path
-}
-module.exports = pathArg
diff --git a/deps/npm/node_modules/mkdirp/lib/use-native.js b/deps/npm/node_modules/mkdirp/lib/use-native.js
deleted file mode 100644
index 079361de19fd81..00000000000000
--- a/deps/npm/node_modules/mkdirp/lib/use-native.js
+++ /dev/null
@@ -1,10 +0,0 @@
-const fs = require('fs')
-
-const version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version
-const versArr = version.replace(/^v/, '').split('.')
-const hasNative = +versArr[0] > 10 || +versArr[0] === 10 && +versArr[1] >= 12
-
-const useNative = !hasNative ? () => false : opts => opts.mkdir === fs.mkdir
-const useNativeSync = !hasNative ? () => false : opts => opts.mkdirSync === fs.mkdirSync
-
-module.exports = {useNative, useNativeSync}
diff --git a/deps/npm/node_modules/mkdirp/package.json b/deps/npm/node_modules/mkdirp/package.json
deleted file mode 100644
index 2913ed09bddd66..00000000000000
--- a/deps/npm/node_modules/mkdirp/package.json
+++ /dev/null
@@ -1,44 +0,0 @@
-{
- "name": "mkdirp",
- "description": "Recursively mkdir, like `mkdir -p`",
- "version": "1.0.4",
- "main": "index.js",
- "keywords": [
- "mkdir",
- "directory",
- "make dir",
- "make",
- "dir",
- "recursive",
- "native"
- ],
- "repository": {
- "type": "git",
- "url": "https://github.com/isaacs/node-mkdirp.git"
- },
- "scripts": {
- "test": "tap",
- "snap": "tap",
- "preversion": "npm test",
- "postversion": "npm publish",
- "postpublish": "git push origin --follow-tags"
- },
- "tap": {
- "check-coverage": true,
- "coverage-map": "map.js"
- },
- "devDependencies": {
- "require-inject": "^1.4.4",
- "tap": "^14.10.7"
- },
- "bin": "bin/cmd.js",
- "license": "MIT",
- "engines": {
- "node": ">=10"
- },
- "files": [
- "bin",
- "lib",
- "index.js"
- ]
-}
diff --git a/deps/npm/node_modules/mkdirp/readme.markdown b/deps/npm/node_modules/mkdirp/readme.markdown
deleted file mode 100644
index 827de5905230a9..00000000000000
--- a/deps/npm/node_modules/mkdirp/readme.markdown
+++ /dev/null
@@ -1,266 +0,0 @@
-# mkdirp
-
-Like `mkdir -p`, but in Node.js!
-
-Now with a modern API and no\* bugs!
-
-\* may contain some bugs
-
-# example
-
-## pow.js
-
-```js
-const mkdirp = require('mkdirp')
-
-// return value is a Promise resolving to the first directory created
-mkdirp('/tmp/foo/bar/baz').then(made =>
- console.log(`made directories, starting with ${made}`))
-```
-
-Output (where `/tmp/foo` already exists)
-
-```
-made directories, starting with /tmp/foo/bar
-```
-
-Or, if you don't have time to wait around for promises:
-
-```js
-const mkdirp = require('mkdirp')
-
-// return value is the first directory created
-const made = mkdirp.sync('/tmp/foo/bar/baz')
-console.log(`made directories, starting with ${made}`)
-```
-
-And now /tmp/foo/bar/baz exists, huzzah!
-
-# methods
-
-```js
-const mkdirp = require('mkdirp')
-```
-
-## mkdirp(dir, [opts]) -> Promise
-
-Create a new directory and any necessary subdirectories at `dir` with octal
-permission string `opts.mode`. If `opts` is a string or number, it will be
-treated as the `opts.mode`.
-
-If `opts.mode` isn't specified, it defaults to `0o777 &
-(~process.umask())`.
-
-Promise resolves to first directory `made` that had to be created, or
-`undefined` if everything already exists. Promise rejects if any errors
-are encountered. Note that, in the case of promise rejection, some
-directories _may_ have been created, as recursive directory creation is not
-an atomic operation.
-
-You can optionally pass in an alternate `fs` implementation by passing in
-`opts.fs`. Your implementation should have `opts.fs.mkdir(path, opts, cb)`
-and `opts.fs.stat(path, cb)`.
-
-You can also override just one or the other of `mkdir` and `stat` by
-passing in `opts.stat` or `opts.mkdir`, or providing an `fs` option that
-only overrides one of these.
-
-## mkdirp.sync(dir, opts) -> String|null
-
-Synchronously create a new directory and any necessary subdirectories at
-`dir` with octal permission string `opts.mode`. If `opts` is a string or
-number, it will be treated as the `opts.mode`.
-
-If `opts.mode` isn't specified, it defaults to `0o777 &
-(~process.umask())`.
-
-Returns the first directory that had to be created, or undefined if
-everything already exists.
-
-You can optionally pass in an alternate `fs` implementation by passing in
-`opts.fs`. Your implementation should have `opts.fs.mkdirSync(path, mode)`
-and `opts.fs.statSync(path)`.
-
-You can also override just one or the other of `mkdirSync` and `statSync`
-by passing in `opts.statSync` or `opts.mkdirSync`, or providing an `fs`
-option that only overrides one of these.
-
-## mkdirp.manual, mkdirp.manualSync
-
-Use the manual implementation (not the native one). This is the default
-when the native implementation is not available or the stat/mkdir
-implementation is overridden.
-
-## mkdirp.native, mkdirp.nativeSync
-
-Use the native implementation (not the manual one). This is the default
-when the native implementation is available and stat/mkdir are not
-overridden.
-
-# implementation
-
-On Node.js v10.12.0 and above, use the native `fs.mkdir(p,
-{recursive:true})` option, unless `fs.mkdir`/`fs.mkdirSync` has been
-overridden by an option.
-
-## native implementation
-
-- If the path is a root directory, then pass it to the underlying
- implementation and return the result/error. (In this case, it'll either
- succeed or fail, but we aren't actually creating any dirs.)
-- Walk up the path statting each directory, to find the first path that
- will be created, `made`.
-- Call `fs.mkdir(path, { recursive: true })` (or `fs.mkdirSync`)
-- If error, raise it to the caller.
-- Return `made`.
-
-## manual implementation
-
-- Call underlying `fs.mkdir` implementation, with `recursive: false`
-- If error:
- - If path is a root directory, raise to the caller and do not handle it
- - If ENOENT, mkdirp parent dir, store result as `made`
- - stat(path)
- - If error, raise original `mkdir` error
- - If directory, return `made`
- - Else, raise original `mkdir` error
-- else
- - return `undefined` if a root dir, or `made` if set, or `path`
-
-## windows vs unix caveat
-
-On Windows file systems, attempts to create a root directory (ie, a drive
-letter or root UNC path) will fail. If the root directory exists, then it
-will fail with `EPERM`. If the root directory does not exist, then it will
-fail with `ENOENT`.
-
-On posix file systems, attempts to create a root directory (in recursive
-mode) will succeed silently, as it is treated like just another directory
-that already exists. (In non-recursive mode, of course, it fails with
-`EEXIST`.)
-
-In order to preserve this system-specific behavior (and because it's not as
-if we can create the parent of a root directory anyway), attempts to create
-a root directory are passed directly to the `fs` implementation, and any
-errors encountered are not handled.
-
-## native error caveat
-
-The native implementation (as of at least Node.js v13.4.0) does not provide
-appropriate errors in some cases (see
-[nodejs/node#31481](https://github.com/nodejs/node/issues/31481) and
-[nodejs/node#28015](https://github.com/nodejs/node/issues/28015)).
-
-In order to work around this issue, the native implementation will fall
-back to the manual implementation if an `ENOENT` error is encountered.
-
-# choosing a recursive mkdir implementation
-
-There are a few to choose from! Use the one that suits your needs best :D
-
-## use `fs.mkdir(path, {recursive: true}, cb)` if:
-
-- You wish to optimize performance even at the expense of other factors.
-- You don't need to know the first dir created.
-- You are ok with getting `ENOENT` as the error when some other problem is
- the actual cause.
-- You can limit your platforms to Node.js v10.12 and above.
-- You're ok with using callbacks instead of promises.
-- You don't need/want a CLI.
-- You don't need to override the `fs` methods in use.
-
-## use this module (mkdirp 1.x) if:
-
-- You need to know the first directory that was created.
-- You wish to use the native implementation if available, but fall back
- when it's not.
-- You prefer promise-returning APIs to callback-taking APIs.
-- You want more useful error messages than the native recursive mkdir
- provides (at least as of Node.js v13.4), and are ok with re-trying on
- `ENOENT` to achieve this.
-- You need (or at least, are ok with) a CLI.
-- You need to override the `fs` methods in use.
-
-## use [`make-dir`](http://npm.im/make-dir) if:
-
-- You do not need to know the first dir created (and wish to save a few
- `stat` calls when using the native implementation for this reason).
-- You wish to use the native implementation if available, but fall back
- when it's not.
-- You prefer promise-returning APIs to callback-taking APIs.
-- You are ok with occasionally getting `ENOENT` errors for failures that
- are actually related to something other than a missing file system entry.
-- You don't need/want a CLI.
-- You need to override the `fs` methods in use.
-
-## use mkdirp 0.x if:
-
-- You need to know the first directory that was created.
-- You need (or at least, are ok with) a CLI.
-- You need to override the `fs` methods in use.
-- You're ok with using callbacks instead of promises.
-- You are not running on Windows, where the root-level ENOENT errors can
- lead to infinite regress.
-- You think vinyl just sounds warmer and richer for some weird reason.
-- You are supporting truly ancient Node.js versions, before even the advent
- of a `Promise` language primitive. (Please don't. You deserve better.)
-
-# cli
-
-This package also ships with a `mkdirp` command.
-
-```
-$ mkdirp -h
-
-usage: mkdirp [DIR1,DIR2..] {OPTIONS}
-
- Create each supplied directory including any necessary parent directories
- that don't yet exist.
-
- If the directory already exists, do nothing.
-
-OPTIONS are:
-
- -m If a directory needs to be created, set the mode as an octal
- --mode= permission string.
-
- -v --version Print the mkdirp version number
-
- -h --help Print this helpful banner
-
- -p --print Print the first directories created for each path provided
-
- --manual Use manual implementation, even if native is available
-```
-
-# install
-
-With [npm](http://npmjs.org) do:
-
-```
-npm install mkdirp
-```
-
-to get the library locally, or
-
-```
-npm install -g mkdirp
-```
-
-to get the command everywhere, or
-
-```
-npx mkdirp ...
-```
-
-to run the command without installing it globally.
-
-# platform support
-
-This module works on node v8, but only v10 and above are officially
-supported, as Node v8 reached its LTS end of life 2020-01-01, which is in
-the past, as of this writing.
-
-# license
-
-MIT
diff --git a/deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/HISTORY.md b/deps/npm/node_modules/negotiator/HISTORY.md
similarity index 100%
rename from deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/HISTORY.md
rename to deps/npm/node_modules/negotiator/HISTORY.md
diff --git a/deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/LICENSE b/deps/npm/node_modules/negotiator/LICENSE
similarity index 100%
rename from deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/LICENSE
rename to deps/npm/node_modules/negotiator/LICENSE
diff --git a/deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/index.js b/deps/npm/node_modules/negotiator/index.js
similarity index 100%
rename from deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/index.js
rename to deps/npm/node_modules/negotiator/index.js
diff --git a/deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/lib/charset.js b/deps/npm/node_modules/negotiator/lib/charset.js
similarity index 100%
rename from deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/lib/charset.js
rename to deps/npm/node_modules/negotiator/lib/charset.js
diff --git a/deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/lib/encoding.js b/deps/npm/node_modules/negotiator/lib/encoding.js
similarity index 100%
rename from deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/lib/encoding.js
rename to deps/npm/node_modules/negotiator/lib/encoding.js
diff --git a/deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/lib/language.js b/deps/npm/node_modules/negotiator/lib/language.js
similarity index 100%
rename from deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/lib/language.js
rename to deps/npm/node_modules/negotiator/lib/language.js
diff --git a/deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/lib/mediaType.js b/deps/npm/node_modules/negotiator/lib/mediaType.js
similarity index 100%
rename from deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/lib/mediaType.js
rename to deps/npm/node_modules/negotiator/lib/mediaType.js
diff --git a/deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/package.json b/deps/npm/node_modules/negotiator/package.json
similarity index 100%
rename from deps/npm/node_modules/make-fetch-happen/node_modules/negotiator/package.json
rename to deps/npm/node_modules/negotiator/package.json
diff --git a/deps/npm/node_modules/node-gyp/.release-please-manifest.json b/deps/npm/node_modules/node-gyp/.release-please-manifest.json
index f098464b1facdb..02eef11e2b93bc 100644
--- a/deps/npm/node_modules/node-gyp/.release-please-manifest.json
+++ b/deps/npm/node_modules/node-gyp/.release-please-manifest.json
@@ -1,3 +1,3 @@
{
- ".": "11.2.0"
+ ".": "11.5.0"
}
diff --git a/deps/npm/node_modules/node-gyp/CHANGELOG.md b/deps/npm/node_modules/node-gyp/CHANGELOG.md
index e206e5d9f3e517..7e2740db652e12 100644
--- a/deps/npm/node_modules/node-gyp/CHANGELOG.md
+++ b/deps/npm/node_modules/node-gyp/CHANGELOG.md
@@ -1,5 +1,83 @@
# Changelog
+## [11.5.0](https://github.com/nodejs/node-gyp/compare/v11.4.2...v11.5.0) (2025-10-15)
+
+
+### Features
+
+* update gyp-next to v0.20.5 ([#3222](https://github.com/nodejs/node-gyp/issues/3222)) ([848e950](https://github.com/nodejs/node-gyp/commit/848e950833b90f0b25f346710ee42e9be4797604))
+
+
+### Bug Fixes
+
+* **ci:** Run Visual Studio test on Windows 11 on ARM ([#3217](https://github.com/nodejs/node-gyp/issues/3217)) ([8bd3f63](https://github.com/nodejs/node-gyp/commit/8bd3f6354b8bd43262a4d99d58a568beab0459e8))
+* **ci:** Test on Python 3.14 release candidate 3 on Linux and macOS ([#3216](https://github.com/nodejs/node-gyp/issues/3216)) ([085b445](https://github.com/nodejs/node-gyp/commit/085b445d1c00f8f1fc6a6ff80d8a93c6643f11ee))
+
+
+### Core
+
+* **deps:** bump actions/github-script from 7 to 8 ([#3213](https://github.com/nodejs/node-gyp/issues/3213)) ([c6b968c](https://github.com/nodejs/node-gyp/commit/c6b968caf7f4e22687fc10716162675b1411f713))
+* **deps:** bump actions/setup-node from 4 to 5 ([#3211](https://github.com/nodejs/node-gyp/issues/3211)) ([921c04d](https://github.com/nodejs/node-gyp/commit/921c04d142549f172d3aeae4097c9e0af05599dd))
+* **deps:** bump actions/setup-python from 5 to 6 ([#3210](https://github.com/nodejs/node-gyp/issues/3210)) ([6b70b05](https://github.com/nodejs/node-gyp/commit/6b70b05ed21cb977214348c97c2b97515c0d08f3))
+
+## [11.4.2](https://github.com/nodejs/node-gyp/compare/v11.4.1...v11.4.2) (2025-08-26)
+
+
+### Bug Fixes
+
+* add adaptation for OpenHarmony platform ([#3207](https://github.com/nodejs/node-gyp/issues/3207)) ([b406532](https://github.com/nodejs/node-gyp/commit/b406532c77659c441c845708ec3ecdf09f013a3b))
+
+### Miscellaneous
+
+* update gyp-next to v0.20.4 ([#3208](https://github.com/nodejs/node-gyp/issues/3208)) ([adc61b1](https://github.com/nodejs/node-gyp/commit/adc61b1458315d9648591e74bf16bbe39511401e))
+* **ci:** Update Node.js version matrix in `tests.yml` ([#3209](https://github.com/nodejs/node-gyp/issues/3209)) ([a4e1da6](https://github.com/nodejs/node-gyp/commit/a4e1da6683a37fde565e1ea50f1fa86fa99a83c7))
+* ruff format Python code ([#3203](https://github.com/nodejs/node-gyp/issues/3203)) ([cb30a53](https://github.com/nodejs/node-gyp/commit/cb30a538eadf49ca0310980ffb0bfdb8fcebf0a4))
+
+## [11.4.1](https://github.com/nodejs/node-gyp/compare/v11.4.0...v11.4.1) (2025-08-20)
+
+
+### Miscellaneous
+
+* **release:** use npm@11 for OIDC publishing ([#3202](https://github.com/nodejs/node-gyp/issues/3202)) ([6b9638a](https://github.com/nodejs/node-gyp/commit/6b9638a0f80352e5bf7c1702e6ef622a6474d44a)), closes [#3201](https://github.com/nodejs/node-gyp/issues/3201)
+
+## [11.4.0](https://github.com/nodejs/node-gyp/compare/v11.3.0...v11.4.0) (2025-08-19)
+
+
+### Features
+
+* read from config case-insensitively ([#3198](https://github.com/nodejs/node-gyp/issues/3198)) ([5538e6c](https://github.com/nodejs/node-gyp/commit/5538e6c5d78dffd41e2a588adfa7ea9022150b9d))
+* support reading config from package.json ([#3196](https://github.com/nodejs/node-gyp/issues/3196)) ([1822dff](https://github.com/nodejs/node-gyp/commit/1822dff4f616a30ac3ca72e5946d81389cb8557e)), closes [#3156](https://github.com/nodejs/node-gyp/issues/3156)
+
+
+### Core
+
+* **deps:** bump actions/checkout from 4 to 5 ([#3193](https://github.com/nodejs/node-gyp/issues/3193)) ([27f5505](https://github.com/nodejs/node-gyp/commit/27f5505ec236551081366bf8a9c13ef5d8e468bf))
+
+
+### Miscellaneous
+
+* use npm oicd connection for publishing ([#3197](https://github.com/nodejs/node-gyp/issues/3197)) ([0773615](https://github.com/nodejs/node-gyp/commit/077361502933fcb994ca365c3c07c03177503df2))
+
+## [11.3.0](https://github.com/nodejs/node-gyp/compare/v11.2.0...v11.3.0) (2025-07-29)
+
+
+### Features
+
+* update gyp-next to v0.20.2 ([#3169](https://github.com/nodejs/node-gyp/issues/3169)) ([0e65632](https://github.com/nodejs/node-gyp/commit/0e656322c1e94041331ab3b01bf66c2ef9bd6ead))
+
+
+### Bug Fixes
+
+* Correct Visual Studio 2019 test version ([#3153](https://github.com/nodejs/node-gyp/issues/3153)) ([7d883b5](https://github.com/nodejs/node-gyp/commit/7d883b5cf4c26e76065201f85b0be36d5ebdcc0e))
+* Normalize win32 library names ([#3189](https://github.com/nodejs/node-gyp/issues/3189)) ([b81a665](https://github.com/nodejs/node-gyp/commit/b81a665acfb9d88102e8044a8ec8ca74a3e9eccc))
+* use temp dir for tar extraction on all platforms ([#3170](https://github.com/nodejs/node-gyp/issues/3170)) ([b41864f](https://github.com/nodejs/node-gyp/commit/b41864f7c1c60e4a160c1b4dd91558dcaa3f74e4)), closes [#3165](https://github.com/nodejs/node-gyp/issues/3165)
+
+
+### Miscellaneous
+
+* retry wasi-sdk download in CI ([#3151](https://github.com/nodejs/node-gyp/issues/3151)) ([8f3cd8b](https://github.com/nodejs/node-gyp/commit/8f3cd8b3a157bccd8d7110e7d46a27c2926625cd))
+* Windows 2019 has been removed from GitHub Actions ([#3190](https://github.com/nodejs/node-gyp/issues/3190)) ([3df8789](https://github.com/nodejs/node-gyp/commit/3df8789a9aa73c60707eec8f02f4e926491d6102))
+
## [11.2.0](https://github.com/nodejs/node-gyp/compare/v11.1.0...v11.2.0) (2025-04-01)
diff --git a/deps/npm/node_modules/node-gyp/README.md b/deps/npm/node_modules/node-gyp/README.md
index 474c59b458941f..72833b13638c1f 100644
--- a/deps/npm/node_modules/node-gyp/README.md
+++ b/deps/npm/node_modules/node-gyp/README.md
@@ -235,9 +235,24 @@ Some additional resources for Node.js native addons and writing `gyp` configurat
## Configuration
+### package.json
+
+Use the `config` object in your package.json with each key in the form `node_gyp_OPTION_NAME`. Any of the command
+options listed above can be set (dashes in option names should be replaced by underscores).
+
+For example, to set `devdir` equal to `/tmp/.gyp`, your package.json would contain this:
+
+```json
+{
+ "config": {
+ "node_gyp_devdir": "/tmp/.gyp"
+ }
+}
+```
+
### Environment variables
-Use the form `npm_config_OPTION_NAME` for any of the command options listed
+Use the form `npm_package_config_node_gyp_OPTION_NAME` for any of the command options listed
above (dashes in option names should be replaced by underscores).
For example, to set `devdir` equal to `/tmp/.gyp`, you would:
@@ -245,15 +260,19 @@ For example, to set `devdir` equal to `/tmp/.gyp`, you would:
Run this on Unix:
```bash
-export npm_config_devdir=/tmp/.gyp
+export npm_package_config_node_gyp_devdir=/tmp/.gyp
```
Or this on Windows:
```console
-set npm_config_devdir=c:\temp\.gyp
+set npm_package_config_node_gyp_devdir=c:\temp\.gyp
```
+Note that in versions of npm before v11 it was possible to use the prefix `npm_config_` for
+environement variables. This was deprecated in npm@11 and will be removed in npm@12 so it
+is recommened to convert your environment variables to the above format.
+
### `npm` configuration for npm versions before v9
Use the form `OPTION_NAME` for any of the command options listed above.
diff --git a/deps/npm/node_modules/node-gyp/addon.gypi b/deps/npm/node_modules/node-gyp/addon.gypi
index b4ac369acb4f13..4f112df81c7716 100644
--- a/deps/npm/node_modules/node-gyp/addon.gypi
+++ b/deps/npm/node_modules/node-gyp/addon.gypi
@@ -179,7 +179,7 @@
'-loleaut32.lib',
'-luuid.lib',
'-lodbc32.lib',
- '-lDelayImp.lib',
+ '-ldelayimp.lib',
'-l"<(node_lib_file)"'
],
'msvs_disabled_warnings': [
@@ -195,7 +195,7 @@
'_FILE_OFFSET_BITS=64'
],
}],
- [ 'OS in "freebsd openbsd netbsd solaris android" or \
+ [ 'OS in "freebsd openbsd netbsd solaris android openharmony" or \
(OS=="linux" and target_arch!="ia32")', {
'cflags': [ '-fPIC' ],
}],
diff --git a/deps/npm/node_modules/node-gyp/gyp/.release-please-manifest.json b/deps/npm/node_modules/node-gyp/gyp/.release-please-manifest.json
index 589cd4553e1bde..dfc532112efe72 100644
--- a/deps/npm/node_modules/node-gyp/gyp/.release-please-manifest.json
+++ b/deps/npm/node_modules/node-gyp/gyp/.release-please-manifest.json
@@ -1,3 +1,3 @@
{
- ".": "0.20.0"
+ ".": "0.20.5"
}
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py
index bc0e93d07f8900..f8e4993d94cdfb 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py
@@ -32,18 +32,18 @@ def cmp(x, y):
def MakeGuid(name, seed="msvs_new"):
"""Returns a GUID for the specified target name.
- Args:
- name: Target name.
- seed: Seed for MD5 hash.
- Returns:
- A GUID-line string calculated from the name and seed.
-
- This generates something which looks like a GUID, but depends only on the
- name and seed. This means the same name/seed will always generate the same
- GUID, so that projects and solutions which refer to each other can explicitly
- determine the GUID to refer to explicitly. It also means that the GUID will
- not change when the project for a target is rebuilt.
- """
+ Args:
+ name: Target name.
+ seed: Seed for MD5 hash.
+ Returns:
+ A GUID-line string calculated from the name and seed.
+
+ This generates something which looks like a GUID, but depends only on the
+ name and seed. This means the same name/seed will always generate the same
+ GUID, so that projects and solutions which refer to each other can explicitly
+ determine the GUID to refer to explicitly. It also means that the GUID will
+ not change when the project for a target is rebuilt.
+ """
# Calculate a MD5 signature for the seed and name.
d = hashlib.md5((str(seed) + str(name)).encode("utf-8")).hexdigest().upper()
# Convert most of the signature to GUID form (discard the rest)
@@ -78,15 +78,15 @@ class MSVSFolder(MSVSSolutionEntry):
def __init__(self, path, name=None, entries=None, guid=None, items=None):
"""Initializes the folder.
- Args:
- path: Full path to the folder.
- name: Name of the folder.
- entries: List of folder entries to nest inside this folder. May contain
- Folder or Project objects. May be None, if the folder is empty.
- guid: GUID to use for folder, if not None.
- items: List of solution items to include in the folder project. May be
- None, if the folder does not directly contain items.
- """
+ Args:
+ path: Full path to the folder.
+ name: Name of the folder.
+ entries: List of folder entries to nest inside this folder. May contain
+ Folder or Project objects. May be None, if the folder is empty.
+ guid: GUID to use for folder, if not None.
+ items: List of solution items to include in the folder project. May be
+ None, if the folder does not directly contain items.
+ """
if name:
self.name = name
else:
@@ -128,19 +128,19 @@ def __init__(
):
"""Initializes the project.
- Args:
- path: Absolute path to the project file.
- name: Name of project. If None, the name will be the same as the base
- name of the project file.
- dependencies: List of other Project objects this project is dependent
- upon, if not None.
- guid: GUID to use for project, if not None.
- spec: Dictionary specifying how to build this project.
- build_file: Filename of the .gyp file that the vcproj file comes from.
- config_platform_overrides: optional dict of configuration platforms to
- used in place of the default for this target.
- fixpath_prefix: the path used to adjust the behavior of _fixpath
- """
+ Args:
+ path: Absolute path to the project file.
+ name: Name of project. If None, the name will be the same as the base
+ name of the project file.
+ dependencies: List of other Project objects this project is dependent
+ upon, if not None.
+ guid: GUID to use for project, if not None.
+ spec: Dictionary specifying how to build this project.
+ build_file: Filename of the .gyp file that the vcproj file comes from.
+ config_platform_overrides: optional dict of configuration platforms to
+ used in place of the default for this target.
+ fixpath_prefix: the path used to adjust the behavior of _fixpath
+ """
self.path = path
self.guid = guid
self.spec = spec
@@ -195,16 +195,16 @@ def __init__(
):
"""Initializes the solution.
- Args:
- path: Path to solution file.
- version: Format version to emit.
- entries: List of entries in solution. May contain Folder or Project
- objects. May be None, if the folder is empty.
- variants: List of build variant strings. If none, a default list will
- be used.
- websiteProperties: Flag to decide if the website properties section
- is generated.
- """
+ Args:
+ path: Path to solution file.
+ version: Format version to emit.
+ entries: List of entries in solution. May contain Folder or Project
+ objects. May be None, if the folder is empty.
+ variants: List of build variant strings. If none, a default list will
+ be used.
+ websiteProperties: Flag to decide if the website properties section
+ is generated.
+ """
self.path = path
self.websiteProperties = websiteProperties
self.version = version
@@ -230,9 +230,9 @@ def __init__(
def Write(self, writer=gyp.common.WriteOnDiff):
"""Writes the solution file to disk.
- Raises:
- IndexError: An entry appears multiple times.
- """
+ Raises:
+ IndexError: An entry appears multiple times.
+ """
# Walk the entry tree and collect all the folders and projects.
all_entries = set()
entries_to_check = self.entries[:]
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py
index 339d27d4029fcf..17bb2bbdb8a555 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py
@@ -15,19 +15,19 @@ class Tool:
def __init__(self, name, attrs=None):
"""Initializes the tool.
- Args:
- name: Tool name.
- attrs: Dict of tool attributes; may be None.
- """
+ Args:
+ name: Tool name.
+ attrs: Dict of tool attributes; may be None.
+ """
self._attrs = attrs or {}
self._attrs["Name"] = name
def _GetSpecification(self):
"""Creates an element for the tool.
- Returns:
- A new xml.dom.Element for the tool.
- """
+ Returns:
+ A new xml.dom.Element for the tool.
+ """
return ["Tool", self._attrs]
@@ -37,10 +37,10 @@ class Filter:
def __init__(self, name, contents=None):
"""Initializes the folder.
- Args:
- name: Filter (folder) name.
- contents: List of filenames and/or Filter objects contained.
- """
+ Args:
+ name: Filter (folder) name.
+ contents: List of filenames and/or Filter objects contained.
+ """
self.name = name
self.contents = list(contents or [])
@@ -54,13 +54,13 @@ class Writer:
def __init__(self, project_path, version, name, guid=None, platforms=None):
"""Initializes the project.
- Args:
- project_path: Path to the project file.
- version: Format version to emit.
- name: Name of the project.
- guid: GUID to use for project, if not None.
- platforms: Array of string, the supported platforms. If null, ['Win32']
- """
+ Args:
+ project_path: Path to the project file.
+ version: Format version to emit.
+ name: Name of the project.
+ guid: GUID to use for project, if not None.
+ platforms: Array of string, the supported platforms. If null, ['Win32']
+ """
self.project_path = project_path
self.version = version
self.name = name
@@ -84,21 +84,21 @@ def __init__(self, project_path, version, name, guid=None, platforms=None):
def AddToolFile(self, path):
"""Adds a tool file to the project.
- Args:
- path: Relative path from project to tool file.
- """
+ Args:
+ path: Relative path from project to tool file.
+ """
self.tool_files_section.append(["ToolFile", {"RelativePath": path}])
def _GetSpecForConfiguration(self, config_type, config_name, attrs, tools):
"""Returns the specification for a configuration.
- Args:
- config_type: Type of configuration node.
- config_name: Configuration name.
- attrs: Dict of configuration attributes; may be None.
- tools: List of tools (strings or Tool objects); may be None.
- Returns:
- """
+ Args:
+ config_type: Type of configuration node.
+ config_name: Configuration name.
+ attrs: Dict of configuration attributes; may be None.
+ tools: List of tools (strings or Tool objects); may be None.
+ Returns:
+ """
# Handle defaults
if not attrs:
attrs = {}
@@ -122,23 +122,23 @@ def _GetSpecForConfiguration(self, config_type, config_name, attrs, tools):
def AddConfig(self, name, attrs=None, tools=None):
"""Adds a configuration to the project.
- Args:
- name: Configuration name.
- attrs: Dict of configuration attributes; may be None.
- tools: List of tools (strings or Tool objects); may be None.
- """
+ Args:
+ name: Configuration name.
+ attrs: Dict of configuration attributes; may be None.
+ tools: List of tools (strings or Tool objects); may be None.
+ """
spec = self._GetSpecForConfiguration("Configuration", name, attrs, tools)
self.configurations_section.append(spec)
def _AddFilesToNode(self, parent, files):
"""Adds files and/or filters to the parent node.
- Args:
- parent: Destination node
- files: A list of Filter objects and/or relative paths to files.
+ Args:
+ parent: Destination node
+ files: A list of Filter objects and/or relative paths to files.
- Will call itself recursively, if the files list contains Filter objects.
- """
+ Will call itself recursively, if the files list contains Filter objects.
+ """
for f in files:
if isinstance(f, Filter):
node = ["Filter", {"Name": f.name}]
@@ -151,13 +151,13 @@ def _AddFilesToNode(self, parent, files):
def AddFiles(self, files):
"""Adds files to the project.
- Args:
- files: A list of Filter objects and/or relative paths to files.
+ Args:
+ files: A list of Filter objects and/or relative paths to files.
- This makes a copy of the file/filter tree at the time of this call. If you
- later add files to a Filter object which was passed into a previous call
- to AddFiles(), it will not be reflected in this project.
- """
+ This makes a copy of the file/filter tree at the time of this call. If you
+ later add files to a Filter object which was passed into a previous call
+ to AddFiles(), it will not be reflected in this project.
+ """
self._AddFilesToNode(self.files_section, files)
# TODO(rspangler) This also doesn't handle adding files to an existing
# filter. That is, it doesn't merge the trees.
@@ -165,15 +165,15 @@ def AddFiles(self, files):
def AddFileConfig(self, path, config, attrs=None, tools=None):
"""Adds a configuration to a file.
- Args:
- path: Relative path to the file.
- config: Name of configuration to add.
- attrs: Dict of configuration attributes; may be None.
- tools: List of tools (strings or Tool objects); may be None.
+ Args:
+ path: Relative path to the file.
+ config: Name of configuration to add.
+ attrs: Dict of configuration attributes; may be None.
+ tools: List of tools (strings or Tool objects); may be None.
- Raises:
- ValueError: Relative path does not match any file added via AddFiles().
- """
+ Raises:
+ ValueError: Relative path does not match any file added via AddFiles().
+ """
# Find the file node with the right relative path
parent = self.files_dict.get(path)
if not parent:
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py
index fea6e672865bfe..155fc3a1cbc693 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py
@@ -35,10 +35,10 @@
class _Tool:
"""Represents a tool used by MSVS or MSBuild.
- Attributes:
- msvs_name: The name of the tool in MSVS.
- msbuild_name: The name of the tool in MSBuild.
- """
+ Attributes:
+ msvs_name: The name of the tool in MSVS.
+ msbuild_name: The name of the tool in MSBuild.
+ """
def __init__(self, msvs_name, msbuild_name):
self.msvs_name = msvs_name
@@ -48,11 +48,11 @@ def __init__(self, msvs_name, msbuild_name):
def _AddTool(tool):
"""Adds a tool to the four dictionaries used to process settings.
- This only defines the tool. Each setting also needs to be added.
+ This only defines the tool. Each setting also needs to be added.
- Args:
- tool: The _Tool object to be added.
- """
+ Args:
+ tool: The _Tool object to be added.
+ """
_msvs_validators[tool.msvs_name] = {}
_msbuild_validators[tool.msbuild_name] = {}
_msvs_to_msbuild_converters[tool.msvs_name] = {}
@@ -70,35 +70,35 @@ class _Type:
def ValidateMSVS(self, value):
"""Verifies that the value is legal for MSVS.
- Args:
- value: the value to check for this type.
+ Args:
+ value: the value to check for this type.
- Raises:
- ValueError if value is not valid for MSVS.
- """
+ Raises:
+ ValueError if value is not valid for MSVS.
+ """
def ValidateMSBuild(self, value):
"""Verifies that the value is legal for MSBuild.
- Args:
- value: the value to check for this type.
+ Args:
+ value: the value to check for this type.
- Raises:
- ValueError if value is not valid for MSBuild.
- """
+ Raises:
+ ValueError if value is not valid for MSBuild.
+ """
def ConvertToMSBuild(self, value):
"""Returns the MSBuild equivalent of the MSVS value given.
- Args:
- value: the MSVS value to convert.
+ Args:
+ value: the MSVS value to convert.
- Returns:
- the MSBuild equivalent.
+ Returns:
+ the MSBuild equivalent.
- Raises:
- ValueError if value is not valid.
- """
+ Raises:
+ ValueError if value is not valid.
+ """
return value
@@ -178,15 +178,15 @@ def ConvertToMSBuild(self, value):
class _Enumeration(_Type):
"""Type of settings that is an enumeration.
- In MSVS, the values are indexes like '0', '1', and '2'.
- MSBuild uses text labels that are more representative, like 'Win32'.
+ In MSVS, the values are indexes like '0', '1', and '2'.
+ MSBuild uses text labels that are more representative, like 'Win32'.
- Constructor args:
- label_list: an array of MSBuild labels that correspond to the MSVS index.
- In the rare cases where MSVS has skipped an index value, None is
- used in the array to indicate the unused spot.
- new: an array of labels that are new to MSBuild.
- """
+ Constructor args:
+ label_list: an array of MSBuild labels that correspond to the MSVS index.
+ In the rare cases where MSVS has skipped an index value, None is
+ used in the array to indicate the unused spot.
+ new: an array of labels that are new to MSBuild.
+ """
def __init__(self, label_list, new=None):
_Type.__init__(self)
@@ -234,23 +234,23 @@ def ConvertToMSBuild(self, value):
def _Same(tool, name, setting_type):
"""Defines a setting that has the same name in MSVS and MSBuild.
- Args:
- tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
- name: the name of the setting.
- setting_type: the type of this setting.
- """
+ Args:
+ tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
+ name: the name of the setting.
+ setting_type: the type of this setting.
+ """
_Renamed(tool, name, name, setting_type)
def _Renamed(tool, msvs_name, msbuild_name, setting_type):
"""Defines a setting for which the name has changed.
- Args:
- tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
- msvs_name: the name of the MSVS setting.
- msbuild_name: the name of the MSBuild setting.
- setting_type: the type of this setting.
- """
+ Args:
+ tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
+ msvs_name: the name of the MSVS setting.
+ msbuild_name: the name of the MSBuild setting.
+ setting_type: the type of this setting.
+ """
def _Translate(value, msbuild_settings):
msbuild_tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool)
@@ -272,13 +272,13 @@ def _MovedAndRenamed(
):
"""Defines a setting that may have moved to a new section.
- Args:
- tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
- msvs_settings_name: the MSVS name of the setting.
- msbuild_tool_name: the name of the MSBuild tool to place the setting under.
- msbuild_settings_name: the MSBuild name of the setting.
- setting_type: the type of this setting.
- """
+ Args:
+ tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
+ msvs_settings_name: the MSVS name of the setting.
+ msbuild_tool_name: the name of the MSBuild tool to place the setting under.
+ msbuild_settings_name: the MSBuild name of the setting.
+ setting_type: the type of this setting.
+ """
def _Translate(value, msbuild_settings):
tool_settings = msbuild_settings.setdefault(msbuild_tool_name, {})
@@ -293,11 +293,11 @@ def _Translate(value, msbuild_settings):
def _MSVSOnly(tool, name, setting_type):
"""Defines a setting that is only found in MSVS.
- Args:
- tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
- name: the name of the setting.
- setting_type: the type of this setting.
- """
+ Args:
+ tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
+ name: the name of the setting.
+ setting_type: the type of this setting.
+ """
def _Translate(unused_value, unused_msbuild_settings):
# Since this is for MSVS only settings, no translation will happen.
@@ -310,11 +310,11 @@ def _Translate(unused_value, unused_msbuild_settings):
def _MSBuildOnly(tool, name, setting_type):
"""Defines a setting that is only found in MSBuild.
- Args:
- tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
- name: the name of the setting.
- setting_type: the type of this setting.
- """
+ Args:
+ tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
+ name: the name of the setting.
+ setting_type: the type of this setting.
+ """
def _Translate(value, msbuild_settings):
# Let msbuild-only properties get translated as-is from msvs_settings.
@@ -328,11 +328,11 @@ def _Translate(value, msbuild_settings):
def _ConvertedToAdditionalOption(tool, msvs_name, flag):
"""Defines a setting that's handled via a command line option in MSBuild.
- Args:
- tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
- msvs_name: the name of the MSVS setting that if 'true' becomes a flag
- flag: the flag to insert at the end of the AdditionalOptions
- """
+ Args:
+ tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
+ msvs_name: the name of the MSVS setting that if 'true' becomes a flag
+ flag: the flag to insert at the end of the AdditionalOptions
+ """
def _Translate(value, msbuild_settings):
if value == "true":
@@ -384,20 +384,19 @@ def _Translate(value, msbuild_settings):
def _ValidateExclusionSetting(setting, settings, error_msg, stderr=sys.stderr):
"""Verify that 'setting' is valid if it is generated from an exclusion list.
- If the setting appears to be generated from an exclusion list, the root name
- is checked.
+ If the setting appears to be generated from an exclusion list, the root name
+ is checked.
- Args:
- setting: A string that is the setting name to validate
- settings: A dictionary where the keys are valid settings
- error_msg: The message to emit in the event of error
- stderr: The stream receiving the error messages.
- """
+ Args:
+ setting: A string that is the setting name to validate
+ settings: A dictionary where the keys are valid settings
+ error_msg: The message to emit in the event of error
+ stderr: The stream receiving the error messages.
+ """
# This may be unrecognized because it's an exclusion list. If the
# setting name has the _excluded suffix, then check the root name.
unrecognized = True
- m = re.match(_EXCLUDED_SUFFIX_RE, setting)
- if m:
+ if m := re.match(_EXCLUDED_SUFFIX_RE, setting):
root_setting = m.group(1)
unrecognized = root_setting not in settings
@@ -409,11 +408,11 @@ def _ValidateExclusionSetting(setting, settings, error_msg, stderr=sys.stderr):
def FixVCMacroSlashes(s):
"""Replace macros which have excessive following slashes.
- These macros are known to have a built-in trailing slash. Furthermore, many
- scripts hiccup on processing paths with extra slashes in the middle.
+ These macros are known to have a built-in trailing slash. Furthermore, many
+ scripts hiccup on processing paths with extra slashes in the middle.
- This list is probably not exhaustive. Add as needed.
- """
+ This list is probably not exhaustive. Add as needed.
+ """
if "$" in s:
s = fix_vc_macro_slashes_regex.sub(r"\1", s)
return s
@@ -422,8 +421,8 @@ def FixVCMacroSlashes(s):
def ConvertVCMacrosToMSBuild(s):
"""Convert the MSVS macros found in the string to the MSBuild equivalent.
- This list is probably not exhaustive. Add as needed.
- """
+ This list is probably not exhaustive. Add as needed.
+ """
if "$" in s:
replace_map = {
"$(ConfigurationName)": "$(Configuration)",
@@ -445,16 +444,16 @@ def ConvertVCMacrosToMSBuild(s):
def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr):
"""Converts MSVS settings (VS2008 and earlier) to MSBuild settings (VS2010+).
- Args:
- msvs_settings: A dictionary. The key is the tool name. The values are
- themselves dictionaries of settings and their values.
- stderr: The stream receiving the error messages.
+ Args:
+ msvs_settings: A dictionary. The key is the tool name. The values are
+ themselves dictionaries of settings and their values.
+ stderr: The stream receiving the error messages.
- Returns:
- A dictionary of MSBuild settings. The key is either the MSBuild tool name
- or the empty string (for the global settings). The values are themselves
- dictionaries of settings and their values.
- """
+ Returns:
+ A dictionary of MSBuild settings. The key is either the MSBuild tool name
+ or the empty string (for the global settings). The values are themselves
+ dictionaries of settings and their values.
+ """
msbuild_settings = {}
for msvs_tool_name, msvs_tool_settings in msvs_settings.items():
if msvs_tool_name in _msvs_to_msbuild_converters:
@@ -493,36 +492,36 @@ def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr):
def ValidateMSVSSettings(settings, stderr=sys.stderr):
"""Validates that the names of the settings are valid for MSVS.
- Args:
- settings: A dictionary. The key is the tool name. The values are
- themselves dictionaries of settings and their values.
- stderr: The stream receiving the error messages.
- """
+ Args:
+ settings: A dictionary. The key is the tool name. The values are
+ themselves dictionaries of settings and their values.
+ stderr: The stream receiving the error messages.
+ """
_ValidateSettings(_msvs_validators, settings, stderr)
def ValidateMSBuildSettings(settings, stderr=sys.stderr):
"""Validates that the names of the settings are valid for MSBuild.
- Args:
- settings: A dictionary. The key is the tool name. The values are
- themselves dictionaries of settings and their values.
- stderr: The stream receiving the error messages.
- """
+ Args:
+ settings: A dictionary. The key is the tool name. The values are
+ themselves dictionaries of settings and their values.
+ stderr: The stream receiving the error messages.
+ """
_ValidateSettings(_msbuild_validators, settings, stderr)
def _ValidateSettings(validators, settings, stderr):
"""Validates that the settings are valid for MSBuild or MSVS.
- We currently only validate the names of the settings, not their values.
+ We currently only validate the names of the settings, not their values.
- Args:
- validators: A dictionary of tools and their validators.
- settings: A dictionary. The key is the tool name. The values are
- themselves dictionaries of settings and their values.
- stderr: The stream receiving the error messages.
- """
+ Args:
+ validators: A dictionary of tools and their validators.
+ settings: A dictionary. The key is the tool name. The values are
+ themselves dictionaries of settings and their values.
+ stderr: The stream receiving the error messages.
+ """
for tool_name in settings:
if tool_name in validators:
tool_validators = validators[tool_name]
@@ -638,7 +637,9 @@ def _ValidateSettings(validators, settings, stderr):
),
) # /RTC1
_Same(
- _compile, "BrowseInformation", _Enumeration(["false", "true", "true"]) # /FR
+ _compile,
+ "BrowseInformation",
+ _Enumeration(["false", "true", "true"]), # /FR
) # /Fr
_Same(
_compile,
@@ -696,7 +697,9 @@ def _ValidateSettings(validators, settings, stderr):
_Enumeration(["false", "Sync", "Async"], new=["SyncCThrow"]), # /EHsc # /EHa
) # /EHs
_Same(
- _compile, "FavorSizeOrSpeed", _Enumeration(["Neither", "Speed", "Size"]) # /Ot
+ _compile,
+ "FavorSizeOrSpeed",
+ _Enumeration(["Neither", "Speed", "Size"]), # /Ot
) # /Os
_Same(
_compile,
@@ -909,7 +912,9 @@ def _ValidateSettings(validators, settings, stderr):
) # /MACHINE:X64
_Same(
- _link, "AssemblyDebug", _Enumeration(["", "true", "false"]) # /ASSEMBLYDEBUG
+ _link,
+ "AssemblyDebug",
+ _Enumeration(["", "true", "false"]), # /ASSEMBLYDEBUG
) # /ASSEMBLYDEBUG:DISABLE
_Same(
_link,
@@ -1159,17 +1164,23 @@ def _ValidateSettings(validators, settings, stderr):
_MSBuildOnly(_midl, "ApplicationConfigurationMode", _boolean) # /app_config
_MSBuildOnly(_midl, "ClientStubFile", _file_name) # /cstub
_MSBuildOnly(
- _midl, "GenerateClientFiles", _Enumeration([], new=["Stub", "None"]) # /client stub
+ _midl,
+ "GenerateClientFiles",
+ _Enumeration([], new=["Stub", "None"]), # /client stub
) # /client none
_MSBuildOnly(
- _midl, "GenerateServerFiles", _Enumeration([], new=["Stub", "None"]) # /client stub
+ _midl,
+ "GenerateServerFiles",
+ _Enumeration([], new=["Stub", "None"]), # /client stub
) # /client none
_MSBuildOnly(_midl, "LocaleID", _integer) # /lcid DECIMAL
_MSBuildOnly(_midl, "ServerStubFile", _file_name) # /sstub
_MSBuildOnly(_midl, "SuppressCompilerWarnings", _boolean) # /no_warn
_MSBuildOnly(_midl, "TrackerLogDirectory", _folder_name)
_MSBuildOnly(
- _midl, "TypeLibFormat", _Enumeration([], new=["NewFormat", "OldFormat"]) # /newtlb
+ _midl,
+ "TypeLibFormat",
+ _Enumeration([], new=["NewFormat", "OldFormat"]), # /newtlb
) # /oldtlb
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py
index 0504728d994ca8..0e661995fbcd99 100755
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py
@@ -1143,47 +1143,47 @@ def testConvertToMSBuildSettings_full_synthetic(self):
def testConvertToMSBuildSettings_actual(self):
"""Tests the conversion of an actual project.
- A VS2008 project with most of the options defined was created through the
- VS2008 IDE. It was then converted to VS2010. The tool settings found in
- the .vcproj and .vcxproj files were converted to the two dictionaries
- msvs_settings and expected_msbuild_settings.
+ A VS2008 project with most of the options defined was created through the
+ VS2008 IDE. It was then converted to VS2010. The tool settings found in
+ the .vcproj and .vcxproj files were converted to the two dictionaries
+ msvs_settings and expected_msbuild_settings.
- Note that for many settings, the VS2010 converter adds macros like
- %(AdditionalIncludeDirectories) to make sure than inherited values are
- included. Since the Gyp projects we generate do not use inheritance,
- we removed these macros. They were:
- ClCompile:
- AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)'
- AdditionalOptions: ' %(AdditionalOptions)'
- AdditionalUsingDirectories: ';%(AdditionalUsingDirectories)'
- DisableSpecificWarnings: ';%(DisableSpecificWarnings)',
- ForcedIncludeFiles: ';%(ForcedIncludeFiles)',
- ForcedUsingFiles: ';%(ForcedUsingFiles)',
- PreprocessorDefinitions: ';%(PreprocessorDefinitions)',
- UndefinePreprocessorDefinitions:
- ';%(UndefinePreprocessorDefinitions)',
- Link:
- AdditionalDependencies: ';%(AdditionalDependencies)',
- AdditionalLibraryDirectories: ';%(AdditionalLibraryDirectories)',
- AdditionalManifestDependencies:
- ';%(AdditionalManifestDependencies)',
- AdditionalOptions: ' %(AdditionalOptions)',
- AddModuleNamesToAssembly: ';%(AddModuleNamesToAssembly)',
- AssemblyLinkResource: ';%(AssemblyLinkResource)',
- DelayLoadDLLs: ';%(DelayLoadDLLs)',
- EmbedManagedResourceFile: ';%(EmbedManagedResourceFile)',
- ForceSymbolReferences: ';%(ForceSymbolReferences)',
- IgnoreSpecificDefaultLibraries:
- ';%(IgnoreSpecificDefaultLibraries)',
- ResourceCompile:
- AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)',
- AdditionalOptions: ' %(AdditionalOptions)',
- PreprocessorDefinitions: ';%(PreprocessorDefinitions)',
- Manifest:
- AdditionalManifestFiles: ';%(AdditionalManifestFiles)',
- AdditionalOptions: ' %(AdditionalOptions)',
- InputResourceManifests: ';%(InputResourceManifests)',
- """
+ Note that for many settings, the VS2010 converter adds macros like
+ %(AdditionalIncludeDirectories) to make sure than inherited values are
+ included. Since the Gyp projects we generate do not use inheritance,
+ we removed these macros. They were:
+ ClCompile:
+ AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)'
+ AdditionalOptions: ' %(AdditionalOptions)'
+ AdditionalUsingDirectories: ';%(AdditionalUsingDirectories)'
+ DisableSpecificWarnings: ';%(DisableSpecificWarnings)',
+ ForcedIncludeFiles: ';%(ForcedIncludeFiles)',
+ ForcedUsingFiles: ';%(ForcedUsingFiles)',
+ PreprocessorDefinitions: ';%(PreprocessorDefinitions)',
+ UndefinePreprocessorDefinitions:
+ ';%(UndefinePreprocessorDefinitions)',
+ Link:
+ AdditionalDependencies: ';%(AdditionalDependencies)',
+ AdditionalLibraryDirectories: ';%(AdditionalLibraryDirectories)',
+ AdditionalManifestDependencies:
+ ';%(AdditionalManifestDependencies)',
+ AdditionalOptions: ' %(AdditionalOptions)',
+ AddModuleNamesToAssembly: ';%(AddModuleNamesToAssembly)',
+ AssemblyLinkResource: ';%(AssemblyLinkResource)',
+ DelayLoadDLLs: ';%(DelayLoadDLLs)',
+ EmbedManagedResourceFile: ';%(EmbedManagedResourceFile)',
+ ForceSymbolReferences: ';%(ForceSymbolReferences)',
+ IgnoreSpecificDefaultLibraries:
+ ';%(IgnoreSpecificDefaultLibraries)',
+ ResourceCompile:
+ AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)',
+ AdditionalOptions: ' %(AdditionalOptions)',
+ PreprocessorDefinitions: ';%(PreprocessorDefinitions)',
+ Manifest:
+ AdditionalManifestFiles: ';%(AdditionalManifestFiles)',
+ AdditionalOptions: ' %(AdditionalOptions)',
+ InputResourceManifests: ';%(InputResourceManifests)',
+ """
msvs_settings = {
"VCCLCompilerTool": {
"AdditionalIncludeDirectories": "dir1",
@@ -1346,8 +1346,7 @@ def testConvertToMSBuildSettings_actual(self):
"EmbedManifest": "false",
"GenerateCatalogFiles": "true",
"InputResourceManifests": "asfsfdafs",
- "ManifestResourceFile":
- "$(IntDir)\\$(TargetFileName).embed.manifest.resfdsf",
+ "ManifestResourceFile": "$(IntDir)\\$(TargetFileName).embed.manifest.resfdsf", # noqa: E501
"OutputManifestFile": "$(TargetPath).manifestdfs",
"RegistrarScriptFile": "sdfsfd",
"ReplacementsFile": "sdffsd",
@@ -1531,8 +1530,7 @@ def testConvertToMSBuildSettings_actual(self):
"LinkIncremental": "",
},
"ManifestResourceCompile": {
- "ResourceOutputFileName":
- "$(IntDir)$(TargetFileName).embed.manifest.resfdsf"
+ "ResourceOutputFileName": "$(IntDir)$(TargetFileName).embed.manifest.resfdsf" # noqa: E501
},
}
self.maxDiff = 9999 # on failure display a long diff
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py
index 901ba84588589b..61ca37c12d09d5 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py
@@ -13,10 +13,10 @@ class Writer:
def __init__(self, tool_file_path, name):
"""Initializes the tool file.
- Args:
- tool_file_path: Path to the tool file.
- name: Name of the tool file.
- """
+ Args:
+ tool_file_path: Path to the tool file.
+ name: Name of the tool file.
+ """
self.tool_file_path = tool_file_path
self.name = name
self.rules_section = ["Rules"]
@@ -26,14 +26,14 @@ def AddCustomBuildRule(
):
"""Adds a rule to the tool file.
- Args:
- name: Name of the rule.
- description: Description of the rule.
- cmd: Command line of the rule.
- additional_dependencies: other files which may trigger the rule.
- outputs: outputs of the rule.
- extensions: extensions handled by the rule.
- """
+ Args:
+ name: Name of the rule.
+ description: Description of the rule.
+ cmd: Command line of the rule.
+ additional_dependencies: other files which may trigger the rule.
+ outputs: outputs of the rule.
+ extensions: extensions handled by the rule.
+ """
rule = [
"CustomBuildRule",
{
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py
index 23d3e16953c43a..b93613bd1d2e4e 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py
@@ -15,11 +15,11 @@
def _FindCommandInPath(command):
"""If there are no slashes in the command given, this function
- searches the PATH env to find the given command, and converts it
- to an absolute path. We have to do this because MSVS is looking
- for an actual file to launch a debugger on, not just a command
- line. Note that this happens at GYP time, so anything needing to
- be built needs to have a full path."""
+ searches the PATH env to find the given command, and converts it
+ to an absolute path. We have to do this because MSVS is looking
+ for an actual file to launch a debugger on, not just a command
+ line. Note that this happens at GYP time, so anything needing to
+ be built needs to have a full path."""
if "/" in command or "\\" in command:
# If the command already has path elements (either relative or
# absolute), then assume it is constructed properly.
@@ -58,11 +58,11 @@ class Writer:
def __init__(self, user_file_path, version, name):
"""Initializes the user file.
- Args:
- user_file_path: Path to the user file.
- version: Version info.
- name: Name of the user file.
- """
+ Args:
+ user_file_path: Path to the user file.
+ version: Version info.
+ name: Name of the user file.
+ """
self.user_file_path = user_file_path
self.version = version
self.name = name
@@ -71,9 +71,9 @@ def __init__(self, user_file_path, version, name):
def AddConfig(self, name):
"""Adds a configuration to the project.
- Args:
- name: Configuration name.
- """
+ Args:
+ name: Configuration name.
+ """
self.configurations[name] = ["Configuration", {"Name": name}]
def AddDebugSettings(
@@ -81,12 +81,12 @@ def AddDebugSettings(
):
"""Adds a DebugSettings node to the user file for a particular config.
- Args:
- command: command line to run. First element in the list is the
- executable. All elements of the command will be quoted if
- necessary.
- working_directory: other files which may trigger the rule. (optional)
- """
+ Args:
+ command: command line to run. First element in the list is the
+ executable. All elements of the command will be quoted if
+ necessary.
+ working_directory: other files which may trigger the rule. (optional)
+ """
command = _QuoteWin32CommandLineArgs(command)
abs_command = _FindCommandInPath(command[0])
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py
index 27647f11d07467..5a1b4ae3198d6c 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py
@@ -29,13 +29,13 @@ def _GetLargePdbShimCcPath():
def _DeepCopySomeKeys(in_dict, keys):
"""Performs a partial deep-copy on |in_dict|, only copying the keys in |keys|.
- Arguments:
- in_dict: The dictionary to copy.
- keys: The keys to be copied. If a key is in this list and doesn't exist in
- |in_dict| this is not an error.
- Returns:
- The partially deep-copied dictionary.
- """
+ Arguments:
+ in_dict: The dictionary to copy.
+ keys: The keys to be copied. If a key is in this list and doesn't exist in
+ |in_dict| this is not an error.
+ Returns:
+ The partially deep-copied dictionary.
+ """
d = {}
for key in keys:
if key not in in_dict:
@@ -47,12 +47,12 @@ def _DeepCopySomeKeys(in_dict, keys):
def _SuffixName(name, suffix):
"""Add a suffix to the end of a target.
- Arguments:
- name: name of the target (foo#target)
- suffix: the suffix to be added
- Returns:
- Target name with suffix added (foo_suffix#target)
- """
+ Arguments:
+ name: name of the target (foo#target)
+ suffix: the suffix to be added
+ Returns:
+ Target name with suffix added (foo_suffix#target)
+ """
parts = name.rsplit("#", 1)
parts[0] = f"{parts[0]}_{suffix}"
return "#".join(parts)
@@ -61,24 +61,24 @@ def _SuffixName(name, suffix):
def _ShardName(name, number):
"""Add a shard number to the end of a target.
- Arguments:
- name: name of the target (foo#target)
- number: shard number
- Returns:
- Target name with shard added (foo_1#target)
- """
+ Arguments:
+ name: name of the target (foo#target)
+ number: shard number
+ Returns:
+ Target name with shard added (foo_1#target)
+ """
return _SuffixName(name, str(number))
def ShardTargets(target_list, target_dicts):
"""Shard some targets apart to work around the linkers limits.
- Arguments:
- target_list: List of target pairs: 'base/base.gyp:base'.
- target_dicts: Dict of target properties keyed on target pair.
- Returns:
- Tuple of the new sharded versions of the inputs.
- """
+ Arguments:
+ target_list: List of target pairs: 'base/base.gyp:base'.
+ target_dicts: Dict of target properties keyed on target pair.
+ Returns:
+ Tuple of the new sharded versions of the inputs.
+ """
# Gather the targets to shard, and how many pieces.
targets_to_shard = {}
for t in target_dicts:
@@ -128,22 +128,22 @@ def ShardTargets(target_list, target_dicts):
def _GetPdbPath(target_dict, config_name, vars):
"""Returns the path to the PDB file that will be generated by a given
- configuration.
-
- The lookup proceeds as follows:
- - Look for an explicit path in the VCLinkerTool configuration block.
- - Look for an 'msvs_large_pdb_path' variable.
- - Use '<(PRODUCT_DIR)/<(product_name).(exe|dll).pdb' if 'product_name' is
- specified.
- - Use '<(PRODUCT_DIR)/<(target_name).(exe|dll).pdb'.
-
- Arguments:
- target_dict: The target dictionary to be searched.
- config_name: The name of the configuration of interest.
- vars: A dictionary of common GYP variables with generator-specific values.
- Returns:
- The path of the corresponding PDB file.
- """
+ configuration.
+
+ The lookup proceeds as follows:
+ - Look for an explicit path in the VCLinkerTool configuration block.
+ - Look for an 'msvs_large_pdb_path' variable.
+ - Use '<(PRODUCT_DIR)/<(product_name).(exe|dll).pdb' if 'product_name' is
+ specified.
+ - Use '<(PRODUCT_DIR)/<(target_name).(exe|dll).pdb'.
+
+ Arguments:
+ target_dict: The target dictionary to be searched.
+ config_name: The name of the configuration of interest.
+ vars: A dictionary of common GYP variables with generator-specific values.
+ Returns:
+ The path of the corresponding PDB file.
+ """
config = target_dict["configurations"][config_name]
msvs = config.setdefault("msvs_settings", {})
@@ -168,16 +168,16 @@ def _GetPdbPath(target_dict, config_name, vars):
def InsertLargePdbShims(target_list, target_dicts, vars):
"""Insert a shim target that forces the linker to use 4KB pagesize PDBs.
- This is a workaround for targets with PDBs greater than 1GB in size, the
- limit for the 1KB pagesize PDBs created by the linker by default.
+ This is a workaround for targets with PDBs greater than 1GB in size, the
+ limit for the 1KB pagesize PDBs created by the linker by default.
- Arguments:
- target_list: List of target pairs: 'base/base.gyp:base'.
- target_dicts: Dict of target properties keyed on target pair.
- vars: A dictionary of common GYP variables with generator-specific values.
- Returns:
- Tuple of the shimmed version of the inputs.
- """
+ Arguments:
+ target_list: List of target pairs: 'base/base.gyp:base'.
+ target_dicts: Dict of target properties keyed on target pair.
+ vars: A dictionary of common GYP variables with generator-specific values.
+ Returns:
+ Tuple of the shimmed version of the inputs.
+ """
# Determine which targets need shimming.
targets_to_shim = []
for t in target_dicts:
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py
index 93f48bc05c8dc5..09baf44b2b0f8a 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py
@@ -76,17 +76,17 @@ def Path(self):
return self.path
def ToolPath(self, tool):
- """Returns the path to a given compiler tool. """
+ """Returns the path to a given compiler tool."""
return os.path.normpath(os.path.join(self.path, "VC/bin", tool))
def DefaultToolset(self):
"""Returns the msbuild toolset version that will be used in the absence
- of a user override."""
+ of a user override."""
return self.default_toolset
def _SetupScriptInternal(self, target_arch):
"""Returns a command (with arguments) to be used to set up the
- environment."""
+ environment."""
assert target_arch in ("x86", "x64"), "target_arch not supported"
# If WindowsSDKDir is set and SetEnv.Cmd exists then we are using the
# depot_tools build tools and should run SetEnv.Cmd to set up the
@@ -154,16 +154,16 @@ def SetupScript(self, target_arch):
def _RegistryQueryBase(sysdir, key, value):
"""Use reg.exe to read a particular key.
- While ideally we might use the win32 module, we would like gyp to be
- python neutral, so for instance cygwin python lacks this module.
+ While ideally we might use the win32 module, we would like gyp to be
+ python neutral, so for instance cygwin python lacks this module.
- Arguments:
- sysdir: The system subdirectory to attempt to launch reg.exe from.
- key: The registry key to read from.
- value: The particular value to read.
- Return:
- stdout from reg.exe, or None for failure.
- """
+ Arguments:
+ sysdir: The system subdirectory to attempt to launch reg.exe from.
+ key: The registry key to read from.
+ value: The particular value to read.
+ Return:
+ stdout from reg.exe, or None for failure.
+ """
# Skip if not on Windows or Python Win32 setup issue
if sys.platform not in ("win32", "cygwin"):
return None
@@ -184,20 +184,20 @@ def _RegistryQueryBase(sysdir, key, value):
def _RegistryQuery(key, value=None):
r"""Use reg.exe to read a particular key through _RegistryQueryBase.
- First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If
- that fails, it falls back to System32. Sysnative is available on Vista and
- up and available on Windows Server 2003 and XP through KB patch 942589. Note
- that Sysnative will always fail if using 64-bit python due to it being a
- virtual directory and System32 will work correctly in the first place.
+ First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If
+ that fails, it falls back to System32. Sysnative is available on Vista and
+ up and available on Windows Server 2003 and XP through KB patch 942589. Note
+ that Sysnative will always fail if using 64-bit python due to it being a
+ virtual directory and System32 will work correctly in the first place.
- KB 942589 - http://support.microsoft.com/kb/942589/en-us.
+ KB 942589 - http://support.microsoft.com/kb/942589/en-us.
- Arguments:
- key: The registry key.
- value: The particular registry value to read (optional).
- Return:
- stdout from reg.exe, or None for failure.
- """
+ Arguments:
+ key: The registry key.
+ value: The particular registry value to read (optional).
+ Return:
+ stdout from reg.exe, or None for failure.
+ """
text = None
try:
text = _RegistryQueryBase("Sysnative", key, value)
@@ -212,14 +212,15 @@ def _RegistryQuery(key, value=None):
def _RegistryGetValueUsingWinReg(key, value):
"""Use the _winreg module to obtain the value of a registry key.
- Args:
- key: The registry key.
- value: The particular registry value to read.
- Return:
- contents of the registry key's value, or None on failure. Throws
- ImportError if winreg is unavailable.
- """
- from winreg import HKEY_LOCAL_MACHINE, OpenKey, QueryValueEx
+ Args:
+ key: The registry key.
+ value: The particular registry value to read.
+ Return:
+ contents of the registry key's value, or None on failure. Throws
+ ImportError if winreg is unavailable.
+ """
+ from winreg import HKEY_LOCAL_MACHINE, OpenKey, QueryValueEx # noqa: PLC0415
+
try:
root, subkey = key.split("\\", 1)
assert root == "HKLM" # Only need HKLM for now.
@@ -232,17 +233,17 @@ def _RegistryGetValueUsingWinReg(key, value):
def _RegistryGetValue(key, value):
"""Use _winreg or reg.exe to obtain the value of a registry key.
- Using _winreg is preferable because it solves an issue on some corporate
- environments where access to reg.exe is locked down. However, we still need
- to fallback to reg.exe for the case where the _winreg module is not available
- (for example in cygwin python).
-
- Args:
- key: The registry key.
- value: The particular registry value to read.
- Return:
- contents of the registry key's value, or None on failure.
- """
+ Using _winreg is preferable because it solves an issue on some corporate
+ environments where access to reg.exe is locked down. However, we still need
+ to fallback to reg.exe for the case where the _winreg module is not available
+ (for example in cygwin python).
+
+ Args:
+ key: The registry key.
+ value: The particular registry value to read.
+ Return:
+ contents of the registry key's value, or None on failure.
+ """
try:
return _RegistryGetValueUsingWinReg(key, value)
except ImportError:
@@ -262,10 +263,10 @@ def _RegistryGetValue(key, value):
def _CreateVersion(name, path, sdk_based=False):
"""Sets up MSVS project generation.
- Setup is based off the GYP_MSVS_VERSION environment variable or whatever is
- autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is
- passed in that doesn't match a value in versions python will throw a error.
- """
+ Setup is based off the GYP_MSVS_VERSION environment variable or whatever is
+ autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is
+ passed in that doesn't match a value in versions python will throw a error.
+ """
if path:
path = os.path.normpath(path)
versions = {
@@ -435,22 +436,22 @@ def _ConvertToCygpath(path):
def _DetectVisualStudioVersions(versions_to_check, force_express):
"""Collect the list of installed visual studio versions.
- Returns:
- A list of visual studio versions installed in descending order of
- usage preference.
- Base this on the registry and a quick check if devenv.exe exists.
- Possibilities are:
- 2005(e) - Visual Studio 2005 (8)
- 2008(e) - Visual Studio 2008 (9)
- 2010(e) - Visual Studio 2010 (10)
- 2012(e) - Visual Studio 2012 (11)
- 2013(e) - Visual Studio 2013 (12)
- 2015 - Visual Studio 2015 (14)
- 2017 - Visual Studio 2017 (15)
- 2019 - Visual Studio 2019 (16)
- 2022 - Visual Studio 2022 (17)
- Where (e) is e for express editions of MSVS and blank otherwise.
- """
+ Returns:
+ A list of visual studio versions installed in descending order of
+ usage preference.
+ Base this on the registry and a quick check if devenv.exe exists.
+ Possibilities are:
+ 2005(e) - Visual Studio 2005 (8)
+ 2008(e) - Visual Studio 2008 (9)
+ 2010(e) - Visual Studio 2010 (10)
+ 2012(e) - Visual Studio 2012 (11)
+ 2013(e) - Visual Studio 2013 (12)
+ 2015 - Visual Studio 2015 (14)
+ 2017 - Visual Studio 2017 (15)
+ 2019 - Visual Studio 2019 (16)
+ 2022 - Visual Studio 2022 (17)
+ Where (e) is e for express editions of MSVS and blank otherwise.
+ """
version_to_year = {
"8.0": "2005",
"9.0": "2008",
@@ -527,11 +528,11 @@ def _DetectVisualStudioVersions(versions_to_check, force_express):
def SelectVisualStudioVersion(version="auto", allow_fallback=True):
"""Select which version of Visual Studio projects to generate.
- Arguments:
- version: Hook to allow caller to force a particular version (vs auto).
- Returns:
- An object representing a visual studio project format version.
- """
+ Arguments:
+ version: Hook to allow caller to force a particular version (vs auto).
+ Returns:
+ An object representing a visual studio project format version.
+ """
# In auto mode, check environment variable for override.
if version == "auto":
version = os.environ.get("GYP_MSVS_VERSION", "auto")
@@ -552,8 +553,7 @@ def SelectVisualStudioVersion(version="auto", allow_fallback=True):
"2019": ("16.0",),
"2022": ("17.0",),
}
- override_path = os.environ.get("GYP_MSVS_OVERRIDE_PATH")
- if override_path:
+ if override_path := os.environ.get("GYP_MSVS_OVERRIDE_PATH"):
msvs_version = os.environ.get("GYP_MSVS_VERSION")
if not msvs_version:
raise ValueError(
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/__init__.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/__init__.py
index 77800661a48c0e..3a70cf076c8b47 100755
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/__init__.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/__init__.py
@@ -25,19 +25,21 @@
DEBUG_VARIABLES = "variables"
DEBUG_INCLUDES = "includes"
+
def EscapeForCString(string: bytes | str) -> str:
if isinstance(string, str):
- string = string.encode(encoding='utf8')
+ string = string.encode(encoding="utf8")
- backslash_or_double_quote = {ord('\\'), ord('"')}
- result = ''
+ backslash_or_double_quote = {ord("\\"), ord('"')}
+ result = ""
for char in string:
if char in backslash_or_double_quote or not 32 <= char < 127:
- result += '\\%03o' % char
+ result += "\\%03o" % char
else:
result += chr(char)
return result
+
def DebugOutput(mode, message, *args):
if "all" in gyp.debug or mode in gyp.debug:
ctx = ("unknown", 0, "unknown")
@@ -76,11 +78,11 @@ def Load(
circular_check=True,
):
"""
- Loads one or more specified build files.
- default_variables and includes will be copied before use.
- Returns the generator for the specified format and the
- data returned by loading the specified build files.
- """
+ Loads one or more specified build files.
+ default_variables and includes will be copied before use.
+ Returns the generator for the specified format and the
+ data returned by loading the specified build files.
+ """
if params is None:
params = {}
@@ -114,7 +116,7 @@ def Load(
# These parameters are passed in order (as opposed to by key)
# because ActivePython cannot handle key parameters to __import__.
generator = __import__(generator_name, globals(), locals(), generator_name)
- for (key, val) in generator.generator_default_variables.items():
+ for key, val in generator.generator_default_variables.items():
default_variables.setdefault(key, val)
output_dir = params["options"].generator_output or params["options"].toplevel_dir
@@ -184,10 +186,10 @@ def Load(
def NameValueListToDict(name_value_list):
"""
- Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary
- of the pairs. If a string is simply NAME, then the value in the dictionary
- is set to True. If VALUE can be converted to an integer, it is.
- """
+ Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary
+ of the pairs. If a string is simply NAME, then the value in the dictionary
+ is set to True. If VALUE can be converted to an integer, it is.
+ """
result = {}
for item in name_value_list:
tokens = item.split("=", 1)
@@ -220,13 +222,13 @@ def FormatOpt(opt, value):
def RegenerateAppendFlag(flag, values, predicate, env_name, options):
"""Regenerate a list of command line flags, for an option of action='append'.
- The |env_name|, if given, is checked in the environment and used to generate
- an initial list of options, then the options that were specified on the
- command line (given in |values|) are appended. This matches the handling of
- environment variables and command line flags where command line flags override
- the environment, while not requiring the environment to be set when the flags
- are used again.
- """
+ The |env_name|, if given, is checked in the environment and used to generate
+ an initial list of options, then the options that were specified on the
+ command line (given in |values|) are appended. This matches the handling of
+ environment variables and command line flags where command line flags override
+ the environment, while not requiring the environment to be set when the flags
+ are used again.
+ """
flags = []
if options.use_environment and env_name:
for flag_value in ShlexEnv(env_name):
@@ -242,14 +244,14 @@ def RegenerateAppendFlag(flag, values, predicate, env_name, options):
def RegenerateFlags(options):
"""Given a parsed options object, and taking the environment variables into
- account, returns a list of flags that should regenerate an equivalent options
- object (even in the absence of the environment variables.)
+ account, returns a list of flags that should regenerate an equivalent options
+ object (even in the absence of the environment variables.)
- Any path options will be normalized relative to depth.
+ Any path options will be normalized relative to depth.
- The format flag is not included, as it is assumed the calling generator will
- set that as appropriate.
- """
+ The format flag is not included, as it is assumed the calling generator will
+ set that as appropriate.
+ """
def FixPath(path):
path = gyp.common.FixIfRelativePath(path, options.depth)
@@ -307,15 +309,15 @@ def __init__(self, usage):
def add_argument(self, *args, **kw):
"""Add an option to the parser.
- This accepts the same arguments as ArgumentParser.add_argument, plus the
- following:
- regenerate: can be set to False to prevent this option from being included
- in regeneration.
- env_name: name of environment variable that additional values for this
- option come from.
- type: adds type='path', to tell the regenerator that the values of
- this option need to be made relative to options.depth
- """
+ This accepts the same arguments as ArgumentParser.add_argument, plus the
+ following:
+ regenerate: can be set to False to prevent this option from being included
+ in regeneration.
+ env_name: name of environment variable that additional values for this
+ option come from.
+ type: adds type='path', to tell the regenerator that the values of
+ this option need to be made relative to options.depth
+ """
env_name = kw.pop("env_name", None)
if "dest" in kw and kw.pop("regenerate", True):
dest = kw["dest"]
@@ -343,7 +345,7 @@ def parse_args(self, *args):
def gyp_main(args):
my_name = os.path.basename(sys.argv[0])
- usage = "usage: %(prog)s [options ...] [build_file ...]"
+ usage = "%(prog)s [options ...] [build_file ...]"
parser = RegeneratableOptionParser(usage=usage.replace("%s", "%(prog)s"))
parser.add_argument(
@@ -489,7 +491,8 @@ def gyp_main(args):
options, build_files_arg = parser.parse_args(args)
if options.version:
- import pkg_resources
+ import pkg_resources # noqa: PLC0415
+
print(f"v{pkg_resources.get_distribution('gyp-next').version}")
return 0
build_files = build_files_arg
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/common.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/common.py
index fbf1024fc38319..223ce47b0032f3 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/common.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/common.py
@@ -31,9 +31,8 @@ def __call__(self, *args):
class GypError(Exception):
"""Error class representing an error, which is to be presented
- to the user. The main entry point will catch and display this.
- """
-
+ to the user. The main entry point will catch and display this.
+ """
def ExceptionAppend(e, msg):
@@ -48,9 +47,9 @@ def ExceptionAppend(e, msg):
def FindQualifiedTargets(target, qualified_list):
"""
- Given a list of qualified targets, return the qualified targets for the
- specified |target|.
- """
+ Given a list of qualified targets, return the qualified targets for the
+ specified |target|.
+ """
return [t for t in qualified_list if ParseQualifiedTarget(t)[1] == target]
@@ -115,7 +114,7 @@ def BuildFile(fully_qualified_target):
def GetEnvironFallback(var_list, default):
"""Look up a key in the environment, with fallback to secondary keys
- and finally falling back to a default value."""
+ and finally falling back to a default value."""
for var in var_list:
if var in os.environ:
return os.environ[var]
@@ -178,11 +177,11 @@ def RelativePath(path, relative_to, follow_path_symlink=True):
@memoize
def InvertRelativePath(path, toplevel_dir=None):
"""Given a path like foo/bar that is relative to toplevel_dir, return
- the inverse relative path back to the toplevel_dir.
+ the inverse relative path back to the toplevel_dir.
- E.g. os.path.normpath(os.path.join(path, InvertRelativePath(path)))
- should always produce the empty string, unless the path contains symlinks.
- """
+ E.g. os.path.normpath(os.path.join(path, InvertRelativePath(path)))
+ should always produce the empty string, unless the path contains symlinks.
+ """
if not path:
return path
toplevel_dir = "." if toplevel_dir is None else toplevel_dir
@@ -262,12 +261,12 @@ def UnrelativePath(path, relative_to):
def EncodePOSIXShellArgument(argument):
"""Encodes |argument| suitably for consumption by POSIX shells.
- argument may be quoted and escaped as necessary to ensure that POSIX shells
- treat the returned value as a literal representing the argument passed to
- this function. Parameter (variable) expansions beginning with $ are allowed
- to remain intact without escaping the $, to allow the argument to contain
- references to variables to be expanded by the shell.
- """
+ argument may be quoted and escaped as necessary to ensure that POSIX shells
+ treat the returned value as a literal representing the argument passed to
+ this function. Parameter (variable) expansions beginning with $ are allowed
+ to remain intact without escaping the $, to allow the argument to contain
+ references to variables to be expanded by the shell.
+ """
if not isinstance(argument, str):
argument = str(argument)
@@ -282,9 +281,9 @@ def EncodePOSIXShellArgument(argument):
def EncodePOSIXShellList(list):
"""Encodes |list| suitably for consumption by POSIX shells.
- Returns EncodePOSIXShellArgument for each item in list, and joins them
- together using the space character as an argument separator.
- """
+ Returns EncodePOSIXShellArgument for each item in list, and joins them
+ together using the space character as an argument separator.
+ """
encoded_arguments = []
for argument in list:
@@ -312,14 +311,12 @@ def DeepDependencyTargets(target_dicts, roots):
def BuildFileTargets(target_list, build_file):
- """From a target_list, returns the subset from the specified build_file.
- """
+ """From a target_list, returns the subset from the specified build_file."""
return [p for p in target_list if BuildFile(p) == build_file]
def AllTargets(target_list, target_dicts, build_file):
- """Returns all targets (direct and dependencies) for the specified build_file.
- """
+ """Returns all targets (direct and dependencies) for the specified build_file."""
bftargets = BuildFileTargets(target_list, build_file)
deptargets = DeepDependencyTargets(target_dicts, bftargets)
return bftargets + deptargets
@@ -328,12 +325,12 @@ def AllTargets(target_list, target_dicts, build_file):
def WriteOnDiff(filename):
"""Write to a file only if the new contents differ.
- Arguments:
- filename: name of the file to potentially write to.
- Returns:
- A file like object which will write to temporary file and only overwrite
- the target if it differs (on close).
- """
+ Arguments:
+ filename: name of the file to potentially write to.
+ Returns:
+ A file like object which will write to temporary file and only overwrite
+ the target if it differs (on close).
+ """
class Writer:
"""Wrapper around file which only covers the target if it differs."""
@@ -421,8 +418,10 @@ def EnsureDirExists(path):
except OSError:
pass
-def GetCrossCompilerPredefines(): # -> dict
+
+def GetCompilerPredefines(): # -> dict
cmd = []
+ defines = {}
# shlex.split() will eat '\' in posix mode, but
# setting posix=False will preserve extra '"' cause CreateProcess fail on Windows
@@ -439,7 +438,7 @@ def replace_sep(s):
if CXXFLAGS := os.environ.get("CXXFLAGS"):
cmd += shlex.split(replace_sep(CXXFLAGS))
else:
- return {}
+ return defines
if sys.platform == "win32":
fd, input = tempfile.mkstemp(suffix=".c")
@@ -447,20 +446,34 @@ def replace_sep(s):
try:
os.close(fd)
stdout = subprocess.run(
- real_cmd, shell=True,
- capture_output=True, check=True
+ real_cmd, shell=True, capture_output=True, check=True
).stdout
+ except subprocess.CalledProcessError as e:
+ print(
+ "Warning: failed to get compiler predefines\n"
+ "cmd: %s\n"
+ "status: %d" % (e.cmd, e.returncode),
+ file=sys.stderr,
+ )
+ return defines
finally:
os.unlink(input)
else:
input = "/dev/null"
real_cmd = [*cmd, "-dM", "-E", "-x", "c", input]
- stdout = subprocess.run(
- real_cmd, shell=False,
- capture_output=True, check=True
- ).stdout
+ try:
+ stdout = subprocess.run(
+ real_cmd, shell=False, capture_output=True, check=True
+ ).stdout
+ except subprocess.CalledProcessError as e:
+ print(
+ "Warning: failed to get compiler predefines\n"
+ "cmd: %s\n"
+ "status: %d" % (e.cmd, e.returncode),
+ file=sys.stderr,
+ )
+ return defines
- defines = {}
lines = stdout.decode("utf-8").replace("\r\n", "\n").split("\n")
for line in lines:
if (line or "").startswith("#define "):
@@ -468,6 +481,7 @@ def replace_sep(s):
defines[key] = " ".join(value)
return defines
+
def GetFlavorByPlatform():
"""Returns |params.flavor| if it's set, the system's default flavor else."""
flavors = {
@@ -495,11 +509,12 @@ def GetFlavorByPlatform():
return "linux"
+
def GetFlavor(params):
if "flavor" in params:
return params["flavor"]
- defines = GetCrossCompilerPredefines()
+ defines = GetCompilerPredefines()
if "__EMSCRIPTEN__" in defines:
return "emscripten"
if "__wasm__" in defines:
@@ -510,7 +525,7 @@ def GetFlavor(params):
def CopyTool(flavor, out_path, generator_flags={}):
"""Finds (flock|mac|win)_tool.gyp in the gyp directory and copies it
- to |out_path|."""
+ to |out_path|."""
# aix and solaris just need flock emulation. mac and win use more complicated
# support scripts.
prefix = {
@@ -566,7 +581,8 @@ def uniquer(seq, idfun=lambda x: x):
# Based on http://code.activestate.com/recipes/576694/.
-class OrderedSet(MutableSet):
+class OrderedSet(MutableSet): # noqa: PLW1641
+ # TODO (cclauss): Fix eq-without-hash ruff rule PLW1641
def __init__(self, iterable=None):
self.end = end = []
end += [None, end, end] # sentinel node for doubly linked list
@@ -644,24 +660,24 @@ def __str__(self):
def TopologicallySorted(graph, get_edges):
r"""Topologically sort based on a user provided edge definition.
- Args:
- graph: A list of node names.
- get_edges: A function mapping from node name to a hashable collection
- of node names which this node has outgoing edges to.
- Returns:
- A list containing all of the node in graph in topological order.
- It is assumed that calling get_edges once for each node and caching is
- cheaper than repeatedly calling get_edges.
- Raises:
- CycleError in the event of a cycle.
- Example:
- graph = {'a': '$(b) $(c)', 'b': 'hi', 'c': '$(b)'}
- def GetEdges(node):
- return re.findall(r'\$\(([^))]\)', graph[node])
- print TopologicallySorted(graph.keys(), GetEdges)
- ==>
- ['a', 'c', b']
- """
+ Args:
+ graph: A list of node names.
+ get_edges: A function mapping from node name to a hashable collection
+ of node names which this node has outgoing edges to.
+ Returns:
+ A list containing all of the node in graph in topological order.
+ It is assumed that calling get_edges once for each node and caching is
+ cheaper than repeatedly calling get_edges.
+ Raises:
+ CycleError in the event of a cycle.
+ Example:
+ graph = {'a': '$(b) $(c)', 'b': 'hi', 'c': '$(b)'}
+ def GetEdges(node):
+ return re.findall(r'\$\(([^))]\)', graph[node])
+ print TopologicallySorted(graph.keys(), GetEdges)
+ ==>
+ ['a', 'c', b']
+ """
get_edges = memoize(get_edges)
visited = set()
visiting = set()
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/common_test.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/common_test.py
index bd7172afaf3697..b5988816c04a2b 100755
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/common_test.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/common_test.py
@@ -7,6 +7,7 @@
"""Unit tests for the common.py file."""
import os
+import subprocess
import sys
import unittest
from unittest.mock import MagicMock, patch
@@ -27,8 +28,12 @@ def test_Valid(self):
def GetEdge(node):
return tuple(graph[node])
- assert gyp.common.TopologicallySorted(
- graph.keys(), GetEdge) == ["a", "c", "d", "b"]
+ assert gyp.common.TopologicallySorted(graph.keys(), GetEdge) == [
+ "a",
+ "c",
+ "d",
+ "b",
+ ]
def test_Cycle(self):
"""Test that an exception is thrown on a cyclic graph."""
@@ -85,89 +90,97 @@ def decode(self, encoding):
@patch("os.close")
@patch("os.unlink")
@patch("tempfile.mkstemp")
- def test_GetCrossCompilerPredefines(self, mock_mkstemp, mock_unlink, mock_close):
+ def test_GetCompilerPredefines(self, mock_mkstemp, mock_unlink, mock_close):
mock_close.return_value = None
mock_unlink.return_value = None
mock_mkstemp.return_value = (0, "temp.c")
- def mock_run(env, defines_stdout, expected_cmd):
+ def mock_run(env, defines_stdout, expected_cmd, throws=False):
with patch("subprocess.run") as mock_run:
- mock_process = MagicMock()
- mock_process.returncode = 0
- mock_process.stdout = TestGetFlavor.MockCommunicate(defines_stdout)
- mock_run.return_value = mock_process
expected_input = "temp.c" if sys.platform == "win32" else "/dev/null"
+ if throws:
+ mock_run.side_effect = subprocess.CalledProcessError(
+ returncode=1,
+ cmd=[*expected_cmd, "-dM", "-E", "-x", "c", expected_input],
+ )
+ else:
+ mock_process = MagicMock()
+ mock_process.returncode = 0
+ mock_process.stdout = TestGetFlavor.MockCommunicate(defines_stdout)
+ mock_run.return_value = mock_process
with patch.dict(os.environ, env):
- defines = gyp.common.GetCrossCompilerPredefines()
+ try:
+ defines = gyp.common.GetCompilerPredefines()
+ except Exception as e:
+ self.fail(f"GetCompilerPredefines raised an exception: {e}")
flavor = gyp.common.GetFlavor({})
- if env.get("CC_target"):
+ if env.get("CC_target") or env.get("CC"):
mock_run.assert_called_with(
- [
- *expected_cmd,
- "-dM", "-E", "-x", "c", expected_input
- ],
+ [*expected_cmd, "-dM", "-E", "-x", "c", expected_input],
shell=sys.platform == "win32",
- capture_output=True, check=True)
+ capture_output=True,
+ check=True,
+ )
return [defines, flavor]
+ [defines0, _] = mock_run({"CC": "cl.exe"}, "", ["cl.exe"], True)
+ assert defines0 == {}
+
[defines1, _] = mock_run({}, "", [])
assert defines1 == {}
[defines2, flavor2] = mock_run(
- { "CC_target": "/opt/wasi-sdk/bin/clang" },
+ {"CC_target": "/opt/wasi-sdk/bin/clang"},
"#define __wasm__ 1\n#define __wasi__ 1\n",
- ["/opt/wasi-sdk/bin/clang"]
+ ["/opt/wasi-sdk/bin/clang"],
)
- assert defines2 == { "__wasm__": "1", "__wasi__": "1" }
+ assert defines2 == {"__wasm__": "1", "__wasi__": "1"}
assert flavor2 == "wasi"
[defines3, flavor3] = mock_run(
- { "CC_target": "/opt/wasi-sdk/bin/clang --target=wasm32" },
+ {"CC_target": "/opt/wasi-sdk/bin/clang --target=wasm32"},
"#define __wasm__ 1\n",
- ["/opt/wasi-sdk/bin/clang", "--target=wasm32"]
+ ["/opt/wasi-sdk/bin/clang", "--target=wasm32"],
)
- assert defines3 == { "__wasm__": "1" }
+ assert defines3 == {"__wasm__": "1"}
assert flavor3 == "wasm"
[defines4, flavor4] = mock_run(
- { "CC_target": "/emsdk/upstream/emscripten/emcc" },
+ {"CC_target": "/emsdk/upstream/emscripten/emcc"},
"#define __EMSCRIPTEN__ 1\n",
- ["/emsdk/upstream/emscripten/emcc"]
+ ["/emsdk/upstream/emscripten/emcc"],
)
- assert defines4 == { "__EMSCRIPTEN__": "1" }
+ assert defines4 == {"__EMSCRIPTEN__": "1"}
assert flavor4 == "emscripten"
# Test path which include white space
[defines5, flavor5] = mock_run(
{
- "CC_target": "\"/Users/Toyo Li/wasi-sdk/bin/clang\" -O3",
- "CFLAGS": "--target=wasm32-wasi-threads -pthread"
+ "CC_target": '"/Users/Toyo Li/wasi-sdk/bin/clang" -O3',
+ "CFLAGS": "--target=wasm32-wasi-threads -pthread",
},
"#define __wasm__ 1\n#define __wasi__ 1\n#define _REENTRANT 1\n",
[
"/Users/Toyo Li/wasi-sdk/bin/clang",
"-O3",
"--target=wasm32-wasi-threads",
- "-pthread"
- ]
+ "-pthread",
+ ],
)
- assert defines5 == {
- "__wasm__": "1",
- "__wasi__": "1",
- "_REENTRANT": "1"
- }
+ assert defines5 == {"__wasm__": "1", "__wasi__": "1", "_REENTRANT": "1"}
assert flavor5 == "wasi"
original_sep = os.sep
os.sep = "\\"
[defines6, flavor6] = mock_run(
- { "CC_target": "\"C:\\Program Files\\wasi-sdk\\clang.exe\"" },
+ {"CC_target": '"C:\\Program Files\\wasi-sdk\\clang.exe"'},
"#define __wasm__ 1\n#define __wasi__ 1\n",
- ["C:/Program Files/wasi-sdk/clang.exe"]
+ ["C:/Program Files/wasi-sdk/clang.exe"],
)
os.sep = original_sep
- assert defines6 == { "__wasm__": "1", "__wasi__": "1" }
+ assert defines6 == {"__wasm__": "1", "__wasi__": "1"}
assert flavor6 == "wasi"
+
if __name__ == "__main__":
unittest.main()
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py
index e4d2f82b687418..a5d95153eca725 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py
@@ -10,43 +10,43 @@
def XmlToString(content, encoding="utf-8", pretty=False):
- """ Writes the XML content to disk, touching the file only if it has changed.
-
- Visual Studio files have a lot of pre-defined structures. This function makes
- it easy to represent these structures as Python data structures, instead of
- having to create a lot of function calls.
-
- Each XML element of the content is represented as a list composed of:
- 1. The name of the element, a string,
- 2. The attributes of the element, a dictionary (optional), and
- 3+. The content of the element, if any. Strings are simple text nodes and
- lists are child elements.
-
- Example 1:
-
- becomes
- ['test']
-
- Example 2:
-
- This is
- it!
-
-
- becomes
- ['myelement', {'a':'value1', 'b':'value2'},
- ['childtype', 'This is'],
- ['childtype', 'it!'],
- ]
-
- Args:
- content: The structured content to be converted.
- encoding: The encoding to report on the first XML line.
- pretty: True if we want pretty printing with indents and new lines.
-
- Returns:
- The XML content as a string.
- """
+ """Writes the XML content to disk, touching the file only if it has changed.
+
+ Visual Studio files have a lot of pre-defined structures. This function makes
+ it easy to represent these structures as Python data structures, instead of
+ having to create a lot of function calls.
+
+ Each XML element of the content is represented as a list composed of:
+ 1. The name of the element, a string,
+ 2. The attributes of the element, a dictionary (optional), and
+ 3+. The content of the element, if any. Strings are simple text nodes and
+ lists are child elements.
+
+ Example 1:
+
+ becomes
+ ['test']
+
+ Example 2:
+
+ This is
+ it!
+
+
+ becomes
+ ['myelement', {'a':'value1', 'b':'value2'},
+ ['childtype', 'This is'],
+ ['childtype', 'it!'],
+ ]
+
+ Args:
+ content: The structured content to be converted.
+ encoding: The encoding to report on the first XML line.
+ pretty: True if we want pretty printing with indents and new lines.
+
+ Returns:
+ The XML content as a string.
+ """
# We create a huge list of all the elements of the file.
xml_parts = ['' % encoding]
if pretty:
@@ -58,14 +58,14 @@ def XmlToString(content, encoding="utf-8", pretty=False):
def _ConstructContentList(xml_parts, specification, pretty, level=0):
- """ Appends the XML parts corresponding to the specification.
-
- Args:
- xml_parts: A list of XML parts to be appended to.
- specification: The specification of the element. See EasyXml docs.
- pretty: True if we want pretty printing with indents and new lines.
- level: Indentation level.
- """
+ """Appends the XML parts corresponding to the specification.
+
+ Args:
+ xml_parts: A list of XML parts to be appended to.
+ specification: The specification of the element. See EasyXml docs.
+ pretty: True if we want pretty printing with indents and new lines.
+ level: Indentation level.
+ """
# The first item in a specification is the name of the element.
if pretty:
indentation = " " * level
@@ -107,16 +107,17 @@ def _ConstructContentList(xml_parts, specification, pretty, level=0):
xml_parts.append("/>%s" % new_line)
-def WriteXmlIfChanged(content, path, encoding="utf-8", pretty=False,
- win32=(sys.platform == "win32")):
- """ Writes the XML content to disk, touching the file only if it has changed.
+def WriteXmlIfChanged(
+ content, path, encoding="utf-8", pretty=False, win32=(sys.platform == "win32")
+):
+ """Writes the XML content to disk, touching the file only if it has changed.
- Args:
- content: The structured content to be written.
- path: Location of the file.
- encoding: The encoding to report on the first line of the XML file.
- pretty: True if we want pretty printing with indents and new lines.
- """
+ Args:
+ content: The structured content to be written.
+ path: Location of the file.
+ encoding: The encoding to report on the first line of the XML file.
+ pretty: True if we want pretty printing with indents and new lines.
+ """
xml_string = XmlToString(content, encoding, pretty)
if win32 and os.linesep != "\r\n":
xml_string = xml_string.replace("\n", "\r\n")
@@ -157,7 +158,7 @@ def WriteXmlIfChanged(content, path, encoding="utf-8", pretty=False,
def _XmlEscape(value, attr=False):
- """ Escape a string for inclusion in XML."""
+ """Escape a string for inclusion in XML."""
def replace(match):
m = match.string[match.start() : match.end()]
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py
index bb97b802c59551..29f5dad5a6e90d 100755
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py
@@ -4,7 +4,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-""" Unit tests for the easy_xml.py file. """
+"""Unit tests for the easy_xml.py file."""
import unittest
from io import StringIO
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py
index cb18742cd8df6d..420c4e49ebc19a 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py
@@ -62,7 +62,6 @@
then the "all" target includes "b1" and "b2".
"""
-
import json
import os
import posixpath
@@ -130,8 +129,8 @@ def _ToGypPath(path):
def _ResolveParent(path, base_path_components):
"""Resolves |path|, which starts with at least one '../'. Returns an empty
- string if the path shouldn't be considered. See _AddSources() for a
- description of |base_path_components|."""
+ string if the path shouldn't be considered. See _AddSources() for a
+ description of |base_path_components|."""
depth = 0
while path.startswith("../"):
depth += 1
@@ -151,11 +150,11 @@ def _ResolveParent(path, base_path_components):
def _AddSources(sources, base_path, base_path_components, result):
"""Extracts valid sources from |sources| and adds them to |result|. Each
- source file is relative to |base_path|, but may contain '..'. To make
- resolving '..' easier |base_path_components| contains each of the
- directories in |base_path|. Additionally each source may contain variables.
- Such sources are ignored as it is assumed dependencies on them are expressed
- and tracked in some other means."""
+ source file is relative to |base_path|, but may contain '..'. To make
+ resolving '..' easier |base_path_components| contains each of the
+ directories in |base_path|. Additionally each source may contain variables.
+ Such sources are ignored as it is assumed dependencies on them are expressed
+ and tracked in some other means."""
# NOTE: gyp paths are always posix style.
for source in sources:
if not len(source) or source.startswith(("!!!", "$")):
@@ -218,23 +217,23 @@ def _ExtractSources(target, target_dict, toplevel_dir):
class Target:
"""Holds information about a particular target:
- deps: set of Targets this Target depends upon. This is not recursive, only the
- direct dependent Targets.
- match_status: one of the MatchStatus values.
- back_deps: set of Targets that have a dependency on this Target.
- visited: used during iteration to indicate whether we've visited this target.
- This is used for two iterations, once in building the set of Targets and
- again in _GetBuildTargets().
- name: fully qualified name of the target.
- requires_build: True if the target type is such that it needs to be built.
- See _DoesTargetTypeRequireBuild for details.
- added_to_compile_targets: used when determining if the target was added to the
- set of targets that needs to be built.
- in_roots: true if this target is a descendant of one of the root nodes.
- is_executable: true if the type of target is executable.
- is_static_library: true if the type of target is static_library.
- is_or_has_linked_ancestor: true if the target does a link (eg executable), or
- if there is a target in back_deps that does a link."""
+ deps: set of Targets this Target depends upon. This is not recursive, only the
+ direct dependent Targets.
+ match_status: one of the MatchStatus values.
+ back_deps: set of Targets that have a dependency on this Target.
+ visited: used during iteration to indicate whether we've visited this target.
+ This is used for two iterations, once in building the set of Targets and
+ again in _GetBuildTargets().
+ name: fully qualified name of the target.
+ requires_build: True if the target type is such that it needs to be built.
+ See _DoesTargetTypeRequireBuild for details.
+ added_to_compile_targets: used when determining if the target was added to the
+ set of targets that needs to be built.
+ in_roots: true if this target is a descendant of one of the root nodes.
+ is_executable: true if the type of target is executable.
+ is_static_library: true if the type of target is static_library.
+ is_or_has_linked_ancestor: true if the target does a link (eg executable), or
+ if there is a target in back_deps that does a link."""
def __init__(self, name):
self.deps = set()
@@ -254,8 +253,8 @@ def __init__(self, name):
class Config:
"""Details what we're looking for
- files: set of files to search for
- targets: see file description for details."""
+ files: set of files to search for
+ targets: see file description for details."""
def __init__(self):
self.files = []
@@ -265,7 +264,7 @@ def __init__(self):
def Init(self, params):
"""Initializes Config. This is a separate method as it raises an exception
- if there is a parse error."""
+ if there is a parse error."""
generator_flags = params.get("generator_flags", {})
config_path = generator_flags.get("config_path", None)
if not config_path:
@@ -289,8 +288,8 @@ def Init(self, params):
def _WasBuildFileModified(build_file, data, files, toplevel_dir):
"""Returns true if the build file |build_file| is either in |files| or
- one of the files included by |build_file| is in |files|. |toplevel_dir| is
- the root of the source tree."""
+ one of the files included by |build_file| is in |files|. |toplevel_dir| is
+ the root of the source tree."""
if _ToLocalPath(toplevel_dir, _ToGypPath(build_file)) in files:
if debug:
print("gyp file modified", build_file)
@@ -319,8 +318,8 @@ def _WasBuildFileModified(build_file, data, files, toplevel_dir):
def _GetOrCreateTargetByName(targets, target_name):
"""Creates or returns the Target at targets[target_name]. If there is no
- Target for |target_name| one is created. Returns a tuple of whether a new
- Target was created and the Target."""
+ Target for |target_name| one is created. Returns a tuple of whether a new
+ Target was created and the Target."""
if target_name in targets:
return False, targets[target_name]
target = Target(target_name)
@@ -340,13 +339,13 @@ def _DoesTargetTypeRequireBuild(target_dict):
def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files, build_files):
"""Returns a tuple of the following:
- . A dictionary mapping from fully qualified name to Target.
- . A list of the targets that have a source file in |files|.
- . Targets that constitute the 'all' target. See description at top of file
- for details on the 'all' target.
- This sets the |match_status| of the targets that contain any of the source
- files in |files| to MATCH_STATUS_MATCHES.
- |toplevel_dir| is the root of the source tree."""
+ . A dictionary mapping from fully qualified name to Target.
+ . A list of the targets that have a source file in |files|.
+ . Targets that constitute the 'all' target. See description at top of file
+ for details on the 'all' target.
+ This sets the |match_status| of the targets that contain any of the source
+ files in |files| to MATCH_STATUS_MATCHES.
+ |toplevel_dir| is the root of the source tree."""
# Maps from target name to Target.
name_to_target = {}
@@ -379,9 +378,10 @@ def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files, build
target_type = target_dicts[target_name]["type"]
target.is_executable = target_type == "executable"
target.is_static_library = target_type == "static_library"
- target.is_or_has_linked_ancestor = (
- target_type in {"executable", "shared_library"}
- )
+ target.is_or_has_linked_ancestor = target_type in {
+ "executable",
+ "shared_library",
+ }
build_file = gyp.common.ParseQualifiedTarget(target_name)[0]
if build_file not in build_file_in_files:
@@ -427,9 +427,9 @@ def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files, build
def _GetUnqualifiedToTargetMapping(all_targets, to_find):
"""Returns a tuple of the following:
- . mapping (dictionary) from unqualified name to Target for all the
- Targets in |to_find|.
- . any target names not found. If this is empty all targets were found."""
+ . mapping (dictionary) from unqualified name to Target for all the
+ Targets in |to_find|.
+ . any target names not found. If this is empty all targets were found."""
result = {}
if not to_find:
return {}, []
@@ -446,15 +446,15 @@ def _GetUnqualifiedToTargetMapping(all_targets, to_find):
def _DoesTargetDependOnMatchingTargets(target):
"""Returns true if |target| or any of its dependencies is one of the
- targets containing the files supplied as input to analyzer. This updates
- |matches| of the Targets as it recurses.
- target: the Target to look for."""
+ targets containing the files supplied as input to analyzer. This updates
+ |matches| of the Targets as it recurses.
+ target: the Target to look for."""
if target.match_status == MATCH_STATUS_DOESNT_MATCH:
return False
- if (
- target.match_status in {MATCH_STATUS_MATCHES,
- MATCH_STATUS_MATCHES_BY_DEPENDENCY}
- ):
+ if target.match_status in {
+ MATCH_STATUS_MATCHES,
+ MATCH_STATUS_MATCHES_BY_DEPENDENCY,
+ }:
return True
for dep in target.deps:
if _DoesTargetDependOnMatchingTargets(dep):
@@ -467,9 +467,9 @@ def _DoesTargetDependOnMatchingTargets(target):
def _GetTargetsDependingOnMatchingTargets(possible_targets):
"""Returns the list of Targets in |possible_targets| that depend (either
- directly on indirectly) on at least one of the targets containing the files
- supplied as input to analyzer.
- possible_targets: targets to search from."""
+ directly on indirectly) on at least one of the targets containing the files
+ supplied as input to analyzer.
+ possible_targets: targets to search from."""
found = []
print("Targets that matched by dependency:")
for target in possible_targets:
@@ -480,11 +480,11 @@ def _GetTargetsDependingOnMatchingTargets(possible_targets):
def _AddCompileTargets(target, roots, add_if_no_ancestor, result):
"""Recurses through all targets that depend on |target|, adding all targets
- that need to be built (and are in |roots|) to |result|.
- roots: set of root targets.
- add_if_no_ancestor: If true and there are no ancestors of |target| then add
- |target| to |result|. |target| must still be in |roots|.
- result: targets that need to be built are added here."""
+ that need to be built (and are in |roots|) to |result|.
+ roots: set of root targets.
+ add_if_no_ancestor: If true and there are no ancestors of |target| then add
+ |target| to |result|. |target| must still be in |roots|.
+ result: targets that need to be built are added here."""
if target.visited:
return
@@ -537,8 +537,8 @@ def _AddCompileTargets(target, roots, add_if_no_ancestor, result):
def _GetCompileTargets(matching_targets, supplied_targets):
"""Returns the set of Targets that require a build.
- matching_targets: targets that changed and need to be built.
- supplied_targets: set of targets supplied to analyzer to search from."""
+ matching_targets: targets that changed and need to be built.
+ supplied_targets: set of targets supplied to analyzer to search from."""
result = set()
for target in matching_targets:
print("finding compile targets for match", target.name)
@@ -592,7 +592,7 @@ def _WriteOutput(params, **values):
def _WasGypIncludeFileModified(params, files):
"""Returns true if one of the files in |files| is in the set of included
- files."""
+ files."""
if params["options"].includes:
for include in params["options"].includes:
if _ToGypPath(os.path.normpath(include)) in files:
@@ -608,7 +608,7 @@ def _NamesNotIn(names, mapping):
def _LookupTargets(names, mapping):
"""Returns a list of the mapping[name] for each value in |names| that is in
- |mapping|."""
+ |mapping|."""
return [mapping[name] for name in names if name in mapping]
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py
index 5ebe58bb556d80..5d5cae2afbf668 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py
@@ -177,9 +177,7 @@ def Write(
self.WriteLn("LOCAL_IS_HOST_MODULE := true")
self.WriteLn("LOCAL_MULTILIB := $(GYP_HOST_MULTILIB)")
elif sdk_version > 0:
- self.WriteLn(
- "LOCAL_MODULE_TARGET_ARCH := $(TARGET_$(GYP_VAR_PREFIX)ARCH)"
- )
+ self.WriteLn("LOCAL_MODULE_TARGET_ARCH := $(TARGET_$(GYP_VAR_PREFIX)ARCH)")
self.WriteLn("LOCAL_SDK_VERSION := %s" % sdk_version)
# Grab output directories; needed for Actions and Rules.
@@ -380,7 +378,7 @@ def WriteRules(self, rules, extra_sources, extra_outputs):
inputs = rule.get("inputs")
for rule_source in rule.get("rule_sources", []):
(rule_source_dirname, rule_source_basename) = os.path.split(rule_source)
- (rule_source_root, rule_source_ext) = os.path.splitext(
+ (rule_source_root, _rule_source_ext) = os.path.splitext(
rule_source_basename
)
@@ -588,7 +586,8 @@ def WriteSources(self, spec, configs, extra_sources):
local_files = []
for source in sources:
(root, ext) = os.path.splitext(source)
- if ("$(gyp_shared_intermediate_dir)" in source
+ if (
+ "$(gyp_shared_intermediate_dir)" in source
or "$(gyp_intermediate_dir)" in source
or (IsCPPExtension(ext) and ext != local_cpp_extension)
):
@@ -734,8 +733,7 @@ def ComputeOutput(self, spec):
elif self.toolset == "host":
path = (
"$(call intermediates-dir-for,%s,%s,true,,"
- "$(GYP_HOST_VAR_PREFIX))"
- % (self.android_class, self.android_module)
+ "$(GYP_HOST_VAR_PREFIX))" % (self.android_class, self.android_module)
)
else:
path = (
@@ -900,8 +898,7 @@ def WriteTarget(
if self.type != "none":
self.WriteTargetFlags(spec, configs, link_deps)
- settings = spec.get("aosp_build_settings", {})
- if settings:
+ if settings := spec.get("aosp_build_settings", {}):
self.WriteLn("### Set directly by aosp_build_settings.")
for k, v in settings.items():
if isinstance(v, list):
@@ -1002,9 +999,9 @@ def LocalPathify(self, path):
# - i.e. that the resulting path is still inside the project tree. The
# path may legitimately have ended up containing just $(LOCAL_PATH), though,
# so we don't look for a slash.
- assert local_path.startswith(
- "$(LOCAL_PATH)"
- ), f"Path {path} attempts to escape from gyp path {self.path} !)"
+ assert local_path.startswith("$(LOCAL_PATH)"), (
+ f"Path {path} attempts to escape from gyp path {self.path} !)"
+ )
return local_path
def ExpandInputRoot(self, template, expansion, dirname):
@@ -1046,9 +1043,9 @@ def CalculateMakefilePath(build_file, base_name):
base_path = gyp.common.RelativePath(os.path.dirname(build_file), options.depth)
# We write the file in the base_path directory.
output_file = os.path.join(options.depth, base_path, base_name)
- assert (
- not options.generator_output
- ), "The Android backend does not support options.generator_output."
+ assert not options.generator_output, (
+ "The Android backend does not support options.generator_output."
+ )
base_path = gyp.common.RelativePath(
os.path.dirname(build_file), options.toplevel_dir
)
@@ -1068,9 +1065,9 @@ def CalculateMakefilePath(build_file, base_name):
makefile_name = "GypAndroid" + options.suffix + ".mk"
makefile_path = os.path.join(options.toplevel_dir, makefile_name)
- assert (
- not options.generator_output
- ), "The Android backend does not support options.generator_output."
+ assert not options.generator_output, (
+ "The Android backend does not support options.generator_output."
+ )
gyp.common.EnsureDirExists(makefile_path)
root_makefile = open(makefile_path, "w")
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py
index e69103e1b9ba3f..dc9ea39acb7fc2 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py
@@ -28,7 +28,6 @@
CMakeLists.txt file.
"""
-
import multiprocessing
import os
import signal
@@ -97,11 +96,11 @@ def Linkable(filename):
def NormjoinPathForceCMakeSource(base_path, rel_path):
"""Resolves rel_path against base_path and returns the result.
- If rel_path is an absolute path it is returned unchanged.
- Otherwise it is resolved against base_path and normalized.
- If the result is a relative path, it is forced to be relative to the
- CMakeLists.txt.
- """
+ If rel_path is an absolute path it is returned unchanged.
+ Otherwise it is resolved against base_path and normalized.
+ If the result is a relative path, it is forced to be relative to the
+ CMakeLists.txt.
+ """
if os.path.isabs(rel_path):
return rel_path
if any(rel_path.startswith(var) for var in FULL_PATH_VARS):
@@ -114,10 +113,10 @@ def NormjoinPathForceCMakeSource(base_path, rel_path):
def NormjoinPath(base_path, rel_path):
"""Resolves rel_path against base_path and returns the result.
- TODO: what is this really used for?
- If rel_path begins with '$' it is returned unchanged.
- Otherwise it is resolved against base_path if relative, then normalized.
- """
+ TODO: what is this really used for?
+ If rel_path begins with '$' it is returned unchanged.
+ Otherwise it is resolved against base_path if relative, then normalized.
+ """
if rel_path.startswith("$") and not rel_path.startswith("${configuration}"):
return rel_path
return os.path.normpath(os.path.join(base_path, rel_path))
@@ -126,19 +125,19 @@ def NormjoinPath(base_path, rel_path):
def CMakeStringEscape(a):
"""Escapes the string 'a' for use inside a CMake string.
- This means escaping
- '\' otherwise it may be seen as modifying the next character
- '"' otherwise it will end the string
- ';' otherwise the string becomes a list
+ This means escaping
+ '\' otherwise it may be seen as modifying the next character
+ '"' otherwise it will end the string
+ ';' otherwise the string becomes a list
- The following do not need to be escaped
- '#' when the lexer is in string state, this does not start a comment
+ The following do not need to be escaped
+ '#' when the lexer is in string state, this does not start a comment
- The following are yet unknown
- '$' generator variables (like ${obj}) must not be escaped,
- but text $ should be escaped
- what is wanted is to know which $ come from generator variables
- """
+ The following are yet unknown
+ '$' generator variables (like ${obj}) must not be escaped,
+ but text $ should be escaped
+ what is wanted is to know which $ come from generator variables
+ """
return a.replace("\\", "\\\\").replace(";", "\\;").replace('"', '\\"')
@@ -237,25 +236,25 @@ def __init__(self, command, modifier, property_modifier):
def StringToCMakeTargetName(a):
"""Converts the given string 'a' to a valid CMake target name.
- All invalid characters are replaced by '_'.
- Invalid for cmake: ' ', '/', '(', ')', '"'
- Invalid for make: ':'
- Invalid for unknown reasons but cause failures: '.'
- """
+ All invalid characters are replaced by '_'.
+ Invalid for cmake: ' ', '/', '(', ')', '"'
+ Invalid for make: ':'
+ Invalid for unknown reasons but cause failures: '.'
+ """
return a.translate(_maketrans(' /():."', "_______"))
def WriteActions(target_name, actions, extra_sources, extra_deps, path_to_gyp, output):
"""Write CMake for the 'actions' in the target.
- Args:
- target_name: the name of the CMake target being generated.
- actions: the Gyp 'actions' dict for this target.
- extra_sources: [(, )] to append with generated source files.
- extra_deps: [] to append with generated targets.
- path_to_gyp: relative path from CMakeLists.txt being generated to
- the Gyp file in which the target being generated is defined.
- """
+ Args:
+ target_name: the name of the CMake target being generated.
+ actions: the Gyp 'actions' dict for this target.
+ extra_sources: [(, )] to append with generated source files.
+ extra_deps: [] to append with generated targets.
+ path_to_gyp: relative path from CMakeLists.txt being generated to
+ the Gyp file in which the target being generated is defined.
+ """
for action in actions:
action_name = StringToCMakeTargetName(action["action_name"])
action_target_name = f"{target_name}__{action_name}"
@@ -337,14 +336,14 @@ def NormjoinRulePathForceCMakeSource(base_path, rel_path, rule_source):
def WriteRules(target_name, rules, extra_sources, extra_deps, path_to_gyp, output):
"""Write CMake for the 'rules' in the target.
- Args:
- target_name: the name of the CMake target being generated.
- actions: the Gyp 'actions' dict for this target.
- extra_sources: [(, )] to append with generated source files.
- extra_deps: [] to append with generated targets.
- path_to_gyp: relative path from CMakeLists.txt being generated to
- the Gyp file in which the target being generated is defined.
- """
+ Args:
+ target_name: the name of the CMake target being generated.
+ actions: the Gyp 'actions' dict for this target.
+ extra_sources: [(, )] to append with generated source files.
+ extra_deps: [] to append with generated targets.
+ path_to_gyp: relative path from CMakeLists.txt being generated to
+ the Gyp file in which the target being generated is defined.
+ """
for rule in rules:
rule_name = StringToCMakeTargetName(target_name + "__" + rule["rule_name"])
@@ -455,13 +454,13 @@ def WriteRules(target_name, rules, extra_sources, extra_deps, path_to_gyp, outpu
def WriteCopies(target_name, copies, extra_deps, path_to_gyp, output):
"""Write CMake for the 'copies' in the target.
- Args:
- target_name: the name of the CMake target being generated.
- actions: the Gyp 'actions' dict for this target.
- extra_deps: [] to append with generated targets.
- path_to_gyp: relative path from CMakeLists.txt being generated to
- the Gyp file in which the target being generated is defined.
- """
+ Args:
+ target_name: the name of the CMake target being generated.
+ actions: the Gyp 'actions' dict for this target.
+ extra_deps: [] to append with generated targets.
+ path_to_gyp: relative path from CMakeLists.txt being generated to
+ the Gyp file in which the target being generated is defined.
+ """
copy_name = target_name + "__copies"
# CMake gets upset with custom targets with OUTPUT which specify no output.
@@ -585,23 +584,23 @@ def CreateCMakeTargetFullName(qualified_target):
class CMakeNamer:
"""Converts Gyp target names into CMake target names.
- CMake requires that target names be globally unique. One way to ensure
- this is to fully qualify the names of the targets. Unfortunately, this
- ends up with all targets looking like "chrome_chrome_gyp_chrome" instead
- of just "chrome". If this generator were only interested in building, it
- would be possible to fully qualify all target names, then create
- unqualified target names which depend on all qualified targets which
- should have had that name. This is more or less what the 'make' generator
- does with aliases. However, one goal of this generator is to create CMake
- files for use with IDEs, and fully qualified names are not as user
- friendly.
+ CMake requires that target names be globally unique. One way to ensure
+ this is to fully qualify the names of the targets. Unfortunately, this
+ ends up with all targets looking like "chrome_chrome_gyp_chrome" instead
+ of just "chrome". If this generator were only interested in building, it
+ would be possible to fully qualify all target names, then create
+ unqualified target names which depend on all qualified targets which
+ should have had that name. This is more or less what the 'make' generator
+ does with aliases. However, one goal of this generator is to create CMake
+ files for use with IDEs, and fully qualified names are not as user
+ friendly.
- Since target name collision is rare, we do the above only when required.
+ Since target name collision is rare, we do the above only when required.
- Toolset variants are always qualified from the base, as this is required for
- building. However, it also makes sense for an IDE, as it is possible for
- defines to be different.
- """
+ Toolset variants are always qualified from the base, as this is required for
+ building. However, it also makes sense for an IDE, as it is possible for
+ defines to be different.
+ """
def __init__(self, target_list):
self.cmake_target_base_names_conflicting = set()
@@ -810,8 +809,7 @@ def WriteTarget(
# link directories to targets defined after it is called.
# As a result, link_directories must come before the target definition.
# CMake unfortunately has no means of removing entries from LINK_DIRECTORIES.
- library_dirs = config.get("library_dirs")
- if library_dirs is not None:
+ if (library_dirs := config.get("library_dirs")) is not None:
output.write("link_directories(")
for library_dir in library_dirs:
output.write(" ")
@@ -1295,8 +1293,7 @@ def CallGenerateOutputForConfig(arglist):
def GenerateOutput(target_list, target_dicts, data, params):
- user_config = params.get("generator_flags", {}).get("config", None)
- if user_config:
+ if user_config := params.get("generator_flags", {}).get("config", None):
GenerateOutputForConfig(target_list, target_dicts, data, params, user_config)
else:
config_names = target_dicts[target_list[0]]["configurations"]
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py
index bebb1303154e16..1361aeca48d0cd 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py
@@ -100,7 +100,7 @@ def resolve(filename):
def GenerateOutput(target_list, target_dicts, data, params):
per_config_commands = {}
for qualified_target, target in target_dicts.items():
- build_file, target_name, toolset = gyp.common.ParseQualifiedTarget(
+ build_file, _target_name, _toolset = gyp.common.ParseQualifiedTarget(
qualified_target
)
if IsMac(params):
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py
index e41c72d71070aa..c919674024e690 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py
@@ -56,7 +56,7 @@ def CalculateVariables(default_variables, params):
def CalculateGeneratorInputInfo(params):
"""Calculate the generator specific info that gets fed to input (called by
- gyp)."""
+ gyp)."""
generator_flags = params.get("generator_flags", {})
if generator_flags.get("adjust_static_libraries", False):
global generator_wants_static_library_dependencies_adjusted
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py
index ed6daa91bac3e7..685cd08c964b91 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py
@@ -69,7 +69,7 @@ def CalculateVariables(default_variables, params):
def CalculateGeneratorInputInfo(params):
"""Calculate the generator specific info that gets fed to input (called by
- gyp)."""
+ gyp)."""
generator_flags = params.get("generator_flags", {})
if generator_flags.get("adjust_static_libraries", False):
global generator_wants_static_library_dependencies_adjusted
@@ -86,10 +86,10 @@ def GetAllIncludeDirectories(
):
"""Calculate the set of include directories to be used.
- Returns:
- A list including all the include_dir's specified for every target followed
- by any include directories that were added as cflag compiler options.
- """
+ Returns:
+ A list including all the include_dir's specified for every target followed
+ by any include directories that were added as cflag compiler options.
+ """
gyp_includes_set = set()
compiler_includes_list = []
@@ -178,11 +178,11 @@ def GetAllIncludeDirectories(
def GetCompilerPath(target_list, data, options):
"""Determine a command that can be used to invoke the compiler.
- Returns:
- If this is a gyp project that has explicit make settings, try to determine
- the compiler from that. Otherwise, see if a compiler was specified via the
- CC_target environment variable.
- """
+ Returns:
+ If this is a gyp project that has explicit make settings, try to determine
+ the compiler from that. Otherwise, see if a compiler was specified via the
+ CC_target environment variable.
+ """
# First, see if the compiler is configured in make's settings.
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
make_global_settings_dict = data[build_file].get("make_global_settings", {})
@@ -202,10 +202,10 @@ def GetCompilerPath(target_list, data, options):
def GetAllDefines(target_list, target_dicts, data, config_name, params, compiler_path):
"""Calculate the defines for a project.
- Returns:
- A dict that includes explicit defines declared in gyp files along with all
- of the default defines that the compiler uses.
- """
+ Returns:
+ A dict that includes explicit defines declared in gyp files along with all
+ of the default defines that the compiler uses.
+ """
# Get defines declared in the gyp files.
all_defines = {}
@@ -373,8 +373,8 @@ def GenerateClasspathFile(
target_list, target_dicts, toplevel_dir, toplevel_build, out_name
):
"""Generates a classpath file suitable for symbol navigation and code
- completion of Java code (such as in Android projects) by finding all
- .java and .jar files used as action inputs."""
+ completion of Java code (such as in Android projects) by finding all
+ .java and .jar files used as action inputs."""
gyp.common.EnsureDirExists(out_name)
result = ET.Element("classpath")
@@ -451,8 +451,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
if params["options"].generator_output:
raise NotImplementedError("--generator_output not implemented for eclipse")
- user_config = params.get("generator_flags", {}).get("config", None)
- if user_config:
+ if user_config := params.get("generator_flags", {}).get("config", None):
GenerateOutputForConfig(target_list, target_dicts, data, params, user_config)
else:
config_names = target_dicts[target_list[0]]["configurations"]
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py
index a0aa6d9245c811..89af24a201b101 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py
@@ -30,7 +30,6 @@
to change.
"""
-
import pprint
import gyp.common
@@ -74,7 +73,7 @@
def GenerateOutput(target_list, target_dicts, data, params):
output_files = {}
for qualified_target in target_list:
- [input_file, target] = gyp.common.ParseQualifiedTarget(qualified_target)[0:2]
+ [input_file, _target] = gyp.common.ParseQualifiedTarget(qualified_target)[0:2]
if input_file[-4:] != ".gyp":
continue
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py
index 36a05deb7eb8b9..72d22ff32b92d7 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py
@@ -13,7 +13,6 @@
The expected usage is "gyp -f gypsh -D OS=desired_os".
"""
-
import code
import sys
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py
index e860479069abaa..5f30f39fc503e5 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py
@@ -78,7 +78,7 @@ def CalculateVariables(default_variables, params):
# Copy additional generator configuration data from Xcode, which is shared
# by the Mac Make generator.
- import gyp.generator.xcode as xcode_generator
+ import gyp.generator.xcode as xcode_generator # noqa: PLC0415
global generator_additional_non_configuration_keys
generator_additional_non_configuration_keys = getattr(
@@ -218,7 +218,7 @@ def CalculateGeneratorInputInfo(params):
quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-""" % {'python': sys.executable} # noqa: E501
+""" % {"python": sys.executable} # noqa: E501
LINK_COMMANDS_ANDROID = """\
quiet_cmd_alink = AR($(TOOLSET)) $@
@@ -443,21 +443,27 @@ def CalculateGeneratorInputInfo(params):
define fixup_dep
# The depfile may not exist if the input file didn't have any #includes.
touch $(depfile).raw
-# Fixup path as in (1).""" +
- (r"""
+# Fixup path as in (1)."""
+ + (
+ r"""
sed -e "s|^$(notdir $@)|$@|" -re 's/\\\\([^$$])/\/\1/g' $(depfile).raw >> $(depfile)"""
- if sys.platform == 'win32' else r"""
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)""") +
- r"""
+ if sys.platform == "win32"
+ else r"""
+sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)"""
+ )
+ + r"""
# Add extra rules as in (2).
# We remove slashes and replace spaces with new lines;
# remove blank lines;
-# delete the first line and append a colon to the remaining lines.""" +
- ("""
+# delete the first line and append a colon to the remaining lines."""
+ + (
+ """
sed -e 's/\\\\\\\\$$//' -e 's/\\\\\\\\/\\//g' -e 'y| |\\n|' $(depfile).raw |\\"""
- if sys.platform == 'win32' else """
-sed -e 's|\\\\||' -e 'y| |\\n|' $(depfile).raw |\\""") +
- r"""
+ if sys.platform == "win32"
+ else """
+sed -e 's|\\\\||' -e 'y| |\\n|' $(depfile).raw |\\"""
+ )
+ + r"""
grep -v '^$$' |\
sed -e 1d -e 's|$$|:|' \
>> $(depfile)
@@ -616,7 +622,7 @@ def CalculateGeneratorInputInfo(params):
quiet_cmd_infoplist = INFOPLIST $@
cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-""" % {'python': sys.executable} # noqa: E501
+""" % {"python": sys.executable} # noqa: E501
def WriteRootHeaderSuffixRules(writer):
@@ -733,11 +739,13 @@ def QuoteIfNecessary(string):
string = '"' + string.replace('"', '\\"') + '"'
return string
+
def replace_sep(string):
- if sys.platform == 'win32':
- string = string.replace('\\\\', '/').replace('\\', '/')
+ if sys.platform == "win32":
+ string = string.replace("\\\\", "/").replace("\\", "/")
return string
+
def StringToMakefileVariable(string):
"""Convert a string to a value that is acceptable as a make variable name."""
return re.sub("[^a-zA-Z0-9_]", "_", string)
@@ -1161,7 +1169,7 @@ def WriteRules(
for rule_source in rule.get("rule_sources", []):
dirs = set()
(rule_source_dirname, rule_source_basename) = os.path.split(rule_source)
- (rule_source_root, rule_source_ext) = os.path.splitext(
+ (rule_source_root, _rule_source_ext) = os.path.splitext(
rule_source_basename
)
@@ -1439,9 +1447,7 @@ def WriteSources(
for obj in objs:
assert " " not in obj, "Spaces in object filenames not supported (%s)" % obj
- self.WriteLn(
- "# Add to the list of files we specially track dependencies for."
- )
+ self.WriteLn("# Add to the list of files we specially track dependencies for.")
self.WriteLn("all_deps += $(OBJS)")
self.WriteLn()
@@ -1465,8 +1471,7 @@ def WriteSources(
order_only=True,
)
- pchdeps = precompiled_header.GetObjDependencies(compilable, objs)
- if pchdeps:
+ if pchdeps := precompiled_header.GetObjDependencies(compilable, objs):
self.WriteLn("# Dependencies from obj files to their precompiled headers")
for source, obj, gch in pchdeps:
self.WriteLn(f"{obj}: {gch}")
@@ -1499,7 +1504,8 @@ def WriteSources(
"$(OBJS): GYP_OBJCFLAGS := "
"$(DEFS_$(BUILDTYPE)) "
"$(INCS_$(BUILDTYPE)) "
- "%s " % precompiled_header.GetInclude("m")
+ "%s "
+ % precompiled_header.GetInclude("m")
+ "$(CFLAGS_$(BUILDTYPE)) "
"$(CFLAGS_C_$(BUILDTYPE)) "
"$(CFLAGS_OBJC_$(BUILDTYPE))"
@@ -1508,7 +1514,8 @@ def WriteSources(
"$(OBJS): GYP_OBJCXXFLAGS := "
"$(DEFS_$(BUILDTYPE)) "
"$(INCS_$(BUILDTYPE)) "
- "%s " % precompiled_header.GetInclude("mm")
+ "%s "
+ % precompiled_header.GetInclude("mm")
+ "$(CFLAGS_$(BUILDTYPE)) "
"$(CFLAGS_CC_$(BUILDTYPE)) "
"$(CFLAGS_OBJCC_$(BUILDTYPE))"
@@ -1600,8 +1607,7 @@ def ComputeOutputBasename(self, spec):
target_prefix = spec.get("product_prefix", target_prefix)
target = spec.get("product_name", target)
- product_ext = spec.get("product_extension")
- if product_ext:
+ if product_ext := spec.get("product_extension"):
target_ext = "." + product_ext
return target_prefix + target + target_ext
@@ -1882,7 +1888,7 @@ def WriteTarget(
self.flavor not in ("mac", "openbsd", "netbsd", "win")
and not self.is_standalone_static_library
):
- if self.flavor in ("linux", "android"):
+ if self.flavor in ("linux", "android", "openharmony"):
self.WriteMakeRule(
[self.output_binary],
link_deps,
@@ -1896,7 +1902,7 @@ def WriteTarget(
part_of_all,
postbuilds=postbuilds,
)
- elif self.flavor in ("linux", "android"):
+ elif self.flavor in ("linux", "android", "openharmony"):
self.WriteMakeRule(
[self.output_binary],
link_deps,
@@ -2383,11 +2389,15 @@ def WriteAutoRegenerationRule(params, root_makefile, makefile_name, build_files)
% {
"makefile_name": makefile_name,
"deps": replace_sep(
- " ".join(SourceifyAndQuoteSpaces(bf) for bf in build_files)
+ " ".join(sorted(SourceifyAndQuoteSpaces(bf) for bf in build_files))
+ ),
+ "cmd": replace_sep(
+ gyp.common.EncodePOSIXShellList(
+ [gyp_binary, "-fmake"]
+ + gyp.RegenerateFlags(options)
+ + build_files_args
+ )
),
- "cmd": replace_sep(gyp.common.EncodePOSIXShellList(
- [gyp_binary, "-fmake"] + gyp.RegenerateFlags(options) + build_files_args
- )),
}
)
@@ -2460,8 +2470,8 @@ def CalculateMakefilePath(build_file, base_name):
# wasm-ld doesn't support --start-group/--end-group
link_commands = LINK_COMMANDS_LINUX
if flavor in ["wasi", "wasm"]:
- link_commands = link_commands.replace(' -Wl,--start-group', '').replace(
- ' -Wl,--end-group', ''
+ link_commands = link_commands.replace(" -Wl,--start-group", "").replace(
+ " -Wl,--end-group", ""
)
CC_target = replace_sep(GetEnvironFallback(("CC_target", "CC"), "$(CC)"))
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py
index b4aea2e69a1939..0f14c055049add 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py
@@ -136,15 +136,15 @@ def _GetDomainAndUserName():
def _NormalizedSource(source):
"""Normalize the path.
- But not if that gets rid of a variable, as this may expand to something
- larger than one directory.
+ But not if that gets rid of a variable, as this may expand to something
+ larger than one directory.
- Arguments:
- source: The path to be normalize.d
+ Arguments:
+ source: The path to be normalize.d
- Returns:
- The normalized path.
- """
+ Returns:
+ The normalized path.
+ """
normalized = os.path.normpath(source)
if source.count("$") == normalized.count("$"):
source = normalized
@@ -154,11 +154,11 @@ def _NormalizedSource(source):
def _FixPath(path, separator="\\"):
"""Convert paths to a form that will make sense in a vcproj file.
- Arguments:
- path: The path to convert, may contain / etc.
- Returns:
- The path with all slashes made into backslashes.
- """
+ Arguments:
+ path: The path to convert, may contain / etc.
+ Returns:
+ The path with all slashes made into backslashes.
+ """
if (
fixpath_prefix
and path
@@ -179,11 +179,11 @@ def _FixPath(path, separator="\\"):
def _IsWindowsAbsPath(path):
"""
- On Cygwin systems Python needs a little help determining if a path
- is an absolute Windows path or not, so that
- it does not treat those as relative, which results in bad paths like:
- '..\\C:\\\\some_source_code_file.cc'
- """
+ On Cygwin systems Python needs a little help determining if a path
+ is an absolute Windows path or not, so that
+ it does not treat those as relative, which results in bad paths like:
+ '..\\C:\\\\some_source_code_file.cc'
+ """
return path.startswith(("c:", "C:"))
@@ -197,22 +197,22 @@ def _ConvertSourcesToFilterHierarchy(
):
"""Converts a list split source file paths into a vcproj folder hierarchy.
- Arguments:
- sources: A list of source file paths split.
- prefix: A list of source file path layers meant to apply to each of sources.
- excluded: A set of excluded files.
- msvs_version: A MSVSVersion object.
-
- Returns:
- A hierarchy of filenames and MSVSProject.Filter objects that matches the
- layout of the source tree.
- For example:
- _ConvertSourcesToFilterHierarchy([['a', 'bob1.c'], ['b', 'bob2.c']],
- prefix=['joe'])
- -->
- [MSVSProject.Filter('a', contents=['joe\\a\\bob1.c']),
- MSVSProject.Filter('b', contents=['joe\\b\\bob2.c'])]
- """
+ Arguments:
+ sources: A list of source file paths split.
+ prefix: A list of source file path layers meant to apply to each of sources.
+ excluded: A set of excluded files.
+ msvs_version: A MSVSVersion object.
+
+ Returns:
+ A hierarchy of filenames and MSVSProject.Filter objects that matches the
+ layout of the source tree.
+ For example:
+ _ConvertSourcesToFilterHierarchy([['a', 'bob1.c'], ['b', 'bob2.c']],
+ prefix=['joe'])
+ -->
+ [MSVSProject.Filter('a', contents=['joe\\a\\bob1.c']),
+ MSVSProject.Filter('b', contents=['joe\\b\\bob2.c'])]
+ """
if not prefix:
prefix = []
result = []
@@ -361,7 +361,6 @@ def _ConfigWindowsTargetPlatformVersion(config_data, version):
def _BuildCommandLineForRuleRaw(
spec, cmd, cygwin_shell, has_input_path, quote_cmd, do_setup_env
):
-
if [x for x in cmd if "$(InputDir)" in x]:
input_dir_preamble = (
"set INPUTDIR=$(InputDir)\n"
@@ -425,8 +424,7 @@ def _BuildCommandLineForRuleRaw(
# Return the path with forward slashes because the command using it might
# not support backslashes.
arguments = [
- i if (i[:1] in "/-" or "=" in i) else _FixPath(i, "/")
- for i in cmd[1:]
+ i if (i[:1] in "/-" or "=" in i) else _FixPath(i, "/") for i in cmd[1:]
]
arguments = [i.replace("$(InputDir)", "%INPUTDIR%") for i in arguments]
arguments = [MSVSSettings.FixVCMacroSlashes(i) for i in arguments]
@@ -459,17 +457,17 @@ def _BuildCommandLineForRule(spec, rule, has_input_path, do_setup_env):
def _AddActionStep(actions_dict, inputs, outputs, description, command):
"""Merge action into an existing list of actions.
- Care must be taken so that actions which have overlapping inputs either don't
- get assigned to the same input, or get collapsed into one.
-
- Arguments:
- actions_dict: dictionary keyed on input name, which maps to a list of
- dicts describing the actions attached to that input file.
- inputs: list of inputs
- outputs: list of outputs
- description: description of the action
- command: command line to execute
- """
+ Care must be taken so that actions which have overlapping inputs either don't
+ get assigned to the same input, or get collapsed into one.
+
+ Arguments:
+ actions_dict: dictionary keyed on input name, which maps to a list of
+ dicts describing the actions attached to that input file.
+ inputs: list of inputs
+ outputs: list of outputs
+ description: description of the action
+ command: command line to execute
+ """
# Require there to be at least one input (call sites will ensure this).
assert inputs
@@ -496,15 +494,15 @@ def _AddCustomBuildToolForMSVS(
):
"""Add a custom build tool to execute something.
- Arguments:
- p: the target project
- spec: the target project dict
- primary_input: input file to attach the build tool to
- inputs: list of inputs
- outputs: list of outputs
- description: description of the action
- cmd: command line to execute
- """
+ Arguments:
+ p: the target project
+ spec: the target project dict
+ primary_input: input file to attach the build tool to
+ inputs: list of inputs
+ outputs: list of outputs
+ description: description of the action
+ cmd: command line to execute
+ """
inputs = _FixPaths(inputs)
outputs = _FixPaths(outputs)
tool = MSVSProject.Tool(
@@ -526,12 +524,12 @@ def _AddCustomBuildToolForMSVS(
def _AddAccumulatedActionsToMSVS(p, spec, actions_dict):
"""Add actions accumulated into an actions_dict, merging as needed.
- Arguments:
- p: the target project
- spec: the target project dict
- actions_dict: dictionary keyed on input name, which maps to a list of
- dicts describing the actions attached to that input file.
- """
+ Arguments:
+ p: the target project
+ spec: the target project dict
+ actions_dict: dictionary keyed on input name, which maps to a list of
+ dicts describing the actions attached to that input file.
+ """
for primary_input in actions_dict:
inputs = OrderedSet()
outputs = OrderedSet()
@@ -559,12 +557,12 @@ def _AddAccumulatedActionsToMSVS(p, spec, actions_dict):
def _RuleExpandPath(path, input_file):
"""Given the input file to which a rule applied, string substitute a path.
- Arguments:
- path: a path to string expand
- input_file: the file to which the rule applied.
- Returns:
- The string substituted path.
- """
+ Arguments:
+ path: a path to string expand
+ input_file: the file to which the rule applied.
+ Returns:
+ The string substituted path.
+ """
path = path.replace(
"$(InputName)", os.path.splitext(os.path.split(input_file)[1])[0]
)
@@ -580,24 +578,24 @@ def _RuleExpandPath(path, input_file):
def _FindRuleTriggerFiles(rule, sources):
"""Find the list of files which a particular rule applies to.
- Arguments:
- rule: the rule in question
- sources: the set of all known source files for this project
- Returns:
- The list of sources that trigger a particular rule.
- """
+ Arguments:
+ rule: the rule in question
+ sources: the set of all known source files for this project
+ Returns:
+ The list of sources that trigger a particular rule.
+ """
return rule.get("rule_sources", [])
def _RuleInputsAndOutputs(rule, trigger_file):
"""Find the inputs and outputs generated by a rule.
- Arguments:
- rule: the rule in question.
- trigger_file: the main trigger for this rule.
- Returns:
- The pair of (inputs, outputs) involved in this rule.
- """
+ Arguments:
+ rule: the rule in question.
+ trigger_file: the main trigger for this rule.
+ Returns:
+ The pair of (inputs, outputs) involved in this rule.
+ """
raw_inputs = _FixPaths(rule.get("inputs", []))
raw_outputs = _FixPaths(rule.get("outputs", []))
inputs = OrderedSet()
@@ -613,13 +611,13 @@ def _RuleInputsAndOutputs(rule, trigger_file):
def _GenerateNativeRulesForMSVS(p, rules, output_dir, spec, options):
"""Generate a native rules file.
- Arguments:
- p: the target project
- rules: the set of rules to include
- output_dir: the directory in which the project/gyp resides
- spec: the project dict
- options: global generator options
- """
+ Arguments:
+ p: the target project
+ rules: the set of rules to include
+ output_dir: the directory in which the project/gyp resides
+ spec: the project dict
+ options: global generator options
+ """
rules_filename = "{}{}.rules".format(spec["target_name"], options.suffix)
rules_file = MSVSToolFile.Writer(
os.path.join(output_dir, rules_filename), spec["target_name"]
@@ -658,14 +656,14 @@ def _Cygwinify(path):
def _GenerateExternalRules(rules, output_dir, spec, sources, options, actions_to_add):
"""Generate an external makefile to do a set of rules.
- Arguments:
- rules: the list of rules to include
- output_dir: path containing project and gyp files
- spec: project specification data
- sources: set of sources known
- options: global generator options
- actions_to_add: The list of actions we will add to.
- """
+ Arguments:
+ rules: the list of rules to include
+ output_dir: path containing project and gyp files
+ spec: project specification data
+ sources: set of sources known
+ options: global generator options
+ actions_to_add: The list of actions we will add to.
+ """
filename = "{}_rules{}.mk".format(spec["target_name"], options.suffix)
mk_file = gyp.common.WriteOnDiff(os.path.join(output_dir, filename))
# Find cygwin style versions of some paths.
@@ -743,17 +741,17 @@ def _GenerateExternalRules(rules, output_dir, spec, sources, options, actions_to
def _EscapeEnvironmentVariableExpansion(s):
"""Escapes % characters.
- Escapes any % characters so that Windows-style environment variable
- expansions will leave them alone.
- See http://connect.microsoft.com/VisualStudio/feedback/details/106127/cl-d-name-text-containing-percentage-characters-doesnt-compile
- to understand why we have to do this.
+ Escapes any % characters so that Windows-style environment variable
+ expansions will leave them alone.
+ See http://connect.microsoft.com/VisualStudio/feedback/details/106127/cl-d-name-text-containing-percentage-characters-doesnt-compile
+ to understand why we have to do this.
- Args:
- s: The string to be escaped.
+ Args:
+ s: The string to be escaped.
- Returns:
- The escaped string.
- """
+ Returns:
+ The escaped string.
+ """
s = s.replace("%", "%%")
return s
@@ -764,17 +762,17 @@ def _EscapeEnvironmentVariableExpansion(s):
def _EscapeCommandLineArgumentForMSVS(s):
"""Escapes a Windows command-line argument.
- So that the Win32 CommandLineToArgv function will turn the escaped result back
- into the original string.
- See http://msdn.microsoft.com/en-us/library/17w5ykft.aspx
- ("Parsing C++ Command-Line Arguments") to understand why we have to do
- this.
+ So that the Win32 CommandLineToArgv function will turn the escaped result back
+ into the original string.
+ See http://msdn.microsoft.com/en-us/library/17w5ykft.aspx
+ ("Parsing C++ Command-Line Arguments") to understand why we have to do
+ this.
- Args:
- s: the string to be escaped.
- Returns:
- the escaped string.
- """
+ Args:
+ s: the string to be escaped.
+ Returns:
+ the escaped string.
+ """
def _Replace(match):
# For a literal quote, CommandLineToArgv requires an odd number of
@@ -795,24 +793,24 @@ def _Replace(match):
def _EscapeVCProjCommandLineArgListItem(s):
"""Escapes command line arguments for MSVS.
- The VCProj format stores string lists in a single string using commas and
- semi-colons as separators, which must be quoted if they are to be
- interpreted literally. However, command-line arguments may already have
- quotes, and the VCProj parser is ignorant of the backslash escaping
- convention used by CommandLineToArgv, so the command-line quotes and the
- VCProj quotes may not be the same quotes. So to store a general
- command-line argument in a VCProj list, we need to parse the existing
- quoting according to VCProj's convention and quote any delimiters that are
- not already quoted by that convention. The quotes that we add will also be
- seen by CommandLineToArgv, so if backslashes precede them then we also have
- to escape those backslashes according to the CommandLineToArgv
- convention.
-
- Args:
- s: the string to be escaped.
- Returns:
- the escaped string.
- """
+ The VCProj format stores string lists in a single string using commas and
+ semi-colons as separators, which must be quoted if they are to be
+ interpreted literally. However, command-line arguments may already have
+ quotes, and the VCProj parser is ignorant of the backslash escaping
+ convention used by CommandLineToArgv, so the command-line quotes and the
+ VCProj quotes may not be the same quotes. So to store a general
+ command-line argument in a VCProj list, we need to parse the existing
+ quoting according to VCProj's convention and quote any delimiters that are
+ not already quoted by that convention. The quotes that we add will also be
+ seen by CommandLineToArgv, so if backslashes precede them then we also have
+ to escape those backslashes according to the CommandLineToArgv
+ convention.
+
+ Args:
+ s: the string to be escaped.
+ Returns:
+ the escaped string.
+ """
def _Replace(match):
# For a non-literal quote, CommandLineToArgv requires an even number of
@@ -896,15 +894,15 @@ def _GenerateRulesForMSVS(
):
"""Generate all the rules for a particular project.
- Arguments:
- p: the project
- output_dir: directory to emit rules to
- options: global options passed to the generator
- spec: the specification for this project
- sources: the set of all known source files in this project
- excluded_sources: the set of sources excluded from normal processing
- actions_to_add: deferred list of actions to add in
- """
+ Arguments:
+ p: the project
+ output_dir: directory to emit rules to
+ options: global options passed to the generator
+ spec: the specification for this project
+ sources: the set of all known source files in this project
+ excluded_sources: the set of sources excluded from normal processing
+ actions_to_add: deferred list of actions to add in
+ """
rules = spec.get("rules", [])
rules_native = [r for r in rules if not int(r.get("msvs_external_rule", 0))]
rules_external = [r for r in rules if int(r.get("msvs_external_rule", 0))]
@@ -946,12 +944,12 @@ def _AdjustSourcesForRules(rules, sources, excluded_sources, is_msbuild):
def _FilterActionsFromExcluded(excluded_sources, actions_to_add):
"""Take inputs with actions attached out of the list of exclusions.
- Arguments:
- excluded_sources: list of source files not to be built.
- actions_to_add: dict of actions keyed on source file they're attached to.
- Returns:
- excluded_sources with files that have actions attached removed.
- """
+ Arguments:
+ excluded_sources: list of source files not to be built.
+ actions_to_add: dict of actions keyed on source file they're attached to.
+ Returns:
+ excluded_sources with files that have actions attached removed.
+ """
must_keep = OrderedSet(_FixPaths(actions_to_add.keys()))
return [s for s in excluded_sources if s not in must_keep]
@@ -963,14 +961,14 @@ def _GetDefaultConfiguration(spec):
def _GetGuidOfProject(proj_path, spec):
"""Get the guid for the project.
- Arguments:
- proj_path: Path of the vcproj or vcxproj file to generate.
- spec: The target dictionary containing the properties of the target.
- Returns:
- the guid.
- Raises:
- ValueError: if the specified GUID is invalid.
- """
+ Arguments:
+ proj_path: Path of the vcproj or vcxproj file to generate.
+ spec: The target dictionary containing the properties of the target.
+ Returns:
+ the guid.
+ Raises:
+ ValueError: if the specified GUID is invalid.
+ """
# Pluck out the default configuration.
default_config = _GetDefaultConfiguration(spec)
# Decide the guid of the project.
@@ -989,13 +987,13 @@ def _GetGuidOfProject(proj_path, spec):
def _GetMsbuildToolsetOfProject(proj_path, spec, version):
"""Get the platform toolset for the project.
- Arguments:
- proj_path: Path of the vcproj or vcxproj file to generate.
- spec: The target dictionary containing the properties of the target.
- version: The MSVSVersion object.
- Returns:
- the platform toolset string or None.
- """
+ Arguments:
+ proj_path: Path of the vcproj or vcxproj file to generate.
+ spec: The target dictionary containing the properties of the target.
+ version: The MSVSVersion object.
+ Returns:
+ the platform toolset string or None.
+ """
# Pluck out the default configuration.
default_config = _GetDefaultConfiguration(spec)
toolset = default_config.get("msbuild_toolset")
@@ -1009,14 +1007,14 @@ def _GetMsbuildToolsetOfProject(proj_path, spec, version):
def _GenerateProject(project, options, version, generator_flags, spec):
"""Generates a vcproj file.
- Arguments:
- project: the MSVSProject object.
- options: global generator options.
- version: the MSVSVersion object.
- generator_flags: dict of generator-specific flags.
- Returns:
- A list of source files that cannot be found on disk.
- """
+ Arguments:
+ project: the MSVSProject object.
+ options: global generator options.
+ version: the MSVSVersion object.
+ generator_flags: dict of generator-specific flags.
+ Returns:
+ A list of source files that cannot be found on disk.
+ """
default_config = _GetDefaultConfiguration(project.spec)
# Skip emitting anything if told to with msvs_existing_vcproj option.
@@ -1032,12 +1030,12 @@ def _GenerateProject(project, options, version, generator_flags, spec):
def _GenerateMSVSProject(project, options, version, generator_flags):
"""Generates a .vcproj file. It may create .rules and .user files too.
- Arguments:
- project: The project object we will generate the file for.
- options: Global options passed to the generator.
- version: The VisualStudioVersion object.
- generator_flags: dict of generator-specific flags.
- """
+ Arguments:
+ project: The project object we will generate the file for.
+ options: Global options passed to the generator.
+ version: The VisualStudioVersion object.
+ generator_flags: dict of generator-specific flags.
+ """
spec = project.spec
gyp.common.EnsureDirExists(project.path)
@@ -1094,11 +1092,11 @@ def _GenerateMSVSProject(project, options, version, generator_flags):
def _GetUniquePlatforms(spec):
"""Returns the list of unique platforms for this spec, e.g ['win32', ...].
- Arguments:
- spec: The target dictionary containing the properties of the target.
- Returns:
- The MSVSUserFile object created.
- """
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ Returns:
+ The MSVSUserFile object created.
+ """
# Gather list of unique platforms.
platforms = OrderedSet()
for configuration in spec["configurations"]:
@@ -1110,14 +1108,14 @@ def _GetUniquePlatforms(spec):
def _CreateMSVSUserFile(proj_path, version, spec):
"""Generates a .user file for the user running this Gyp program.
- Arguments:
- proj_path: The path of the project file being created. The .user file
- shares the same path (with an appropriate suffix).
- version: The VisualStudioVersion object.
- spec: The target dictionary containing the properties of the target.
- Returns:
- The MSVSUserFile object created.
- """
+ Arguments:
+ proj_path: The path of the project file being created. The .user file
+ shares the same path (with an appropriate suffix).
+ version: The VisualStudioVersion object.
+ spec: The target dictionary containing the properties of the target.
+ Returns:
+ The MSVSUserFile object created.
+ """
(domain, username) = _GetDomainAndUserName()
vcuser_filename = ".".join([proj_path, domain, username, "user"])
user_file = MSVSUserFile.Writer(vcuser_filename, version, spec["target_name"])
@@ -1127,14 +1125,14 @@ def _CreateMSVSUserFile(proj_path, version, spec):
def _GetMSVSConfigurationType(spec, build_file):
"""Returns the configuration type for this project.
- It's a number defined by Microsoft. May raise an exception.
+ It's a number defined by Microsoft. May raise an exception.
- Args:
- spec: The target dictionary containing the properties of the target.
- build_file: The path of the gyp file.
- Returns:
- An integer, the configuration type.
- """
+ Args:
+ spec: The target dictionary containing the properties of the target.
+ build_file: The path of the gyp file.
+ Returns:
+ An integer, the configuration type.
+ """
try:
config_type = {
"executable": "1", # .exe
@@ -1161,17 +1159,17 @@ def _GetMSVSConfigurationType(spec, build_file):
def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config):
"""Adds a configuration to the MSVS project.
- Many settings in a vcproj file are specific to a configuration. This
- function the main part of the vcproj file that's configuration specific.
-
- Arguments:
- p: The target project being generated.
- spec: The target dictionary containing the properties of the target.
- config_type: The configuration type, a number as defined by Microsoft.
- config_name: The name of the configuration.
- config: The dictionary that defines the special processing to be done
- for this configuration.
- """
+ Many settings in a vcproj file are specific to a configuration. This
+ function the main part of the vcproj file that's configuration specific.
+
+ Arguments:
+ p: The target project being generated.
+ spec: The target dictionary containing the properties of the target.
+ config_type: The configuration type, a number as defined by Microsoft.
+ config_name: The name of the configuration.
+ config: The dictionary that defines the special processing to be done
+ for this configuration.
+ """
# Get the information for this configuration
include_dirs, midl_include_dirs, resource_include_dirs = _GetIncludeDirs(config)
libraries = _GetLibraries(spec)
@@ -1251,12 +1249,12 @@ def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config):
def _GetIncludeDirs(config):
"""Returns the list of directories to be used for #include directives.
- Arguments:
- config: The dictionary that defines the special processing to be done
- for this configuration.
- Returns:
- The list of directory paths.
- """
+ Arguments:
+ config: The dictionary that defines the special processing to be done
+ for this configuration.
+ Returns:
+ The list of directory paths.
+ """
# TODO(bradnelson): include_dirs should really be flexible enough not to
# require this sort of thing.
include_dirs = config.get("include_dirs", []) + config.get(
@@ -1275,12 +1273,12 @@ def _GetIncludeDirs(config):
def _GetLibraryDirs(config):
"""Returns the list of directories to be used for library search paths.
- Arguments:
- config: The dictionary that defines the special processing to be done
- for this configuration.
- Returns:
- The list of directory paths.
- """
+ Arguments:
+ config: The dictionary that defines the special processing to be done
+ for this configuration.
+ Returns:
+ The list of directory paths.
+ """
library_dirs = config.get("library_dirs", [])
library_dirs = _FixPaths(library_dirs)
@@ -1290,11 +1288,11 @@ def _GetLibraryDirs(config):
def _GetLibraries(spec):
"""Returns the list of libraries for this configuration.
- Arguments:
- spec: The target dictionary containing the properties of the target.
- Returns:
- The list of directory paths.
- """
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ Returns:
+ The list of directory paths.
+ """
libraries = spec.get("libraries", [])
# Strip out -l, as it is not used on windows (but is needed so we can pass
# in libraries that are assumed to be in the default library path).
@@ -1316,14 +1314,14 @@ def _GetLibraries(spec):
def _GetOutputFilePathAndTool(spec, msbuild):
"""Returns the path and tool to use for this target.
- Figures out the path of the file this spec will create and the name of
- the VC tool that will create it.
+ Figures out the path of the file this spec will create and the name of
+ the VC tool that will create it.
- Arguments:
- spec: The target dictionary containing the properties of the target.
- Returns:
- A triple of (file path, name of the vc tool, name of the msbuild tool)
- """
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ Returns:
+ A triple of (file path, name of the vc tool, name of the msbuild tool)
+ """
# Select a name for the output file.
out_file = ""
vc_tool = ""
@@ -1355,17 +1353,16 @@ def _GetOutputFilePathAndTool(spec, msbuild):
def _GetOutputTargetExt(spec):
"""Returns the extension for this target, including the dot
- If product_extension is specified, set target_extension to this to avoid
- MSB8012, returns None otherwise. Ignores any target_extension settings in
- the input files.
-
- Arguments:
- spec: The target dictionary containing the properties of the target.
- Returns:
- A string with the extension, or None
- """
- target_extension = spec.get("product_extension")
- if target_extension:
+ If product_extension is specified, set target_extension to this to avoid
+ MSB8012, returns None otherwise. Ignores any target_extension settings in
+ the input files.
+
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ Returns:
+ A string with the extension, or None
+ """
+ if target_extension := spec.get("product_extension"):
return "." + target_extension
return None
@@ -1373,12 +1370,12 @@ def _GetOutputTargetExt(spec):
def _GetDefines(config):
"""Returns the list of preprocessor definitions for this configuration.
- Arguments:
- config: The dictionary that defines the special processing to be done
- for this configuration.
- Returns:
- The list of preprocessor definitions.
- """
+ Arguments:
+ config: The dictionary that defines the special processing to be done
+ for this configuration.
+ Returns:
+ The list of preprocessor definitions.
+ """
defines = []
for d in config.get("defines", []):
fd = "=".join([str(dpart) for dpart in d]) if isinstance(d, list) else str(d)
@@ -1412,11 +1409,11 @@ def _GetModuleDefinition(spec):
def _ConvertToolsToExpectedForm(tools):
"""Convert tools to a form expected by Visual Studio.
- Arguments:
- tools: A dictionary of settings; the tool name is the key.
- Returns:
- A list of Tool objects.
- """
+ Arguments:
+ tools: A dictionary of settings; the tool name is the key.
+ Returns:
+ A list of Tool objects.
+ """
tool_list = []
for tool, settings in tools.items():
# Collapse settings with lists.
@@ -1439,15 +1436,15 @@ def _ConvertToolsToExpectedForm(tools):
def _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name):
"""Add to the project file the configuration specified by config.
- Arguments:
- p: The target project being generated.
- spec: the target project dict.
- tools: A dictionary of settings; the tool name is the key.
- config: The dictionary that defines the special processing to be done
- for this configuration.
- config_type: The configuration type, a number as defined by Microsoft.
- config_name: The name of the configuration.
- """
+ Arguments:
+ p: The target project being generated.
+ spec: the target project dict.
+ tools: A dictionary of settings; the tool name is the key.
+ config: The dictionary that defines the special processing to be done
+ for this configuration.
+ config_type: The configuration type, a number as defined by Microsoft.
+ config_name: The name of the configuration.
+ """
attributes = _GetMSVSAttributes(spec, config, config_type)
# Add in this configuration.
tool_list = _ConvertToolsToExpectedForm(tools)
@@ -1488,18 +1485,18 @@ def _AddNormalizedSources(sources_set, sources_array):
def _PrepareListOfSources(spec, generator_flags, gyp_file):
"""Prepare list of sources and excluded sources.
- Besides the sources specified directly in the spec, adds the gyp file so
- that a change to it will cause a re-compile. Also adds appropriate sources
- for actions and copies. Assumes later stage will un-exclude files which
- have custom build steps attached.
-
- Arguments:
- spec: The target dictionary containing the properties of the target.
- gyp_file: The name of the gyp file.
- Returns:
- A pair of (list of sources, list of excluded sources).
- The sources will be relative to the gyp file.
- """
+ Besides the sources specified directly in the spec, adds the gyp file so
+ that a change to it will cause a re-compile. Also adds appropriate sources
+ for actions and copies. Assumes later stage will un-exclude files which
+ have custom build steps attached.
+
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ gyp_file: The name of the gyp file.
+ Returns:
+ A pair of (list of sources, list of excluded sources).
+ The sources will be relative to the gyp file.
+ """
sources = OrderedSet()
_AddNormalizedSources(sources, spec.get("sources", []))
excluded_sources = OrderedSet()
@@ -1529,19 +1526,19 @@ def _AdjustSourcesAndConvertToFilterHierarchy(
):
"""Adjusts the list of sources and excluded sources.
- Also converts the sets to lists.
-
- Arguments:
- spec: The target dictionary containing the properties of the target.
- options: Global generator options.
- gyp_dir: The path to the gyp file being processed.
- sources: A set of sources to be included for this project.
- excluded_sources: A set of sources to be excluded for this project.
- version: A MSVSVersion object.
- Returns:
- A trio of (list of sources, list of excluded sources,
- path of excluded IDL file)
- """
+ Also converts the sets to lists.
+
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ options: Global generator options.
+ gyp_dir: The path to the gyp file being processed.
+ sources: A set of sources to be included for this project.
+ excluded_sources: A set of sources to be excluded for this project.
+ version: A MSVSVersion object.
+ Returns:
+ A trio of (list of sources, list of excluded sources,
+ path of excluded IDL file)
+ """
# Exclude excluded sources coming into the generator.
excluded_sources.update(OrderedSet(spec.get("sources_excluded", [])))
# Add excluded sources into sources for good measure.
@@ -1669,7 +1666,7 @@ def _HandlePreCompiledHeaders(p, sources, spec):
p.AddFileConfig(
source, _ConfigFullName(config_name, config), {}, tools=[tool]
)
- basename, extension = os.path.splitext(source)
+ _basename, extension = os.path.splitext(source)
if extension == ".c":
extensions_excluded_from_precompile = [".cc", ".cpp", ".cxx"]
else:
@@ -1680,7 +1677,7 @@ def DisableForSourceTree(source_tree):
if isinstance(source, MSVSProject.Filter):
DisableForSourceTree(source.contents)
else:
- basename, extension = os.path.splitext(source)
+ _basename, extension = os.path.splitext(source)
if extension in extensions_excluded_from_precompile:
for config_name, config in spec["configurations"].items():
tool = MSVSProject.Tool(
@@ -1837,8 +1834,11 @@ def _CollapseSingles(parent, node):
# Recursively explorer the tree of dicts looking for projects which are
# the sole item in a folder which has the same name as the project. Bring
# such projects up one level.
- if (isinstance(node, dict) and len(node) == 1 and
- next(iter(node)) == parent + ".vcproj"):
+ if (
+ isinstance(node, dict)
+ and len(node) == 1
+ and next(iter(node)) == parent + ".vcproj"
+ ):
return node[next(iter(node))]
if not isinstance(node, dict):
return node
@@ -1907,14 +1907,14 @@ def _GetPlatformOverridesOfProject(spec):
def _CreateProjectObjects(target_list, target_dicts, options, msvs_version):
"""Create a MSVSProject object for the targets found in target list.
- Arguments:
- target_list: the list of targets to generate project objects for.
- target_dicts: the dictionary of specifications.
- options: global generator options.
- msvs_version: the MSVSVersion object.
- Returns:
- A set of created projects, keyed by target.
- """
+ Arguments:
+ target_list: the list of targets to generate project objects for.
+ target_dicts: the dictionary of specifications.
+ options: global generator options.
+ msvs_version: the MSVSVersion object.
+ Returns:
+ A set of created projects, keyed by target.
+ """
global fixpath_prefix
# Generate each project.
projects = {}
@@ -1958,15 +1958,15 @@ def _CreateProjectObjects(target_list, target_dicts, options, msvs_version):
def _InitNinjaFlavor(params, target_list, target_dicts):
"""Initialize targets for the ninja flavor.
- This sets up the necessary variables in the targets to generate msvs projects
- that use ninja as an external builder. The variables in the spec are only set
- if they have not been set. This allows individual specs to override the
- default values initialized here.
- Arguments:
- params: Params provided to the generator.
- target_list: List of target pairs: 'base/base.gyp:base'.
- target_dicts: Dict of target properties keyed on target pair.
- """
+ This sets up the necessary variables in the targets to generate msvs projects
+ that use ninja as an external builder. The variables in the spec are only set
+ if they have not been set. This allows individual specs to override the
+ default values initialized here.
+ Arguments:
+ params: Params provided to the generator.
+ target_list: List of target pairs: 'base/base.gyp:base'.
+ target_dicts: Dict of target properties keyed on target pair.
+ """
for qualified_target in target_list:
spec = target_dicts[qualified_target]
if spec.get("msvs_external_builder"):
@@ -2077,12 +2077,12 @@ def CalculateGeneratorInputInfo(params):
def GenerateOutput(target_list, target_dicts, data, params):
"""Generate .sln and .vcproj files.
- This is the entry point for this generator.
- Arguments:
- target_list: List of target pairs: 'base/base.gyp:base'.
- target_dicts: Dict of target properties keyed on target pair.
- data: Dictionary containing per .gyp data.
- """
+ This is the entry point for this generator.
+ Arguments:
+ target_list: List of target pairs: 'base/base.gyp:base'.
+ target_dicts: Dict of target properties keyed on target pair.
+ data: Dictionary containing per .gyp data.
+ """
global fixpath_prefix
options = params["options"]
@@ -2176,14 +2176,14 @@ def _GenerateMSBuildFiltersFile(
):
"""Generate the filters file.
- This file is used by Visual Studio to organize the presentation of source
- files into folders.
+ This file is used by Visual Studio to organize the presentation of source
+ files into folders.
- Arguments:
- filters_path: The path of the file to be created.
- source_files: The hierarchical structure of all the sources.
- extension_to_rule_name: A dictionary mapping file extensions to rules.
- """
+ Arguments:
+ filters_path: The path of the file to be created.
+ source_files: The hierarchical structure of all the sources.
+ extension_to_rule_name: A dictionary mapping file extensions to rules.
+ """
filter_group = []
source_group = []
_AppendFiltersForMSBuild(
@@ -2224,14 +2224,14 @@ def _AppendFiltersForMSBuild(
):
"""Creates the list of filters and sources to be added in the filter file.
- Args:
- parent_filter_name: The name of the filter under which the sources are
- found.
- sources: The hierarchy of filters and sources to process.
- extension_to_rule_name: A dictionary mapping file extensions to rules.
- filter_group: The list to which filter entries will be appended.
- source_group: The list to which source entries will be appended.
- """
+ Args:
+ parent_filter_name: The name of the filter under which the sources are
+ found.
+ sources: The hierarchy of filters and sources to process.
+ extension_to_rule_name: A dictionary mapping file extensions to rules.
+ filter_group: The list to which filter entries will be appended.
+ source_group: The list to which source entries will be appended.
+ """
for source in sources:
if isinstance(source, MSVSProject.Filter):
# We have a sub-filter. Create the name of that sub-filter.
@@ -2275,13 +2275,13 @@ def _MapFileToMsBuildSourceType(
):
"""Returns the group and element type of the source file.
- Arguments:
- source: The source file name.
- extension_to_rule_name: A dictionary mapping file extensions to rules.
+ Arguments:
+ source: The source file name.
+ extension_to_rule_name: A dictionary mapping file extensions to rules.
- Returns:
- A pair of (group this file should be part of, the label of element)
- """
+ Returns:
+ A pair of (group this file should be part of, the label of element)
+ """
_, ext = os.path.splitext(source)
ext = ext.lower()
if ext in extension_to_rule_name:
@@ -2369,22 +2369,22 @@ def _GenerateRulesForMSBuild(
class MSBuildRule:
"""Used to store information used to generate an MSBuild rule.
- Attributes:
- rule_name: The rule name, sanitized to use in XML.
- target_name: The name of the target.
- after_targets: The name of the AfterTargets element.
- before_targets: The name of the BeforeTargets element.
- depends_on: The name of the DependsOn element.
- compute_output: The name of the ComputeOutput element.
- dirs_to_make: The name of the DirsToMake element.
- inputs: The name of the _inputs element.
- tlog: The name of the _tlog element.
- extension: The extension this rule applies to.
- description: The message displayed when this rule is invoked.
- additional_dependencies: A string listing additional dependencies.
- outputs: The outputs of this rule.
- command: The command used to run the rule.
- """
+ Attributes:
+ rule_name: The rule name, sanitized to use in XML.
+ target_name: The name of the target.
+ after_targets: The name of the AfterTargets element.
+ before_targets: The name of the BeforeTargets element.
+ depends_on: The name of the DependsOn element.
+ compute_output: The name of the ComputeOutput element.
+ dirs_to_make: The name of the DirsToMake element.
+ inputs: The name of the _inputs element.
+ tlog: The name of the _tlog element.
+ extension: The extension this rule applies to.
+ description: The message displayed when this rule is invoked.
+ additional_dependencies: A string listing additional dependencies.
+ outputs: The outputs of this rule.
+ command: The command used to run the rule.
+ """
def __init__(self, rule, spec):
self.display_name = rule["rule_name"]
@@ -2909,7 +2909,7 @@ def _GetConfigurationCondition(name, settings, spec):
def _GetMSBuildProjectConfigurations(configurations, spec):
group = ["ItemGroup", {"Label": "ProjectConfigurations"}]
- for (name, settings) in sorted(configurations.items()):
+ for name, settings in sorted(configurations.items()):
configuration, platform = _GetConfigurationAndPlatform(name, settings, spec)
designation = f"{configuration}|{platform}"
group.append(
@@ -3003,10 +3003,11 @@ def _GetMSBuildConfigurationDetails(spec, build_file):
vctools_version = msbuild_attributes.get("VCToolsVersion")
config_type = msbuild_attributes.get("ConfigurationType")
_AddConditionalProperty(properties, condition, "ConfigurationType", config_type)
- spectre_mitigation = msbuild_attributes.get('SpectreMitigation')
+ spectre_mitigation = msbuild_attributes.get("SpectreMitigation")
if spectre_mitigation:
- _AddConditionalProperty(properties, condition, "SpectreMitigation",
- spectre_mitigation)
+ _AddConditionalProperty(
+ properties, condition, "SpectreMitigation", spectre_mitigation
+ )
if config_type == "Driver":
_AddConditionalProperty(properties, condition, "DriverType", "WDM")
_AddConditionalProperty(
@@ -3166,8 +3167,7 @@ def _GetMSBuildAttributes(spec, config, build_file):
"windows_driver": "Link",
"static_library": "Lib",
}
- msbuild_tool = msbuild_tool_map.get(spec["type"])
- if msbuild_tool:
+ if msbuild_tool := msbuild_tool_map.get(spec["type"]):
msbuild_settings = config["finalized_msbuild_settings"]
out_file = msbuild_settings[msbuild_tool].get("OutputFile")
if out_file:
@@ -3184,8 +3184,7 @@ def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file):
# there are actions.
# TODO(jeanluc) Handle the equivalent of setting 'CYGWIN=nontsec'.
new_paths = []
- cygwin_dirs = spec.get("msvs_cygwin_dirs", ["."])[0]
- if cygwin_dirs:
+ if cygwin_dirs := spec.get("msvs_cygwin_dirs", ["."])[0]:
cyg_path = "$(MSBuildProjectDirectory)\\%s\\bin\\" % _FixPath(cygwin_dirs)
new_paths.append(cyg_path)
# TODO(jeanluc) Change the convention to have both a cygwin_dir and a
@@ -3196,7 +3195,7 @@ def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file):
new_paths = "$(ExecutablePath);" + ";".join(new_paths)
properties = {}
- for (name, configuration) in sorted(configurations.items()):
+ for name, configuration in sorted(configurations.items()):
condition = _GetConfigurationCondition(name, configuration, spec)
attributes = _GetMSBuildAttributes(spec, configuration, build_file)
msbuild_settings = configuration["finalized_msbuild_settings"]
@@ -3235,14 +3234,14 @@ def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file):
def _AddConditionalProperty(properties, condition, name, value):
"""Adds a property / conditional value pair to a dictionary.
- Arguments:
- properties: The dictionary to be modified. The key is the name of the
- property. The value is itself a dictionary; its key is the value and
- the value a list of condition for which this value is true.
- condition: The condition under which the named property has the value.
- name: The name of the property.
- value: The value of the property.
- """
+ Arguments:
+ properties: The dictionary to be modified. The key is the name of the
+ property. The value is itself a dictionary; its key is the value and
+ the value a list of condition for which this value is true.
+ condition: The condition under which the named property has the value.
+ name: The name of the property.
+ value: The value of the property.
+ """
if name not in properties:
properties[name] = {}
values = properties[name]
@@ -3259,13 +3258,13 @@ def _AddConditionalProperty(properties, condition, name, value):
def _GetMSBuildPropertyGroup(spec, label, properties):
"""Returns a PropertyGroup definition for the specified properties.
- Arguments:
- spec: The target project dict.
- label: An optional label for the PropertyGroup.
- properties: The dictionary to be converted. The key is the name of the
- property. The value is itself a dictionary; its key is the value and
- the value a list of condition for which this value is true.
- """
+ Arguments:
+ spec: The target project dict.
+ label: An optional label for the PropertyGroup.
+ properties: The dictionary to be converted. The key is the name of the
+ property. The value is itself a dictionary; its key is the value and
+ the value a list of condition for which this value is true.
+ """
group = ["PropertyGroup"]
if label:
group.append({"Label": label})
@@ -3314,7 +3313,7 @@ def GetEdges(node):
def _GetMSBuildToolSettingsSections(spec, configurations):
groups = []
- for (name, configuration) in sorted(configurations.items()):
+ for name, configuration in sorted(configurations.items()):
msbuild_settings = configuration["finalized_msbuild_settings"]
group = [
"ItemDefinitionGroup",
@@ -3370,7 +3369,6 @@ def _FinalizeMSBuildSettings(spec, configuration):
prebuild = configuration.get("msvs_prebuild")
postbuild = configuration.get("msvs_postbuild")
def_file = _GetModuleDefinition(spec)
- precompiled_header = configuration.get("msvs_precompiled_header")
# Add the information to the appropriate tool
# TODO(jeanluc) We could optimize and generate these settings only if
@@ -3408,11 +3406,11 @@ def _FinalizeMSBuildSettings(spec, configuration):
msbuild_settings, "ClCompile", "DisableSpecificWarnings", disabled_warnings
)
# Turn on precompiled headers if appropriate.
- if precompiled_header:
+ if precompiled_header := configuration.get("msvs_precompiled_header"):
# While MSVC works with just file name eg. "v8_pch.h", ClangCL requires
# the full path eg. "tools/msvs/pch/v8_pch.h" to find the file.
# P.S. Only ClangCL defines msbuild_toolset, for MSVC it is None.
- if configuration.get("msbuild_toolset") != 'ClangCL':
+ if configuration.get("msbuild_toolset") != "ClangCL":
precompiled_header = os.path.split(precompiled_header)[1]
_ToolAppend(msbuild_settings, "ClCompile", "PrecompiledHeader", "Use")
_ToolAppend(
@@ -3474,16 +3472,16 @@ def _GetValueFormattedForMSBuild(tool_name, name, value):
def _VerifySourcesExist(sources, root_dir):
"""Verifies that all source files exist on disk.
- Checks that all regular source files, i.e. not created at run time,
- exist on disk. Missing files cause needless recompilation but no otherwise
- visible errors.
+ Checks that all regular source files, i.e. not created at run time,
+ exist on disk. Missing files cause needless recompilation but no otherwise
+ visible errors.
- Arguments:
- sources: A recursive list of Filter/file names.
- root_dir: The root directory for the relative path names.
- Returns:
- A list of source files that cannot be found on disk.
- """
+ Arguments:
+ sources: A recursive list of Filter/file names.
+ root_dir: The root directory for the relative path names.
+ Returns:
+ A list of source files that cannot be found on disk.
+ """
missing_sources = []
for source in sources:
if isinstance(source, MSVSProject.Filter):
@@ -3568,24 +3566,20 @@ def _AddSources2(
detail.append(["ExcludedFromBuild", "true"])
else:
for config_name, configuration in sorted(excluded_configurations):
- condition = _GetConfigurationCondition(
- config_name, configuration
- )
+ condition = _GetConfigurationCondition(config_name, configuration)
detail.append(
["ExcludedFromBuild", {"Condition": condition}, "true"]
)
# Add precompile if needed
for config_name, configuration in spec["configurations"].items():
- precompiled_source = configuration.get(
- "msvs_precompiled_source", ""
- )
+ precompiled_source = configuration.get("msvs_precompiled_source", "")
if precompiled_source != "":
precompiled_source = _FixPath(precompiled_source)
if not extensions_excluded_from_precompile:
# If the precompiled header is generated by a C source,
# we must not try to use it for C++ sources,
# and vice versa.
- basename, extension = os.path.splitext(precompiled_source)
+ _basename, extension = os.path.splitext(precompiled_source)
if extension == ".c":
extensions_excluded_from_precompile = [
".cc",
@@ -3826,15 +3820,15 @@ def _GenerateMSBuildProject(project, options, version, generator_flags, spec):
def _GetMSBuildExternalBuilderTargets(spec):
"""Return a list of MSBuild targets for external builders.
- The "Build" and "Clean" targets are always generated. If the spec contains
- 'msvs_external_builder_clcompile_cmd', then the "ClCompile" target will also
- be generated, to support building selected C/C++ files.
+ The "Build" and "Clean" targets are always generated. If the spec contains
+ 'msvs_external_builder_clcompile_cmd', then the "ClCompile" target will also
+ be generated, to support building selected C/C++ files.
- Arguments:
- spec: The gyp target spec.
- Returns:
- List of MSBuild 'Target' specs.
- """
+ Arguments:
+ spec: The gyp target spec.
+ Returns:
+ List of MSBuild 'Target' specs.
+ """
build_cmd = _BuildCommandLineForRuleRaw(
spec, spec["msvs_external_builder_build_cmd"], False, False, False, False
)
@@ -3882,14 +3876,14 @@ def _GetMSBuildExtensionTargets(targets_files_of_rules):
def _GenerateActionsForMSBuild(spec, actions_to_add):
"""Add actions accumulated into an actions_to_add, merging as needed.
- Arguments:
- spec: the target project dict
- actions_to_add: dictionary keyed on input name, which maps to a list of
- dicts describing the actions attached to that input file.
+ Arguments:
+ spec: the target project dict
+ actions_to_add: dictionary keyed on input name, which maps to a list of
+ dicts describing the actions attached to that input file.
- Returns:
- A pair of (action specification, the sources handled by this action).
- """
+ Returns:
+ A pair of (action specification, the sources handled by this action).
+ """
sources_handled_by_action = OrderedSet()
actions_spec = []
for primary_input, actions in actions_to_add.items():
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py
index 8cea3d1479e3b0..e3c4758696c40d 100755
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py
@@ -3,7 +3,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-""" Unit tests for the msvs.py file. """
+"""Unit tests for the msvs.py file."""
import unittest
from io import StringIO
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
index b7ac823d1490d6..bc9ddd26545e9d 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
@@ -5,6 +5,7 @@
import collections
import copy
+import ctypes
import hashlib
import json
import multiprocessing
@@ -263,8 +264,7 @@ def ExpandSpecial(self, path, product_dir=None):
dir.
"""
- PRODUCT_DIR = "$!PRODUCT_DIR"
- if PRODUCT_DIR in path:
+ if (PRODUCT_DIR := "$!PRODUCT_DIR") in path:
if product_dir:
path = path.replace(PRODUCT_DIR, product_dir)
else:
@@ -272,8 +272,7 @@ def ExpandSpecial(self, path, product_dir=None):
path = path.replace(PRODUCT_DIR + "\\", "")
path = path.replace(PRODUCT_DIR, ".")
- INTERMEDIATE_DIR = "$!INTERMEDIATE_DIR"
- if INTERMEDIATE_DIR in path:
+ if (INTERMEDIATE_DIR := "$!INTERMEDIATE_DIR") in path:
int_dir = self.GypPathToUniqueOutput("gen")
# GypPathToUniqueOutput generates a path relative to the product dir,
# so insert product_dir in front if it is provided.
@@ -1304,7 +1303,7 @@ def WritePchTargets(self, ninja_file, pch_commands):
ninja_file.build(gch, cmd, input, variables=[(var_name, lang_flag)])
def WriteLink(self, spec, config_name, config, link_deps, compile_deps):
- """Write out a link step. Fills out target.binary. """
+ """Write out a link step. Fills out target.binary."""
if self.flavor != "mac" or len(self.archs) == 1:
return self.WriteLinkForArch(
self.ninja, spec, config_name, config, link_deps, compile_deps
@@ -1348,7 +1347,7 @@ def WriteLink(self, spec, config_name, config, link_deps, compile_deps):
def WriteLinkForArch(
self, ninja_file, spec, config_name, config, link_deps, compile_deps, arch=None
):
- """Write out a link step. Fills out target.binary. """
+ """Write out a link step. Fills out target.binary."""
command = {
"executable": "link",
"loadable_module": "solink_module",
@@ -1756,11 +1755,9 @@ def GetPostbuildCommand(self, spec, output, output_binary, is_command_start):
+ " && ".join([ninja_syntax.escape(command) for command in postbuilds])
)
command_string = (
- commands
- + "); G=$$?; "
+ commands + "); G=$$?; "
# Remove the final output if any postbuild failed.
- "((exit $$G) || rm -rf %s) " % output
- + "&& exit $$G)"
+ "((exit $$G) || rm -rf %s) " % output + "&& exit $$G)"
)
if is_command_start:
return "(" + command_string + " && "
@@ -1949,7 +1946,8 @@ def WriteNewNinjaRule(
)
else:
rspfile_content = gyp.msvs_emulation.EncodeRspFileList(
- args, win_shell_flags.quote)
+ args, win_shell_flags.quote
+ )
command = (
"%s gyp-win-tool action-wrapper $arch " % sys.executable
+ rspfile
@@ -1995,7 +1993,7 @@ def CalculateVariables(default_variables, params):
# Copy additional generator configuration data from Xcode, which is shared
# by the Mac Ninja generator.
- import gyp.generator.xcode as xcode_generator
+ import gyp.generator.xcode as xcode_generator # noqa: PLC0415
generator_additional_non_configuration_keys = getattr(
xcode_generator, "generator_additional_non_configuration_keys", []
@@ -2018,7 +2016,7 @@ def CalculateVariables(default_variables, params):
# Copy additional generator configuration data from VS, which is shared
# by the Windows Ninja generator.
- import gyp.generator.msvs as msvs_generator
+ import gyp.generator.msvs as msvs_generator # noqa: PLC0415
generator_additional_non_configuration_keys = getattr(
msvs_generator, "generator_additional_non_configuration_keys", []
@@ -2075,20 +2073,17 @@ def OpenOutput(path, mode="w"):
def CommandWithWrapper(cmd, wrappers, prog):
- wrapper = wrappers.get(cmd, "")
- if wrapper:
+ if wrapper := wrappers.get(cmd, ""):
return wrapper + " " + prog
return prog
def GetDefaultConcurrentLinks():
"""Returns a best-guess for a number of concurrent links."""
- pool_size = int(os.environ.get("GYP_LINK_CONCURRENCY") or 0)
- if pool_size:
+ if pool_size := int(os.environ.get("GYP_LINK_CONCURRENCY") or 0):
return pool_size
if sys.platform in ("win32", "cygwin"):
- import ctypes
class MEMORYSTATUSEX(ctypes.Structure):
_fields_ = [
@@ -2109,8 +2104,8 @@ class MEMORYSTATUSEX(ctypes.Structure):
# VS 2015 uses 20% more working set than VS 2013 and can consume all RAM
# on a 64 GiB machine.
- mem_limit = max(1, stat.ullTotalPhys // (5 * (2 ** 30))) # total / 5GiB
- hard_cap = max(1, int(os.environ.get("GYP_LINK_CONCURRENCY_MAX") or 2 ** 32))
+ mem_limit = max(1, stat.ullTotalPhys // (5 * (2**30))) # total / 5GiB
+ hard_cap = max(1, int(os.environ.get("GYP_LINK_CONCURRENCY_MAX") or 2**32))
return min(mem_limit, hard_cap)
elif sys.platform.startswith("linux"):
if os.path.exists("/proc/meminfo"):
@@ -2121,14 +2116,14 @@ class MEMORYSTATUSEX(ctypes.Structure):
if not match:
continue
# Allow 8Gb per link on Linux because Gold is quite memory hungry
- return max(1, int(match.group(1)) // (8 * (2 ** 20)))
+ return max(1, int(match.group(1)) // (8 * (2**20)))
return 1
elif sys.platform == "darwin":
try:
avail_bytes = int(subprocess.check_output(["sysctl", "-n", "hw.memsize"]))
# A static library debug build of Chromium's unit_tests takes ~2.7GB, so
# 4GB per ld process allows for some more bloat.
- return max(1, avail_bytes // (4 * (2 ** 30))) # total / 4GB
+ return max(1, avail_bytes // (4 * (2**30))) # total / 4GB
except subprocess.CalledProcessError:
return 1
else:
@@ -2305,8 +2300,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name
key_prefix = re.sub(r"\.HOST$", ".host", key_prefix)
wrappers[key_prefix] = os.path.join(build_to_root, value)
- mac_toolchain_dir = generator_flags.get("mac_toolchain_dir", None)
- if mac_toolchain_dir:
+ if mac_toolchain_dir := generator_flags.get("mac_toolchain_dir", None):
wrappers["LINK"] = "export DEVELOPER_DIR='%s' &&" % mac_toolchain_dir
if flavor == "win":
@@ -2417,8 +2411,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name
"cc_s",
description="CC $out",
command=(
- "$cc $defines $includes $cflags $cflags_c "
- "$cflags_pch_c -c $in -o $out"
+ "$cc $defines $includes $cflags $cflags_c $cflags_pch_c -c $in -o $out"
),
)
master_ninja.rule(
@@ -2529,8 +2522,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name
"solink",
description="SOLINK $lib",
restat=True,
- command=mtime_preserving_solink_base
- % {"suffix": "@$link_file_list"},
+ command=mtime_preserving_solink_base % {"suffix": "@$link_file_list"},
rspfile="$link_file_list",
rspfile_content=(
"-Wl,--whole-archive $in $solibs -Wl,--no-whole-archive $libs"
@@ -2715,7 +2707,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name
command="$env %(python)s gyp-mac-tool compile-ios-framework-header-map "
"$out $framework $in && $env %(python)s gyp-mac-tool "
"copy-ios-framework-headers $framework $copy_headers"
- % {'python': sys.executable},
+ % {"python": sys.executable},
)
master_ninja.rule(
"mac_tool",
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py
index 581b14595e143e..616bc7aaf015a2 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py
@@ -4,7 +4,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-""" Unit tests for the ninja.py file. """
+"""Unit tests for the ninja.py file."""
import sys
import unittest
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py
index cdf11c3b27b1d5..db4b45d1a04d25 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py
@@ -531,7 +531,7 @@ def AddSourceToTarget(source, type, pbxp, xct):
library_extensions = ["a", "dylib", "framework", "o"]
basename = posixpath.basename(source)
- (root, ext) = posixpath.splitext(basename)
+ (_root, ext) = posixpath.splitext(basename)
if ext:
ext = ext[1:].lower()
@@ -564,12 +564,12 @@ def AddHeaderToTarget(header, pbxp, xct, is_public):
def ExpandXcodeVariables(string, expansions):
"""Expands Xcode-style $(VARIABLES) in string per the expansions dict.
- In some rare cases, it is appropriate to expand Xcode variables when a
- project file is generated. For any substring $(VAR) in string, if VAR is a
- key in the expansions dict, $(VAR) will be replaced with expansions[VAR].
- Any $(VAR) substring in string for which VAR is not a key in the expansions
- dict will remain in the returned string.
- """
+ In some rare cases, it is appropriate to expand Xcode variables when a
+ project file is generated. For any substring $(VAR) in string, if VAR is a
+ key in the expansions dict, $(VAR) will be replaced with expansions[VAR].
+ Any $(VAR) substring in string for which VAR is not a key in the expansions
+ dict will remain in the returned string.
+ """
matches = _xcode_variable_re.findall(string)
if matches is None:
@@ -592,9 +592,9 @@ def ExpandXcodeVariables(string, expansions):
def EscapeXcodeDefine(s):
"""We must escape the defines that we give to XCode so that it knows not to
- split on spaces and to respect backslash and quote literals. However, we
- must not quote the define, or Xcode will incorrectly interpret variables
- especially $(inherited)."""
+ split on spaces and to respect backslash and quote literals. However, we
+ must not quote the define, or Xcode will incorrectly interpret variables
+ especially $(inherited)."""
return re.sub(_xcode_define_re, r"\\\1", s)
@@ -679,9 +679,9 @@ def GenerateOutput(target_list, target_dicts, data, params):
project_attributes["BuildIndependentTargetsInParallel"] = "YES"
if upgrade_check_project_version:
project_attributes["LastUpgradeCheck"] = upgrade_check_project_version
- project_attributes[
- "LastTestingUpgradeCheck"
- ] = upgrade_check_project_version
+ project_attributes["LastTestingUpgradeCheck"] = (
+ upgrade_check_project_version
+ )
project_attributes["LastSwiftUpdateCheck"] = upgrade_check_project_version
pbxp.SetProperty("attributes", project_attributes)
@@ -696,7 +696,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
xcode_targets = {}
xcode_target_to_target_dict = {}
for qualified_target in target_list:
- [build_file, target_name, toolset] = gyp.common.ParseQualifiedTarget(
+ [build_file, target_name, _toolset] = gyp.common.ParseQualifiedTarget(
qualified_target
)
@@ -734,8 +734,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
"loadable_module+xcuitest": "com.apple.product-type.bundle.ui-testing",
"shared_library+bundle": "com.apple.product-type.framework",
"executable+extension+bundle": "com.apple.product-type.app-extension",
- "executable+watch+extension+bundle":
- "com.apple.product-type.watchkit-extension",
+ "executable+watch+extension+bundle": "com.apple.product-type.watchkit-extension", # noqa: E501
"executable+watch+bundle": "com.apple.product-type.application.watchapp",
"mac_kernel_extension+bundle": "com.apple.product-type.kernel-extension",
}
@@ -780,8 +779,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
type_bundle_key += "+watch+extension+bundle"
elif is_watch_app:
assert is_bundle, (
- "ios_watch_app flag requires mac_bundle "
- "(target %s)" % target_name
+ "ios_watch_app flag requires mac_bundle (target %s)" % target_name
)
type_bundle_key += "+watch+bundle"
elif is_bundle:
@@ -1103,7 +1101,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
eol = " \\"
makefile.write(f" {concrete_output}{eol}\n")
- for (rule_source, concrete_outputs, message, action) in zip(
+ for rule_source, concrete_outputs, message, action in zip(
rule["rule_sources"],
concrete_outputs_by_rule_source,
messages,
@@ -1217,7 +1215,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
# Add "sources".
for source in spec.get("sources", []):
- (source_root, source_extension) = posixpath.splitext(source)
+ (_source_root, source_extension) = posixpath.splitext(source)
if source_extension[1:] not in rules_by_ext:
# AddSourceToTarget will add the file to a root group if it's not
# already there.
@@ -1229,7 +1227,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
# it's a bundle of any type.
if is_bundle:
for resource in tgt_mac_bundle_resources:
- (resource_root, resource_extension) = posixpath.splitext(resource)
+ (_resource_root, resource_extension) = posixpath.splitext(resource)
if resource_extension[1:] not in rules_by_ext:
AddResourceToTarget(resource, pbxp, xct)
else:
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py
index b0b51a08a6db48..bfd8c587a3175d 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py
@@ -4,7 +4,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-""" Unit tests for the xcode.py file. """
+"""Unit tests for the xcode.py file."""
import sys
import unittest
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/input.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/input.py
index 994bf6625fb81d..f3a5e168f2075d 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/input.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/input.py
@@ -139,21 +139,21 @@ def IsPathSection(section):
def GetIncludedBuildFiles(build_file_path, aux_data, included=None):
"""Return a list of all build files included into build_file_path.
- The returned list will contain build_file_path as well as all other files
- that it included, either directly or indirectly. Note that the list may
- contain files that were included into a conditional section that evaluated
- to false and was not merged into build_file_path's dict.
+ The returned list will contain build_file_path as well as all other files
+ that it included, either directly or indirectly. Note that the list may
+ contain files that were included into a conditional section that evaluated
+ to false and was not merged into build_file_path's dict.
- aux_data is a dict containing a key for each build file or included build
- file. Those keys provide access to dicts whose "included" keys contain
- lists of all other files included by the build file.
+ aux_data is a dict containing a key for each build file or included build
+ file. Those keys provide access to dicts whose "included" keys contain
+ lists of all other files included by the build file.
- included should be left at its default None value by external callers. It
- is used for recursion.
+ included should be left at its default None value by external callers. It
+ is used for recursion.
- The returned list will not contain any duplicate entries. Each build file
- in the list will be relative to the current directory.
- """
+ The returned list will not contain any duplicate entries. Each build file
+ in the list will be relative to the current directory.
+ """
if included is None:
included = []
@@ -171,10 +171,10 @@ def GetIncludedBuildFiles(build_file_path, aux_data, included=None):
def CheckedEval(file_contents):
"""Return the eval of a gyp file.
- The gyp file is restricted to dictionaries and lists only, and
- repeated keys are not allowed.
- Note that this is slower than eval() is.
- """
+ The gyp file is restricted to dictionaries and lists only, and
+ repeated keys are not allowed.
+ Note that this is slower than eval() is.
+ """
syntax_tree = ast.parse(file_contents)
assert isinstance(syntax_tree, ast.Module)
@@ -508,9 +508,9 @@ def CallLoadTargetBuildFile(
):
"""Wrapper around LoadTargetBuildFile for parallel processing.
- This wrapper is used when LoadTargetBuildFile is executed in
- a worker process.
- """
+ This wrapper is used when LoadTargetBuildFile is executed in
+ a worker process.
+ """
try:
signal.signal(signal.SIGINT, signal.SIG_IGN)
@@ -559,10 +559,10 @@ class ParallelProcessingError(Exception):
class ParallelState:
"""Class to keep track of state when processing input files in parallel.
- If build files are loaded in parallel, use this to keep track of
- state during farming out and processing parallel jobs. It's stored
- in a global so that the callback function can have access to it.
- """
+ If build files are loaded in parallel, use this to keep track of
+ state during farming out and processing parallel jobs. It's stored
+ in a global so that the callback function can have access to it.
+ """
def __init__(self):
# The multiprocessing pool.
@@ -584,8 +584,7 @@ def __init__(self):
self.error = False
def LoadTargetBuildFileCallback(self, result):
- """Handle the results of running LoadTargetBuildFile in another process.
- """
+ """Handle the results of running LoadTargetBuildFile in another process."""
self.condition.acquire()
if not result:
self.error = True
@@ -692,8 +691,8 @@ def FindEnclosingBracketGroup(input_str):
def IsStrCanonicalInt(string):
"""Returns True if |string| is in its canonical integer form.
- The canonical form is such that str(int(string)) == string.
- """
+ The canonical form is such that str(int(string)) == string.
+ """
if isinstance(string, str):
# This function is called a lot so for maximum performance, avoid
# involving regexps which would otherwise make the code much
@@ -870,8 +869,9 @@ def ExpandVariables(input, phase, variables, build_file):
# This works around actions/rules which have more inputs than will
# fit on the command line.
if file_list:
- contents_list = (contents if isinstance(contents, list)
- else contents.split(" "))
+ contents_list = (
+ contents if isinstance(contents, list) else contents.split(" ")
+ )
replacement = contents_list[0]
if os.path.isabs(replacement):
raise GypError('| cannot handle absolute paths, got "%s"' % replacement)
@@ -934,7 +934,6 @@ def ExpandVariables(input, phase, variables, build_file):
os.chdir(build_file_dir)
sys.path.append(os.getcwd())
try:
-
parsed_contents = shlex.split(contents)
try:
py_module = __import__(parsed_contents[0])
@@ -965,7 +964,7 @@ def ExpandVariables(input, phase, variables, build_file):
stdout=subprocess.PIPE,
shell=use_shell,
cwd=build_file_dir,
- check=False
+ check=False,
)
except Exception as e:
raise GypError(
@@ -1003,9 +1002,7 @@ def ExpandVariables(input, phase, variables, build_file):
# ],
replacement = []
else:
- raise GypError(
- "Undefined variable " + contents + " in " + build_file
- )
+ raise GypError("Undefined variable " + contents + " in " + build_file)
else:
replacement = variables[contents]
@@ -1114,7 +1111,7 @@ def ExpandVariables(input, phase, variables, build_file):
def EvalCondition(condition, conditions_key, phase, variables, build_file):
"""Returns the dict that should be used or None if the result was
- that nothing should be used."""
+ that nothing should be used."""
if not isinstance(condition, list):
raise GypError(conditions_key + " must be a list")
if len(condition) < 2:
@@ -1159,7 +1156,7 @@ def EvalCondition(condition, conditions_key, phase, variables, build_file):
def EvalSingleCondition(cond_expr, true_dict, false_dict, phase, variables, build_file):
"""Returns true_dict if cond_expr evaluates to true, and false_dict
- otherwise."""
+ otherwise."""
# Do expansions on the condition itself. Since the condition can naturally
# contain variable references without needing to resort to GYP expansion
# syntax, this is of dubious value for variables, but someone might want to
@@ -1289,10 +1286,10 @@ def ProcessVariablesAndConditionsInDict(
):
"""Handle all variable and command expansion and conditional evaluation.
- This function is the public entry point for all variable expansions and
- conditional evaluations. The variables_in dictionary will not be modified
- by this function.
- """
+ This function is the public entry point for all variable expansions and
+ conditional evaluations. The variables_in dictionary will not be modified
+ by this function.
+ """
# Make a copy of the variables_in dict that can be modified during the
# loading of automatics and the loading of the variables dict.
@@ -1441,15 +1438,15 @@ def ProcessVariablesAndConditionsInList(the_list, phase, variables, build_file):
def BuildTargetsDict(data):
"""Builds a dict mapping fully-qualified target names to their target dicts.
- |data| is a dict mapping loaded build files by pathname relative to the
- current directory. Values in |data| are build file contents. For each
- |data| value with a "targets" key, the value of the "targets" key is taken
- as a list containing target dicts. Each target's fully-qualified name is
- constructed from the pathname of the build file (|data| key) and its
- "target_name" property. These fully-qualified names are used as the keys
- in the returned dict. These keys provide access to the target dicts,
- the dicts in the "targets" lists.
- """
+ |data| is a dict mapping loaded build files by pathname relative to the
+ current directory. Values in |data| are build file contents. For each
+ |data| value with a "targets" key, the value of the "targets" key is taken
+ as a list containing target dicts. Each target's fully-qualified name is
+ constructed from the pathname of the build file (|data| key) and its
+ "target_name" property. These fully-qualified names are used as the keys
+ in the returned dict. These keys provide access to the target dicts,
+ the dicts in the "targets" lists.
+ """
targets = {}
for build_file in data["target_build_files"]:
@@ -1467,13 +1464,13 @@ def BuildTargetsDict(data):
def QualifyDependencies(targets):
"""Make dependency links fully-qualified relative to the current directory.
- |targets| is a dict mapping fully-qualified target names to their target
- dicts. For each target in this dict, keys known to contain dependency
- links are examined, and any dependencies referenced will be rewritten
- so that they are fully-qualified and relative to the current directory.
- All rewritten dependencies are suitable for use as keys to |targets| or a
- similar dict.
- """
+ |targets| is a dict mapping fully-qualified target names to their target
+ dicts. For each target in this dict, keys known to contain dependency
+ links are examined, and any dependencies referenced will be rewritten
+ so that they are fully-qualified and relative to the current directory.
+ All rewritten dependencies are suitable for use as keys to |targets| or a
+ similar dict.
+ """
all_dependency_sections = [
dep + op for dep in dependency_sections for op in ("", "!", "/")
@@ -1516,18 +1513,18 @@ def QualifyDependencies(targets):
def ExpandWildcardDependencies(targets, data):
"""Expands dependencies specified as build_file:*.
- For each target in |targets|, examines sections containing links to other
- targets. If any such section contains a link of the form build_file:*, it
- is taken as a wildcard link, and is expanded to list each target in
- build_file. The |data| dict provides access to build file dicts.
+ For each target in |targets|, examines sections containing links to other
+ targets. If any such section contains a link of the form build_file:*, it
+ is taken as a wildcard link, and is expanded to list each target in
+ build_file. The |data| dict provides access to build file dicts.
- Any target that does not wish to be included by wildcard can provide an
- optional "suppress_wildcard" key in its target dict. When present and
- true, a wildcard dependency link will not include such targets.
+ Any target that does not wish to be included by wildcard can provide an
+ optional "suppress_wildcard" key in its target dict. When present and
+ true, a wildcard dependency link will not include such targets.
- All dependency names, including the keys to |targets| and the values in each
- dependency list, must be qualified when this function is called.
- """
+ All dependency names, including the keys to |targets| and the values in each
+ dependency list, must be qualified when this function is called.
+ """
for target, target_dict in targets.items():
target_build_file = gyp.common.BuildFile(target)
@@ -1573,14 +1570,10 @@ def ExpandWildcardDependencies(targets, data):
if int(dependency_target_dict.get("suppress_wildcard", False)):
continue
dependency_target_name = dependency_target_dict["target_name"]
- if (
- dependency_target not in {"*", dependency_target_name}
- ):
+ if dependency_target not in {"*", dependency_target_name}:
continue
dependency_target_toolset = dependency_target_dict["toolset"]
- if (
- dependency_toolset not in {"*", dependency_target_toolset}
- ):
+ if dependency_toolset not in {"*", dependency_target_toolset}:
continue
dependency = gyp.common.QualifiedTarget(
dependency_build_file,
@@ -1601,7 +1594,7 @@ def Unify(items):
def RemoveDuplicateDependencies(targets):
"""Makes sure every dependency appears only once in all targets's dependency
- lists."""
+ lists."""
for target_name, target_dict in targets.items():
for dependency_key in dependency_sections:
dependencies = target_dict.get(dependency_key, [])
@@ -1617,25 +1610,21 @@ def Filter(items, item):
def RemoveSelfDependencies(targets):
"""Remove self dependencies from targets that have the prune_self_dependency
- variable set."""
+ variable set."""
for target_name, target_dict in targets.items():
for dependency_key in dependency_sections:
dependencies = target_dict.get(dependency_key, [])
if dependencies:
for t in dependencies:
if t == target_name and (
- targets[t]
- .get("variables", {})
- .get("prune_self_dependency", 0)
+ targets[t].get("variables", {}).get("prune_self_dependency", 0)
):
- target_dict[dependency_key] = Filter(
- dependencies, target_name
- )
+ target_dict[dependency_key] = Filter(dependencies, target_name)
def RemoveLinkDependenciesFromNoneTargets(targets):
"""Remove dependencies having the 'link_dependency' attribute from the 'none'
- targets."""
+ targets."""
for target_name, target_dict in targets.items():
for dependency_key in dependency_sections:
dependencies = target_dict.get(dependency_key, [])
@@ -1651,11 +1640,11 @@ def RemoveLinkDependenciesFromNoneTargets(targets):
class DependencyGraphNode:
"""
- Attributes:
- ref: A reference to an object that this DependencyGraphNode represents.
- dependencies: List of DependencyGraphNodes on which this one depends.
- dependents: List of DependencyGraphNodes that depend on this one.
- """
+ Attributes:
+ ref: A reference to an object that this DependencyGraphNode represents.
+ dependencies: List of DependencyGraphNodes on which this one depends.
+ dependents: List of DependencyGraphNodes that depend on this one.
+ """
class CircularException(GypError):
pass
@@ -1721,8 +1710,8 @@ def ExtractNodeRef(node):
def FindCycles(self):
"""
- Returns a list of cycles in the graph, where each cycle is its own list.
- """
+ Returns a list of cycles in the graph, where each cycle is its own list.
+ """
results = []
visited = set()
@@ -1753,21 +1742,21 @@ def DirectDependencies(self, dependencies=None):
def _AddImportedDependencies(self, targets, dependencies=None):
"""Given a list of direct dependencies, adds indirect dependencies that
- other dependencies have declared to export their settings.
-
- This method does not operate on self. Rather, it operates on the list
- of dependencies in the |dependencies| argument. For each dependency in
- that list, if any declares that it exports the settings of one of its
- own dependencies, those dependencies whose settings are "passed through"
- are added to the list. As new items are added to the list, they too will
- be processed, so it is possible to import settings through multiple levels
- of dependencies.
-
- This method is not terribly useful on its own, it depends on being
- "primed" with a list of direct dependencies such as one provided by
- DirectDependencies. DirectAndImportedDependencies is intended to be the
- public entry point.
- """
+ other dependencies have declared to export their settings.
+
+ This method does not operate on self. Rather, it operates on the list
+ of dependencies in the |dependencies| argument. For each dependency in
+ that list, if any declares that it exports the settings of one of its
+ own dependencies, those dependencies whose settings are "passed through"
+ are added to the list. As new items are added to the list, they too will
+ be processed, so it is possible to import settings through multiple levels
+ of dependencies.
+
+ This method is not terribly useful on its own, it depends on being
+ "primed" with a list of direct dependencies such as one provided by
+ DirectDependencies. DirectAndImportedDependencies is intended to be the
+ public entry point.
+ """
if dependencies is None:
dependencies = []
@@ -1795,9 +1784,9 @@ def _AddImportedDependencies(self, targets, dependencies=None):
def DirectAndImportedDependencies(self, targets, dependencies=None):
"""Returns a list of a target's direct dependencies and all indirect
- dependencies that a dependency has advertised settings should be exported
- through the dependency for.
- """
+ dependencies that a dependency has advertised settings should be exported
+ through the dependency for.
+ """
dependencies = self.DirectDependencies(dependencies)
return self._AddImportedDependencies(targets, dependencies)
@@ -1823,19 +1812,19 @@ def _LinkDependenciesInternal(
self, targets, include_shared_libraries, dependencies=None, initial=True
):
"""Returns an OrderedSet of dependency targets that are linked
- into this target.
+ into this target.
- This function has a split personality, depending on the setting of
- |initial|. Outside callers should always leave |initial| at its default
- setting.
+ This function has a split personality, depending on the setting of
+ |initial|. Outside callers should always leave |initial| at its default
+ setting.
- When adding a target to the list of dependencies, this function will
- recurse into itself with |initial| set to False, to collect dependencies
- that are linked into the linkable target for which the list is being built.
+ When adding a target to the list of dependencies, this function will
+ recurse into itself with |initial| set to False, to collect dependencies
+ that are linked into the linkable target for which the list is being built.
- If |include_shared_libraries| is False, the resulting dependencies will not
- include shared_library targets that are linked into this target.
- """
+ If |include_shared_libraries| is False, the resulting dependencies will not
+ include shared_library targets that are linked into this target.
+ """
if dependencies is None:
# Using a list to get ordered output and a set to do fast "is it
# already added" checks.
@@ -1917,9 +1906,9 @@ def _LinkDependenciesInternal(
def DependenciesForLinkSettings(self, targets):
"""
- Returns a list of dependency targets whose link_settings should be merged
- into this target.
- """
+ Returns a list of dependency targets whose link_settings should be merged
+ into this target.
+ """
# TODO(sbaig) Currently, chrome depends on the bug that shared libraries'
# link_settings are propagated. So for now, we will allow it, unless the
@@ -1932,8 +1921,8 @@ def DependenciesForLinkSettings(self, targets):
def DependenciesToLinkAgainst(self, targets):
"""
- Returns a list of dependency targets that are linked into this target.
- """
+ Returns a list of dependency targets that are linked into this target.
+ """
return self._LinkDependenciesInternal(targets, True)
@@ -2446,7 +2435,7 @@ def SetUpConfigurations(target, target_dict):
merged_configurations = {}
configs = target_dict["configurations"]
- for (configuration, old_configuration_dict) in configs.items():
+ for configuration, old_configuration_dict in configs.items():
# Skip abstract configurations (saves work only).
if old_configuration_dict.get("abstract"):
continue
@@ -2454,7 +2443,7 @@ def SetUpConfigurations(target, target_dict):
# Get the inheritance relationship right by making a copy of the target
# dict.
new_configuration_dict = {}
- for (key, target_val) in target_dict.items():
+ for key, target_val in target_dict.items():
key_ext = key[-1:]
key_base = key[:-1] if key_ext in key_suffixes else key
if key_base not in non_configuration_keys:
@@ -2502,25 +2491,25 @@ def SetUpConfigurations(target, target_dict):
def ProcessListFiltersInDict(name, the_dict):
"""Process regular expression and exclusion-based filters on lists.
- An exclusion list is in a dict key named with a trailing "!", like
- "sources!". Every item in such a list is removed from the associated
- main list, which in this example, would be "sources". Removed items are
- placed into a "sources_excluded" list in the dict.
-
- Regular expression (regex) filters are contained in dict keys named with a
- trailing "/", such as "sources/" to operate on the "sources" list. Regex
- filters in a dict take the form:
- 'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'],
- ['include', '_mac\\.cc$'] ],
- The first filter says to exclude all files ending in _linux.cc, _mac.cc, and
- _win.cc. The second filter then includes all files ending in _mac.cc that
- are now or were once in the "sources" list. Items matching an "exclude"
- filter are subject to the same processing as would occur if they were listed
- by name in an exclusion list (ending in "!"). Items matching an "include"
- filter are brought back into the main list if previously excluded by an
- exclusion list or exclusion regex filter. Subsequent matching "exclude"
- patterns can still cause items to be excluded after matching an "include".
- """
+ An exclusion list is in a dict key named with a trailing "!", like
+ "sources!". Every item in such a list is removed from the associated
+ main list, which in this example, would be "sources". Removed items are
+ placed into a "sources_excluded" list in the dict.
+
+ Regular expression (regex) filters are contained in dict keys named with a
+ trailing "/", such as "sources/" to operate on the "sources" list. Regex
+ filters in a dict take the form:
+ 'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'],
+ ['include', '_mac\\.cc$'] ],
+ The first filter says to exclude all files ending in _linux.cc, _mac.cc, and
+ _win.cc. The second filter then includes all files ending in _mac.cc that
+ are now or were once in the "sources" list. Items matching an "exclude"
+ filter are subject to the same processing as would occur if they were listed
+ by name in an exclusion list (ending in "!"). Items matching an "include"
+ filter are brought back into the main list if previously excluded by an
+ exclusion list or exclusion regex filter. Subsequent matching "exclude"
+ patterns can still cause items to be excluded after matching an "include".
+ """
# Look through the dictionary for any lists whose keys end in "!" or "/".
# These are lists that will be treated as exclude lists and regular
@@ -2682,12 +2671,12 @@ def ProcessListFiltersInList(name, the_list):
def ValidateTargetType(target, target_dict):
"""Ensures the 'type' field on the target is one of the known types.
- Arguments:
- target: string, name of target.
- target_dict: dict, target spec.
+ Arguments:
+ target: string, name of target.
+ target_dict: dict, target spec.
- Raises an exception on error.
- """
+ Raises an exception on error.
+ """
VALID_TARGET_TYPES = (
"executable",
"loadable_module",
@@ -2715,14 +2704,14 @@ def ValidateTargetType(target, target_dict):
def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
"""Ensures that the rules sections in target_dict are valid and consistent,
- and determines which sources they apply to.
+ and determines which sources they apply to.
- Arguments:
- target: string, name of target.
- target_dict: dict, target spec containing "rules" and "sources" lists.
- extra_sources_for_rules: a list of keys to scan for rule matches in
- addition to 'sources'.
- """
+ Arguments:
+ target: string, name of target.
+ target_dict: dict, target spec containing "rules" and "sources" lists.
+ extra_sources_for_rules: a list of keys to scan for rule matches in
+ addition to 'sources'.
+ """
# Dicts to map between values found in rules' 'rule_name' and 'extension'
# keys and the rule dicts themselves.
@@ -2734,9 +2723,7 @@ def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
# Make sure that there's no conflict among rule names and extensions.
rule_name = rule["rule_name"]
if rule_name in rule_names:
- raise GypError(
- f"rule {rule_name} exists in duplicate, target {target}"
- )
+ raise GypError(f"rule {rule_name} exists in duplicate, target {target}")
rule_names[rule_name] = rule
rule_extension = rule["extension"]
@@ -2770,7 +2757,7 @@ def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
source_keys.extend(extra_sources_for_rules)
for source_key in source_keys:
for source in target_dict.get(source_key, []):
- (source_root, source_extension) = os.path.splitext(source)
+ (_source_root, source_extension) = os.path.splitext(source)
if source_extension.startswith("."):
source_extension = source_extension[1:]
if source_extension == rule_extension:
@@ -2835,8 +2822,7 @@ def ValidateActionsInTarget(target, target_dict, build_file):
def TurnIntIntoStrInDict(the_dict):
- """Given dict the_dict, recursively converts all integers into strings.
- """
+ """Given dict the_dict, recursively converts all integers into strings."""
# Use items instead of iteritems because there's no need to try to look at
# reinserted keys and their associated values.
for k, v in the_dict.items():
@@ -2854,8 +2840,7 @@ def TurnIntIntoStrInDict(the_dict):
def TurnIntIntoStrInList(the_list):
- """Given list the_list, recursively converts all integers into strings.
- """
+ """Given list the_list, recursively converts all integers into strings."""
for index, item in enumerate(the_list):
if isinstance(item, int):
the_list[index] = str(item)
@@ -2902,9 +2887,9 @@ def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets, dat
def VerifyNoCollidingTargets(targets):
"""Verify that no two targets in the same directory share the same name.
- Arguments:
- targets: A list of targets in the form 'path/to/file.gyp:target_name'.
- """
+ Arguments:
+ targets: A list of targets in the form 'path/to/file.gyp:target_name'.
+ """
# Keep a dict going from 'subdirectory:target_name' to 'foo.gyp'.
used = {}
for target in targets:
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py
index 70aab4f1787f44..3710178e110ae5 100755
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py
@@ -8,7 +8,6 @@
These functions are executed via gyp-mac-tool when using the Makefile generator.
"""
-
import fcntl
import fnmatch
import glob
@@ -25,14 +24,13 @@
def main(args):
executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
+ if (exit_code := executor.Dispatch(args)) is not None:
sys.exit(exit_code)
class MacTool:
"""This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
+ executed directly, or dispatched from an argument list."""
def Dispatch(self, args):
"""Dispatches a string command to a method."""
@@ -48,7 +46,7 @@ def _CommandifyName(self, name_string):
def ExecCopyBundleResource(self, source, dest, convert_to_binary):
"""Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
+ necessary compilation on each resource."""
convert_to_binary = convert_to_binary == "True"
extension = os.path.splitext(source)[1].lower()
if os.path.isdir(source):
@@ -142,7 +140,7 @@ def _CopyStringsFile(self, source, dest):
# CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
# semicolon in dictionary.
# on invalid files. Do the same kind of validation.
- import CoreFoundation
+ import CoreFoundation # noqa: PLC0415
with open(source, "rb") as in_file:
s = in_file.read()
@@ -156,15 +154,15 @@ def _CopyStringsFile(self, source, dest):
def _DetectInputEncoding(self, file_name):
"""Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
+ encoding. Returns None as a guess if it can't detect it."""
with open(file_name, "rb") as fp:
try:
header = fp.read(3)
except Exception:
return None
- if header.startswith((b"\xFE\xFF", b"\xFF\xFE")):
+ if header.startswith((b"\xfe\xff", b"\xff\xfe")):
return "UTF-16"
- elif header.startswith(b"\xEF\xBB\xBF"):
+ elif header.startswith(b"\xef\xbb\xbf"):
return "UTF-8"
else:
return None
@@ -255,7 +253,7 @@ def ExecFlock(self, lockfile, *cmd_list):
def ExecFilterLibtool(self, *cmd_list):
"""Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
+ symbols'."""
libtool_re = re.compile(
r"^.*libtool: (?:for architecture: \S* )?file: .* has no symbols$"
)
@@ -304,7 +302,7 @@ def ExecPackageIosFramework(self, framework):
def ExecPackageFramework(self, framework, version):
"""Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
+ sets up all the symlinks."""
# Find the name of the binary based on the part before the ".framework".
binary = os.path.basename(framework).split(".")[0]
@@ -333,7 +331,7 @@ def ExecPackageFramework(self, framework, version):
def _Relink(self, dest, link):
"""Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
+ it is overwritten."""
if os.path.lexists(link):
os.remove(link)
os.symlink(dest, link)
@@ -358,14 +356,14 @@ def ExecCopyIosFrameworkHeaders(self, framework, *copy_headers):
def ExecCompileXcassets(self, keys, *inputs):
"""Compiles multiple .xcassets files into a single .car file.
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
+ This invokes 'actool' to compile all the inputs .xcassets files. The
+ |keys| arguments is a json-encoded dictionary of extra arguments to
+ pass to 'actool' when the asset catalogs contains an application icon
+ or a launch image.
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
+ Note that 'actool' does not create the Assets.car file if the asset
+ catalogs does not contains imageset.
+ """
command_line = [
"xcrun",
"actool",
@@ -438,13 +436,13 @@ def ExecMergeInfoPlist(self, output, *inputs):
def ExecCodeSignBundle(self, key, entitlements, provisioning, path, preserve):
"""Code sign a bundle.
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 2. copy Entitlements.plist from user or SDK next to the bundle,
- 3. code sign the bundle.
- """
+ This function tries to code sign an iOS bundle, following the same
+ algorithm as Xcode:
+ 1. pick the provisioning profile that best match the bundle identifier,
+ and copy it into the bundle as embedded.mobileprovision,
+ 2. copy Entitlements.plist from user or SDK next to the bundle,
+ 3. code sign the bundle.
+ """
substitutions, overrides = self._InstallProvisioningProfile(
provisioning, self._GetCFBundleIdentifier()
)
@@ -463,16 +461,16 @@ def ExecCodeSignBundle(self, key, entitlements, provisioning, path, preserve):
def _InstallProvisioningProfile(self, profile, bundle_identifier):
"""Installs embedded.mobileprovision into the bundle.
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
+ Args:
+ profile: string, optional, short name of the .mobileprovision file
+ to use, if empty or the file is missing, the best file installed
+ will be used
+ bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
+ Returns:
+ A tuple containing two dictionary: variables substitutions and values
+ to overrides when generating the entitlements file.
+ """
source_path, provisioning_data, team_id = self._FindProvisioningProfile(
profile, bundle_identifier
)
@@ -488,24 +486,24 @@ def _InstallProvisioningProfile(self, profile, bundle_identifier):
def _FindProvisioningProfile(self, profile, bundle_identifier):
"""Finds the .mobileprovision file to use for signing the bundle.
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
+ Checks all the installed provisioning profiles (or if the user specified
+ the PROVISIONING_PROFILE variable, only consult it) and select the most
+ specific that correspond to the bundle identifier.
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
+ Args:
+ profile: string, optional, short name of the .mobileprovision file
+ to use, if empty or the file is missing, the best file installed
+ will be used
+ bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
+ Returns:
+ A tuple of the path to the selected provisioning profile, the data of
+ the embedded plist in the provisioning profile and the team identifier
+ to use for code signing.
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
+ Raises:
+ SystemExit: if no .mobileprovision can be used to sign the bundle.
+ """
profiles_dir = os.path.join(
os.environ["HOME"], "Library", "MobileDevice", "Provisioning Profiles"
)
@@ -553,12 +551,12 @@ def _FindProvisioningProfile(self, profile, bundle_identifier):
def _LoadProvisioningProfile(self, profile_path):
"""Extracts the plist embedded in a provisioning profile.
- Args:
- profile_path: string, path to the .mobileprovision file
+ Args:
+ profile_path: string, path to the .mobileprovision file
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
+ Returns:
+ Content of the plist embedded in the provisioning profile as a dictionary.
+ """
with tempfile.NamedTemporaryFile() as temp:
subprocess.check_call(
["security", "cms", "-D", "-i", profile_path, "-o", temp.name]
@@ -581,16 +579,16 @@ def _MergePlist(self, merged_plist, plist):
def _LoadPlistMaybeBinary(self, plist_path):
"""Loads into a memory a plist possibly encoded in binary format.
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
+ This is a wrapper around plistlib.readPlist that tries to convert the
+ plist to the XML format if it can't be parsed (assuming that it is in
+ the binary format).
- Args:
- plist_path: string, path to a plist file, in XML or binary format
+ Args:
+ plist_path: string, path to a plist file, in XML or binary format
- Returns:
- Content of the plist as a dictionary.
- """
+ Returns:
+ Content of the plist as a dictionary.
+ """
try:
# First, try to read the file using plistlib that only supports XML,
# and if an exception is raised, convert a temporary copy to XML and
@@ -606,13 +604,13 @@ def _LoadPlistMaybeBinary(self, plist_path):
def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
"""Constructs a dictionary of variable substitutions for Entitlements.plist.
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
+ Args:
+ bundle_identifier: string, value of CFBundleIdentifier from Info.plist
+ app_identifier_prefix: string, value for AppIdentifierPrefix
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
+ Returns:
+ Dictionary of substitutions to apply when generating Entitlements.plist.
+ """
return {
"CFBundleIdentifier": bundle_identifier,
"AppIdentifierPrefix": app_identifier_prefix,
@@ -621,9 +619,9 @@ def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
def _GetCFBundleIdentifier(self):
"""Extracts CFBundleIdentifier value from Info.plist in the bundle.
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
+ Returns:
+ Value of CFBundleIdentifier in the Info.plist located in the bundle.
+ """
info_plist_path = os.path.join(
os.environ["TARGET_BUILD_DIR"], os.environ["INFOPLIST_PATH"]
)
@@ -633,19 +631,19 @@ def _GetCFBundleIdentifier(self):
def _InstallEntitlements(self, entitlements, substitutions, overrides):
"""Generates and install the ${BundleName}.xcent entitlements file.
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
+ Expands variables "$(variable)" pattern in the source entitlements file,
+ add extra entitlements defined in the .mobileprovision file and the copy
+ the generated plist to "${BundlePath}.xcent".
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
+ Args:
+ entitlements: string, optional, path to the Entitlements.plist template
+ to use, defaults to "${SDKROOT}/Entitlements.plist"
+ substitutions: dictionary, variable substitutions
+ overrides: dictionary, values to add to the entitlements
- Returns:
- Path to the generated entitlements file.
- """
+ Returns:
+ Path to the generated entitlements file.
+ """
source_path = entitlements
target_path = os.path.join(
os.environ["BUILT_PRODUCTS_DIR"], os.environ["PRODUCT_NAME"] + ".xcent"
@@ -665,15 +663,15 @@ def _InstallEntitlements(self, entitlements, substitutions, overrides):
def _ExpandVariables(self, data, substitutions):
"""Expands variables "$(variable)" in data.
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
+ Args:
+ data: object, can be either string, list or dictionary
+ substitutions: dictionary, variable substitutions to perform
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
+ Returns:
+ Copy of data where each references to "$(variable)" has been replaced
+ by the corresponding value found in substitutions, or left intact if
+ the key was not found.
+ """
if isinstance(data, str):
for key, value in substitutions.items():
data = data.replace("$(%s)" % key, value)
@@ -692,15 +690,15 @@ def NextGreaterPowerOf2(x):
def WriteHmap(output_name, filelist):
"""Generates a header map based on |filelist|.
- Per Mark Mentovai:
- A header map is structured essentially as a hash table, keyed by names used
- in #includes, and providing pathnames to the actual files.
+ Per Mark Mentovai:
+ A header map is structured essentially as a hash table, keyed by names used
+ in #includes, and providing pathnames to the actual files.
- The implementation below and the comment above comes from inspecting:
- http://www.opensource.apple.com/source/distcc/distcc-2503/distcc_dist/include_server/headermap.py?txt
- while also looking at the implementation in clang in:
- https://llvm.org/svn/llvm-project/cfe/trunk/lib/Lex/HeaderMap.cpp
- """
+ The implementation below and the comment above comes from inspecting:
+ http://www.opensource.apple.com/source/distcc/distcc-2503/distcc_dist/include_server/headermap.py?txt
+ while also looking at the implementation in clang in:
+ https://llvm.org/svn/llvm-project/cfe/trunk/lib/Lex/HeaderMap.cpp
+ """
magic = 1751998832
version = 1
_reserved = 0
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py
index ace0cae5ebff23..7c461a8fdf72d8 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py
@@ -74,8 +74,7 @@ def EncodeRspFileList(args, quote_cmd):
program = call + " " + os.path.normpath(program)
else:
program = os.path.normpath(args[0])
- return (program + " "
- + " ".join(QuoteForRspFile(arg, quote_cmd) for arg in args[1:]))
+ return program + " " + " ".join(QuoteForRspFile(arg, quote_cmd) for arg in args[1:])
def _GenericRetrieve(root, default, path):
@@ -247,9 +246,7 @@ def GetExtension(self):
the target type.
"""
ext = self.spec.get("product_extension", None)
- if ext:
- return ext
- return gyp.MSVSUtil.TARGET_TYPE_EXT.get(self.spec["type"], "")
+ return ext or gyp.MSVSUtil.TARGET_TYPE_EXT.get(self.spec["type"], "")
def GetVSMacroEnv(self, base_to_build=None, config=None):
"""Get a dict of variables mapping internal VS macro names to their gyp
@@ -625,8 +622,7 @@ def GetDefFile(self, gyp_to_build_path):
def _GetDefFileAsLdflags(self, ldflags, gyp_to_build_path):
""".def files get implicitly converted to a ModuleDefinitionFile for the
linker in the VS generator. Emulate that behaviour here."""
- def_file = self.GetDefFile(gyp_to_build_path)
- if def_file:
+ if def_file := self.GetDefFile(gyp_to_build_path):
ldflags.append('/DEF:"%s"' % def_file)
def GetPGDName(self, config, expand_special):
@@ -674,14 +670,11 @@ def GetLdflags(
)
ld("DelayLoadDLLs", prefix="/DELAYLOAD:")
ld("TreatLinkerWarningAsErrors", prefix="/WX", map={"true": "", "false": ":NO"})
- out = self.GetOutputName(config, expand_special)
- if out:
+ if out := self.GetOutputName(config, expand_special):
ldflags.append("/OUT:" + out)
- pdb = self.GetPDBName(config, expand_special, output_name + ".pdb")
- if pdb:
+ if pdb := self.GetPDBName(config, expand_special, output_name + ".pdb"):
ldflags.append("/PDB:" + pdb)
- pgd = self.GetPGDName(config, expand_special)
- if pgd:
+ if pgd := self.GetPGDName(config, expand_special):
ldflags.append("/PGD:" + pgd)
map_file = self.GetMapFileName(config, expand_special)
ld("GenerateMapFile", map={"true": "/MAP:" + map_file if map_file else "/MAP"})
@@ -940,14 +933,17 @@ def GetRuleShellFlags(self, rule):
includes whether it should run under cygwin (msvs_cygwin_shell), and
whether the commands should be quoted (msvs_quote_cmd)."""
# If the variable is unset, or set to 1 we use cygwin
- cygwin = int(rule.get("msvs_cygwin_shell",
- self.spec.get("msvs_cygwin_shell", 1))) != 0
+ cygwin = (
+ int(rule.get("msvs_cygwin_shell", self.spec.get("msvs_cygwin_shell", 1)))
+ != 0
+ )
# Default to quoting. There's only a few special instances where the
# target command uses non-standard command line parsing and handle quotes
# and quote escaping differently.
quote_cmd = int(rule.get("msvs_quote_cmd", 1))
- assert quote_cmd != 0 or cygwin != 1, \
- "msvs_quote_cmd=0 only applicable for msvs_cygwin_shell=0"
+ assert quote_cmd != 0 or cygwin != 1, (
+ "msvs_quote_cmd=0 only applicable for msvs_cygwin_shell=0"
+ )
return MsvsSettings.RuleShellFlags(cygwin, quote_cmd)
def _HasExplicitRuleForExtension(self, spec, extension):
@@ -1135,8 +1131,7 @@ def _ExtractImportantEnvironment(output_of_set):
for required in ("SYSTEMROOT", "TEMP", "TMP"):
if required not in env:
raise Exception(
- 'Environment variable "%s" '
- "required to be set to valid path" % required
+ 'Environment variable "%s" required to be set to valid path' % required
)
return env
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py
index 729cec0636273b..8b026642fc5ef0 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py
@@ -17,8 +17,8 @@ class Error(Exception):
def deepcopy(x):
"""Deep copy operation on gyp objects such as strings, ints, dicts
- and lists. More than twice as fast as copy.deepcopy but much less
- generic."""
+ and lists. More than twice as fast as copy.deepcopy but much less
+ generic."""
try:
return _deepcopy_dispatch[type(x)](x)
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py
index 7e647f40a84c54..43665577bdddaf 100755
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py
@@ -9,7 +9,6 @@
These functions are executed via gyp-win-tool when using the ninja generator.
"""
-
import os
import re
import shutil
@@ -27,18 +26,17 @@
def main(args):
executor = WinTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
+ if (exit_code := executor.Dispatch(args)) is not None:
sys.exit(exit_code)
class WinTool:
"""This class performs all the Windows tooling steps. The methods can either
- be executed directly, or dispatched from an argument list."""
+ be executed directly, or dispatched from an argument list."""
def _UseSeparateMspdbsrv(self, env, args):
"""Allows to use a unique instance of mspdbsrv.exe per linker instead of a
- shared one."""
+ shared one."""
if len(args) < 1:
raise Exception("Not enough arguments")
@@ -115,9 +113,9 @@ def _on_error(fn, path, excinfo):
def ExecLinkWrapper(self, arch, use_separate_mspdbsrv, *args):
"""Filter diagnostic output from link that looks like:
- ' Creating library ui.dll.lib and object ui.dll.exp'
- This happens when there are exports from the dll or exe.
- """
+ ' Creating library ui.dll.lib and object ui.dll.exp'
+ This happens when there are exports from the dll or exe.
+ """
env = self._GetEnv(arch)
if use_separate_mspdbsrv == "True":
self._UseSeparateMspdbsrv(env, args)
@@ -159,10 +157,10 @@ def ExecLinkWithManifests(
mt,
rc,
intermediate_manifest,
- *manifests
+ *manifests,
):
"""A wrapper for handling creating a manifest resource and then executing
- a link command."""
+ a link command."""
# The 'normal' way to do manifests is to have link generate a manifest
# based on gathering dependencies from the object files, then merge that
# manifest with other manifests supplied as sources, convert the merged
@@ -246,8 +244,8 @@ def dump(filename):
def ExecManifestWrapper(self, arch, *args):
"""Run manifest tool with environment set. Strip out undesirable warning
- (some XML blocks are recognized by the OS loader, but not the manifest
- tool)."""
+ (some XML blocks are recognized by the OS loader, but not the manifest
+ tool)."""
env = self._GetEnv(arch)
popen = subprocess.Popen(
args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
@@ -260,8 +258,8 @@ def ExecManifestWrapper(self, arch, *args):
def ExecManifestToRc(self, arch, *args):
"""Creates a resource file pointing a SxS assembly manifest.
- |args| is tuple containing path to resource file, path to manifest file
- and resource name which can be "1" (for executables) or "2" (for DLLs)."""
+ |args| is tuple containing path to resource file, path to manifest file
+ and resource name which can be "1" (for executables) or "2" (for DLLs)."""
manifest_path, resource_path, resource_name = args
with open(resource_path, "w") as output:
output.write(
@@ -271,8 +269,8 @@ def ExecManifestToRc(self, arch, *args):
def ExecMidlWrapper(self, arch, outdir, tlb, h, dlldata, iid, proxy, idl, *flags):
"""Filter noisy filenames output from MIDL compile step that isn't
- quietable via command line flags.
- """
+ quietable via command line flags.
+ """
args = (
["midl", "/nologo"]
+ list(flags)
@@ -328,7 +326,7 @@ def ExecAsmWrapper(self, arch, *args):
def ExecRcWrapper(self, arch, *args):
"""Filter logo banner from invocations of rc.exe. Older versions of RC
- don't support the /nologo flag."""
+ don't support the /nologo flag."""
env = self._GetEnv(arch)
popen = subprocess.Popen(
args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
@@ -345,7 +343,7 @@ def ExecRcWrapper(self, arch, *args):
def ExecActionWrapper(self, arch, rspfile, *dir):
"""Runs an action command line from a response file using the environment
- for |arch|. If |dir| is supplied, use that as the working directory."""
+ for |arch|. If |dir| is supplied, use that as the working directory."""
env = self._GetEnv(arch)
# TODO(scottmg): This is a temporary hack to get some specific variables
# through to actions that are set after gyp-time. http://crbug.com/333738.
@@ -358,7 +356,7 @@ def ExecActionWrapper(self, arch, rspfile, *dir):
def ExecClCompile(self, project_dir, selected_files):
"""Executed by msvs-ninja projects when the 'ClCompile' target is used to
- build selected C/C++ files."""
+ build selected C/C++ files."""
project_dir = os.path.relpath(project_dir, BASE_DIR)
selected_files = selected_files.split(";")
ninja_targets = [
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py
index 85a63dfd7ae0e2..d13eaa9af240b7 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py
@@ -7,7 +7,6 @@
other build systems, such as make and ninja.
"""
-
import copy
import os
import os.path
@@ -31,7 +30,7 @@
def XcodeArchsVariableMapping(archs, archs_including_64_bit=None):
"""Constructs a dictionary with expansion for $(ARCHS_STANDARD) variable,
- and optionally for $(ARCHS_STANDARD_INCLUDING_64_BIT)."""
+ and optionally for $(ARCHS_STANDARD_INCLUDING_64_BIT)."""
mapping = {"$(ARCHS_STANDARD)": archs}
if archs_including_64_bit:
mapping["$(ARCHS_STANDARD_INCLUDING_64_BIT)"] = archs_including_64_bit
@@ -40,10 +39,10 @@ def XcodeArchsVariableMapping(archs, archs_including_64_bit=None):
class XcodeArchsDefault:
"""A class to resolve ARCHS variable from xcode_settings, resolving Xcode
- macros and implementing filtering by VALID_ARCHS. The expansion of macros
- depends on the SDKROOT used ("macosx", "iphoneos", "iphonesimulator") and
- on the version of Xcode.
- """
+ macros and implementing filtering by VALID_ARCHS. The expansion of macros
+ depends on the SDKROOT used ("macosx", "iphoneos", "iphonesimulator") and
+ on the version of Xcode.
+ """
# Match variable like $(ARCHS_STANDARD).
variable_pattern = re.compile(r"\$\([a-zA-Z_][a-zA-Z0-9_]*\)$")
@@ -82,8 +81,8 @@ def _ExpandArchs(self, archs, sdkroot):
def ActiveArchs(self, archs, valid_archs, sdkroot):
"""Expands variables references in ARCHS, and filter by VALID_ARCHS if it
- is defined (if not set, Xcode accept any value in ARCHS, otherwise, only
- values present in VALID_ARCHS are kept)."""
+ is defined (if not set, Xcode accept any value in ARCHS, otherwise, only
+ values present in VALID_ARCHS are kept)."""
expanded_archs = self._ExpandArchs(archs or self._default, sdkroot or "")
if valid_archs:
filtered_archs = []
@@ -96,24 +95,24 @@ def ActiveArchs(self, archs, valid_archs, sdkroot):
def GetXcodeArchsDefault():
"""Returns the |XcodeArchsDefault| object to use to expand ARCHS for the
- installed version of Xcode. The default values used by Xcode for ARCHS
- and the expansion of the variables depends on the version of Xcode used.
+ installed version of Xcode. The default values used by Xcode for ARCHS
+ and the expansion of the variables depends on the version of Xcode used.
- For all version anterior to Xcode 5.0 or posterior to Xcode 5.1 included
- uses $(ARCHS_STANDARD) if ARCHS is unset, while Xcode 5.0 to 5.0.2 uses
- $(ARCHS_STANDARD_INCLUDING_64_BIT). This variable was added to Xcode 5.0
- and deprecated with Xcode 5.1.
+ For all version anterior to Xcode 5.0 or posterior to Xcode 5.1 included
+ uses $(ARCHS_STANDARD) if ARCHS is unset, while Xcode 5.0 to 5.0.2 uses
+ $(ARCHS_STANDARD_INCLUDING_64_BIT). This variable was added to Xcode 5.0
+ and deprecated with Xcode 5.1.
- For "macosx" SDKROOT, all version starting with Xcode 5.0 includes 64-bit
- architecture as part of $(ARCHS_STANDARD) and default to only building it.
+ For "macosx" SDKROOT, all version starting with Xcode 5.0 includes 64-bit
+ architecture as part of $(ARCHS_STANDARD) and default to only building it.
- For "iphoneos" and "iphonesimulator" SDKROOT, 64-bit architectures are part
- of $(ARCHS_STANDARD_INCLUDING_64_BIT) from Xcode 5.0. From Xcode 5.1, they
- are also part of $(ARCHS_STANDARD).
+ For "iphoneos" and "iphonesimulator" SDKROOT, 64-bit architectures are part
+ of $(ARCHS_STANDARD_INCLUDING_64_BIT) from Xcode 5.0. From Xcode 5.1, they
+ are also part of $(ARCHS_STANDARD).
- All these rules are coded in the construction of the |XcodeArchsDefault|
- object to use depending on the version of Xcode detected. The object is
- for performance reason."""
+ All these rules are coded in the construction of the |XcodeArchsDefault|
+ object to use depending on the version of Xcode detected. The object is
+ for performance reason."""
global XCODE_ARCHS_DEFAULT_CACHE
if XCODE_ARCHS_DEFAULT_CACHE:
return XCODE_ARCHS_DEFAULT_CACHE
@@ -190,8 +189,8 @@ def __init__(self, spec):
def _ConvertConditionalKeys(self, configname):
"""Converts or warns on conditional keys. Xcode supports conditional keys,
- such as CODE_SIGN_IDENTITY[sdk=iphoneos*]. This is a partial implementation
- with some keys converted while the rest force a warning."""
+ such as CODE_SIGN_IDENTITY[sdk=iphoneos*]. This is a partial implementation
+ with some keys converted while the rest force a warning."""
settings = self.xcode_settings[configname]
conditional_keys = [key for key in settings if key.endswith("]")]
for key in conditional_keys:
@@ -256,13 +255,13 @@ def _IsIosWatchApp(self):
def GetFrameworkVersion(self):
"""Returns the framework version of the current target. Only valid for
- bundles."""
+ bundles."""
assert self._IsBundle()
return self.GetPerTargetSetting("FRAMEWORK_VERSION", default="A")
def GetWrapperExtension(self):
"""Returns the bundle extension (.app, .framework, .plugin, etc). Only
- valid for bundles."""
+ valid for bundles."""
assert self._IsBundle()
if self.spec["type"] in ("loadable_module", "shared_library"):
default_wrapper_extension = {
@@ -297,13 +296,13 @@ def GetFullProductName(self):
def GetWrapperName(self):
"""Returns the directory name of the bundle represented by this target.
- Only valid for bundles."""
+ Only valid for bundles."""
assert self._IsBundle()
return self.GetProductName() + self.GetWrapperExtension()
def GetBundleContentsFolderPath(self):
"""Returns the qualified path to the bundle's contents folder. E.g.
- Chromium.app/Contents or Foo.bundle/Versions/A. Only valid for bundles."""
+ Chromium.app/Contents or Foo.bundle/Versions/A. Only valid for bundles."""
if self.isIOS:
return self.GetWrapperName()
assert self._IsBundle()
@@ -317,7 +316,7 @@ def GetBundleContentsFolderPath(self):
def GetBundleResourceFolder(self):
"""Returns the qualified path to the bundle's resource folder. E.g.
- Chromium.app/Contents/Resources. Only valid for bundles."""
+ Chromium.app/Contents/Resources. Only valid for bundles."""
assert self._IsBundle()
if self.isIOS:
return self.GetBundleContentsFolderPath()
@@ -325,7 +324,7 @@ def GetBundleResourceFolder(self):
def GetBundleExecutableFolderPath(self):
"""Returns the qualified path to the bundle's executables folder. E.g.
- Chromium.app/Contents/MacOS. Only valid for bundles."""
+ Chromium.app/Contents/MacOS. Only valid for bundles."""
assert self._IsBundle()
if self.spec["type"] in ("shared_library") or self.isIOS:
return self.GetBundleContentsFolderPath()
@@ -334,25 +333,25 @@ def GetBundleExecutableFolderPath(self):
def GetBundleJavaFolderPath(self):
"""Returns the qualified path to the bundle's Java resource folder.
- E.g. Chromium.app/Contents/Resources/Java. Only valid for bundles."""
+ E.g. Chromium.app/Contents/Resources/Java. Only valid for bundles."""
assert self._IsBundle()
return os.path.join(self.GetBundleResourceFolder(), "Java")
def GetBundleFrameworksFolderPath(self):
"""Returns the qualified path to the bundle's frameworks folder. E.g,
- Chromium.app/Contents/Frameworks. Only valid for bundles."""
+ Chromium.app/Contents/Frameworks. Only valid for bundles."""
assert self._IsBundle()
return os.path.join(self.GetBundleContentsFolderPath(), "Frameworks")
def GetBundleSharedFrameworksFolderPath(self):
"""Returns the qualified path to the bundle's frameworks folder. E.g,
- Chromium.app/Contents/SharedFrameworks. Only valid for bundles."""
+ Chromium.app/Contents/SharedFrameworks. Only valid for bundles."""
assert self._IsBundle()
return os.path.join(self.GetBundleContentsFolderPath(), "SharedFrameworks")
def GetBundleSharedSupportFolderPath(self):
"""Returns the qualified path to the bundle's shared support folder. E.g,
- Chromium.app/Contents/SharedSupport. Only valid for bundles."""
+ Chromium.app/Contents/SharedSupport. Only valid for bundles."""
assert self._IsBundle()
if self.spec["type"] == "shared_library":
return self.GetBundleResourceFolder()
@@ -361,19 +360,19 @@ def GetBundleSharedSupportFolderPath(self):
def GetBundlePlugInsFolderPath(self):
"""Returns the qualified path to the bundle's plugins folder. E.g,
- Chromium.app/Contents/PlugIns. Only valid for bundles."""
+ Chromium.app/Contents/PlugIns. Only valid for bundles."""
assert self._IsBundle()
return os.path.join(self.GetBundleContentsFolderPath(), "PlugIns")
def GetBundleXPCServicesFolderPath(self):
"""Returns the qualified path to the bundle's XPC services folder. E.g,
- Chromium.app/Contents/XPCServices. Only valid for bundles."""
+ Chromium.app/Contents/XPCServices. Only valid for bundles."""
assert self._IsBundle()
return os.path.join(self.GetBundleContentsFolderPath(), "XPCServices")
def GetBundlePlistPath(self):
"""Returns the qualified path to the bundle's plist file. E.g.
- Chromium.app/Contents/Info.plist. Only valid for bundles."""
+ Chromium.app/Contents/Info.plist. Only valid for bundles."""
assert self._IsBundle()
if (
self.spec["type"] in ("executable", "loadable_module")
@@ -439,7 +438,7 @@ def GetMachOType(self):
def _GetBundleBinaryPath(self):
"""Returns the name of the bundle binary of by this target.
- E.g. Chromium.app/Contents/MacOS/Chromium. Only valid for bundles."""
+ E.g. Chromium.app/Contents/MacOS/Chromium. Only valid for bundles."""
assert self._IsBundle()
return os.path.join(
self.GetBundleExecutableFolderPath(), self.GetExecutableName()
@@ -470,14 +469,14 @@ def _GetStandaloneExecutablePrefix(self):
def _GetStandaloneBinaryPath(self):
"""Returns the name of the non-bundle binary represented by this target.
- E.g. hello_world. Only valid for non-bundles."""
+ E.g. hello_world. Only valid for non-bundles."""
assert not self._IsBundle()
assert self.spec["type"] in {
"executable",
"shared_library",
"static_library",
"loadable_module",
- }, ("Unexpected type %s" % self.spec["type"])
+ }, "Unexpected type %s" % self.spec["type"]
target = self.spec["target_name"]
if self.spec["type"] in {"loadable_module", "shared_library", "static_library"}:
if target[:3] == "lib":
@@ -490,7 +489,7 @@ def _GetStandaloneBinaryPath(self):
def GetExecutableName(self):
"""Returns the executable name of the bundle represented by this target.
- E.g. Chromium."""
+ E.g. Chromium."""
if self._IsBundle():
return self.spec.get("product_name", self.spec["target_name"])
else:
@@ -498,7 +497,7 @@ def GetExecutableName(self):
def GetExecutablePath(self):
"""Returns the qualified path to the primary executable of the bundle
- represented by this target. E.g. Chromium.app/Contents/MacOS/Chromium."""
+ represented by this target. E.g. Chromium.app/Contents/MacOS/Chromium."""
if self._IsBundle():
return self._GetBundleBinaryPath()
else:
@@ -521,7 +520,7 @@ def _GetSdkVersionInfoItem(self, sdk, infoitem):
# most sensible route and should still do the right thing.
try:
return GetStdoutQuiet(["xcrun", "--sdk", sdk, infoitem])
- except GypError:
+ except (GypError, OSError):
pass
def _SdkRoot(self, configname):
@@ -568,7 +567,7 @@ def _AppendPlatformVersionMinFlags(self, lst):
def GetCflags(self, configname, arch=None):
"""Returns flags that need to be added to .c, .cc, .m, and .mm
- compilations."""
+ compilations."""
# This functions (and the similar ones below) do not offer complete
# emulation of all xcode_settings keys. They're implemented on demand.
@@ -863,7 +862,7 @@ def GetInstallName(self):
def _MapLinkerFlagFilename(self, ldflag, gyp_to_build_path):
"""Checks if ldflag contains a filename and if so remaps it from
- gyp-directory-relative to build-directory-relative."""
+ gyp-directory-relative to build-directory-relative."""
# This list is expanded on demand.
# They get matched as:
# -exported_symbols_list file
@@ -895,13 +894,13 @@ def _MapLinkerFlagFilename(self, ldflag, gyp_to_build_path):
def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None):
"""Returns flags that need to be passed to the linker.
- Args:
- configname: The name of the configuration to get ld flags for.
- product_dir: The directory where products such static and dynamic
- libraries are placed. This is added to the library search path.
- gyp_to_build_path: A function that converts paths relative to the
- current gyp file to paths relative to the build directory.
- """
+ Args:
+ configname: The name of the configuration to get ld flags for.
+ product_dir: The directory where products such static and dynamic
+ libraries are placed. This is added to the library search path.
+ gyp_to_build_path: A function that converts paths relative to the
+ current gyp file to paths relative to the build directory.
+ """
self.configname = configname
ldflags = []
@@ -1001,9 +1000,9 @@ def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None):
def GetLibtoolflags(self, configname):
"""Returns flags that need to be passed to the static linker.
- Args:
- configname: The name of the configuration to get ld flags for.
- """
+ Args:
+ configname: The name of the configuration to get ld flags for.
+ """
self.configname = configname
libtoolflags = []
@@ -1016,7 +1015,7 @@ def GetLibtoolflags(self, configname):
def GetPerTargetSettings(self):
"""Gets a list of all the per-target settings. This will only fetch keys
- whose values are the same across all configurations."""
+ whose values are the same across all configurations."""
first_pass = True
result = {}
for configname in sorted(self.xcode_settings.keys()):
@@ -1039,7 +1038,7 @@ def GetPerConfigSetting(self, setting, configname, default=None):
def GetPerTargetSetting(self, setting, default=None):
"""Tries to get xcode_settings.setting from spec. Assumes that the setting
- has the same value in all configurations and throws otherwise."""
+ has the same value in all configurations and throws otherwise."""
is_first_pass = True
result = None
for configname in sorted(self.xcode_settings.keys()):
@@ -1057,15 +1056,14 @@ def GetPerTargetSetting(self, setting, default=None):
def _GetStripPostbuilds(self, configname, output_binary, quiet):
"""Returns a list of shell commands that contain the shell commands
- necessary to strip this target's binary. These should be run as postbuilds
- before the actual postbuilds run."""
+ necessary to strip this target's binary. These should be run as postbuilds
+ before the actual postbuilds run."""
self.configname = configname
result = []
if self._Test("DEPLOYMENT_POSTPROCESSING", "YES", default="NO") and self._Test(
"STRIP_INSTALLED_PRODUCT", "YES", default="NO"
):
-
default_strip_style = "debugging"
if (
self.spec["type"] == "loadable_module" or self._IsIosAppExtension()
@@ -1092,8 +1090,8 @@ def _GetStripPostbuilds(self, configname, output_binary, quiet):
def _GetDebugInfoPostbuilds(self, configname, output, output_binary, quiet):
"""Returns a list of shell commands that contain the shell commands
- necessary to massage this target's debug information. These should be run
- as postbuilds before the actual postbuilds run."""
+ necessary to massage this target's debug information. These should be run
+ as postbuilds before the actual postbuilds run."""
self.configname = configname
# For static libraries, no dSYMs are created.
@@ -1114,7 +1112,7 @@ def _GetDebugInfoPostbuilds(self, configname, output, output_binary, quiet):
def _GetTargetPostbuilds(self, configname, output, output_binary, quiet=False):
"""Returns a list of shell commands that contain the shell commands
- to run as postbuilds for this target, before the actual postbuilds."""
+ to run as postbuilds for this target, before the actual postbuilds."""
# dSYMs need to build before stripping happens.
return self._GetDebugInfoPostbuilds(
configname, output, output_binary, quiet
@@ -1122,11 +1120,10 @@ def _GetTargetPostbuilds(self, configname, output, output_binary, quiet=False):
def _GetIOSPostbuilds(self, configname, output_binary):
"""Return a shell command to codesign the iOS output binary so it can
- be deployed to a device. This should be run as the very last step of the
- build."""
+ be deployed to a device. This should be run as the very last step of the
+ build."""
if not (
- (self.isIOS
- and (self.spec["type"] == "executable" or self._IsXCTest()))
+ (self.isIOS and (self.spec["type"] == "executable" or self._IsXCTest()))
or self.IsIosFramework()
):
return []
@@ -1240,7 +1237,7 @@ def AddImplicitPostbuilds(
self, configname, output, output_binary, postbuilds=[], quiet=False
):
"""Returns a list of shell commands that should run before and after
- |postbuilds|."""
+ |postbuilds|."""
assert output_binary is not None
pre = self._GetTargetPostbuilds(configname, output, output_binary, quiet)
post = self._GetIOSPostbuilds(configname, output_binary)
@@ -1276,8 +1273,8 @@ def _AdjustLibrary(self, library, config_name=None):
def AdjustLibraries(self, libraries, config_name=None):
"""Transforms entries like 'Cocoa.framework' in libraries into entries like
- '-framework Cocoa', 'libcrypto.dylib' into '-lcrypto', etc.
- """
+ '-framework Cocoa', 'libcrypto.dylib' into '-lcrypto', etc.
+ """
libraries = [self._AdjustLibrary(library, config_name) for library in libraries]
return libraries
@@ -1342,20 +1339,19 @@ def GetExtraPlistItems(self, configname=None):
def _DefaultSdkRoot(self):
"""Returns the default SDKROOT to use.
- Prior to version 5.0.0, if SDKROOT was not explicitly set in the Xcode
- project, then the environment variable was empty. Starting with this
- version, Xcode uses the name of the newest SDK installed.
- """
+ Prior to version 5.0.0, if SDKROOT was not explicitly set in the Xcode
+ project, then the environment variable was empty. Starting with this
+ version, Xcode uses the name of the newest SDK installed.
+ """
xcode_version, _ = XcodeVersion()
if xcode_version < "0500":
return ""
default_sdk_path = self._XcodeSdkPath("")
- default_sdk_root = XcodeSettings._sdk_root_cache.get(default_sdk_path)
- if default_sdk_root:
+ if default_sdk_root := XcodeSettings._sdk_root_cache.get(default_sdk_path):
return default_sdk_root
try:
all_sdks = GetStdout(["xcodebuild", "-showsdks"])
- except GypError:
+ except (GypError, OSError):
# If xcodebuild fails, there will be no valid SDKs
return ""
for line in all_sdks.splitlines():
@@ -1371,39 +1367,39 @@ def _DefaultSdkRoot(self):
class MacPrefixHeader:
"""A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature.
- This feature consists of several pieces:
- * If GCC_PREFIX_HEADER is present, all compilations in that project get an
- additional |-include path_to_prefix_header| cflag.
- * If GCC_PRECOMPILE_PREFIX_HEADER is present too, then the prefix header is
- instead compiled, and all other compilations in the project get an
- additional |-include path_to_compiled_header| instead.
- + Compiled prefix headers have the extension gch. There is one gch file for
- every language used in the project (c, cc, m, mm), since gch files for
- different languages aren't compatible.
- + gch files themselves are built with the target's normal cflags, but they
- obviously don't get the |-include| flag. Instead, they need a -x flag that
- describes their language.
- + All o files in the target need to depend on the gch file, to make sure
- it's built before any o file is built.
-
- This class helps with some of these tasks, but it needs help from the build
- system for writing dependencies to the gch files, for writing build commands
- for the gch files, and for figuring out the location of the gch files.
- """
+ This feature consists of several pieces:
+ * If GCC_PREFIX_HEADER is present, all compilations in that project get an
+ additional |-include path_to_prefix_header| cflag.
+ * If GCC_PRECOMPILE_PREFIX_HEADER is present too, then the prefix header is
+ instead compiled, and all other compilations in the project get an
+ additional |-include path_to_compiled_header| instead.
+ + Compiled prefix headers have the extension gch. There is one gch file for
+ every language used in the project (c, cc, m, mm), since gch files for
+ different languages aren't compatible.
+ + gch files themselves are built with the target's normal cflags, but they
+ obviously don't get the |-include| flag. Instead, they need a -x flag that
+ describes their language.
+ + All o files in the target need to depend on the gch file, to make sure
+ it's built before any o file is built.
+
+ This class helps with some of these tasks, but it needs help from the build
+ system for writing dependencies to the gch files, for writing build commands
+ for the gch files, and for figuring out the location of the gch files.
+ """
def __init__(
self, xcode_settings, gyp_path_to_build_path, gyp_path_to_build_output
):
"""If xcode_settings is None, all methods on this class are no-ops.
- Args:
- gyp_path_to_build_path: A function that takes a gyp-relative path,
- and returns a path relative to the build directory.
- gyp_path_to_build_output: A function that takes a gyp-relative path and
- a language code ('c', 'cc', 'm', or 'mm'), and that returns a path
- to where the output of precompiling that path for that language
- should be placed (without the trailing '.gch').
- """
+ Args:
+ gyp_path_to_build_path: A function that takes a gyp-relative path,
+ and returns a path relative to the build directory.
+ gyp_path_to_build_output: A function that takes a gyp-relative path and
+ a language code ('c', 'cc', 'm', or 'mm'), and that returns a path
+ to where the output of precompiling that path for that language
+ should be placed (without the trailing '.gch').
+ """
# This doesn't support per-configuration prefix headers. Good enough
# for now.
self.header = None
@@ -1448,9 +1444,9 @@ def _Gch(self, lang, arch):
def GetObjDependencies(self, sources, objs, arch=None):
"""Given a list of source files and the corresponding object files, returns
- a list of (source, object, gch) tuples, where |gch| is the build-directory
- relative path to the gch file each object file depends on. |compilable[i]|
- has to be the source file belonging to |objs[i]|."""
+ a list of (source, object, gch) tuples, where |gch| is the build-directory
+ relative path to the gch file each object file depends on. |compilable[i]|
+ has to be the source file belonging to |objs[i]|."""
if not self.header or not self.compile_headers:
return []
@@ -1471,8 +1467,8 @@ def GetObjDependencies(self, sources, objs, arch=None):
def GetPchBuildCommands(self, arch=None):
"""Returns [(path_to_gch, language_flag, language, header)].
- |path_to_gch| and |header| are relative to the build directory.
- """
+ |path_to_gch| and |header| are relative to the build directory.
+ """
if not self.header or not self.compile_headers:
return []
return [
@@ -1509,7 +1505,8 @@ def XcodeVersion():
raise GypError("xcodebuild returned unexpected results")
version = version_list[0].split()[-1] # Last word on first line
build = version_list[-1].split()[-1] # Last word on last line
- except GypError: # Xcode not installed so look for XCode Command Line Tools
+ except (GypError, OSError):
+ # Xcode not installed so look for XCode Command Line Tools
version = CLTVersion() # macOS Catalina returns 11.0.0.0.1.1567737322
if not version:
raise GypError("No Xcode or CLT version detected!")
@@ -1537,26 +1534,28 @@ def CLTVersion():
FROM_XCODE_PKG_ID = "com.apple.pkg.DeveloperToolsCLI"
MAVERICKS_PKG_ID = "com.apple.pkg.CLTools_Executables"
- regex = re.compile("version: (?P.+)")
+ regex = re.compile(r"version: (?P.+)")
for key in [MAVERICKS_PKG_ID, STANDALONE_PKG_ID, FROM_XCODE_PKG_ID]:
try:
output = GetStdout(["/usr/sbin/pkgutil", "--pkg-info", key])
- return re.search(regex, output).groupdict()["version"]
- except GypError:
+ if m := re.search(regex, output):
+ return m.groupdict()["version"]
+ except (GypError, OSError):
continue
regex = re.compile(r"Command Line Tools for Xcode\s+(?P\S+)")
try:
output = GetStdout(["/usr/sbin/softwareupdate", "--history"])
- return re.search(regex, output).groupdict()["version"]
- except GypError:
+ if m := re.search(regex, output):
+ return m.groupdict()["version"]
+ except (GypError, OSError):
return None
def GetStdoutQuiet(cmdlist):
"""Returns the content of standard output returned by invoking |cmdlist|.
- Ignores the stderr.
- Raises |GypError| if the command return with a non-zero return code."""
+ Ignores the stderr.
+ Raises |GypError| if the command return with a non-zero return code."""
job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out = job.communicate()[0].decode("utf-8")
if job.returncode != 0:
@@ -1566,7 +1565,7 @@ def GetStdoutQuiet(cmdlist):
def GetStdout(cmdlist):
"""Returns the content of standard output returned by invoking |cmdlist|.
- Raises |GypError| if the command return with a non-zero return code."""
+ Raises |GypError| if the command return with a non-zero return code."""
job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE)
out = job.communicate()[0].decode("utf-8")
if job.returncode != 0:
@@ -1577,9 +1576,9 @@ def GetStdout(cmdlist):
def MergeGlobalXcodeSettingsToSpec(global_dict, spec):
"""Merges the global xcode_settings dictionary into each configuration of the
- target represented by spec. For keys that are both in the global and the local
- xcode_settings dict, the local key gets precedence.
- """
+ target represented by spec. For keys that are both in the global and the local
+ xcode_settings dict, the local key gets precedence.
+ """
# The xcode generator special-cases global xcode_settings and does something
# that amounts to merging in the global xcode_settings into each local
# xcode_settings dict.
@@ -1594,9 +1593,9 @@ def MergeGlobalXcodeSettingsToSpec(global_dict, spec):
def IsMacBundle(flavor, spec):
"""Returns if |spec| should be treated as a bundle.
- Bundles are directories with a certain subdirectory structure, instead of
- just a single file. Bundle rules do not produce a binary but also package
- resources into that directory."""
+ Bundles are directories with a certain subdirectory structure, instead of
+ just a single file. Bundle rules do not produce a binary but also package
+ resources into that directory."""
is_mac_bundle = (
int(spec.get("mac_xctest_bundle", 0)) != 0
or int(spec.get("mac_xcuitest_bundle", 0)) != 0
@@ -1613,14 +1612,14 @@ def IsMacBundle(flavor, spec):
def GetMacBundleResources(product_dir, xcode_settings, resources):
"""Yields (output, resource) pairs for every resource in |resources|.
- Only call this for mac bundle targets.
-
- Args:
- product_dir: Path to the directory containing the output bundle,
- relative to the build directory.
- xcode_settings: The XcodeSettings of the current target.
- resources: A list of bundle resources, relative to the build directory.
- """
+ Only call this for mac bundle targets.
+
+ Args:
+ product_dir: Path to the directory containing the output bundle,
+ relative to the build directory.
+ xcode_settings: The XcodeSettings of the current target.
+ resources: A list of bundle resources, relative to the build directory.
+ """
dest = os.path.join(product_dir, xcode_settings.GetBundleResourceFolder())
for res in resources:
output = dest
@@ -1651,24 +1650,24 @@ def GetMacBundleResources(product_dir, xcode_settings, resources):
def GetMacInfoPlist(product_dir, xcode_settings, gyp_path_to_build_path):
"""Returns (info_plist, dest_plist, defines, extra_env), where:
- * |info_plist| is the source plist path, relative to the
- build directory,
- * |dest_plist| is the destination plist path, relative to the
- build directory,
- * |defines| is a list of preprocessor defines (empty if the plist
- shouldn't be preprocessed,
- * |extra_env| is a dict of env variables that should be exported when
- invoking |mac_tool copy-info-plist|.
-
- Only call this for mac bundle targets.
-
- Args:
- product_dir: Path to the directory containing the output bundle,
- relative to the build directory.
- xcode_settings: The XcodeSettings of the current target.
- gyp_to_build_path: A function that converts paths relative to the
- current gyp file to paths relative to the build directory.
- """
+ * |info_plist| is the source plist path, relative to the
+ build directory,
+ * |dest_plist| is the destination plist path, relative to the
+ build directory,
+ * |defines| is a list of preprocessor defines (empty if the plist
+ shouldn't be preprocessed,
+ * |extra_env| is a dict of env variables that should be exported when
+ invoking |mac_tool copy-info-plist|.
+
+ Only call this for mac bundle targets.
+
+ Args:
+ product_dir: Path to the directory containing the output bundle,
+ relative to the build directory.
+ xcode_settings: The XcodeSettings of the current target.
+ gyp_to_build_path: A function that converts paths relative to the
+ current gyp file to paths relative to the build directory.
+ """
info_plist = xcode_settings.GetPerTargetSetting("INFOPLIST_FILE")
if not info_plist:
return None, None, [], {}
@@ -1706,18 +1705,18 @@ def _GetXcodeEnv(
xcode_settings, built_products_dir, srcroot, configuration, additional_settings=None
):
"""Return the environment variables that Xcode would set. See
- http://developer.apple.com/library/mac/#documentation/DeveloperTools/Reference/XcodeBuildSettingRef/1-Build_Setting_Reference/build_setting_ref.html#//apple_ref/doc/uid/TP40003931-CH3-SW153
- for a full list.
-
- Args:
- xcode_settings: An XcodeSettings object. If this is None, this function
- returns an empty dict.
- built_products_dir: Absolute path to the built products dir.
- srcroot: Absolute path to the source root.
- configuration: The build configuration name.
- additional_settings: An optional dict with more values to add to the
- result.
- """
+ http://developer.apple.com/library/mac/#documentation/DeveloperTools/Reference/XcodeBuildSettingRef/1-Build_Setting_Reference/build_setting_ref.html#//apple_ref/doc/uid/TP40003931-CH3-SW153
+ for a full list.
+
+ Args:
+ xcode_settings: An XcodeSettings object. If this is None, this function
+ returns an empty dict.
+ built_products_dir: Absolute path to the built products dir.
+ srcroot: Absolute path to the source root.
+ configuration: The build configuration name.
+ additional_settings: An optional dict with more values to add to the
+ result.
+ """
if not xcode_settings:
return {}
@@ -1771,27 +1770,25 @@ def _GetXcodeEnv(
)
env["CONTENTS_FOLDER_PATH"] = xcode_settings.GetBundleContentsFolderPath()
env["EXECUTABLE_FOLDER_PATH"] = xcode_settings.GetBundleExecutableFolderPath()
- env[
- "UNLOCALIZED_RESOURCES_FOLDER_PATH"
- ] = xcode_settings.GetBundleResourceFolder()
+ env["UNLOCALIZED_RESOURCES_FOLDER_PATH"] = (
+ xcode_settings.GetBundleResourceFolder()
+ )
env["JAVA_FOLDER_PATH"] = xcode_settings.GetBundleJavaFolderPath()
env["FRAMEWORKS_FOLDER_PATH"] = xcode_settings.GetBundleFrameworksFolderPath()
- env[
- "SHARED_FRAMEWORKS_FOLDER_PATH"
- ] = xcode_settings.GetBundleSharedFrameworksFolderPath()
- env[
- "SHARED_SUPPORT_FOLDER_PATH"
- ] = xcode_settings.GetBundleSharedSupportFolderPath()
+ env["SHARED_FRAMEWORKS_FOLDER_PATH"] = (
+ xcode_settings.GetBundleSharedFrameworksFolderPath()
+ )
+ env["SHARED_SUPPORT_FOLDER_PATH"] = (
+ xcode_settings.GetBundleSharedSupportFolderPath()
+ )
env["PLUGINS_FOLDER_PATH"] = xcode_settings.GetBundlePlugInsFolderPath()
env["XPCSERVICES_FOLDER_PATH"] = xcode_settings.GetBundleXPCServicesFolderPath()
env["INFOPLIST_PATH"] = xcode_settings.GetBundlePlistPath()
env["WRAPPER_NAME"] = xcode_settings.GetWrapperName()
- install_name = xcode_settings.GetInstallName()
- if install_name:
+ if install_name := xcode_settings.GetInstallName():
env["LD_DYLIB_INSTALL_NAME"] = install_name
- install_name_base = xcode_settings.GetInstallNameBase()
- if install_name_base:
+ if install_name_base := xcode_settings.GetInstallNameBase():
env["DYLIB_INSTALL_NAME_BASE"] = install_name_base
xcode_version, _ = XcodeVersion()
if xcode_version >= "0500" and not env.get("SDKROOT"):
@@ -1819,8 +1816,8 @@ def _GetXcodeEnv(
def _NormalizeEnvVarReferences(str):
"""Takes a string containing variable references in the form ${FOO}, $(FOO),
- or $FOO, and returns a string with all variable references in the form ${FOO}.
- """
+ or $FOO, and returns a string with all variable references in the form ${FOO}.
+ """
# $FOO -> ${FOO}
str = re.sub(r"\$([a-zA-Z_][a-zA-Z0-9_]*)", r"${\1}", str)
@@ -1836,9 +1833,9 @@ def _NormalizeEnvVarReferences(str):
def ExpandEnvVars(string, expansions):
"""Expands ${VARIABLES}, $(VARIABLES), and $VARIABLES in string per the
- expansions list. If the variable expands to something that references
- another variable, this variable is expanded as well if it's in env --
- until no variables present in env are left."""
+ expansions list. If the variable expands to something that references
+ another variable, this variable is expanded as well if it's in env --
+ until no variables present in env are left."""
for k, v in reversed(expansions):
string = string.replace("${" + k + "}", v)
string = string.replace("$(" + k + ")", v)
@@ -1848,11 +1845,11 @@ def ExpandEnvVars(string, expansions):
def _TopologicallySortedEnvVarKeys(env):
"""Takes a dict |env| whose values are strings that can refer to other keys,
- for example env['foo'] = '$(bar) and $(baz)'. Returns a list L of all keys of
- env such that key2 is after key1 in L if env[key2] refers to env[key1].
+ for example env['foo'] = '$(bar) and $(baz)'. Returns a list L of all keys of
+ env such that key2 is after key1 in L if env[key2] refers to env[key1].
- Throws an Exception in case of dependency cycles.
- """
+ Throws an Exception in case of dependency cycles.
+ """
# Since environment variables can refer to other variables, the evaluation
# order is important. Below is the logic to compute the dependency graph
# and sort it.
@@ -1893,7 +1890,7 @@ def GetSortedXcodeEnv(
def GetSpecPostbuildCommands(spec, quiet=False):
"""Returns the list of postbuilds explicitly defined on |spec|, in a form
- executable by a shell."""
+ executable by a shell."""
postbuilds = []
for postbuild in spec.get("postbuilds", []):
if not quiet:
@@ -1907,7 +1904,7 @@ def GetSpecPostbuildCommands(spec, quiet=False):
def _HasIOSTarget(targets):
"""Returns true if any target contains the iOS specific key
- IPHONEOS_DEPLOYMENT_TARGET."""
+ IPHONEOS_DEPLOYMENT_TARGET."""
for target_dict in targets.values():
for config in target_dict["configurations"].values():
if config.get("xcode_settings", {}).get("IPHONEOS_DEPLOYMENT_TARGET"):
@@ -1917,7 +1914,7 @@ def _HasIOSTarget(targets):
def _AddIOSDeviceConfigurations(targets):
"""Clone all targets and append -iphoneos to the name. Configure these targets
- to build for iOS devices and use correct architectures for those builds."""
+ to build for iOS devices and use correct architectures for those builds."""
for target_dict in targets.values():
toolset = target_dict["toolset"]
configs = target_dict["configurations"]
@@ -1933,7 +1930,7 @@ def _AddIOSDeviceConfigurations(targets):
def CloneConfigurationForDeviceAndEmulator(target_dicts):
"""If |target_dicts| contains any iOS targets, automatically create -iphoneos
- targets for iOS device builds."""
+ targets for iOS device builds."""
if _HasIOSTarget(target_dicts):
return _AddIOSDeviceConfigurations(target_dicts)
return target_dicts
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py
index cac1af56f7bfb7..a133fdbe8b4f58 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py
@@ -21,8 +21,8 @@
def _WriteWorkspace(main_gyp, sources_gyp, params):
- """ Create a workspace to wrap main and sources gyp paths. """
- (build_file_root, build_file_ext) = os.path.splitext(main_gyp)
+ """Create a workspace to wrap main and sources gyp paths."""
+ (build_file_root, _build_file_ext) = os.path.splitext(main_gyp)
workspace_path = build_file_root + ".xcworkspace"
options = params["options"]
if options.generator_output:
@@ -57,7 +57,7 @@ def _WriteWorkspace(main_gyp, sources_gyp, params):
def _TargetFromSpec(old_spec, params):
- """ Create fake target for xcode-ninja wrapper. """
+ """Create fake target for xcode-ninja wrapper."""
# Determine ninja top level build dir (e.g. /path/to/out).
ninja_toplevel = None
jobs = 0
@@ -70,12 +70,11 @@ def _TargetFromSpec(old_spec, params):
target_name = old_spec.get("target_name")
product_name = old_spec.get("product_name", target_name)
- product_extension = old_spec.get("product_extension")
ninja_target = {}
ninja_target["target_name"] = target_name
ninja_target["product_name"] = product_name
- if product_extension:
+ if product_extension := old_spec.get("product_extension"):
ninja_target["product_extension"] = product_extension
ninja_target["toolset"] = old_spec.get("toolset")
ninja_target["default_configuration"] = old_spec.get("default_configuration")
@@ -103,9 +102,9 @@ def _TargetFromSpec(old_spec, params):
new_xcode_settings[key] = old_xcode_settings[key]
ninja_target["configurations"][config] = {}
- ninja_target["configurations"][config][
- "xcode_settings"
- ] = new_xcode_settings
+ ninja_target["configurations"][config]["xcode_settings"] = (
+ new_xcode_settings
+ )
ninja_target["mac_bundle"] = old_spec.get("mac_bundle", 0)
ninja_target["mac_xctest_bundle"] = old_spec.get("mac_xctest_bundle", 0)
@@ -138,13 +137,13 @@ def _TargetFromSpec(old_spec, params):
def IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
"""Limit targets for Xcode wrapper.
- Xcode sometimes performs poorly with too many targets, so only include
- proper executable targets, with filters to customize.
- Arguments:
- target_extras: Regular expression to always add, matching any target.
- executable_target_pattern: Regular expression limiting executable targets.
- spec: Specifications for target.
- """
+ Xcode sometimes performs poorly with too many targets, so only include
+ proper executable targets, with filters to customize.
+ Arguments:
+ target_extras: Regular expression to always add, matching any target.
+ executable_target_pattern: Regular expression limiting executable targets.
+ spec: Specifications for target.
+ """
target_name = spec.get("target_name")
# Always include targets matching target_extras.
if target_extras is not None and re.search(target_extras, target_name):
@@ -155,7 +154,6 @@ def IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
spec.get("type", "") == "executable"
and spec.get("product_extension", "") != "bundle"
):
-
# If there is a filter and the target does not match, exclude the target.
if executable_target_pattern is not None:
if not re.search(executable_target_pattern, target_name):
@@ -167,14 +165,14 @@ def IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
def CreateWrapper(target_list, target_dicts, data, params):
"""Initialize targets for the ninja wrapper.
- This sets up the necessary variables in the targets to generate Xcode projects
- that use ninja as an external builder.
- Arguments:
- target_list: List of target pairs: 'base/base.gyp:base'.
- target_dicts: Dict of target properties keyed on target pair.
- data: Dict of flattened build files keyed on gyp path.
- params: Dict of global options for gyp.
- """
+ This sets up the necessary variables in the targets to generate Xcode projects
+ that use ninja as an external builder.
+ Arguments:
+ target_list: List of target pairs: 'base/base.gyp:base'.
+ target_dicts: Dict of target properties keyed on target pair.
+ data: Dict of flattened build files keyed on gyp path.
+ params: Dict of global options for gyp.
+ """
orig_gyp = params["build_files"][0]
for gyp_name, gyp_dict in data.items():
if gyp_name == orig_gyp:
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py
index be17ef946dce35..cb467470d3044b 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py
@@ -176,15 +176,14 @@ def cmp(x, y):
def SourceTreeAndPathFromPath(input_path):
"""Given input_path, returns a tuple with sourceTree and path values.
- Examples:
- input_path (source_tree, output_path)
- '$(VAR)/path' ('VAR', 'path')
- '$(VAR)' ('VAR', None)
- 'path' (None, 'path')
- """
-
- source_group_match = _path_leading_variable.match(input_path)
- if source_group_match:
+ Examples:
+ input_path (source_tree, output_path)
+ '$(VAR)/path' ('VAR', 'path')
+ '$(VAR)' ('VAR', None)
+ 'path' (None, 'path')
+ """
+
+ if source_group_match := _path_leading_variable.match(input_path):
source_tree = source_group_match.group(1)
output_path = source_group_match.group(3) # This may be None.
else:
@@ -201,70 +200,70 @@ def ConvertVariablesToShellSyntax(input_string):
class XCObject:
"""The abstract base of all class types used in Xcode project files.
- Class variables:
- _schema: A dictionary defining the properties of this class. The keys to
- _schema are string property keys as used in project files. Values
- are a list of four or five elements:
- [ is_list, property_type, is_strong, is_required, default ]
- is_list: True if the property described is a list, as opposed
- to a single element.
- property_type: The type to use as the value of the property,
- or if is_list is True, the type to use for each
- element of the value's list. property_type must
- be an XCObject subclass, or one of the built-in
- types str, int, or dict.
- is_strong: If property_type is an XCObject subclass, is_strong
- is True to assert that this class "owns," or serves
- as parent, to the property value (or, if is_list is
- True, values). is_strong must be False if
- property_type is not an XCObject subclass.
- is_required: True if the property is required for the class.
- Note that is_required being True does not preclude
- an empty string ("", in the case of property_type
- str) or list ([], in the case of is_list True) from
- being set for the property.
- default: Optional. If is_required is True, default may be set
- to provide a default value for objects that do not supply
- their own value. If is_required is True and default
- is not provided, users of the class must supply their own
- value for the property.
- Note that although the values of the array are expressed in
- boolean terms, subclasses provide values as integers to conserve
- horizontal space.
- _should_print_single_line: False in XCObject. Subclasses whose objects
- should be written to the project file in the
- alternate single-line format, such as
- PBXFileReference and PBXBuildFile, should
- set this to True.
- _encode_transforms: Used by _EncodeString to encode unprintable characters.
- The index into this list is the ordinal of the
- character to transform; each value is a string
- used to represent the character in the output. XCObject
- provides an _encode_transforms list suitable for most
- XCObject subclasses.
- _alternate_encode_transforms: Provided for subclasses that wish to use
- the alternate encoding rules. Xcode seems
- to use these rules when printing objects in
- single-line format. Subclasses that desire
- this behavior should set _encode_transforms
- to _alternate_encode_transforms.
- _hashables: A list of XCObject subclasses that can be hashed by ComputeIDs
- to construct this object's ID. Most classes that need custom
- hashing behavior should do it by overriding Hashables,
- but in some cases an object's parent may wish to push a
- hashable value into its child, and it can do so by appending
- to _hashables.
- Attributes:
- id: The object's identifier, a 24-character uppercase hexadecimal string.
- Usually, objects being created should not set id until the entire
- project file structure is built. At that point, UpdateIDs() should
- be called on the root object to assign deterministic values for id to
- each object in the tree.
- parent: The object's parent. This is set by a parent XCObject when a child
- object is added to it.
- _properties: The object's property dictionary. An object's properties are
- described by its class' _schema variable.
- """
+ Class variables:
+ _schema: A dictionary defining the properties of this class. The keys to
+ _schema are string property keys as used in project files. Values
+ are a list of four or five elements:
+ [ is_list, property_type, is_strong, is_required, default ]
+ is_list: True if the property described is a list, as opposed
+ to a single element.
+ property_type: The type to use as the value of the property,
+ or if is_list is True, the type to use for each
+ element of the value's list. property_type must
+ be an XCObject subclass, or one of the built-in
+ types str, int, or dict.
+ is_strong: If property_type is an XCObject subclass, is_strong
+ is True to assert that this class "owns," or serves
+ as parent, to the property value (or, if is_list is
+ True, values). is_strong must be False if
+ property_type is not an XCObject subclass.
+ is_required: True if the property is required for the class.
+ Note that is_required being True does not preclude
+ an empty string ("", in the case of property_type
+ str) or list ([], in the case of is_list True) from
+ being set for the property.
+ default: Optional. If is_required is True, default may be set
+ to provide a default value for objects that do not supply
+ their own value. If is_required is True and default
+ is not provided, users of the class must supply their own
+ value for the property.
+ Note that although the values of the array are expressed in
+ boolean terms, subclasses provide values as integers to conserve
+ horizontal space.
+ _should_print_single_line: False in XCObject. Subclasses whose objects
+ should be written to the project file in the
+ alternate single-line format, such as
+ PBXFileReference and PBXBuildFile, should
+ set this to True.
+ _encode_transforms: Used by _EncodeString to encode unprintable characters.
+ The index into this list is the ordinal of the
+ character to transform; each value is a string
+ used to represent the character in the output. XCObject
+ provides an _encode_transforms list suitable for most
+ XCObject subclasses.
+ _alternate_encode_transforms: Provided for subclasses that wish to use
+ the alternate encoding rules. Xcode seems
+ to use these rules when printing objects in
+ single-line format. Subclasses that desire
+ this behavior should set _encode_transforms
+ to _alternate_encode_transforms.
+ _hashables: A list of XCObject subclasses that can be hashed by ComputeIDs
+ to construct this object's ID. Most classes that need custom
+ hashing behavior should do it by overriding Hashables,
+ but in some cases an object's parent may wish to push a
+ hashable value into its child, and it can do so by appending
+ to _hashables.
+ Attributes:
+ id: The object's identifier, a 24-character uppercase hexadecimal string.
+ Usually, objects being created should not set id until the entire
+ project file structure is built. At that point, UpdateIDs() should
+ be called on the root object to assign deterministic values for id to
+ each object in the tree.
+ parent: The object's parent. This is set by a parent XCObject when a child
+ object is added to it.
+ _properties: The object's property dictionary. An object's properties are
+ described by its class' _schema variable.
+ """
_schema = {}
_should_print_single_line = False
@@ -306,12 +305,12 @@ def __repr__(self):
def Copy(self):
"""Make a copy of this object.
- The new object will have its own copy of lists and dicts. Any XCObject
- objects owned by this object (marked "strong") will be copied in the
- new object, even those found in lists. If this object has any weak
- references to other XCObjects, the same references are added to the new
- object without making a copy.
- """
+ The new object will have its own copy of lists and dicts. Any XCObject
+ objects owned by this object (marked "strong") will be copied in the
+ new object, even those found in lists. If this object has any weak
+ references to other XCObjects, the same references are added to the new
+ object without making a copy.
+ """
that = self.__class__(id=self.id, parent=self.parent)
for key, value in self._properties.items():
@@ -360,9 +359,9 @@ def Copy(self):
def Name(self):
"""Return the name corresponding to an object.
- Not all objects necessarily need to be nameable, and not all that do have
- a "name" property. Override as needed.
- """
+ Not all objects necessarily need to be nameable, and not all that do have
+ a "name" property. Override as needed.
+ """
# If the schema indicates that "name" is required, try to access the
# property even if it doesn't exist. This will result in a KeyError
@@ -378,20 +377,19 @@ def Name(self):
def Comment(self):
"""Return a comment string for the object.
- Most objects just use their name as the comment, but PBXProject uses
- different values.
+ Most objects just use their name as the comment, but PBXProject uses
+ different values.
- The returned comment is not escaped and does not have any comment marker
- strings applied to it.
- """
+ The returned comment is not escaped and does not have any comment marker
+ strings applied to it.
+ """
return self.Name()
def Hashables(self):
hashables = [self.__class__.__name__]
- name = self.Name()
- if name is not None:
+ if (name := self.Name()) is not None:
hashables.append(name)
hashables.extend(self._hashables)
@@ -404,26 +402,26 @@ def HashablesForChild(self):
def ComputeIDs(self, recursive=True, overwrite=True, seed_hash=None):
"""Set "id" properties deterministically.
- An object's "id" property is set based on a hash of its class type and
- name, as well as the class type and name of all ancestor objects. As
- such, it is only advisable to call ComputeIDs once an entire project file
- tree is built.
+ An object's "id" property is set based on a hash of its class type and
+ name, as well as the class type and name of all ancestor objects. As
+ such, it is only advisable to call ComputeIDs once an entire project file
+ tree is built.
- If recursive is True, recurse into all descendant objects and update their
- hashes.
+ If recursive is True, recurse into all descendant objects and update their
+ hashes.
- If overwrite is True, any existing value set in the "id" property will be
- replaced.
- """
+ If overwrite is True, any existing value set in the "id" property will be
+ replaced.
+ """
def _HashUpdate(hash, data):
"""Update hash with data's length and contents.
- If the hash were updated only with the value of data, it would be
- possible for clowns to induce collisions by manipulating the names of
- their objects. By adding the length, it's exceedingly less likely that
- ID collisions will be encountered, intentionally or not.
- """
+ If the hash were updated only with the value of data, it would be
+ possible for clowns to induce collisions by manipulating the names of
+ their objects. By adding the length, it's exceedingly less likely that
+ ID collisions will be encountered, intentionally or not.
+ """
hash.update(struct.pack(">i", len(data)))
if isinstance(data, str):
@@ -466,8 +464,7 @@ def _HashUpdate(hash, data):
self.id = "%08X%08X%08X" % tuple(id_ints)
def EnsureNoIDCollisions(self):
- """Verifies that no two objects have the same ID. Checks all descendants.
- """
+ """Verifies that no two objects have the same ID. Checks all descendants."""
ids = {}
descendants = self.Descendants()
@@ -490,7 +487,7 @@ def Children(self):
children = []
for property, attributes in self._schema.items():
- (is_list, property_type, is_strong) = attributes[0:3]
+ (is_list, _property_type, is_strong) = attributes[0:3]
if is_strong and property in self._properties:
if not is_list:
children.append(self._properties[property])
@@ -500,8 +497,8 @@ def Children(self):
def Descendants(self):
"""Returns a list of all of this object's descendants, including this
- object.
- """
+ object.
+ """
children = self.Children()
descendants = [self]
@@ -517,8 +514,8 @@ def PBXProjectAncestor(self):
def _EncodeComment(self, comment):
"""Encodes a comment to be placed in the project file output, mimicking
- Xcode behavior.
- """
+ Xcode behavior.
+ """
# This mimics Xcode behavior by wrapping the comment in "/*" and "*/". If
# the string already contains a "*/", it is turned into "(*)/". This keeps
@@ -545,8 +542,8 @@ def _EncodeTransform(self, match):
def _EncodeString(self, value):
"""Encodes a string to be placed in the project file output, mimicking
- Xcode behavior.
- """
+ Xcode behavior.
+ """
# Use quotation marks when any character outside of the range A-Z, a-z, 0-9,
# $ (dollar sign), . (period), and _ (underscore) is present. Also use
@@ -587,18 +584,18 @@ def _XCPrint(self, file, tabs, line):
def _XCPrintableValue(self, tabs, value, flatten_list=False):
"""Returns a representation of value that may be printed in a project file,
- mimicking Xcode's behavior.
+ mimicking Xcode's behavior.
- _XCPrintableValue can handle str and int values, XCObjects (which are
- made printable by returning their id property), and list and dict objects
- composed of any of the above types. When printing a list or dict, and
- _should_print_single_line is False, the tabs parameter is used to determine
- how much to indent the lines corresponding to the items in the list or
- dict.
+ _XCPrintableValue can handle str and int values, XCObjects (which are
+ made printable by returning their id property), and list and dict objects
+ composed of any of the above types. When printing a list or dict, and
+ _should_print_single_line is False, the tabs parameter is used to determine
+ how much to indent the lines corresponding to the items in the list or
+ dict.
- If flatten_list is True, single-element lists will be transformed into
- strings.
- """
+ If flatten_list is True, single-element lists will be transformed into
+ strings.
+ """
printable = ""
comment = None
@@ -659,12 +656,12 @@ def _XCPrintableValue(self, tabs, value, flatten_list=False):
def _XCKVPrint(self, file, tabs, key, value):
"""Prints a key and value, members of an XCObject's _properties dictionary,
- to file.
+ to file.
- tabs is an int identifying the indentation level. If the class'
- _should_print_single_line variable is True, tabs is ignored and the
- key-value pair will be followed by a space instead of a newline.
- """
+ tabs is an int identifying the indentation level. If the class'
+ _should_print_single_line variable is True, tabs is ignored and the
+ key-value pair will be followed by a space instead of a newline.
+ """
if self._should_print_single_line:
printable = ""
@@ -722,8 +719,8 @@ def _XCKVPrint(self, file, tabs, key, value):
def Print(self, file=sys.stdout):
"""Prints a reprentation of this object to file, adhering to Xcode output
- formatting.
- """
+ formatting.
+ """
self.VerifyHasRequiredProperties()
@@ -761,15 +758,15 @@ def Print(self, file=sys.stdout):
def UpdateProperties(self, properties, do_copy=False):
"""Merge the supplied properties into the _properties dictionary.
- The input properties must adhere to the class schema or a KeyError or
- TypeError exception will be raised. If adding an object of an XCObject
- subclass and the schema indicates a strong relationship, the object's
- parent will be set to this object.
+ The input properties must adhere to the class schema or a KeyError or
+ TypeError exception will be raised. If adding an object of an XCObject
+ subclass and the schema indicates a strong relationship, the object's
+ parent will be set to this object.
- If do_copy is True, then lists, dicts, strong-owned XCObjects, and
- strong-owned XCObjects in lists will be copied instead of having their
- references added.
- """
+ If do_copy is True, then lists, dicts, strong-owned XCObjects, and
+ strong-owned XCObjects in lists will be copied instead of having their
+ references added.
+ """
if properties is None:
return
@@ -910,23 +907,23 @@ def AppendProperty(self, key, value):
def VerifyHasRequiredProperties(self):
"""Ensure that all properties identified as required by the schema are
- set.
- """
+ set.
+ """
# TODO(mark): A stronger verification mechanism is needed. Some
# subclasses need to perform validation beyond what the schema can enforce.
for property, attributes in self._schema.items():
- (is_list, property_type, is_strong, is_required) = attributes[0:4]
+ (_is_list, _property_type, _is_strong, is_required) = attributes[0:4]
if is_required and property not in self._properties:
raise KeyError(self.__class__.__name__ + " requires " + property)
def _SetDefaultsFromSchema(self):
"""Assign object default values according to the schema. This will not
- overwrite properties that have already been set."""
+ overwrite properties that have already been set."""
defaults = {}
for property, attributes in self._schema.items():
- (is_list, property_type, is_strong, is_required) = attributes[0:4]
+ (_is_list, _property_type, _is_strong, is_required) = attributes[0:4]
if (
is_required
and len(attributes) >= 5
@@ -944,7 +941,7 @@ def _SetDefaultsFromSchema(self):
class XCHierarchicalElement(XCObject):
"""Abstract base for PBXGroup and PBXFileReference. Not represented in a
- project file."""
+ project file."""
# TODO(mark): Do name and path belong here? Probably so.
# If path is set and name is not, name may have a default value. Name will
@@ -1010,27 +1007,27 @@ def Name(self):
def Hashables(self):
"""Custom hashables for XCHierarchicalElements.
- XCHierarchicalElements are special. Generally, their hashes shouldn't
- change if the paths don't change. The normal XCObject implementation of
- Hashables adds a hashable for each object, which means that if
- the hierarchical structure changes (possibly due to changes caused when
- TakeOverOnlyChild runs and encounters slight changes in the hierarchy),
- the hashes will change. For example, if a project file initially contains
- a/b/f1 and a/b becomes collapsed into a/b, f1 will have a single parent
- a/b. If someone later adds a/f2 to the project file, a/b can no longer be
- collapsed, and f1 winds up with parent b and grandparent a. That would
- be sufficient to change f1's hash.
-
- To counteract this problem, hashables for all XCHierarchicalElements except
- for the main group (which has neither a name nor a path) are taken to be
- just the set of path components. Because hashables are inherited from
- parents, this provides assurance that a/b/f1 has the same set of hashables
- whether its parent is b or a/b.
-
- The main group is a special case. As it is permitted to have no name or
- path, it is permitted to use the standard XCObject hash mechanism. This
- is not considered a problem because there can be only one main group.
- """
+ XCHierarchicalElements are special. Generally, their hashes shouldn't
+ change if the paths don't change. The normal XCObject implementation of
+ Hashables adds a hashable for each object, which means that if
+ the hierarchical structure changes (possibly due to changes caused when
+ TakeOverOnlyChild runs and encounters slight changes in the hierarchy),
+ the hashes will change. For example, if a project file initially contains
+ a/b/f1 and a/b becomes collapsed into a/b, f1 will have a single parent
+ a/b. If someone later adds a/f2 to the project file, a/b can no longer be
+ collapsed, and f1 winds up with parent b and grandparent a. That would
+ be sufficient to change f1's hash.
+
+ To counteract this problem, hashables for all XCHierarchicalElements except
+ for the main group (which has neither a name nor a path) are taken to be
+ just the set of path components. Because hashables are inherited from
+ parents, this provides assurance that a/b/f1 has the same set of hashables
+ whether its parent is b or a/b.
+
+ The main group is a special case. As it is permitted to have no name or
+ path, it is permitted to use the standard XCObject hash mechanism. This
+ is not considered a problem because there can be only one main group.
+ """
if self == self.PBXProjectAncestor()._properties["mainGroup"]:
# super
@@ -1051,8 +1048,7 @@ def Hashables(self):
# including paths with a sourceTree, they'll still inherit their parents'
# hashables, even though the paths aren't relative to their parents. This
# is not expected to be much of a problem in practice.
- path = self.PathFromSourceTreeAndPath()
- if path is not None:
+ if (path := self.PathFromSourceTreeAndPath()) is not None:
components = path.split(posixpath.sep)
for component in components:
hashables.append(self.__class__.__name__ + ".path")
@@ -1160,12 +1156,12 @@ def FullPath(self):
class PBXGroup(XCHierarchicalElement):
"""
- Attributes:
- _children_by_path: Maps pathnames of children of this PBXGroup to the
- actual child XCHierarchicalElement objects.
- _variant_children_by_name_and_path: Maps (name, path) tuples of
- PBXVariantGroup children to the actual child PBXVariantGroup objects.
- """
+ Attributes:
+ _children_by_path: Maps pathnames of children of this PBXGroup to the
+ actual child XCHierarchicalElement objects.
+ _variant_children_by_name_and_path: Maps (name, path) tuples of
+ PBXVariantGroup children to the actual child PBXVariantGroup objects.
+ """
_schema = XCHierarchicalElement._schema.copy()
_schema.update(
@@ -1284,20 +1280,20 @@ def GetChildByRemoteObject(self, remote_object):
def AddOrGetFileByPath(self, path, hierarchical):
"""Returns an existing or new file reference corresponding to path.
- If hierarchical is True, this method will create or use the necessary
- hierarchical group structure corresponding to path. Otherwise, it will
- look in and create an item in the current group only.
+ If hierarchical is True, this method will create or use the necessary
+ hierarchical group structure corresponding to path. Otherwise, it will
+ look in and create an item in the current group only.
- If an existing matching reference is found, it is returned, otherwise, a
- new one will be created, added to the correct group, and returned.
+ If an existing matching reference is found, it is returned, otherwise, a
+ new one will be created, added to the correct group, and returned.
- If path identifies a directory by virtue of carrying a trailing slash,
- this method returns a PBXFileReference of "folder" type. If path
- identifies a variant, by virtue of it identifying a file inside a directory
- with an ".lproj" extension, this method returns a PBXVariantGroup
- containing the variant named by path, and possibly other variants. For
- all other paths, a "normal" PBXFileReference will be returned.
- """
+ If path identifies a directory by virtue of carrying a trailing slash,
+ this method returns a PBXFileReference of "folder" type. If path
+ identifies a variant, by virtue of it identifying a file inside a directory
+ with an ".lproj" extension, this method returns a PBXVariantGroup
+ containing the variant named by path, and possibly other variants. For
+ all other paths, a "normal" PBXFileReference will be returned.
+ """
# Adding or getting a directory? Directories end with a trailing slash.
is_dir = False
@@ -1382,15 +1378,15 @@ def AddOrGetFileByPath(self, path, hierarchical):
def AddOrGetVariantGroupByNameAndPath(self, name, path):
"""Returns an existing or new PBXVariantGroup for name and path.
- If a PBXVariantGroup identified by the name and path arguments is already
- present as a child of this object, it is returned. Otherwise, a new
- PBXVariantGroup with the correct properties is created, added as a child,
- and returned.
+ If a PBXVariantGroup identified by the name and path arguments is already
+ present as a child of this object, it is returned. Otherwise, a new
+ PBXVariantGroup with the correct properties is created, added as a child,
+ and returned.
- This method will generally be called by AddOrGetFileByPath, which knows
- when to create a variant group based on the structure of the pathnames
- passed to it.
- """
+ This method will generally be called by AddOrGetFileByPath, which knows
+ when to create a variant group based on the structure of the pathnames
+ passed to it.
+ """
key = (name, path)
if key in self._variant_children_by_name_and_path:
@@ -1408,19 +1404,19 @@ def AddOrGetVariantGroupByNameAndPath(self, name, path):
def TakeOverOnlyChild(self, recurse=False):
"""If this PBXGroup has only one child and it's also a PBXGroup, take
- it over by making all of its children this object's children.
-
- This function will continue to take over only children when those children
- are groups. If there are three PBXGroups representing a, b, and c, with
- c inside b and b inside a, and a and b have no other children, this will
- result in a taking over both b and c, forming a PBXGroup for a/b/c.
-
- If recurse is True, this function will recurse into children and ask them
- to collapse themselves by taking over only children as well. Assuming
- an example hierarchy with files at a/b/c/d1, a/b/c/d2, and a/b/c/d3/e/f
- (d1, d2, and f are files, the rest are groups), recursion will result in
- a group for a/b/c containing a group for d3/e.
- """
+ it over by making all of its children this object's children.
+
+ This function will continue to take over only children when those children
+ are groups. If there are three PBXGroups representing a, b, and c, with
+ c inside b and b inside a, and a and b have no other children, this will
+ result in a taking over both b and c, forming a PBXGroup for a/b/c.
+
+ If recurse is True, this function will recurse into children and ask them
+ to collapse themselves by taking over only children as well. Assuming
+ an example hierarchy with files at a/b/c/d1, a/b/c/d2, and a/b/c/d3/e/f
+ (d1, d2, and f are files, the rest are groups), recursion will result in
+ a group for a/b/c containing a group for d3/e.
+ """
# At this stage, check that child class types are PBXGroup exactly,
# instead of using isinstance. The only subclass of PBXGroup,
@@ -1620,7 +1616,7 @@ def __init__(self, properties=None, id=None, parent=None):
prop_name = "lastKnownFileType"
else:
basename = posixpath.basename(self._properties["path"])
- (root, ext) = posixpath.splitext(basename)
+ (_root, ext) = posixpath.splitext(basename)
# Check the map using a lowercase extension.
# TODO(mark): Maybe it should try with the original case first and fall
# back to lowercase, in case there are any instances where case
@@ -1719,16 +1715,16 @@ def DefaultConfiguration(self):
def HasBuildSetting(self, key):
"""Determines the state of a build setting in all XCBuildConfiguration
- child objects.
+ child objects.
- If all child objects have key in their build settings, and the value is the
- same in all child objects, returns 1.
+ If all child objects have key in their build settings, and the value is the
+ same in all child objects, returns 1.
- If no child objects have the key in their build settings, returns 0.
+ If no child objects have the key in their build settings, returns 0.
- If some, but not all, child objects have the key in their build settings,
- or if any children have different values for the key, returns -1.
- """
+ If some, but not all, child objects have the key in their build settings,
+ or if any children have different values for the key, returns -1.
+ """
has = None
value = None
@@ -1754,9 +1750,9 @@ def HasBuildSetting(self, key):
def GetBuildSetting(self, key):
"""Gets the build setting for key.
- All child XCConfiguration objects must have the same value set for the
- setting, or a ValueError will be raised.
- """
+ All child XCConfiguration objects must have the same value set for the
+ setting, or a ValueError will be raised.
+ """
# TODO(mark): This is wrong for build settings that are lists. The list
# contents should be compared (and a list copy returned?)
@@ -1773,31 +1769,30 @@ def GetBuildSetting(self, key):
def SetBuildSetting(self, key, value):
"""Sets the build setting for key to value in all child
- XCBuildConfiguration objects.
- """
+ XCBuildConfiguration objects.
+ """
for configuration in self._properties["buildConfigurations"]:
configuration.SetBuildSetting(key, value)
def AppendBuildSetting(self, key, value):
"""Appends value to the build setting for key, which is treated as a list,
- in all child XCBuildConfiguration objects.
- """
+ in all child XCBuildConfiguration objects.
+ """
for configuration in self._properties["buildConfigurations"]:
configuration.AppendBuildSetting(key, value)
def DelBuildSetting(self, key):
"""Deletes the build setting key from all child XCBuildConfiguration
- objects.
- """
+ objects.
+ """
for configuration in self._properties["buildConfigurations"]:
configuration.DelBuildSetting(key)
def SetBaseConfiguration(self, value):
- """Sets the build configuration in all child XCBuildConfiguration objects.
- """
+ """Sets the build configuration in all child XCBuildConfiguration objects."""
for configuration in self._properties["buildConfigurations"]:
configuration.SetBaseConfiguration(value)
@@ -1837,14 +1832,14 @@ def Hashables(self):
class XCBuildPhase(XCObject):
"""Abstract base for build phase classes. Not represented in a project
- file.
+ file.
- Attributes:
- _files_by_path: A dict mapping each path of a child in the files list by
- path (keys) to the corresponding PBXBuildFile children (values).
- _files_by_xcfilelikeelement: A dict mapping each XCFileLikeElement (keys)
- to the corresponding PBXBuildFile children (values).
- """
+ Attributes:
+ _files_by_path: A dict mapping each path of a child in the files list by
+ path (keys) to the corresponding PBXBuildFile children (values).
+ _files_by_xcfilelikeelement: A dict mapping each XCFileLikeElement (keys)
+ to the corresponding PBXBuildFile children (values).
+ """
# TODO(mark): Some build phase types, like PBXShellScriptBuildPhase, don't
# actually have a "files" list. XCBuildPhase should not have "files" but
@@ -1883,8 +1878,8 @@ def FileGroup(self, path):
def _AddPathToDict(self, pbxbuildfile, path):
"""Adds path to the dict tracking paths belonging to this build phase.
- If the path is already a member of this build phase, raises an exception.
- """
+ If the path is already a member of this build phase, raises an exception.
+ """
if path in self._files_by_path:
raise ValueError("Found multiple build files with path " + path)
@@ -1893,28 +1888,28 @@ def _AddPathToDict(self, pbxbuildfile, path):
def _AddBuildFileToDicts(self, pbxbuildfile, path=None):
"""Maintains the _files_by_path and _files_by_xcfilelikeelement dicts.
- If path is specified, then it is the path that is being added to the
- phase, and pbxbuildfile must contain either a PBXFileReference directly
- referencing that path, or it must contain a PBXVariantGroup that itself
- contains a PBXFileReference referencing the path.
-
- If path is not specified, either the PBXFileReference's path or the paths
- of all children of the PBXVariantGroup are taken as being added to the
- phase.
-
- If the path is already present in the phase, raises an exception.
-
- If the PBXFileReference or PBXVariantGroup referenced by pbxbuildfile
- are already present in the phase, referenced by a different PBXBuildFile
- object, raises an exception. This does not raise an exception when
- a PBXFileReference or PBXVariantGroup reappear and are referenced by the
- same PBXBuildFile that has already introduced them, because in the case
- of PBXVariantGroup objects, they may correspond to multiple paths that are
- not all added simultaneously. When this situation occurs, the path needs
- to be added to _files_by_path, but nothing needs to change in
- _files_by_xcfilelikeelement, and the caller should have avoided adding
- the PBXBuildFile if it is already present in the list of children.
- """
+ If path is specified, then it is the path that is being added to the
+ phase, and pbxbuildfile must contain either a PBXFileReference directly
+ referencing that path, or it must contain a PBXVariantGroup that itself
+ contains a PBXFileReference referencing the path.
+
+ If path is not specified, either the PBXFileReference's path or the paths
+ of all children of the PBXVariantGroup are taken as being added to the
+ phase.
+
+ If the path is already present in the phase, raises an exception.
+
+ If the PBXFileReference or PBXVariantGroup referenced by pbxbuildfile
+ are already present in the phase, referenced by a different PBXBuildFile
+ object, raises an exception. This does not raise an exception when
+ a PBXFileReference or PBXVariantGroup reappear and are referenced by the
+ same PBXBuildFile that has already introduced them, because in the case
+ of PBXVariantGroup objects, they may correspond to multiple paths that are
+ not all added simultaneously. When this situation occurs, the path needs
+ to be added to _files_by_path, but nothing needs to change in
+ _files_by_xcfilelikeelement, and the caller should have avoided adding
+ the PBXBuildFile if it is already present in the list of children.
+ """
xcfilelikeelement = pbxbuildfile._properties["fileRef"]
@@ -2015,7 +2010,7 @@ def Name(self):
return "Frameworks"
def FileGroup(self, path):
- (root, ext) = posixpath.splitext(path)
+ (_root, ext) = posixpath.splitext(path)
if ext != "":
ext = ext[1:].lower()
if ext == "o":
@@ -2105,12 +2100,11 @@ def FileGroup(self, path):
def SetDestination(self, path):
"""Set the dstSubfolderSpec and dstPath properties from path.
- path may be specified in the same notation used for XCHierarchicalElements,
- specifically, "$(DIR)/path".
- """
+ path may be specified in the same notation used for XCHierarchicalElements,
+ specifically, "$(DIR)/path".
+ """
- path_tree_match = self.path_tree_re.search(path)
- if path_tree_match:
+ if path_tree_match := self.path_tree_re.search(path):
path_tree = path_tree_match.group(1)
if path_tree in self.path_tree_first_to_subfolder:
subfolder = self.path_tree_first_to_subfolder[path_tree]
@@ -2182,9 +2176,7 @@ def SetDestination(self, path):
subfolder = 0
relative_path = path[1:]
else:
- raise ValueError(
- f"Can't use path {path} in a {self.__class__.__name__}"
- )
+ raise ValueError(f"Can't use path {path} in a {self.__class__.__name__}")
self._properties["dstPath"] = relative_path
self._properties["dstSubfolderSpec"] = subfolder
@@ -2534,9 +2526,9 @@ def __init__(
# loadable modules, but there's precedent: Python loadable modules on
# Mac OS X use an .so extension.
if self._properties["productType"] == "com.googlecode.gyp.xcode.bundle":
- self._properties[
- "productType"
- ] = "com.apple.product-type.library.dynamic"
+ self._properties["productType"] = (
+ "com.apple.product-type.library.dynamic"
+ )
self.SetBuildSetting("MACH_O_TYPE", "mh_bundle")
self.SetBuildSetting("DYLIB_CURRENT_VERSION", "")
self.SetBuildSetting("DYLIB_COMPATIBILITY_VERSION", "")
@@ -2544,9 +2536,10 @@ def __init__(
force_extension = suffix[1:]
if (
- self._properties["productType"] in {
+ self._properties["productType"]
+ in {
"com.apple.product-type-bundle.unit.test",
- "com.apple.product-type-bundle.ui-testing"
+ "com.apple.product-type-bundle.ui-testing",
}
) and force_extension is None:
force_extension = suffix[1:]
@@ -2698,10 +2691,8 @@ def AddDependency(self, other):
other._properties["productType"] == static_library_type
or (
(
- other._properties["productType"] in {
- shared_library_type,
- framework_type
- }
+ other._properties["productType"]
+ in {shared_library_type, framework_type}
)
and (
(not other.HasBuildSetting("MACH_O_TYPE"))
@@ -2710,7 +2701,6 @@ def AddDependency(self, other):
)
)
):
-
file_ref = other.GetProperty("productReference")
pbxproject = self.PBXProjectAncestor()
@@ -2736,13 +2726,13 @@ class PBXProject(XCContainerPortal):
# PBXContainerItemProxy.
"""
- Attributes:
- path: "sample.xcodeproj". TODO(mark) Document me!
- _other_pbxprojects: A dictionary, keyed by other PBXProject objects. Each
- value is a reference to the dict in the
- projectReferences list associated with the keyed
- PBXProject.
- """
+ Attributes:
+ path: "sample.xcodeproj". TODO(mark) Document me!
+ _other_pbxprojects: A dictionary, keyed by other PBXProject objects. Each
+ value is a reference to the dict in the
+ projectReferences list associated with the keyed
+ PBXProject.
+ """
_schema = XCContainerPortal._schema.copy()
_schema.update(
@@ -2837,17 +2827,17 @@ def ProjectsGroup(self):
def RootGroupForPath(self, path):
"""Returns a PBXGroup child of this object to which path should be added.
- This method is intended to choose between SourceGroup and
- IntermediatesGroup on the basis of whether path is present in a source
- directory or an intermediates directory. For the purposes of this
- determination, any path located within a derived file directory such as
- PROJECT_DERIVED_FILE_DIR is treated as being in an intermediates
- directory.
+ This method is intended to choose between SourceGroup and
+ IntermediatesGroup on the basis of whether path is present in a source
+ directory or an intermediates directory. For the purposes of this
+ determination, any path located within a derived file directory such as
+ PROJECT_DERIVED_FILE_DIR is treated as being in an intermediates
+ directory.
- The returned value is a two-element tuple. The first element is the
- PBXGroup, and the second element specifies whether that group should be
- organized hierarchically (True) or as a single flat list (False).
- """
+ The returned value is a two-element tuple. The first element is the
+ PBXGroup, and the second element specifies whether that group should be
+ organized hierarchically (True) or as a single flat list (False).
+ """
# TODO(mark): make this a class variable and bind to self on call?
# Also, this list is nowhere near exhaustive.
@@ -2873,11 +2863,11 @@ def RootGroupForPath(self, path):
def AddOrGetFileInRootGroup(self, path):
"""Returns a PBXFileReference corresponding to path in the correct group
- according to RootGroupForPath's heuristics.
+ according to RootGroupForPath's heuristics.
- If an existing PBXFileReference for path exists, it will be returned.
- Otherwise, one will be created and returned.
- """
+ If an existing PBXFileReference for path exists, it will be returned.
+ Otherwise, one will be created and returned.
+ """
(group, hierarchical) = self.RootGroupForPath(path)
return group.AddOrGetFileByPath(path, hierarchical)
@@ -2927,17 +2917,17 @@ def SortGroups(self):
def AddOrGetProjectReference(self, other_pbxproject):
"""Add a reference to another project file (via PBXProject object) to this
- one.
+ one.
- Returns [ProductGroup, ProjectRef]. ProductGroup is a PBXGroup object in
- this project file that contains a PBXReferenceProxy object for each
- product of each PBXNativeTarget in the other project file. ProjectRef is
- a PBXFileReference to the other project file.
+ Returns [ProductGroup, ProjectRef]. ProductGroup is a PBXGroup object in
+ this project file that contains a PBXReferenceProxy object for each
+ product of each PBXNativeTarget in the other project file. ProjectRef is
+ a PBXFileReference to the other project file.
- If this project file already references the other project file, the
- existing ProductGroup and ProjectRef are returned. The ProductGroup will
- still be updated if necessary.
- """
+ If this project file already references the other project file, the
+ existing ProductGroup and ProjectRef are returned. The ProductGroup will
+ still be updated if necessary.
+ """
if "projectReferences" not in self._properties:
self._properties["projectReferences"] = []
@@ -2989,7 +2979,7 @@ def AddOrGetProjectReference(self, other_pbxproject):
# Xcode seems to sort this list case-insensitively
self._properties["projectReferences"] = sorted(
self._properties["projectReferences"],
- key=lambda x: x["ProjectRef"].Name().lower()
+ key=lambda x: x["ProjectRef"].Name().lower(),
)
else:
# The link already exists. Pull out the relevant data.
@@ -3014,11 +3004,8 @@ def _AllSymrootsUnique(self, target, inherit_unique_symroot):
# define an explicit value for 'SYMROOT'.
symroots = self._DefinedSymroots(target)
for s in self._DefinedSymroots(target):
- if (
- (s is not None
- and not self._IsUniqueSymrootForTarget(s))
- or (s is None
- and not inherit_unique_symroot)
+ if (s is not None and not self._IsUniqueSymrootForTarget(s)) or (
+ s is None and not inherit_unique_symroot
):
return False
return True if symroots else inherit_unique_symroot
@@ -3122,7 +3109,8 @@ def CompareProducts(x, y, remote_products):
product_group._properties["children"] = sorted(
product_group._properties["children"],
key=cmp_to_key(
- lambda x, y, rp=remote_products: CompareProducts(x, y, rp)),
+ lambda x, y, rp=remote_products: CompareProducts(x, y, rp)
+ ),
)
@@ -3156,9 +3144,7 @@ def Print(self, file=sys.stdout):
self._XCPrint(file, 0, "{ ")
else:
self._XCPrint(file, 0, "{\n")
- for property, value in sorted(
- self._properties.items()
- ):
+ for property, value in sorted(self._properties.items()):
if property == "objects":
self._PrintObjects(file)
else:
@@ -3184,9 +3170,7 @@ def _PrintObjects(self, file):
for class_name in sorted(objects_by_class):
self._XCPrint(file, 0, "\n")
self._XCPrint(file, 0, "/* Begin " + class_name + " section */\n")
- for object in sorted(
- objects_by_class[class_name], key=attrgetter("id")
- ):
+ for object in sorted(objects_by_class[class_name], key=attrgetter("id")):
object.Print(file)
self._XCPrint(file, 0, "/* End " + class_name + " section */\n")
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py
index 530196366946d8..d7e3b5a95604f7 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py
@@ -9,7 +9,6 @@
TODO(bradnelson): Consider dropping this when we drop XP support.
"""
-
import xml.dom.minidom
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/packaging/_elffile.py b/deps/npm/node_modules/node-gyp/gyp/pylib/packaging/_elffile.py
index 6fb19b30bb53c1..cb33e10556ba1b 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/packaging/_elffile.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/packaging/_elffile.py
@@ -48,8 +48,7 @@ def __init__(self, f: IO[bytes]) -> None:
ident = self._read("16B")
except struct.error:
raise ELFInvalid("unable to parse identification")
- magic = bytes(ident[:4])
- if magic != b"\x7fELF":
+ if (magic := bytes(ident[:4])) != b"\x7fELF":
raise ELFInvalid(f"invalid magic: {magic!r}")
self.capacity = ident[4] # Format for program header (bitness).
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/packaging/markers.py b/deps/npm/node_modules/node-gyp/gyp/pylib/packaging/markers.py
index 8b98fca7233be6..7e4d150208eec4 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/packaging/markers.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/packaging/markers.py
@@ -166,8 +166,7 @@ def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool:
def format_full_version(info: "sys._version_info") -> str:
version = "{0.major}.{0.minor}.{0.micro}".format(info)
- kind = info.releaselevel
- if kind != "final":
+ if (kind := info.releaselevel) != "final":
version += kind[0] + str(info.serial)
return version
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/packaging/metadata.py b/deps/npm/node_modules/node-gyp/gyp/pylib/packaging/metadata.py
index 23bb564f3d5ff8..43f5c5b30df979 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/packaging/metadata.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/packaging/metadata.py
@@ -591,8 +591,7 @@ def _process_description_content_type(self, value: str) -> str:
f"{{field}} must be one of {list(content_types)}, not {value!r}"
)
- charset = parameters.get("charset", "UTF-8")
- if charset != "UTF-8":
+ if (charset := parameters.get("charset", "UTF-8")) != "UTF-8":
raise self._invalid_metadata(
f"{{field}} can only specify the UTF-8 charset, not {list(charset)}"
)
diff --git a/deps/npm/node_modules/node-gyp/gyp/pyproject.toml b/deps/npm/node_modules/node-gyp/gyp/pyproject.toml
index 537308731fe542..adc82c3350151f 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pyproject.toml
+++ b/deps/npm/node_modules/node-gyp/gyp/pyproject.toml
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "gyp-next"
-version = "0.20.0"
+version = "0.20.5"
authors = [
{ name="Node.js contributors", email="ryzokuken@disroot.org" },
]
@@ -39,7 +39,6 @@ gyp = "gyp:script_main"
[tool.ruff]
extend-exclude = ["pylib/packaging"]
line-length = 88
-target-version = "py37"
[tool.ruff.lint]
select = [
diff --git a/deps/npm/node_modules/node-gyp/gyp/test_gyp.py b/deps/npm/node_modules/node-gyp/gyp/test_gyp.py
index b7bb956b8ed585..70c81ae8ca3bf9 100755
--- a/deps/npm/node_modules/node-gyp/gyp/test_gyp.py
+++ b/deps/npm/node_modules/node-gyp/gyp/test_gyp.py
@@ -5,7 +5,6 @@
"""gyptest.py -- test runner for GYP tests."""
-
import argparse
import os
import platform
@@ -148,13 +147,13 @@ def print_configuration_info():
print("Test configuration:")
if sys.platform == "darwin":
sys.path.append(os.path.abspath("test/lib"))
- import TestMac
+ import TestMac # noqa: PLC0415
print(f" Mac {platform.mac_ver()[0]} {platform.mac_ver()[2]}")
print(f" Xcode {TestMac.Xcode.Version()}")
elif sys.platform == "win32":
sys.path.append(os.path.abspath("pylib"))
- import gyp.MSVSVersion
+ import gyp.MSVSVersion # noqa: PLC0415
print(" Win %s %s\n" % platform.win32_ver()[0:2])
print(" MSVS %s" % gyp.MSVSVersion.SelectVisualStudioVersion().Description())
diff --git a/deps/npm/node_modules/node-gyp/lib/install.js b/deps/npm/node_modules/node-gyp/lib/install.js
index 90be86c822c8fb..ee4adb1e67fcd5 100644
--- a/deps/npm/node_modules/node-gyp/lib/install.js
+++ b/deps/npm/node_modules/node-gyp/lib/install.js
@@ -200,10 +200,10 @@ async function install (gyp, argv) {
// download the tarball and extract!
// Ommited on Windows if only new node.lib is required
- // on Windows there can be file errors from tar if parallel installs
+ // there can be file errors from tar if parallel installs
// are happening (not uncommon with multiple native modules) so
// extract the tarball to a temp directory first and then copy over
- const tarExtractDir = win ? await fs.mkdtemp(path.join(os.tmpdir(), 'node-gyp-tmp-')) : devDir
+ const tarExtractDir = await fs.mkdtemp(path.join(os.tmpdir(), 'node-gyp-tmp-'))
try {
if (shouldDownloadTarball) {
@@ -277,17 +277,13 @@ async function install (gyp, argv) {
}
// copy over the files from the temp tarball extract directory to devDir
- if (tarExtractDir !== devDir) {
- await copyDirectory(tarExtractDir, devDir)
- }
+ await copyDirectory(tarExtractDir, devDir)
} finally {
- if (tarExtractDir !== devDir) {
- try {
- // try to cleanup temp dir
- await fs.rm(tarExtractDir, { recursive: true, maxRetries: 3 })
- } catch {
- log.warn('failed to clean up temp tarball extract directory')
- }
+ try {
+ // try to cleanup temp dir
+ await fs.rm(tarExtractDir, { recursive: true, maxRetries: 3 })
+ } catch {
+ log.warn('failed to clean up temp tarball extract directory')
}
}
diff --git a/deps/npm/node_modules/node-gyp/lib/node-gyp.js b/deps/npm/node_modules/node-gyp/lib/node-gyp.js
index 5e25bf996f8b22..dafce99d49e352 100644
--- a/deps/npm/node_modules/node-gyp/lib/node-gyp.js
+++ b/deps/npm/node_modules/node-gyp/lib/node-gyp.js
@@ -122,31 +122,42 @@ class Gyp extends EventEmitter {
}
// support for inheriting config env variables from npm
- const npmConfigPrefix = 'npm_config_'
- Object.keys(process.env).forEach((name) => {
- if (name.indexOf(npmConfigPrefix) !== 0) {
- return
- }
- const val = process.env[name]
- if (name === npmConfigPrefix + 'loglevel') {
- log.logger.level = val
- } else {
+ // npm will set environment variables in the following forms:
+ // - `npm_config_` for values from npm's own config. Setting arbitrary
+ // options on npm's config was deprecated in npm v11 but node-gyp still
+ // supports it for backwards compatibility.
+ // See https://github.com/nodejs/node-gyp/issues/3156
+ // - `npm_package_config_node_gyp_` for values from the `config` object
+ // in package.json. This is the preferred way to set options for node-gyp
+ // since npm v11. The `node_gyp_` prefix is used to avoid conflicts with
+ // other tools.
+ // The `npm_package_config_node_gyp_` prefix will take precedence over
+ // `npm_config_` keys.
+ const npmConfigPrefix = /^npm_config_/i
+ const npmPackageConfigPrefix = /^npm_package_config_node_gyp_/i
+
+ const configEnvKeys = Object.keys(process.env)
+ .filter((k) => npmConfigPrefix.test(k) || npmPackageConfigPrefix.test(k))
+ // sort so that npm_package_config_node_gyp_ keys come last and will override
+ .sort((a) => npmConfigPrefix.test(a) ? -1 : 1)
+
+ for (const key of configEnvKeys) {
// add the user-defined options to the config
- name = name.substring(npmConfigPrefix.length)
- // gyp@741b7f1 enters an infinite loop when it encounters
- // zero-length options so ensure those don't get through.
- if (name) {
+ const name = npmConfigPrefix.test(key)
+ ? key.replace(npmConfigPrefix, '')
+ : key.replace(npmPackageConfigPrefix, '')
+ // gyp@741b7f1 enters an infinite loop when it encounters
+ // zero-length options so ensure those don't get through.
+ if (name) {
// convert names like force_process_config to force-process-config
- if (name.includes('_')) {
- name = name.replace(/_/g, '-')
- }
- this.opts[name] = val
- }
+ // and convert to lowercase
+ this.opts[name.replaceAll('_', '-').toLowerCase()] = process.env[key]
}
- })
+ }
if (this.opts.loglevel) {
log.logger.level = this.opts.loglevel
+ delete this.opts.loglevel
}
log.resume()
}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/chownr/dist/commonjs/index.js b/deps/npm/node_modules/node-gyp/node_modules/chownr/dist/commonjs/index.js
deleted file mode 100644
index 6a7b68d5eac26e..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/chownr/dist/commonjs/index.js
+++ /dev/null
@@ -1,93 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.chownrSync = exports.chownr = void 0;
-const node_fs_1 = __importDefault(require("node:fs"));
-const node_path_1 = __importDefault(require("node:path"));
-const lchownSync = (path, uid, gid) => {
- try {
- return node_fs_1.default.lchownSync(path, uid, gid);
- }
- catch (er) {
- if (er?.code !== 'ENOENT')
- throw er;
- }
-};
-const chown = (cpath, uid, gid, cb) => {
- node_fs_1.default.lchown(cpath, uid, gid, er => {
- // Skip ENOENT error
- cb(er && er?.code !== 'ENOENT' ? er : null);
- });
-};
-const chownrKid = (p, child, uid, gid, cb) => {
- if (child.isDirectory()) {
- (0, exports.chownr)(node_path_1.default.resolve(p, child.name), uid, gid, (er) => {
- if (er)
- return cb(er);
- const cpath = node_path_1.default.resolve(p, child.name);
- chown(cpath, uid, gid, cb);
- });
- }
- else {
- const cpath = node_path_1.default.resolve(p, child.name);
- chown(cpath, uid, gid, cb);
- }
-};
-const chownr = (p, uid, gid, cb) => {
- node_fs_1.default.readdir(p, { withFileTypes: true }, (er, children) => {
- // any error other than ENOTDIR or ENOTSUP means it's not readable,
- // or doesn't exist. give up.
- if (er) {
- if (er.code === 'ENOENT')
- return cb();
- else if (er.code !== 'ENOTDIR' && er.code !== 'ENOTSUP')
- return cb(er);
- }
- if (er || !children.length)
- return chown(p, uid, gid, cb);
- let len = children.length;
- let errState = null;
- const then = (er) => {
- /* c8 ignore start */
- if (errState)
- return;
- /* c8 ignore stop */
- if (er)
- return cb((errState = er));
- if (--len === 0)
- return chown(p, uid, gid, cb);
- };
- for (const child of children) {
- chownrKid(p, child, uid, gid, then);
- }
- });
-};
-exports.chownr = chownr;
-const chownrKidSync = (p, child, uid, gid) => {
- if (child.isDirectory())
- (0, exports.chownrSync)(node_path_1.default.resolve(p, child.name), uid, gid);
- lchownSync(node_path_1.default.resolve(p, child.name), uid, gid);
-};
-const chownrSync = (p, uid, gid) => {
- let children;
- try {
- children = node_fs_1.default.readdirSync(p, { withFileTypes: true });
- }
- catch (er) {
- const e = er;
- if (e?.code === 'ENOENT')
- return;
- else if (e?.code === 'ENOTDIR' || e?.code === 'ENOTSUP')
- return lchownSync(p, uid, gid);
- else
- throw e;
- }
- for (const child of children) {
- chownrKidSync(p, child, uid, gid);
- }
- return lchownSync(p, uid, gid);
-};
-exports.chownrSync = chownrSync;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/chownr/dist/esm/index.js b/deps/npm/node_modules/node-gyp/node_modules/chownr/dist/esm/index.js
deleted file mode 100644
index 5c2815297a67cb..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/chownr/dist/esm/index.js
+++ /dev/null
@@ -1,85 +0,0 @@
-import fs from 'node:fs';
-import path from 'node:path';
-const lchownSync = (path, uid, gid) => {
- try {
- return fs.lchownSync(path, uid, gid);
- }
- catch (er) {
- if (er?.code !== 'ENOENT')
- throw er;
- }
-};
-const chown = (cpath, uid, gid, cb) => {
- fs.lchown(cpath, uid, gid, er => {
- // Skip ENOENT error
- cb(er && er?.code !== 'ENOENT' ? er : null);
- });
-};
-const chownrKid = (p, child, uid, gid, cb) => {
- if (child.isDirectory()) {
- chownr(path.resolve(p, child.name), uid, gid, (er) => {
- if (er)
- return cb(er);
- const cpath = path.resolve(p, child.name);
- chown(cpath, uid, gid, cb);
- });
- }
- else {
- const cpath = path.resolve(p, child.name);
- chown(cpath, uid, gid, cb);
- }
-};
-export const chownr = (p, uid, gid, cb) => {
- fs.readdir(p, { withFileTypes: true }, (er, children) => {
- // any error other than ENOTDIR or ENOTSUP means it's not readable,
- // or doesn't exist. give up.
- if (er) {
- if (er.code === 'ENOENT')
- return cb();
- else if (er.code !== 'ENOTDIR' && er.code !== 'ENOTSUP')
- return cb(er);
- }
- if (er || !children.length)
- return chown(p, uid, gid, cb);
- let len = children.length;
- let errState = null;
- const then = (er) => {
- /* c8 ignore start */
- if (errState)
- return;
- /* c8 ignore stop */
- if (er)
- return cb((errState = er));
- if (--len === 0)
- return chown(p, uid, gid, cb);
- };
- for (const child of children) {
- chownrKid(p, child, uid, gid, then);
- }
- });
-};
-const chownrKidSync = (p, child, uid, gid) => {
- if (child.isDirectory())
- chownrSync(path.resolve(p, child.name), uid, gid);
- lchownSync(path.resolve(p, child.name), uid, gid);
-};
-export const chownrSync = (p, uid, gid) => {
- let children;
- try {
- children = fs.readdirSync(p, { withFileTypes: true });
- }
- catch (er) {
- const e = er;
- if (e?.code === 'ENOENT')
- return;
- else if (e?.code === 'ENOTDIR' || e?.code === 'ENOTSUP')
- return lchownSync(p, uid, gid);
- else
- throw e;
- }
- for (const child of children) {
- chownrKidSync(p, child, uid, gid);
- }
- return lchownSync(p, uid, gid);
-};
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/chownr/dist/esm/package.json b/deps/npm/node_modules/node-gyp/node_modules/chownr/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c055..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/chownr/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
- "type": "module"
-}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/chownr/package.json b/deps/npm/node_modules/node-gyp/node_modules/chownr/package.json
deleted file mode 100644
index 09aa6b2e2e576d..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/chownr/package.json
+++ /dev/null
@@ -1,69 +0,0 @@
-{
- "author": "Isaac Z. Schlueter (http://blog.izs.me/)",
- "name": "chownr",
- "description": "like `chown -R`",
- "version": "3.0.0",
- "repository": {
- "type": "git",
- "url": "git://github.com/isaacs/chownr.git"
- },
- "files": [
- "dist"
- ],
- "devDependencies": {
- "@types/node": "^20.12.5",
- "mkdirp": "^3.0.1",
- "prettier": "^3.2.5",
- "rimraf": "^5.0.5",
- "tap": "^18.7.2",
- "tshy": "^1.13.1",
- "typedoc": "^0.25.12"
- },
- "scripts": {
- "prepare": "tshy",
- "pretest": "npm run prepare",
- "test": "tap",
- "preversion": "npm test",
- "postversion": "npm publish",
- "prepublishOnly": "git push origin --follow-tags",
- "format": "prettier --write . --loglevel warn",
- "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
- },
- "license": "BlueOak-1.0.0",
- "engines": {
- "node": ">=18"
- },
- "tshy": {
- "exports": {
- "./package.json": "./package.json",
- ".": "./src/index.ts"
- }
- },
- "exports": {
- "./package.json": "./package.json",
- ".": {
- "import": {
- "types": "./dist/esm/index.d.ts",
- "default": "./dist/esm/index.js"
- },
- "require": {
- "types": "./dist/commonjs/index.d.ts",
- "default": "./dist/commonjs/index.js"
- }
- }
- },
- "main": "./dist/commonjs/index.js",
- "types": "./dist/commonjs/index.d.ts",
- "type": "module",
- "prettier": {
- "semi": false,
- "printWidth": 75,
- "tabWidth": 2,
- "useTabs": false,
- "singleQuote": true,
- "jsxSingleQuote": false,
- "bracketSameLine": true,
- "arrowParens": "avoid",
- "endOfLine": "lf"
- }
-}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/LICENSE
deleted file mode 100644
index 0a034db7a73b5d..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-Copyright (c) 2011-2023 James Halliday (mail@substack.net) and Isaac Z. Schlueter (i@izs.me)
-
-This project is free software released under the MIT license:
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/package.json b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/package.json
deleted file mode 100644
index 9d04a66e16cd93..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/package.json
+++ /dev/null
@@ -1,91 +0,0 @@
-{
- "name": "mkdirp",
- "description": "Recursively mkdir, like `mkdir -p`",
- "version": "3.0.1",
- "keywords": [
- "mkdir",
- "directory",
- "make dir",
- "make",
- "dir",
- "recursive",
- "native"
- ],
- "bin": "./dist/cjs/src/bin.js",
- "main": "./dist/cjs/src/index.js",
- "module": "./dist/mjs/index.js",
- "types": "./dist/mjs/index.d.ts",
- "exports": {
- ".": {
- "import": {
- "types": "./dist/mjs/index.d.ts",
- "default": "./dist/mjs/index.js"
- },
- "require": {
- "types": "./dist/cjs/src/index.d.ts",
- "default": "./dist/cjs/src/index.js"
- }
- }
- },
- "files": [
- "dist"
- ],
- "scripts": {
- "preversion": "npm test",
- "postversion": "npm publish",
- "prepublishOnly": "git push origin --follow-tags",
- "preprepare": "rm -rf dist",
- "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json",
- "postprepare": "bash fixup.sh",
- "pretest": "npm run prepare",
- "presnap": "npm run prepare",
- "test": "c8 tap",
- "snap": "c8 tap",
- "format": "prettier --write . --loglevel warn",
- "benchmark": "node benchmark/index.js",
- "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
- },
- "prettier": {
- "semi": false,
- "printWidth": 80,
- "tabWidth": 2,
- "useTabs": false,
- "singleQuote": true,
- "jsxSingleQuote": false,
- "bracketSameLine": true,
- "arrowParens": "avoid",
- "endOfLine": "lf"
- },
- "devDependencies": {
- "@types/brace-expansion": "^1.1.0",
- "@types/node": "^18.11.9",
- "@types/tap": "^15.0.7",
- "c8": "^7.12.0",
- "eslint-config-prettier": "^8.6.0",
- "prettier": "^2.8.2",
- "tap": "^16.3.3",
- "ts-node": "^10.9.1",
- "typedoc": "^0.23.21",
- "typescript": "^4.9.3"
- },
- "tap": {
- "coverage": false,
- "node-arg": [
- "--no-warnings",
- "--loader",
- "ts-node/esm"
- ],
- "ts": false
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- },
- "repository": {
- "type": "git",
- "url": "https://github.com/isaacs/node-mkdirp.git"
- },
- "license": "MIT",
- "engines": {
- "node": ">=10"
- }
-}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/bin.d.ts b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/bin.d.ts
deleted file mode 100644
index 34e005228653c8..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/bin.d.ts
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env node
-export {};
-//# sourceMappingURL=bin.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/bin.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/bin.d.ts.map
deleted file mode 100644
index c10c656ec75109..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/bin.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"bin.d.ts","sourceRoot":"","sources":["../../../src/bin.ts"],"names":[],"mappings":""}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/bin.js b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/bin.js
deleted file mode 100755
index 757aae1fd96cb2..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/bin.js
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/usr/bin/env node
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-const package_json_1 = require("../package.json");
-const usage = () => `
-usage: mkdirp [DIR1,DIR2..] {OPTIONS}
-
- Create each supplied directory including any necessary parent directories
- that don't yet exist.
-
- If the directory already exists, do nothing.
-
-OPTIONS are:
-
- -m If a directory needs to be created, set the mode as an octal
- --mode= permission string.
-
- -v --version Print the mkdirp version number
-
- -h --help Print this helpful banner
-
- -p --print Print the first directories created for each path provided
-
- --manual Use manual implementation, even if native is available
-`;
-const dirs = [];
-const opts = {};
-let doPrint = false;
-let dashdash = false;
-let manual = false;
-for (const arg of process.argv.slice(2)) {
- if (dashdash)
- dirs.push(arg);
- else if (arg === '--')
- dashdash = true;
- else if (arg === '--manual')
- manual = true;
- else if (/^-h/.test(arg) || /^--help/.test(arg)) {
- console.log(usage());
- process.exit(0);
- }
- else if (arg === '-v' || arg === '--version') {
- console.log(package_json_1.version);
- process.exit(0);
- }
- else if (arg === '-p' || arg === '--print') {
- doPrint = true;
- }
- else if (/^-m/.test(arg) || /^--mode=/.test(arg)) {
- // these don't get covered in CI, but work locally
- // weird because the tests below show as passing in the output.
- /* c8 ignore start */
- const mode = parseInt(arg.replace(/^(-m|--mode=)/, ''), 8);
- if (isNaN(mode)) {
- console.error(`invalid mode argument: ${arg}\nMust be an octal number.`);
- process.exit(1);
- }
- /* c8 ignore stop */
- opts.mode = mode;
- }
- else
- dirs.push(arg);
-}
-const index_js_1 = require("./index.js");
-const impl = manual ? index_js_1.mkdirp.manual : index_js_1.mkdirp;
-if (dirs.length === 0) {
- console.error(usage());
-}
-// these don't get covered in CI, but work locally
-/* c8 ignore start */
-Promise.all(dirs.map(dir => impl(dir, opts)))
- .then(made => (doPrint ? made.forEach(m => m && console.log(m)) : null))
- .catch(er => {
- console.error(er.message);
- if (er.code)
- console.error(' code: ' + er.code);
- process.exit(1);
-});
-/* c8 ignore stop */
-//# sourceMappingURL=bin.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/bin.js.map b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/bin.js.map
deleted file mode 100644
index d99295301b5fa7..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/bin.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"bin.js","sourceRoot":"","sources":["../../../src/bin.ts"],"names":[],"mappings":";;;AAEA,kDAAyC;AAGzC,MAAM,KAAK,GAAG,GAAG,EAAE,CAAC;;;;;;;;;;;;;;;;;;;;CAoBnB,CAAA;AAED,MAAM,IAAI,GAAa,EAAE,CAAA;AACzB,MAAM,IAAI,GAAkB,EAAE,CAAA;AAC9B,IAAI,OAAO,GAAY,KAAK,CAAA;AAC5B,IAAI,QAAQ,GAAG,KAAK,CAAA;AACpB,IAAI,MAAM,GAAG,KAAK,CAAA;AAClB,KAAK,MAAM,GAAG,IAAI,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACvC,IAAI,QAAQ;QAAE,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;SACvB,IAAI,GAAG,KAAK,IAAI;QAAE,QAAQ,GAAG,IAAI,CAAA;SACjC,IAAI,GAAG,KAAK,UAAU;QAAE,MAAM,GAAG,IAAI,CAAA;SACrC,IAAI,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;QAC/C,OAAO,CAAC,GAAG,CAAC,KAAK,EAAE,CAAC,CAAA;QACpB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;KAChB;SAAM,IAAI,GAAG,KAAK,IAAI,IAAI,GAAG,KAAK,WAAW,EAAE;QAC9C,OAAO,CAAC,GAAG,CAAC,sBAAO,CAAC,CAAA;QACpB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;KAChB;SAAM,IAAI,GAAG,KAAK,IAAI,IAAI,GAAG,KAAK,SAAS,EAAE;QAC5C,OAAO,GAAG,IAAI,CAAA;KACf;SAAM,IAAI,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;QAClD,kDAAkD;QAClD,+DAA+D;QAC/D,qBAAqB;QACrB,MAAM,IAAI,GAAG,QAAQ,CAAC,GAAG,CAAC,OAAO,CAAC,eAAe,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;QAC1D,IAAI,KAAK,CAAC,IAAI,CAAC,EAAE;YACf,OAAO,CAAC,KAAK,CAAC,0BAA0B,GAAG,4BAA4B,CAAC,CAAA;YACxE,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;SAChB;QACD,oBAAoB;QACpB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;KACjB;;QAAM,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;CACtB;AAED,yCAAmC;AACnC,MAAM,IAAI,GAAG,MAAM,CAAC,CAAC,CAAC,iBAAM,CAAC,MAAM,CAAC,CAAC,CAAC,iBAAM,CAAA;AAC5C,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE;IACrB,OAAO,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC,CAAA;CACvB;AAED,kDAAkD;AAClD,qBAAqB;AACrB,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC,CAAC;KAC1C,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;KACvE,KAAK,CAAC,EAAE,CAAC,EAAE;IACV,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC,OAAO,CAAC,CAAA;IACzB,IAAI,EAAE,CAAC,IAAI;QAAE,OAAO,CAAC,KAAK,CAAC,UAAU,GAAG,EAAE,CAAC,IAAI,CAAC,CAAA;IAChD,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;AACjB,CAAC,CAAC,CAAA;AACJ,oBAAoB","sourcesContent":["#!/usr/bin/env node\n\nimport { version } from '../package.json'\nimport { MkdirpOptions } from './opts-arg.js'\n\nconst usage = () => `\nusage: mkdirp [DIR1,DIR2..] {OPTIONS}\n\n Create each supplied directory including any necessary parent directories\n that don't yet exist.\n\n If the directory already exists, do nothing.\n\nOPTIONS are:\n\n -m If a directory needs to be created, set the mode as an octal\n --mode= permission string.\n\n -v --version Print the mkdirp version number\n\n -h --help Print this helpful banner\n\n -p --print Print the first directories created for each path provided\n\n --manual Use manual implementation, even if native is available\n`\n\nconst dirs: string[] = []\nconst opts: MkdirpOptions = {}\nlet doPrint: boolean = false\nlet dashdash = false\nlet manual = false\nfor (const arg of process.argv.slice(2)) {\n if (dashdash) dirs.push(arg)\n else if (arg === '--') dashdash = true\n else if (arg === '--manual') manual = true\n else if (/^-h/.test(arg) || /^--help/.test(arg)) {\n console.log(usage())\n process.exit(0)\n } else if (arg === '-v' || arg === '--version') {\n console.log(version)\n process.exit(0)\n } else if (arg === '-p' || arg === '--print') {\n doPrint = true\n } else if (/^-m/.test(arg) || /^--mode=/.test(arg)) {\n // these don't get covered in CI, but work locally\n // weird because the tests below show as passing in the output.\n /* c8 ignore start */\n const mode = parseInt(arg.replace(/^(-m|--mode=)/, ''), 8)\n if (isNaN(mode)) {\n console.error(`invalid mode argument: ${arg}\\nMust be an octal number.`)\n process.exit(1)\n }\n /* c8 ignore stop */\n opts.mode = mode\n } else dirs.push(arg)\n}\n\nimport { mkdirp } from './index.js'\nconst impl = manual ? mkdirp.manual : mkdirp\nif (dirs.length === 0) {\n console.error(usage())\n}\n\n// these don't get covered in CI, but work locally\n/* c8 ignore start */\nPromise.all(dirs.map(dir => impl(dir, opts)))\n .then(made => (doPrint ? made.forEach(m => m && console.log(m)) : null))\n .catch(er => {\n console.error(er.message)\n if (er.code) console.error(' code: ' + er.code)\n process.exit(1)\n })\n/* c8 ignore stop */\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/find-made.d.ts b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/find-made.d.ts
deleted file mode 100644
index e47794b3bb72a3..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/find-made.d.ts
+++ /dev/null
@@ -1,4 +0,0 @@
-import { MkdirpOptionsResolved } from './opts-arg.js';
-export declare const findMade: (opts: MkdirpOptionsResolved, parent: string, path?: string) => Promise;
-export declare const findMadeSync: (opts: MkdirpOptionsResolved, parent: string, path?: string) => undefined | string;
-//# sourceMappingURL=find-made.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/find-made.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/find-made.d.ts.map
deleted file mode 100644
index 00d5d1a4dbefdf..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/find-made.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"find-made.d.ts","sourceRoot":"","sources":["../../../src/find-made.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,qBAAqB,EAAE,MAAM,eAAe,CAAA;AAErD,eAAO,MAAM,QAAQ,SACb,qBAAqB,UACnB,MAAM,SACP,MAAM,KACZ,QAAQ,SAAS,GAAG,MAAM,CAe5B,CAAA;AAED,eAAO,MAAM,YAAY,SACjB,qBAAqB,UACnB,MAAM,SACP,MAAM,KACZ,SAAS,GAAG,MAad,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/find-made.js b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/find-made.js
deleted file mode 100644
index e831ef27cadc1d..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/find-made.js
+++ /dev/null
@@ -1,35 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.findMadeSync = exports.findMade = void 0;
-const path_1 = require("path");
-const findMade = async (opts, parent, path) => {
- // we never want the 'made' return value to be a root directory
- if (path === parent) {
- return;
- }
- return opts.statAsync(parent).then(st => (st.isDirectory() ? path : undefined), // will fail later
- // will fail later
- er => {
- const fer = er;
- return fer && fer.code === 'ENOENT'
- ? (0, exports.findMade)(opts, (0, path_1.dirname)(parent), parent)
- : undefined;
- });
-};
-exports.findMade = findMade;
-const findMadeSync = (opts, parent, path) => {
- if (path === parent) {
- return undefined;
- }
- try {
- return opts.statSync(parent).isDirectory() ? path : undefined;
- }
- catch (er) {
- const fer = er;
- return fer && fer.code === 'ENOENT'
- ? (0, exports.findMadeSync)(opts, (0, path_1.dirname)(parent), parent)
- : undefined;
- }
-};
-exports.findMadeSync = findMadeSync;
-//# sourceMappingURL=find-made.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/find-made.js.map b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/find-made.js.map
deleted file mode 100644
index 30a0d66398878d..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/find-made.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"find-made.js","sourceRoot":"","sources":["../../../src/find-made.ts"],"names":[],"mappings":";;;AAAA,+BAA8B;AAGvB,MAAM,QAAQ,GAAG,KAAK,EAC3B,IAA2B,EAC3B,MAAc,EACd,IAAa,EACgB,EAAE;IAC/B,+DAA+D;IAC/D,IAAI,IAAI,KAAK,MAAM,EAAE;QACnB,OAAM;KACP;IAED,OAAO,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,IAAI,CAChC,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,EAAE,kBAAkB;IAC/D,AAD6C,kBAAkB;IAC/D,EAAE,CAAC,EAAE;QACH,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,OAAO,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ;YACjC,CAAC,CAAC,IAAA,gBAAQ,EAAC,IAAI,EAAE,IAAA,cAAO,EAAC,MAAM,CAAC,EAAE,MAAM,CAAC;YACzC,CAAC,CAAC,SAAS,CAAA;IACf,CAAC,CACF,CAAA;AACH,CAAC,CAAA;AAnBY,QAAA,QAAQ,YAmBpB;AAEM,MAAM,YAAY,GAAG,CAC1B,IAA2B,EAC3B,MAAc,EACd,IAAa,EACO,EAAE;IACtB,IAAI,IAAI,KAAK,MAAM,EAAE;QACnB,OAAO,SAAS,CAAA;KACjB;IAED,IAAI;QACF,OAAO,IAAI,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAA;KAC9D;IAAC,OAAO,EAAE,EAAE;QACX,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,OAAO,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ;YACjC,CAAC,CAAC,IAAA,oBAAY,EAAC,IAAI,EAAE,IAAA,cAAO,EAAC,MAAM,CAAC,EAAE,MAAM,CAAC;YAC7C,CAAC,CAAC,SAAS,CAAA;KACd;AACH,CAAC,CAAA;AAjBY,QAAA,YAAY,gBAiBxB","sourcesContent":["import { dirname } from 'path'\nimport { MkdirpOptionsResolved } from './opts-arg.js'\n\nexport const findMade = async (\n opts: MkdirpOptionsResolved,\n parent: string,\n path?: string\n): Promise => {\n // we never want the 'made' return value to be a root directory\n if (path === parent) {\n return\n }\n\n return opts.statAsync(parent).then(\n st => (st.isDirectory() ? path : undefined), // will fail later\n er => {\n const fer = er as NodeJS.ErrnoException\n return fer && fer.code === 'ENOENT'\n ? findMade(opts, dirname(parent), parent)\n : undefined\n }\n )\n}\n\nexport const findMadeSync = (\n opts: MkdirpOptionsResolved,\n parent: string,\n path?: string\n): undefined | string => {\n if (path === parent) {\n return undefined\n }\n\n try {\n return opts.statSync(parent).isDirectory() ? path : undefined\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n return fer && fer.code === 'ENOENT'\n ? findMadeSync(opts, dirname(parent), parent)\n : undefined\n }\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/index.d.ts b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/index.d.ts
deleted file mode 100644
index fc9e43b3a45de1..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/index.d.ts
+++ /dev/null
@@ -1,39 +0,0 @@
-import { MkdirpOptions } from './opts-arg.js';
-export { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js';
-export { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js';
-export { useNative, useNativeSync } from './use-native.js';
-export declare const mkdirpSync: (path: string, opts?: MkdirpOptions) => string | void;
-export declare const sync: (path: string, opts?: MkdirpOptions) => string | void;
-export declare const manual: ((path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => Promise) & {
- sync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined;
-};
-export declare const manualSync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined;
-export declare const native: ((path: string, options?: MkdirpOptions | undefined) => Promise) & {
- sync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined;
-};
-export declare const nativeSync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined;
-export declare const mkdirp: ((path: string, opts?: MkdirpOptions) => Promise) & {
- mkdirpSync: (path: string, opts?: MkdirpOptions) => string | void;
- mkdirpNative: ((path: string, options?: MkdirpOptions | undefined) => Promise) & {
- sync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined;
- };
- mkdirpNativeSync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined;
- mkdirpManual: ((path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => Promise) & {
- sync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined;
- };
- mkdirpManualSync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined;
- sync: (path: string, opts?: MkdirpOptions) => string | void;
- native: ((path: string, options?: MkdirpOptions | undefined) => Promise) & {
- sync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined;
- };
- nativeSync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined;
- manual: ((path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => Promise) & {
- sync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined;
- };
- manualSync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined;
- useNative: ((opts?: MkdirpOptions | undefined) => boolean) & {
- sync: (opts?: MkdirpOptions | undefined) => boolean;
- };
- useNativeSync: (opts?: MkdirpOptions | undefined) => boolean;
-};
-//# sourceMappingURL=index.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/index.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/index.d.ts.map
deleted file mode 100644
index 0e915bbc9a0c7a..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/index.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/index.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,aAAa,EAAW,MAAM,eAAe,CAAA;AAItD,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAE,SAAS,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AAG1D,eAAO,MAAM,UAAU,SAAU,MAAM,SAAS,aAAa,kBAM5D,CAAA;AAED,eAAO,MAAM,IAAI,SARgB,MAAM,SAAS,aAAa,kBAQ/B,CAAA;AAC9B,eAAO,MAAM,MAAM;;CAAe,CAAA;AAClC,eAAO,MAAM,UAAU,oHAAmB,CAAA;AAC1C,eAAO,MAAM,MAAM;;CAAe,CAAA;AAClC,eAAO,MAAM,UAAU,kFAAmB,CAAA;AAC1C,eAAO,MAAM,MAAM,UACJ,MAAM,SAAS,aAAa;uBAdV,MAAM,SAAS,aAAa;;;;;;;;;iBAA5B,MAAM,SAAS,aAAa;;;;;;;;;;;;;CAoC5D,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/index.js b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/index.js
deleted file mode 100644
index ab9dc62cddda36..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/index.js
+++ /dev/null
@@ -1,53 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.mkdirp = exports.nativeSync = exports.native = exports.manualSync = exports.manual = exports.sync = exports.mkdirpSync = exports.useNativeSync = exports.useNative = exports.mkdirpNativeSync = exports.mkdirpNative = exports.mkdirpManualSync = exports.mkdirpManual = void 0;
-const mkdirp_manual_js_1 = require("./mkdirp-manual.js");
-const mkdirp_native_js_1 = require("./mkdirp-native.js");
-const opts_arg_js_1 = require("./opts-arg.js");
-const path_arg_js_1 = require("./path-arg.js");
-const use_native_js_1 = require("./use-native.js");
-/* c8 ignore start */
-var mkdirp_manual_js_2 = require("./mkdirp-manual.js");
-Object.defineProperty(exports, "mkdirpManual", { enumerable: true, get: function () { return mkdirp_manual_js_2.mkdirpManual; } });
-Object.defineProperty(exports, "mkdirpManualSync", { enumerable: true, get: function () { return mkdirp_manual_js_2.mkdirpManualSync; } });
-var mkdirp_native_js_2 = require("./mkdirp-native.js");
-Object.defineProperty(exports, "mkdirpNative", { enumerable: true, get: function () { return mkdirp_native_js_2.mkdirpNative; } });
-Object.defineProperty(exports, "mkdirpNativeSync", { enumerable: true, get: function () { return mkdirp_native_js_2.mkdirpNativeSync; } });
-var use_native_js_2 = require("./use-native.js");
-Object.defineProperty(exports, "useNative", { enumerable: true, get: function () { return use_native_js_2.useNative; } });
-Object.defineProperty(exports, "useNativeSync", { enumerable: true, get: function () { return use_native_js_2.useNativeSync; } });
-/* c8 ignore stop */
-const mkdirpSync = (path, opts) => {
- path = (0, path_arg_js_1.pathArg)(path);
- const resolved = (0, opts_arg_js_1.optsArg)(opts);
- return (0, use_native_js_1.useNativeSync)(resolved)
- ? (0, mkdirp_native_js_1.mkdirpNativeSync)(path, resolved)
- : (0, mkdirp_manual_js_1.mkdirpManualSync)(path, resolved);
-};
-exports.mkdirpSync = mkdirpSync;
-exports.sync = exports.mkdirpSync;
-exports.manual = mkdirp_manual_js_1.mkdirpManual;
-exports.manualSync = mkdirp_manual_js_1.mkdirpManualSync;
-exports.native = mkdirp_native_js_1.mkdirpNative;
-exports.nativeSync = mkdirp_native_js_1.mkdirpNativeSync;
-exports.mkdirp = Object.assign(async (path, opts) => {
- path = (0, path_arg_js_1.pathArg)(path);
- const resolved = (0, opts_arg_js_1.optsArg)(opts);
- return (0, use_native_js_1.useNative)(resolved)
- ? (0, mkdirp_native_js_1.mkdirpNative)(path, resolved)
- : (0, mkdirp_manual_js_1.mkdirpManual)(path, resolved);
-}, {
- mkdirpSync: exports.mkdirpSync,
- mkdirpNative: mkdirp_native_js_1.mkdirpNative,
- mkdirpNativeSync: mkdirp_native_js_1.mkdirpNativeSync,
- mkdirpManual: mkdirp_manual_js_1.mkdirpManual,
- mkdirpManualSync: mkdirp_manual_js_1.mkdirpManualSync,
- sync: exports.mkdirpSync,
- native: mkdirp_native_js_1.mkdirpNative,
- nativeSync: mkdirp_native_js_1.mkdirpNativeSync,
- manual: mkdirp_manual_js_1.mkdirpManual,
- manualSync: mkdirp_manual_js_1.mkdirpManualSync,
- useNative: use_native_js_1.useNative,
- useNativeSync: use_native_js_1.useNativeSync,
-});
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/index.js.map b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/index.js.map
deleted file mode 100644
index fdb572677a98ef..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/index.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/index.ts"],"names":[],"mappings":";;;AAAA,yDAAmE;AACnE,yDAAmE;AACnE,+CAAsD;AACtD,+CAAuC;AACvC,mDAA0D;AAC1D,qBAAqB;AACrB,uDAAmE;AAA1D,gHAAA,YAAY,OAAA;AAAE,oHAAA,gBAAgB,OAAA;AACvC,uDAAmE;AAA1D,gHAAA,YAAY,OAAA;AAAE,oHAAA,gBAAgB,OAAA;AACvC,iDAA0D;AAAjD,0GAAA,SAAS,OAAA;AAAE,8GAAA,aAAa,OAAA;AACjC,oBAAoB;AAEb,MAAM,UAAU,GAAG,CAAC,IAAY,EAAE,IAAoB,EAAE,EAAE;IAC/D,IAAI,GAAG,IAAA,qBAAO,EAAC,IAAI,CAAC,CAAA;IACpB,MAAM,QAAQ,GAAG,IAAA,qBAAO,EAAC,IAAI,CAAC,CAAA;IAC9B,OAAO,IAAA,6BAAa,EAAC,QAAQ,CAAC;QAC5B,CAAC,CAAC,IAAA,mCAAgB,EAAC,IAAI,EAAE,QAAQ,CAAC;QAClC,CAAC,CAAC,IAAA,mCAAgB,EAAC,IAAI,EAAE,QAAQ,CAAC,CAAA;AACtC,CAAC,CAAA;AANY,QAAA,UAAU,cAMtB;AAEY,QAAA,IAAI,GAAG,kBAAU,CAAA;AACjB,QAAA,MAAM,GAAG,+BAAY,CAAA;AACrB,QAAA,UAAU,GAAG,mCAAgB,CAAA;AAC7B,QAAA,MAAM,GAAG,+BAAY,CAAA;AACrB,QAAA,UAAU,GAAG,mCAAgB,CAAA;AAC7B,QAAA,MAAM,GAAG,MAAM,CAAC,MAAM,CACjC,KAAK,EAAE,IAAY,EAAE,IAAoB,EAAE,EAAE;IAC3C,IAAI,GAAG,IAAA,qBAAO,EAAC,IAAI,CAAC,CAAA;IACpB,MAAM,QAAQ,GAAG,IAAA,qBAAO,EAAC,IAAI,CAAC,CAAA;IAC9B,OAAO,IAAA,yBAAS,EAAC,QAAQ,CAAC;QACxB,CAAC,CAAC,IAAA,+BAAY,EAAC,IAAI,EAAE,QAAQ,CAAC;QAC9B,CAAC,CAAC,IAAA,+BAAY,EAAC,IAAI,EAAE,QAAQ,CAAC,CAAA;AAClC,CAAC,EACD;IACE,UAAU,EAAV,kBAAU;IACV,YAAY,EAAZ,+BAAY;IACZ,gBAAgB,EAAhB,mCAAgB;IAChB,YAAY,EAAZ,+BAAY;IACZ,gBAAgB,EAAhB,mCAAgB;IAEhB,IAAI,EAAE,kBAAU;IAChB,MAAM,EAAE,+BAAY;IACpB,UAAU,EAAE,mCAAgB;IAC5B,MAAM,EAAE,+BAAY;IACpB,UAAU,EAAE,mCAAgB;IAC5B,SAAS,EAAT,yBAAS;IACT,aAAa,EAAb,6BAAa;CACd,CACF,CAAA","sourcesContent":["import { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js'\nimport { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js'\nimport { MkdirpOptions, optsArg } from './opts-arg.js'\nimport { pathArg } from './path-arg.js'\nimport { useNative, useNativeSync } from './use-native.js'\n/* c8 ignore start */\nexport { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js'\nexport { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js'\nexport { useNative, useNativeSync } from './use-native.js'\n/* c8 ignore stop */\n\nexport const mkdirpSync = (path: string, opts?: MkdirpOptions) => {\n path = pathArg(path)\n const resolved = optsArg(opts)\n return useNativeSync(resolved)\n ? mkdirpNativeSync(path, resolved)\n : mkdirpManualSync(path, resolved)\n}\n\nexport const sync = mkdirpSync\nexport const manual = mkdirpManual\nexport const manualSync = mkdirpManualSync\nexport const native = mkdirpNative\nexport const nativeSync = mkdirpNativeSync\nexport const mkdirp = Object.assign(\n async (path: string, opts?: MkdirpOptions) => {\n path = pathArg(path)\n const resolved = optsArg(opts)\n return useNative(resolved)\n ? mkdirpNative(path, resolved)\n : mkdirpManual(path, resolved)\n },\n {\n mkdirpSync,\n mkdirpNative,\n mkdirpNativeSync,\n mkdirpManual,\n mkdirpManualSync,\n\n sync: mkdirpSync,\n native: mkdirpNative,\n nativeSync: mkdirpNativeSync,\n manual: mkdirpManual,\n manualSync: mkdirpManualSync,\n useNative,\n useNativeSync,\n }\n)\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.d.ts b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.d.ts
deleted file mode 100644
index e49cdf9f1bd122..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.d.ts
+++ /dev/null
@@ -1,6 +0,0 @@
-import { MkdirpOptions } from './opts-arg.js';
-export declare const mkdirpManualSync: (path: string, options?: MkdirpOptions, made?: string | undefined | void) => string | undefined | void;
-export declare const mkdirpManual: ((path: string, options?: MkdirpOptions, made?: string | undefined | void) => Promise) & {
- sync: (path: string, options?: MkdirpOptions, made?: string | undefined | void) => string | undefined | void;
-};
-//# sourceMappingURL=mkdirp-manual.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.d.ts.map
deleted file mode 100644
index 9301bab1ffb35b..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"mkdirp-manual.d.ts","sourceRoot":"","sources":["../../../src/mkdirp-manual.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,aAAa,EAAW,MAAM,eAAe,CAAA;AAEtD,eAAO,MAAM,gBAAgB,SACrB,MAAM,YACF,aAAa,SAChB,MAAM,GAAG,SAAS,GAAG,IAAI,KAC/B,MAAM,GAAG,SAAS,GAAG,IAmCvB,CAAA;AAED,eAAO,MAAM,YAAY,UAEf,MAAM,YACF,aAAa,SAChB,MAAM,GAAG,SAAS,GAAG,IAAI,KAC/B,QAAQ,MAAM,GAAG,SAAS,GAAG,IAAI,CAAC;iBA7C/B,MAAM,YACF,aAAa,SAChB,MAAM,GAAG,SAAS,GAAG,IAAI,KAC/B,MAAM,GAAG,SAAS,GAAG,IAAI;CAqF3B,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js
deleted file mode 100644
index d9bd1d8bb5a49b..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js
+++ /dev/null
@@ -1,79 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.mkdirpManual = exports.mkdirpManualSync = void 0;
-const path_1 = require("path");
-const opts_arg_js_1 = require("./opts-arg.js");
-const mkdirpManualSync = (path, options, made) => {
- const parent = (0, path_1.dirname)(path);
- const opts = { ...(0, opts_arg_js_1.optsArg)(options), recursive: false };
- if (parent === path) {
- try {
- return opts.mkdirSync(path, opts);
- }
- catch (er) {
- // swallowed by recursive implementation on posix systems
- // any other error is a failure
- const fer = er;
- if (fer && fer.code !== 'EISDIR') {
- throw er;
- }
- return;
- }
- }
- try {
- opts.mkdirSync(path, opts);
- return made || path;
- }
- catch (er) {
- const fer = er;
- if (fer && fer.code === 'ENOENT') {
- return (0, exports.mkdirpManualSync)(path, opts, (0, exports.mkdirpManualSync)(parent, opts, made));
- }
- if (fer && fer.code !== 'EEXIST' && fer && fer.code !== 'EROFS') {
- throw er;
- }
- try {
- if (!opts.statSync(path).isDirectory())
- throw er;
- }
- catch (_) {
- throw er;
- }
- }
-};
-exports.mkdirpManualSync = mkdirpManualSync;
-exports.mkdirpManual = Object.assign(async (path, options, made) => {
- const opts = (0, opts_arg_js_1.optsArg)(options);
- opts.recursive = false;
- const parent = (0, path_1.dirname)(path);
- if (parent === path) {
- return opts.mkdirAsync(path, opts).catch(er => {
- // swallowed by recursive implementation on posix systems
- // any other error is a failure
- const fer = er;
- if (fer && fer.code !== 'EISDIR') {
- throw er;
- }
- });
- }
- return opts.mkdirAsync(path, opts).then(() => made || path, async (er) => {
- const fer = er;
- if (fer && fer.code === 'ENOENT') {
- return (0, exports.mkdirpManual)(parent, opts).then((made) => (0, exports.mkdirpManual)(path, opts, made));
- }
- if (fer && fer.code !== 'EEXIST' && fer.code !== 'EROFS') {
- throw er;
- }
- return opts.statAsync(path).then(st => {
- if (st.isDirectory()) {
- return made;
- }
- else {
- throw er;
- }
- }, () => {
- throw er;
- });
- });
-}, { sync: exports.mkdirpManualSync });
-//# sourceMappingURL=mkdirp-manual.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js.map b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js.map
deleted file mode 100644
index ff7ba24dca32ad..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"mkdirp-manual.js","sourceRoot":"","sources":["../../../src/mkdirp-manual.ts"],"names":[],"mappings":";;;AAAA,+BAA8B;AAC9B,+CAAsD;AAE/C,MAAM,gBAAgB,GAAG,CAC9B,IAAY,EACZ,OAAuB,EACvB,IAAgC,EACL,EAAE;IAC7B,MAAM,MAAM,GAAG,IAAA,cAAO,EAAC,IAAI,CAAC,CAAA;IAC5B,MAAM,IAAI,GAAG,EAAE,GAAG,IAAA,qBAAO,EAAC,OAAO,CAAC,EAAE,SAAS,EAAE,KAAK,EAAE,CAAA;IAEtD,IAAI,MAAM,KAAK,IAAI,EAAE;QACnB,IAAI;YACF,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;SAClC;QAAC,OAAO,EAAE,EAAE;YACX,yDAAyD;YACzD,+BAA+B;YAC/B,MAAM,GAAG,GAAG,EAA2B,CAAA;YACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;gBAChC,MAAM,EAAE,CAAA;aACT;YACD,OAAM;SACP;KACF;IAED,IAAI;QACF,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;QAC1B,OAAO,IAAI,IAAI,IAAI,CAAA;KACpB;IAAC,OAAO,EAAE,EAAE;QACX,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;YAChC,OAAO,IAAA,wBAAgB,EAAC,IAAI,EAAE,IAAI,EAAE,IAAA,wBAAgB,EAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,CAAA;SAC1E;QACD,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,OAAO,EAAE;YAC/D,MAAM,EAAE,CAAA;SACT;QACD,IAAI;YACF,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,WAAW,EAAE;gBAAE,MAAM,EAAE,CAAA;SACjD;QAAC,OAAO,CAAC,EAAE;YACV,MAAM,EAAE,CAAA;SACT;KACF;AACH,CAAC,CAAA;AAvCY,QAAA,gBAAgB,oBAuC5B;AAEY,QAAA,YAAY,GAAG,MAAM,CAAC,MAAM,CACvC,KAAK,EACH,IAAY,EACZ,OAAuB,EACvB,IAAgC,EACI,EAAE;IACtC,MAAM,IAAI,GAAG,IAAA,qBAAO,EAAC,OAAO,CAAC,CAAA;IAC7B,IAAI,CAAC,SAAS,GAAG,KAAK,CAAA;IACtB,MAAM,MAAM,GAAG,IAAA,cAAO,EAAC,IAAI,CAAC,CAAA;IAC5B,IAAI,MAAM,KAAK,IAAI,EAAE;QACnB,OAAO,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE;YAC5C,yDAAyD;YACzD,+BAA+B;YAC/B,MAAM,GAAG,GAAG,EAA2B,CAAA;YACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;gBAChC,MAAM,EAAE,CAAA;aACT;QACH,CAAC,CAAC,CAAA;KACH;IAED,OAAO,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,IAAI,CACrC,GAAG,EAAE,CAAC,IAAI,IAAI,IAAI,EAClB,KAAK,EAAC,EAAE,EAAC,EAAE;QACT,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;YAChC,OAAO,IAAA,oBAAY,EAAC,MAAM,EAAE,IAAI,CAAC,CAAC,IAAI,CACpC,CAAC,IAAgC,EAAE,EAAE,CAAC,IAAA,oBAAY,EAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CACrE,CAAA;SACF;QACD,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,IAAI,GAAG,CAAC,IAAI,KAAK,OAAO,EAAE;YACxD,MAAM,EAAE,CAAA;SACT;QACD,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,IAAI,CAC9B,EAAE,CAAC,EAAE;YACH,IAAI,EAAE,CAAC,WAAW,EAAE,EAAE;gBACpB,OAAO,IAAI,CAAA;aACZ;iBAAM;gBACL,MAAM,EAAE,CAAA;aACT;QACH,CAAC,EACD,GAAG,EAAE;YACH,MAAM,EAAE,CAAA;QACV,CAAC,CACF,CAAA;IACH,CAAC,CACF,CAAA;AACH,CAAC,EACD,EAAE,IAAI,EAAE,wBAAgB,EAAE,CAC3B,CAAA","sourcesContent":["import { dirname } from 'path'\nimport { MkdirpOptions, optsArg } from './opts-arg.js'\n\nexport const mkdirpManualSync = (\n path: string,\n options?: MkdirpOptions,\n made?: string | undefined | void\n): string | undefined | void => {\n const parent = dirname(path)\n const opts = { ...optsArg(options), recursive: false }\n\n if (parent === path) {\n try {\n return opts.mkdirSync(path, opts)\n } catch (er) {\n // swallowed by recursive implementation on posix systems\n // any other error is a failure\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code !== 'EISDIR') {\n throw er\n }\n return\n }\n }\n\n try {\n opts.mkdirSync(path, opts)\n return made || path\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code === 'ENOENT') {\n return mkdirpManualSync(path, opts, mkdirpManualSync(parent, opts, made))\n }\n if (fer && fer.code !== 'EEXIST' && fer && fer.code !== 'EROFS') {\n throw er\n }\n try {\n if (!opts.statSync(path).isDirectory()) throw er\n } catch (_) {\n throw er\n }\n }\n}\n\nexport const mkdirpManual = Object.assign(\n async (\n path: string,\n options?: MkdirpOptions,\n made?: string | undefined | void\n ): Promise => {\n const opts = optsArg(options)\n opts.recursive = false\n const parent = dirname(path)\n if (parent === path) {\n return opts.mkdirAsync(path, opts).catch(er => {\n // swallowed by recursive implementation on posix systems\n // any other error is a failure\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code !== 'EISDIR') {\n throw er\n }\n })\n }\n\n return opts.mkdirAsync(path, opts).then(\n () => made || path,\n async er => {\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code === 'ENOENT') {\n return mkdirpManual(parent, opts).then(\n (made?: string | undefined | void) => mkdirpManual(path, opts, made)\n )\n }\n if (fer && fer.code !== 'EEXIST' && fer.code !== 'EROFS') {\n throw er\n }\n return opts.statAsync(path).then(\n st => {\n if (st.isDirectory()) {\n return made\n } else {\n throw er\n }\n },\n () => {\n throw er\n }\n )\n }\n )\n },\n { sync: mkdirpManualSync }\n)\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/mkdirp-native.d.ts b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/mkdirp-native.d.ts
deleted file mode 100644
index 28b64814b2545a..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/mkdirp-native.d.ts
+++ /dev/null
@@ -1,6 +0,0 @@
-import { MkdirpOptions } from './opts-arg.js';
-export declare const mkdirpNativeSync: (path: string, options?: MkdirpOptions) => string | void | undefined;
-export declare const mkdirpNative: ((path: string, options?: MkdirpOptions) => Promise) & {
- sync: (path: string, options?: MkdirpOptions) => string | void | undefined;
-};
-//# sourceMappingURL=mkdirp-native.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/mkdirp-native.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/mkdirp-native.d.ts.map
deleted file mode 100644
index 379c0f6591c686..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/mkdirp-native.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"mkdirp-native.d.ts","sourceRoot":"","sources":["../../../src/mkdirp-native.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,EAAW,MAAM,eAAe,CAAA;AAEtD,eAAO,MAAM,gBAAgB,SACrB,MAAM,YACF,aAAa,KACtB,MAAM,GAAG,IAAI,GAAG,SAoBlB,CAAA;AAED,eAAO,MAAM,YAAY,UAEf,MAAM,YACF,aAAa,KACtB,QAAQ,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;iBA5B/B,MAAM,YACF,aAAa,KACtB,MAAM,GAAG,IAAI,GAAG,SAAS;CAgD3B,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js
deleted file mode 100644
index 9f00567d7cc200..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js
+++ /dev/null
@@ -1,50 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.mkdirpNative = exports.mkdirpNativeSync = void 0;
-const path_1 = require("path");
-const find_made_js_1 = require("./find-made.js");
-const mkdirp_manual_js_1 = require("./mkdirp-manual.js");
-const opts_arg_js_1 = require("./opts-arg.js");
-const mkdirpNativeSync = (path, options) => {
- const opts = (0, opts_arg_js_1.optsArg)(options);
- opts.recursive = true;
- const parent = (0, path_1.dirname)(path);
- if (parent === path) {
- return opts.mkdirSync(path, opts);
- }
- const made = (0, find_made_js_1.findMadeSync)(opts, path);
- try {
- opts.mkdirSync(path, opts);
- return made;
- }
- catch (er) {
- const fer = er;
- if (fer && fer.code === 'ENOENT') {
- return (0, mkdirp_manual_js_1.mkdirpManualSync)(path, opts);
- }
- else {
- throw er;
- }
- }
-};
-exports.mkdirpNativeSync = mkdirpNativeSync;
-exports.mkdirpNative = Object.assign(async (path, options) => {
- const opts = { ...(0, opts_arg_js_1.optsArg)(options), recursive: true };
- const parent = (0, path_1.dirname)(path);
- if (parent === path) {
- return await opts.mkdirAsync(path, opts);
- }
- return (0, find_made_js_1.findMade)(opts, path).then((made) => opts
- .mkdirAsync(path, opts)
- .then(m => made || m)
- .catch(er => {
- const fer = er;
- if (fer && fer.code === 'ENOENT') {
- return (0, mkdirp_manual_js_1.mkdirpManual)(path, opts);
- }
- else {
- throw er;
- }
- }));
-}, { sync: exports.mkdirpNativeSync });
-//# sourceMappingURL=mkdirp-native.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js.map b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js.map
deleted file mode 100644
index 1f889ee98876cc..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"mkdirp-native.js","sourceRoot":"","sources":["../../../src/mkdirp-native.ts"],"names":[],"mappings":";;;AAAA,+BAA8B;AAC9B,iDAAuD;AACvD,yDAAmE;AACnE,+CAAsD;AAE/C,MAAM,gBAAgB,GAAG,CAC9B,IAAY,EACZ,OAAuB,EACI,EAAE;IAC7B,MAAM,IAAI,GAAG,IAAA,qBAAO,EAAC,OAAO,CAAC,CAAA;IAC7B,IAAI,CAAC,SAAS,GAAG,IAAI,CAAA;IACrB,MAAM,MAAM,GAAG,IAAA,cAAO,EAAC,IAAI,CAAC,CAAA;IAC5B,IAAI,MAAM,KAAK,IAAI,EAAE;QACnB,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;KAClC;IAED,MAAM,IAAI,GAAG,IAAA,2BAAY,EAAC,IAAI,EAAE,IAAI,CAAC,CAAA;IACrC,IAAI;QACF,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;QAC1B,OAAO,IAAI,CAAA;KACZ;IAAC,OAAO,EAAE,EAAE;QACX,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;YAChC,OAAO,IAAA,mCAAgB,EAAC,IAAI,EAAE,IAAI,CAAC,CAAA;SACpC;aAAM;YACL,MAAM,EAAE,CAAA;SACT;KACF;AACH,CAAC,CAAA;AAvBY,QAAA,gBAAgB,oBAuB5B;AAEY,QAAA,YAAY,GAAG,MAAM,CAAC,MAAM,CACvC,KAAK,EACH,IAAY,EACZ,OAAuB,EACa,EAAE;IACtC,MAAM,IAAI,GAAG,EAAE,GAAG,IAAA,qBAAO,EAAC,OAAO,CAAC,EAAE,SAAS,EAAE,IAAI,EAAE,CAAA;IACrD,MAAM,MAAM,GAAG,IAAA,cAAO,EAAC,IAAI,CAAC,CAAA;IAC5B,IAAI,MAAM,KAAK,IAAI,EAAE;QACnB,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;KACzC;IAED,OAAO,IAAA,uBAAQ,EAAC,IAAI,EAAE,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC,IAAyB,EAAE,EAAE,CAC7D,IAAI;SACD,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC;SACtB,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,IAAI,CAAC,CAAC;SACpB,KAAK,CAAC,EAAE,CAAC,EAAE;QACV,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;YAChC,OAAO,IAAA,+BAAY,EAAC,IAAI,EAAE,IAAI,CAAC,CAAA;SAChC;aAAM;YACL,MAAM,EAAE,CAAA;SACT;IACH,CAAC,CAAC,CACL,CAAA;AACH,CAAC,EACD,EAAE,IAAI,EAAE,wBAAgB,EAAE,CAC3B,CAAA","sourcesContent":["import { dirname } from 'path'\nimport { findMade, findMadeSync } from './find-made.js'\nimport { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js'\nimport { MkdirpOptions, optsArg } from './opts-arg.js'\n\nexport const mkdirpNativeSync = (\n path: string,\n options?: MkdirpOptions\n): string | void | undefined => {\n const opts = optsArg(options)\n opts.recursive = true\n const parent = dirname(path)\n if (parent === path) {\n return opts.mkdirSync(path, opts)\n }\n\n const made = findMadeSync(opts, path)\n try {\n opts.mkdirSync(path, opts)\n return made\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code === 'ENOENT') {\n return mkdirpManualSync(path, opts)\n } else {\n throw er\n }\n }\n}\n\nexport const mkdirpNative = Object.assign(\n async (\n path: string,\n options?: MkdirpOptions\n ): Promise => {\n const opts = { ...optsArg(options), recursive: true }\n const parent = dirname(path)\n if (parent === path) {\n return await opts.mkdirAsync(path, opts)\n }\n\n return findMade(opts, path).then((made?: string | undefined) =>\n opts\n .mkdirAsync(path, opts)\n .then(m => made || m)\n .catch(er => {\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code === 'ENOENT') {\n return mkdirpManual(path, opts)\n } else {\n throw er\n }\n })\n )\n },\n { sync: mkdirpNativeSync }\n)\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/opts-arg.d.ts b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/opts-arg.d.ts
deleted file mode 100644
index 73d076b3b6923c..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/opts-arg.d.ts
+++ /dev/null
@@ -1,42 +0,0 @@
-///
-///
-import { MakeDirectoryOptions, Stats } from 'fs';
-export interface FsProvider {
- stat?: (path: string, callback: (err: NodeJS.ErrnoException | null, stats: Stats) => any) => any;
- mkdir?: (path: string, opts: MakeDirectoryOptions & {
- recursive?: boolean;
- }, callback: (err: NodeJS.ErrnoException | null, made?: string) => any) => any;
- statSync?: (path: string) => Stats;
- mkdirSync?: (path: string, opts: MakeDirectoryOptions & {
- recursive?: boolean;
- }) => string | undefined;
-}
-interface Options extends FsProvider {
- mode?: number | string;
- fs?: FsProvider;
- mkdirAsync?: (path: string, opts: MakeDirectoryOptions & {
- recursive?: boolean;
- }) => Promise;
- statAsync?: (path: string) => Promise;
-}
-export type MkdirpOptions = Options | number | string;
-export interface MkdirpOptionsResolved {
- mode: number;
- fs: FsProvider;
- mkdirAsync: (path: string, opts: MakeDirectoryOptions & {
- recursive?: boolean;
- }) => Promise;
- statAsync: (path: string) => Promise;
- stat: (path: string, callback: (err: NodeJS.ErrnoException | null, stats: Stats) => any) => any;
- mkdir: (path: string, opts: MakeDirectoryOptions & {
- recursive?: boolean;
- }, callback: (err: NodeJS.ErrnoException | null, made?: string) => any) => any;
- statSync: (path: string) => Stats;
- mkdirSync: (path: string, opts: MakeDirectoryOptions & {
- recursive?: boolean;
- }) => string | undefined;
- recursive?: boolean;
-}
-export declare const optsArg: (opts?: MkdirpOptions) => MkdirpOptionsResolved;
-export {};
-//# sourceMappingURL=opts-arg.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/opts-arg.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/opts-arg.d.ts.map
deleted file mode 100644
index e575161714f651..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/opts-arg.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"opts-arg.d.ts","sourceRoot":"","sources":["../../../src/opts-arg.ts"],"names":[],"mappings":";;AAAA,OAAO,EACL,oBAAoB,EAIpB,KAAK,EAEN,MAAM,IAAI,CAAA;AAEX,MAAM,WAAW,UAAU;IACzB,IAAI,CAAC,EAAE,CACL,IAAI,EAAE,MAAM,EACZ,QAAQ,EAAE,CAAC,GAAG,EAAE,MAAM,CAAC,cAAc,GAAG,IAAI,EAAE,KAAK,EAAE,KAAK,KAAK,GAAG,KAC/D,GAAG,CAAA;IACR,KAAK,CAAC,EAAE,CACN,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,EACpD,QAAQ,EAAE,CAAC,GAAG,EAAE,MAAM,CAAC,cAAc,GAAG,IAAI,EAAE,IAAI,CAAC,EAAE,MAAM,KAAK,GAAG,KAChE,GAAG,CAAA;IACR,QAAQ,CAAC,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,KAAK,CAAA;IAClC,SAAS,CAAC,EAAE,CACV,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,KACjD,MAAM,GAAG,SAAS,CAAA;CACxB;AAED,UAAU,OAAQ,SAAQ,UAAU;IAClC,IAAI,CAAC,EAAE,MAAM,GAAG,MAAM,CAAA;IACtB,EAAE,CAAC,EAAE,UAAU,CAAA;IACf,UAAU,CAAC,EAAE,CACX,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,KACjD,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC,CAAA;IAChC,SAAS,CAAC,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,KAAK,CAAC,CAAA;CAC7C;AAED,MAAM,MAAM,aAAa,GAAG,OAAO,GAAG,MAAM,GAAG,MAAM,CAAA;AAErD,MAAM,WAAW,qBAAqB;IACpC,IAAI,EAAE,MAAM,CAAA;IACZ,EAAE,EAAE,UAAU,CAAA;IACd,UAAU,EAAE,CACV,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,KACjD,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC,CAAA;IAChC,SAAS,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,KAAK,CAAC,CAAA;IAC3C,IAAI,EAAE,CACJ,IAAI,EAAE,MAAM,EACZ,QAAQ,EAAE,CAAC,GAAG,EAAE,MAAM,CAAC,cAAc,GAAG,IAAI,EAAE,KAAK,EAAE,KAAK,KAAK,GAAG,KAC/D,GAAG,CAAA;IACR,KAAK,EAAE,CACL,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,EACpD,QAAQ,EAAE,CAAC,GAAG,EAAE,MAAM,CAAC,cAAc,GAAG,IAAI,EAAE,IAAI,CAAC,EAAE,MAAM,KAAK,GAAG,KAChE,GAAG,CAAA;IACR,QAAQ,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,KAAK,CAAA;IACjC,SAAS,EAAE,CACT,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,KACjD,MAAM,GAAG,SAAS,CAAA;IACvB,SAAS,CAAC,EAAE,OAAO,CAAA;CACpB;AAED,eAAO,MAAM,OAAO,UAAW,aAAa,KAAG,qBA2C9C,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/opts-arg.js b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/opts-arg.js
deleted file mode 100644
index e8f486c0905957..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/opts-arg.js
+++ /dev/null
@@ -1,38 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.optsArg = void 0;
-const fs_1 = require("fs");
-const optsArg = (opts) => {
- if (!opts) {
- opts = { mode: 0o777 };
- }
- else if (typeof opts === 'object') {
- opts = { mode: 0o777, ...opts };
- }
- else if (typeof opts === 'number') {
- opts = { mode: opts };
- }
- else if (typeof opts === 'string') {
- opts = { mode: parseInt(opts, 8) };
- }
- else {
- throw new TypeError('invalid options argument');
- }
- const resolved = opts;
- const optsFs = opts.fs || {};
- opts.mkdir = opts.mkdir || optsFs.mkdir || fs_1.mkdir;
- opts.mkdirAsync = opts.mkdirAsync
- ? opts.mkdirAsync
- : async (path, options) => {
- return new Promise((res, rej) => resolved.mkdir(path, options, (er, made) => er ? rej(er) : res(made)));
- };
- opts.stat = opts.stat || optsFs.stat || fs_1.stat;
- opts.statAsync = opts.statAsync
- ? opts.statAsync
- : async (path) => new Promise((res, rej) => resolved.stat(path, (err, stats) => (err ? rej(err) : res(stats))));
- opts.statSync = opts.statSync || optsFs.statSync || fs_1.statSync;
- opts.mkdirSync = opts.mkdirSync || optsFs.mkdirSync || fs_1.mkdirSync;
- return resolved;
-};
-exports.optsArg = optsArg;
-//# sourceMappingURL=opts-arg.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/opts-arg.js.map b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/opts-arg.js.map
deleted file mode 100644
index fd5590f40f54cd..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/opts-arg.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"opts-arg.js","sourceRoot":"","sources":["../../../src/opts-arg.ts"],"names":[],"mappings":";;;AAAA,2BAOW;AAwDJ,MAAM,OAAO,GAAG,CAAC,IAAoB,EAAyB,EAAE;IACrE,IAAI,CAAC,IAAI,EAAE;QACT,IAAI,GAAG,EAAE,IAAI,EAAE,KAAK,EAAE,CAAA;KACvB;SAAM,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QACnC,IAAI,GAAG,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,IAAI,EAAE,CAAA;KAChC;SAAM,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QACnC,IAAI,GAAG,EAAE,IAAI,EAAE,IAAI,EAAE,CAAA;KACtB;SAAM,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QACnC,IAAI,GAAG,EAAE,IAAI,EAAE,QAAQ,CAAC,IAAI,EAAE,CAAC,CAAC,EAAE,CAAA;KACnC;SAAM;QACL,MAAM,IAAI,SAAS,CAAC,0BAA0B,CAAC,CAAA;KAChD;IAED,MAAM,QAAQ,GAAG,IAA6B,CAAA;IAC9C,MAAM,MAAM,GAAG,IAAI,CAAC,EAAE,IAAI,EAAE,CAAA;IAE5B,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,IAAI,MAAM,CAAC,KAAK,IAAI,UAAK,CAAA;IAEhD,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,UAAU;QAC/B,CAAC,CAAC,IAAI,CAAC,UAAU;QACjB,CAAC,CAAC,KAAK,EACH,IAAY,EACZ,OAAuD,EAC1B,EAAE;YAC/B,OAAO,IAAI,OAAO,CAAqB,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CAClD,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE,CACzC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CACzB,CACF,CAAA;QACH,CAAC,CAAA;IAEL,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,IAAI,MAAM,CAAC,IAAI,IAAI,SAAI,CAAA;IAC5C,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,SAAS;QAC7B,CAAC,CAAC,IAAI,CAAC,SAAS;QAChB,CAAC,CAAC,KAAK,EAAE,IAAY,EAAE,EAAE,CACrB,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,GAAG,EAAE,KAAK,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,CAAA;IAEP,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,MAAM,CAAC,QAAQ,IAAI,aAAQ,CAAA;IAC5D,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,SAAS,IAAI,MAAM,CAAC,SAAS,IAAI,cAAS,CAAA;IAEhE,OAAO,QAAQ,CAAA;AACjB,CAAC,CAAA;AA3CY,QAAA,OAAO,WA2CnB","sourcesContent":["import {\n MakeDirectoryOptions,\n mkdir,\n mkdirSync,\n stat,\n Stats,\n statSync,\n} from 'fs'\n\nexport interface FsProvider {\n stat?: (\n path: string,\n callback: (err: NodeJS.ErrnoException | null, stats: Stats) => any\n ) => any\n mkdir?: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean },\n callback: (err: NodeJS.ErrnoException | null, made?: string) => any\n ) => any\n statSync?: (path: string) => Stats\n mkdirSync?: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean }\n ) => string | undefined\n}\n\ninterface Options extends FsProvider {\n mode?: number | string\n fs?: FsProvider\n mkdirAsync?: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean }\n ) => Promise\n statAsync?: (path: string) => Promise\n}\n\nexport type MkdirpOptions = Options | number | string\n\nexport interface MkdirpOptionsResolved {\n mode: number\n fs: FsProvider\n mkdirAsync: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean }\n ) => Promise\n statAsync: (path: string) => Promise\n stat: (\n path: string,\n callback: (err: NodeJS.ErrnoException | null, stats: Stats) => any\n ) => any\n mkdir: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean },\n callback: (err: NodeJS.ErrnoException | null, made?: string) => any\n ) => any\n statSync: (path: string) => Stats\n mkdirSync: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean }\n ) => string | undefined\n recursive?: boolean\n}\n\nexport const optsArg = (opts?: MkdirpOptions): MkdirpOptionsResolved => {\n if (!opts) {\n opts = { mode: 0o777 }\n } else if (typeof opts === 'object') {\n opts = { mode: 0o777, ...opts }\n } else if (typeof opts === 'number') {\n opts = { mode: opts }\n } else if (typeof opts === 'string') {\n opts = { mode: parseInt(opts, 8) }\n } else {\n throw new TypeError('invalid options argument')\n }\n\n const resolved = opts as MkdirpOptionsResolved\n const optsFs = opts.fs || {}\n\n opts.mkdir = opts.mkdir || optsFs.mkdir || mkdir\n\n opts.mkdirAsync = opts.mkdirAsync\n ? opts.mkdirAsync\n : async (\n path: string,\n options: MakeDirectoryOptions & { recursive?: boolean }\n ): Promise => {\n return new Promise((res, rej) =>\n resolved.mkdir(path, options, (er, made) =>\n er ? rej(er) : res(made)\n )\n )\n }\n\n opts.stat = opts.stat || optsFs.stat || stat\n opts.statAsync = opts.statAsync\n ? opts.statAsync\n : async (path: string) =>\n new Promise((res, rej) =>\n resolved.stat(path, (err, stats) => (err ? rej(err) : res(stats)))\n )\n\n opts.statSync = opts.statSync || optsFs.statSync || statSync\n opts.mkdirSync = opts.mkdirSync || optsFs.mkdirSync || mkdirSync\n\n return resolved\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/path-arg.d.ts b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/path-arg.d.ts
deleted file mode 100644
index ad0ccfc482a485..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/path-arg.d.ts
+++ /dev/null
@@ -1,2 +0,0 @@
-export declare const pathArg: (path: string) => string;
-//# sourceMappingURL=path-arg.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/path-arg.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/path-arg.d.ts.map
deleted file mode 100644
index 3b52b077c6c05c..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/path-arg.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"path-arg.d.ts","sourceRoot":"","sources":["../../../src/path-arg.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,OAAO,SAAU,MAAM,WAyBnC,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/path-arg.js b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/path-arg.js
deleted file mode 100644
index a6b457f6e23d58..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/path-arg.js
+++ /dev/null
@@ -1,28 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.pathArg = void 0;
-const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform;
-const path_1 = require("path");
-const pathArg = (path) => {
- if (/\0/.test(path)) {
- // simulate same failure that node raises
- throw Object.assign(new TypeError('path must be a string without null bytes'), {
- path,
- code: 'ERR_INVALID_ARG_VALUE',
- });
- }
- path = (0, path_1.resolve)(path);
- if (platform === 'win32') {
- const badWinChars = /[*|"<>?:]/;
- const { root } = (0, path_1.parse)(path);
- if (badWinChars.test(path.substring(root.length))) {
- throw Object.assign(new Error('Illegal characters in path.'), {
- path,
- code: 'EINVAL',
- });
- }
- }
- return path;
-};
-exports.pathArg = pathArg;
-//# sourceMappingURL=path-arg.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/path-arg.js.map b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/path-arg.js.map
deleted file mode 100644
index ad3b5d38cad3cd..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/path-arg.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"path-arg.js","sourceRoot":"","sources":["../../../src/path-arg.ts"],"names":[],"mappings":";;;AAAA,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,2BAA2B,IAAI,OAAO,CAAC,QAAQ,CAAA;AAC5E,+BAAqC;AAC9B,MAAM,OAAO,GAAG,CAAC,IAAY,EAAE,EAAE;IACtC,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;QACnB,yCAAyC;QACzC,MAAM,MAAM,CAAC,MAAM,CACjB,IAAI,SAAS,CAAC,0CAA0C,CAAC,EACzD;YACE,IAAI;YACJ,IAAI,EAAE,uBAAuB;SAC9B,CACF,CAAA;KACF;IAED,IAAI,GAAG,IAAA,cAAO,EAAC,IAAI,CAAC,CAAA;IACpB,IAAI,QAAQ,KAAK,OAAO,EAAE;QACxB,MAAM,WAAW,GAAG,WAAW,CAAA;QAC/B,MAAM,EAAE,IAAI,EAAE,GAAG,IAAA,YAAK,EAAC,IAAI,CAAC,CAAA;QAC5B,IAAI,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE;YACjD,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,6BAA6B,CAAC,EAAE;gBAC5D,IAAI;gBACJ,IAAI,EAAE,QAAQ;aACf,CAAC,CAAA;SACH;KACF;IAED,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAzBY,QAAA,OAAO,WAyBnB","sourcesContent":["const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform\nimport { parse, resolve } from 'path'\nexport const pathArg = (path: string) => {\n if (/\\0/.test(path)) {\n // simulate same failure that node raises\n throw Object.assign(\n new TypeError('path must be a string without null bytes'),\n {\n path,\n code: 'ERR_INVALID_ARG_VALUE',\n }\n )\n }\n\n path = resolve(path)\n if (platform === 'win32') {\n const badWinChars = /[*|\"<>?:]/\n const { root } = parse(path)\n if (badWinChars.test(path.substring(root.length))) {\n throw Object.assign(new Error('Illegal characters in path.'), {\n path,\n code: 'EINVAL',\n })\n }\n }\n\n return path\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/use-native.d.ts b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/use-native.d.ts
deleted file mode 100644
index 1c6cb619e30405..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/use-native.d.ts
+++ /dev/null
@@ -1,6 +0,0 @@
-import { MkdirpOptions } from './opts-arg.js';
-export declare const useNativeSync: (opts?: MkdirpOptions) => boolean;
-export declare const useNative: ((opts?: MkdirpOptions) => boolean) & {
- sync: (opts?: MkdirpOptions) => boolean;
-};
-//# sourceMappingURL=use-native.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/use-native.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/use-native.d.ts.map
deleted file mode 100644
index 7dc275e322ea3b..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/use-native.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"use-native.d.ts","sourceRoot":"","sources":["../../../src/use-native.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,aAAa,EAAW,MAAM,eAAe,CAAA;AAMtD,eAAO,MAAM,aAAa,UAEd,aAAa,YAA0C,CAAA;AAEnE,eAAO,MAAM,SAAS,WAGR,aAAa;kBALf,aAAa;CASxB,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/use-native.js b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/use-native.js
deleted file mode 100644
index 550b3452688ee5..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/use-native.js
+++ /dev/null
@@ -1,17 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.useNative = exports.useNativeSync = void 0;
-const fs_1 = require("fs");
-const opts_arg_js_1 = require("./opts-arg.js");
-const version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version;
-const versArr = version.replace(/^v/, '').split('.');
-const hasNative = +versArr[0] > 10 || (+versArr[0] === 10 && +versArr[1] >= 12);
-exports.useNativeSync = !hasNative
- ? () => false
- : (opts) => (0, opts_arg_js_1.optsArg)(opts).mkdirSync === fs_1.mkdirSync;
-exports.useNative = Object.assign(!hasNative
- ? () => false
- : (opts) => (0, opts_arg_js_1.optsArg)(opts).mkdir === fs_1.mkdir, {
- sync: exports.useNativeSync,
-});
-//# sourceMappingURL=use-native.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/use-native.js.map b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/use-native.js.map
deleted file mode 100644
index 9a15efebb9ec28..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/use-native.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"use-native.js","sourceRoot":"","sources":["../../../src/use-native.ts"],"names":[],"mappings":";;;AAAA,2BAAqC;AACrC,+CAAsD;AAEtD,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,+BAA+B,IAAI,OAAO,CAAC,OAAO,CAAA;AAC9E,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;AACpD,MAAM,SAAS,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAA;AAElE,QAAA,aAAa,GAAG,CAAC,SAAS;IACrC,CAAC,CAAC,GAAG,EAAE,CAAC,KAAK;IACb,CAAC,CAAC,CAAC,IAAoB,EAAE,EAAE,CAAC,IAAA,qBAAO,EAAC,IAAI,CAAC,CAAC,SAAS,KAAK,cAAS,CAAA;AAEtD,QAAA,SAAS,GAAG,MAAM,CAAC,MAAM,CACpC,CAAC,SAAS;IACR,CAAC,CAAC,GAAG,EAAE,CAAC,KAAK;IACb,CAAC,CAAC,CAAC,IAAoB,EAAE,EAAE,CAAC,IAAA,qBAAO,EAAC,IAAI,CAAC,CAAC,KAAK,KAAK,UAAK,EAC3D;IACE,IAAI,EAAE,qBAAa;CACpB,CACF,CAAA","sourcesContent":["import { mkdir, mkdirSync } from 'fs'\nimport { MkdirpOptions, optsArg } from './opts-arg.js'\n\nconst version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version\nconst versArr = version.replace(/^v/, '').split('.')\nconst hasNative = +versArr[0] > 10 || (+versArr[0] === 10 && +versArr[1] >= 12)\n\nexport const useNativeSync = !hasNative\n ? () => false\n : (opts?: MkdirpOptions) => optsArg(opts).mkdirSync === mkdirSync\n\nexport const useNative = Object.assign(\n !hasNative\n ? () => false\n : (opts?: MkdirpOptions) => optsArg(opts).mkdir === mkdir,\n {\n sync: useNativeSync,\n }\n)\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/find-made.d.ts b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/find-made.d.ts
deleted file mode 100644
index e47794b3bb72a3..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/find-made.d.ts
+++ /dev/null
@@ -1,4 +0,0 @@
-import { MkdirpOptionsResolved } from './opts-arg.js';
-export declare const findMade: (opts: MkdirpOptionsResolved, parent: string, path?: string) => Promise;
-export declare const findMadeSync: (opts: MkdirpOptionsResolved, parent: string, path?: string) => undefined | string;
-//# sourceMappingURL=find-made.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/find-made.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/find-made.d.ts.map
deleted file mode 100644
index 411aad1410eb7a..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/find-made.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"find-made.d.ts","sourceRoot":"","sources":["../../src/find-made.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,qBAAqB,EAAE,MAAM,eAAe,CAAA;AAErD,eAAO,MAAM,QAAQ,SACb,qBAAqB,UACnB,MAAM,SACP,MAAM,KACZ,QAAQ,SAAS,GAAG,MAAM,CAe5B,CAAA;AAED,eAAO,MAAM,YAAY,SACjB,qBAAqB,UACnB,MAAM,SACP,MAAM,KACZ,SAAS,GAAG,MAad,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/find-made.js b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/find-made.js
deleted file mode 100644
index 3e72fd59a2c1fb..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/find-made.js
+++ /dev/null
@@ -1,30 +0,0 @@
-import { dirname } from 'path';
-export const findMade = async (opts, parent, path) => {
- // we never want the 'made' return value to be a root directory
- if (path === parent) {
- return;
- }
- return opts.statAsync(parent).then(st => (st.isDirectory() ? path : undefined), // will fail later
- // will fail later
- er => {
- const fer = er;
- return fer && fer.code === 'ENOENT'
- ? findMade(opts, dirname(parent), parent)
- : undefined;
- });
-};
-export const findMadeSync = (opts, parent, path) => {
- if (path === parent) {
- return undefined;
- }
- try {
- return opts.statSync(parent).isDirectory() ? path : undefined;
- }
- catch (er) {
- const fer = er;
- return fer && fer.code === 'ENOENT'
- ? findMadeSync(opts, dirname(parent), parent)
- : undefined;
- }
-};
-//# sourceMappingURL=find-made.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/find-made.js.map b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/find-made.js.map
deleted file mode 100644
index 7b58089c6266c1..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/find-made.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"find-made.js","sourceRoot":"","sources":["../../src/find-made.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AAG9B,MAAM,CAAC,MAAM,QAAQ,GAAG,KAAK,EAC3B,IAA2B,EAC3B,MAAc,EACd,IAAa,EACgB,EAAE;IAC/B,+DAA+D;IAC/D,IAAI,IAAI,KAAK,MAAM,EAAE;QACnB,OAAM;KACP;IAED,OAAO,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,IAAI,CAChC,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,EAAE,kBAAkB;IAC/D,AAD6C,kBAAkB;IAC/D,EAAE,CAAC,EAAE;QACH,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,OAAO,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ;YACjC,CAAC,CAAC,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,MAAM,CAAC,EAAE,MAAM,CAAC;YACzC,CAAC,CAAC,SAAS,CAAA;IACf,CAAC,CACF,CAAA;AACH,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,YAAY,GAAG,CAC1B,IAA2B,EAC3B,MAAc,EACd,IAAa,EACO,EAAE;IACtB,IAAI,IAAI,KAAK,MAAM,EAAE;QACnB,OAAO,SAAS,CAAA;KACjB;IAED,IAAI;QACF,OAAO,IAAI,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAA;KAC9D;IAAC,OAAO,EAAE,EAAE;QACX,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,OAAO,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ;YACjC,CAAC,CAAC,YAAY,CAAC,IAAI,EAAE,OAAO,CAAC,MAAM,CAAC,EAAE,MAAM,CAAC;YAC7C,CAAC,CAAC,SAAS,CAAA;KACd;AACH,CAAC,CAAA","sourcesContent":["import { dirname } from 'path'\nimport { MkdirpOptionsResolved } from './opts-arg.js'\n\nexport const findMade = async (\n opts: MkdirpOptionsResolved,\n parent: string,\n path?: string\n): Promise => {\n // we never want the 'made' return value to be a root directory\n if (path === parent) {\n return\n }\n\n return opts.statAsync(parent).then(\n st => (st.isDirectory() ? path : undefined), // will fail later\n er => {\n const fer = er as NodeJS.ErrnoException\n return fer && fer.code === 'ENOENT'\n ? findMade(opts, dirname(parent), parent)\n : undefined\n }\n )\n}\n\nexport const findMadeSync = (\n opts: MkdirpOptionsResolved,\n parent: string,\n path?: string\n): undefined | string => {\n if (path === parent) {\n return undefined\n }\n\n try {\n return opts.statSync(parent).isDirectory() ? path : undefined\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n return fer && fer.code === 'ENOENT'\n ? findMadeSync(opts, dirname(parent), parent)\n : undefined\n }\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/index.d.ts b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/index.d.ts
deleted file mode 100644
index fc9e43b3a45de1..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/index.d.ts
+++ /dev/null
@@ -1,39 +0,0 @@
-import { MkdirpOptions } from './opts-arg.js';
-export { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js';
-export { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js';
-export { useNative, useNativeSync } from './use-native.js';
-export declare const mkdirpSync: (path: string, opts?: MkdirpOptions) => string | void;
-export declare const sync: (path: string, opts?: MkdirpOptions) => string | void;
-export declare const manual: ((path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => Promise) & {
- sync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined;
-};
-export declare const manualSync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined;
-export declare const native: ((path: string, options?: MkdirpOptions | undefined) => Promise) & {
- sync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined;
-};
-export declare const nativeSync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined;
-export declare const mkdirp: ((path: string, opts?: MkdirpOptions) => Promise) & {
- mkdirpSync: (path: string, opts?: MkdirpOptions) => string | void;
- mkdirpNative: ((path: string, options?: MkdirpOptions | undefined) => Promise) & {
- sync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined;
- };
- mkdirpNativeSync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined;
- mkdirpManual: ((path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => Promise) & {
- sync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined;
- };
- mkdirpManualSync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined;
- sync: (path: string, opts?: MkdirpOptions) => string | void;
- native: ((path: string, options?: MkdirpOptions | undefined) => Promise) & {
- sync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined;
- };
- nativeSync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined;
- manual: ((path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => Promise) & {
- sync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined;
- };
- manualSync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined;
- useNative: ((opts?: MkdirpOptions | undefined) => boolean) & {
- sync: (opts?: MkdirpOptions | undefined) => boolean;
- };
- useNativeSync: (opts?: MkdirpOptions | undefined) => boolean;
-};
-//# sourceMappingURL=index.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/index.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/index.d.ts.map
deleted file mode 100644
index cfcc78083857b1..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/index.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,aAAa,EAAW,MAAM,eAAe,CAAA;AAItD,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAE,SAAS,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AAG1D,eAAO,MAAM,UAAU,SAAU,MAAM,SAAS,aAAa,kBAM5D,CAAA;AAED,eAAO,MAAM,IAAI,SARgB,MAAM,SAAS,aAAa,kBAQ/B,CAAA;AAC9B,eAAO,MAAM,MAAM;;CAAe,CAAA;AAClC,eAAO,MAAM,UAAU,oHAAmB,CAAA;AAC1C,eAAO,MAAM,MAAM;;CAAe,CAAA;AAClC,eAAO,MAAM,UAAU,kFAAmB,CAAA;AAC1C,eAAO,MAAM,MAAM,UACJ,MAAM,SAAS,aAAa;uBAdV,MAAM,SAAS,aAAa;;;;;;;;;iBAA5B,MAAM,SAAS,aAAa;;;;;;;;;;;;;CAoC5D,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/index.js b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/index.js
deleted file mode 100644
index 0217ecc8cdd83d..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/index.js
+++ /dev/null
@@ -1,43 +0,0 @@
-import { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js';
-import { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js';
-import { optsArg } from './opts-arg.js';
-import { pathArg } from './path-arg.js';
-import { useNative, useNativeSync } from './use-native.js';
-/* c8 ignore start */
-export { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js';
-export { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js';
-export { useNative, useNativeSync } from './use-native.js';
-/* c8 ignore stop */
-export const mkdirpSync = (path, opts) => {
- path = pathArg(path);
- const resolved = optsArg(opts);
- return useNativeSync(resolved)
- ? mkdirpNativeSync(path, resolved)
- : mkdirpManualSync(path, resolved);
-};
-export const sync = mkdirpSync;
-export const manual = mkdirpManual;
-export const manualSync = mkdirpManualSync;
-export const native = mkdirpNative;
-export const nativeSync = mkdirpNativeSync;
-export const mkdirp = Object.assign(async (path, opts) => {
- path = pathArg(path);
- const resolved = optsArg(opts);
- return useNative(resolved)
- ? mkdirpNative(path, resolved)
- : mkdirpManual(path, resolved);
-}, {
- mkdirpSync,
- mkdirpNative,
- mkdirpNativeSync,
- mkdirpManual,
- mkdirpManualSync,
- sync: mkdirpSync,
- native: mkdirpNative,
- nativeSync: mkdirpNativeSync,
- manual: mkdirpManual,
- manualSync: mkdirpManualSync,
- useNative,
- useNativeSync,
-});
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/index.js.map b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/index.js.map
deleted file mode 100644
index 47a8133a070c8f..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/index.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAiB,OAAO,EAAE,MAAM,eAAe,CAAA;AACtD,OAAO,EAAE,OAAO,EAAE,MAAM,eAAe,CAAA;AACvC,OAAO,EAAE,SAAS,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AAC1D,qBAAqB;AACrB,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAE,SAAS,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AAC1D,oBAAoB;AAEpB,MAAM,CAAC,MAAM,UAAU,GAAG,CAAC,IAAY,EAAE,IAAoB,EAAE,EAAE;IAC/D,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IACpB,MAAM,QAAQ,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IAC9B,OAAO,aAAa,CAAC,QAAQ,CAAC;QAC5B,CAAC,CAAC,gBAAgB,CAAC,IAAI,EAAE,QAAQ,CAAC;QAClC,CAAC,CAAC,gBAAgB,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAA;AACtC,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,IAAI,GAAG,UAAU,CAAA;AAC9B,MAAM,CAAC,MAAM,MAAM,GAAG,YAAY,CAAA;AAClC,MAAM,CAAC,MAAM,UAAU,GAAG,gBAAgB,CAAA;AAC1C,MAAM,CAAC,MAAM,MAAM,GAAG,YAAY,CAAA;AAClC,MAAM,CAAC,MAAM,UAAU,GAAG,gBAAgB,CAAA;AAC1C,MAAM,CAAC,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CACjC,KAAK,EAAE,IAAY,EAAE,IAAoB,EAAE,EAAE;IAC3C,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IACpB,MAAM,QAAQ,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IAC9B,OAAO,SAAS,CAAC,QAAQ,CAAC;QACxB,CAAC,CAAC,YAAY,CAAC,IAAI,EAAE,QAAQ,CAAC;QAC9B,CAAC,CAAC,YAAY,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAA;AAClC,CAAC,EACD;IACE,UAAU;IACV,YAAY;IACZ,gBAAgB;IAChB,YAAY;IACZ,gBAAgB;IAEhB,IAAI,EAAE,UAAU;IAChB,MAAM,EAAE,YAAY;IACpB,UAAU,EAAE,gBAAgB;IAC5B,MAAM,EAAE,YAAY;IACpB,UAAU,EAAE,gBAAgB;IAC5B,SAAS;IACT,aAAa;CACd,CACF,CAAA","sourcesContent":["import { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js'\nimport { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js'\nimport { MkdirpOptions, optsArg } from './opts-arg.js'\nimport { pathArg } from './path-arg.js'\nimport { useNative, useNativeSync } from './use-native.js'\n/* c8 ignore start */\nexport { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js'\nexport { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js'\nexport { useNative, useNativeSync } from './use-native.js'\n/* c8 ignore stop */\n\nexport const mkdirpSync = (path: string, opts?: MkdirpOptions) => {\n path = pathArg(path)\n const resolved = optsArg(opts)\n return useNativeSync(resolved)\n ? mkdirpNativeSync(path, resolved)\n : mkdirpManualSync(path, resolved)\n}\n\nexport const sync = mkdirpSync\nexport const manual = mkdirpManual\nexport const manualSync = mkdirpManualSync\nexport const native = mkdirpNative\nexport const nativeSync = mkdirpNativeSync\nexport const mkdirp = Object.assign(\n async (path: string, opts?: MkdirpOptions) => {\n path = pathArg(path)\n const resolved = optsArg(opts)\n return useNative(resolved)\n ? mkdirpNative(path, resolved)\n : mkdirpManual(path, resolved)\n },\n {\n mkdirpSync,\n mkdirpNative,\n mkdirpNativeSync,\n mkdirpManual,\n mkdirpManualSync,\n\n sync: mkdirpSync,\n native: mkdirpNative,\n nativeSync: mkdirpNativeSync,\n manual: mkdirpManual,\n manualSync: mkdirpManualSync,\n useNative,\n useNativeSync,\n }\n)\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/mkdirp-manual.d.ts b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/mkdirp-manual.d.ts
deleted file mode 100644
index e49cdf9f1bd122..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/mkdirp-manual.d.ts
+++ /dev/null
@@ -1,6 +0,0 @@
-import { MkdirpOptions } from './opts-arg.js';
-export declare const mkdirpManualSync: (path: string, options?: MkdirpOptions, made?: string | undefined | void) => string | undefined | void;
-export declare const mkdirpManual: ((path: string, options?: MkdirpOptions, made?: string | undefined | void) => Promise) & {
- sync: (path: string, options?: MkdirpOptions, made?: string | undefined | void) => string | undefined | void;
-};
-//# sourceMappingURL=mkdirp-manual.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/mkdirp-manual.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/mkdirp-manual.d.ts.map
deleted file mode 100644
index ae7f243d3ca78b..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/mkdirp-manual.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"mkdirp-manual.d.ts","sourceRoot":"","sources":["../../src/mkdirp-manual.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,aAAa,EAAW,MAAM,eAAe,CAAA;AAEtD,eAAO,MAAM,gBAAgB,SACrB,MAAM,YACF,aAAa,SAChB,MAAM,GAAG,SAAS,GAAG,IAAI,KAC/B,MAAM,GAAG,SAAS,GAAG,IAmCvB,CAAA;AAED,eAAO,MAAM,YAAY,UAEf,MAAM,YACF,aAAa,SAChB,MAAM,GAAG,SAAS,GAAG,IAAI,KAC/B,QAAQ,MAAM,GAAG,SAAS,GAAG,IAAI,CAAC;iBA7C/B,MAAM,YACF,aAAa,SAChB,MAAM,GAAG,SAAS,GAAG,IAAI,KAC/B,MAAM,GAAG,SAAS,GAAG,IAAI;CAqF3B,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/mkdirp-manual.js b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/mkdirp-manual.js
deleted file mode 100644
index a4d044e02d3bfc..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/mkdirp-manual.js
+++ /dev/null
@@ -1,75 +0,0 @@
-import { dirname } from 'path';
-import { optsArg } from './opts-arg.js';
-export const mkdirpManualSync = (path, options, made) => {
- const parent = dirname(path);
- const opts = { ...optsArg(options), recursive: false };
- if (parent === path) {
- try {
- return opts.mkdirSync(path, opts);
- }
- catch (er) {
- // swallowed by recursive implementation on posix systems
- // any other error is a failure
- const fer = er;
- if (fer && fer.code !== 'EISDIR') {
- throw er;
- }
- return;
- }
- }
- try {
- opts.mkdirSync(path, opts);
- return made || path;
- }
- catch (er) {
- const fer = er;
- if (fer && fer.code === 'ENOENT') {
- return mkdirpManualSync(path, opts, mkdirpManualSync(parent, opts, made));
- }
- if (fer && fer.code !== 'EEXIST' && fer && fer.code !== 'EROFS') {
- throw er;
- }
- try {
- if (!opts.statSync(path).isDirectory())
- throw er;
- }
- catch (_) {
- throw er;
- }
- }
-};
-export const mkdirpManual = Object.assign(async (path, options, made) => {
- const opts = optsArg(options);
- opts.recursive = false;
- const parent = dirname(path);
- if (parent === path) {
- return opts.mkdirAsync(path, opts).catch(er => {
- // swallowed by recursive implementation on posix systems
- // any other error is a failure
- const fer = er;
- if (fer && fer.code !== 'EISDIR') {
- throw er;
- }
- });
- }
- return opts.mkdirAsync(path, opts).then(() => made || path, async (er) => {
- const fer = er;
- if (fer && fer.code === 'ENOENT') {
- return mkdirpManual(parent, opts).then((made) => mkdirpManual(path, opts, made));
- }
- if (fer && fer.code !== 'EEXIST' && fer.code !== 'EROFS') {
- throw er;
- }
- return opts.statAsync(path).then(st => {
- if (st.isDirectory()) {
- return made;
- }
- else {
- throw er;
- }
- }, () => {
- throw er;
- });
- });
-}, { sync: mkdirpManualSync });
-//# sourceMappingURL=mkdirp-manual.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/mkdirp-manual.js.map b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/mkdirp-manual.js.map
deleted file mode 100644
index 29eab250e126c8..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/mkdirp-manual.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"mkdirp-manual.js","sourceRoot":"","sources":["../../src/mkdirp-manual.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AAC9B,OAAO,EAAiB,OAAO,EAAE,MAAM,eAAe,CAAA;AAEtD,MAAM,CAAC,MAAM,gBAAgB,GAAG,CAC9B,IAAY,EACZ,OAAuB,EACvB,IAAgC,EACL,EAAE;IAC7B,MAAM,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IAC5B,MAAM,IAAI,GAAG,EAAE,GAAG,OAAO,CAAC,OAAO,CAAC,EAAE,SAAS,EAAE,KAAK,EAAE,CAAA;IAEtD,IAAI,MAAM,KAAK,IAAI,EAAE;QACnB,IAAI;YACF,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;SAClC;QAAC,OAAO,EAAE,EAAE;YACX,yDAAyD;YACzD,+BAA+B;YAC/B,MAAM,GAAG,GAAG,EAA2B,CAAA;YACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;gBAChC,MAAM,EAAE,CAAA;aACT;YACD,OAAM;SACP;KACF;IAED,IAAI;QACF,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;QAC1B,OAAO,IAAI,IAAI,IAAI,CAAA;KACpB;IAAC,OAAO,EAAE,EAAE;QACX,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;YAChC,OAAO,gBAAgB,CAAC,IAAI,EAAE,IAAI,EAAE,gBAAgB,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,CAAA;SAC1E;QACD,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,OAAO,EAAE;YAC/D,MAAM,EAAE,CAAA;SACT;QACD,IAAI;YACF,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,WAAW,EAAE;gBAAE,MAAM,EAAE,CAAA;SACjD;QAAC,OAAO,CAAC,EAAE;YACV,MAAM,EAAE,CAAA;SACT;KACF;AACH,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,YAAY,GAAG,MAAM,CAAC,MAAM,CACvC,KAAK,EACH,IAAY,EACZ,OAAuB,EACvB,IAAgC,EACI,EAAE;IACtC,MAAM,IAAI,GAAG,OAAO,CAAC,OAAO,CAAC,CAAA;IAC7B,IAAI,CAAC,SAAS,GAAG,KAAK,CAAA;IACtB,MAAM,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IAC5B,IAAI,MAAM,KAAK,IAAI,EAAE;QACnB,OAAO,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE;YAC5C,yDAAyD;YACzD,+BAA+B;YAC/B,MAAM,GAAG,GAAG,EAA2B,CAAA;YACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;gBAChC,MAAM,EAAE,CAAA;aACT;QACH,CAAC,CAAC,CAAA;KACH;IAED,OAAO,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,IAAI,CACrC,GAAG,EAAE,CAAC,IAAI,IAAI,IAAI,EAClB,KAAK,EAAC,EAAE,EAAC,EAAE;QACT,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;YAChC,OAAO,YAAY,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC,IAAI,CACpC,CAAC,IAAgC,EAAE,EAAE,CAAC,YAAY,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CACrE,CAAA;SACF;QACD,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,IAAI,GAAG,CAAC,IAAI,KAAK,OAAO,EAAE;YACxD,MAAM,EAAE,CAAA;SACT;QACD,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,IAAI,CAC9B,EAAE,CAAC,EAAE;YACH,IAAI,EAAE,CAAC,WAAW,EAAE,EAAE;gBACpB,OAAO,IAAI,CAAA;aACZ;iBAAM;gBACL,MAAM,EAAE,CAAA;aACT;QACH,CAAC,EACD,GAAG,EAAE;YACH,MAAM,EAAE,CAAA;QACV,CAAC,CACF,CAAA;IACH,CAAC,CACF,CAAA;AACH,CAAC,EACD,EAAE,IAAI,EAAE,gBAAgB,EAAE,CAC3B,CAAA","sourcesContent":["import { dirname } from 'path'\nimport { MkdirpOptions, optsArg } from './opts-arg.js'\n\nexport const mkdirpManualSync = (\n path: string,\n options?: MkdirpOptions,\n made?: string | undefined | void\n): string | undefined | void => {\n const parent = dirname(path)\n const opts = { ...optsArg(options), recursive: false }\n\n if (parent === path) {\n try {\n return opts.mkdirSync(path, opts)\n } catch (er) {\n // swallowed by recursive implementation on posix systems\n // any other error is a failure\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code !== 'EISDIR') {\n throw er\n }\n return\n }\n }\n\n try {\n opts.mkdirSync(path, opts)\n return made || path\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code === 'ENOENT') {\n return mkdirpManualSync(path, opts, mkdirpManualSync(parent, opts, made))\n }\n if (fer && fer.code !== 'EEXIST' && fer && fer.code !== 'EROFS') {\n throw er\n }\n try {\n if (!opts.statSync(path).isDirectory()) throw er\n } catch (_) {\n throw er\n }\n }\n}\n\nexport const mkdirpManual = Object.assign(\n async (\n path: string,\n options?: MkdirpOptions,\n made?: string | undefined | void\n ): Promise => {\n const opts = optsArg(options)\n opts.recursive = false\n const parent = dirname(path)\n if (parent === path) {\n return opts.mkdirAsync(path, opts).catch(er => {\n // swallowed by recursive implementation on posix systems\n // any other error is a failure\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code !== 'EISDIR') {\n throw er\n }\n })\n }\n\n return opts.mkdirAsync(path, opts).then(\n () => made || path,\n async er => {\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code === 'ENOENT') {\n return mkdirpManual(parent, opts).then(\n (made?: string | undefined | void) => mkdirpManual(path, opts, made)\n )\n }\n if (fer && fer.code !== 'EEXIST' && fer.code !== 'EROFS') {\n throw er\n }\n return opts.statAsync(path).then(\n st => {\n if (st.isDirectory()) {\n return made\n } else {\n throw er\n }\n },\n () => {\n throw er\n }\n )\n }\n )\n },\n { sync: mkdirpManualSync }\n)\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/mkdirp-native.d.ts b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/mkdirp-native.d.ts
deleted file mode 100644
index 28b64814b2545a..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/mkdirp-native.d.ts
+++ /dev/null
@@ -1,6 +0,0 @@
-import { MkdirpOptions } from './opts-arg.js';
-export declare const mkdirpNativeSync: (path: string, options?: MkdirpOptions) => string | void | undefined;
-export declare const mkdirpNative: ((path: string, options?: MkdirpOptions) => Promise) & {
- sync: (path: string, options?: MkdirpOptions) => string | void | undefined;
-};
-//# sourceMappingURL=mkdirp-native.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/mkdirp-native.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/mkdirp-native.d.ts.map
deleted file mode 100644
index 517dfabe7d1213..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/mkdirp-native.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"mkdirp-native.d.ts","sourceRoot":"","sources":["../../src/mkdirp-native.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,EAAW,MAAM,eAAe,CAAA;AAEtD,eAAO,MAAM,gBAAgB,SACrB,MAAM,YACF,aAAa,KACtB,MAAM,GAAG,IAAI,GAAG,SAoBlB,CAAA;AAED,eAAO,MAAM,YAAY,UAEf,MAAM,YACF,aAAa,KACtB,QAAQ,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;iBA5B/B,MAAM,YACF,aAAa,KACtB,MAAM,GAAG,IAAI,GAAG,SAAS;CAgD3B,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/mkdirp-native.js b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/mkdirp-native.js
deleted file mode 100644
index 99d10a5425dade..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/mkdirp-native.js
+++ /dev/null
@@ -1,46 +0,0 @@
-import { dirname } from 'path';
-import { findMade, findMadeSync } from './find-made.js';
-import { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js';
-import { optsArg } from './opts-arg.js';
-export const mkdirpNativeSync = (path, options) => {
- const opts = optsArg(options);
- opts.recursive = true;
- const parent = dirname(path);
- if (parent === path) {
- return opts.mkdirSync(path, opts);
- }
- const made = findMadeSync(opts, path);
- try {
- opts.mkdirSync(path, opts);
- return made;
- }
- catch (er) {
- const fer = er;
- if (fer && fer.code === 'ENOENT') {
- return mkdirpManualSync(path, opts);
- }
- else {
- throw er;
- }
- }
-};
-export const mkdirpNative = Object.assign(async (path, options) => {
- const opts = { ...optsArg(options), recursive: true };
- const parent = dirname(path);
- if (parent === path) {
- return await opts.mkdirAsync(path, opts);
- }
- return findMade(opts, path).then((made) => opts
- .mkdirAsync(path, opts)
- .then(m => made || m)
- .catch(er => {
- const fer = er;
- if (fer && fer.code === 'ENOENT') {
- return mkdirpManual(path, opts);
- }
- else {
- throw er;
- }
- }));
-}, { sync: mkdirpNativeSync });
-//# sourceMappingURL=mkdirp-native.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/mkdirp-native.js.map b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/mkdirp-native.js.map
deleted file mode 100644
index 27de32d9436d67..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/mkdirp-native.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"mkdirp-native.js","sourceRoot":"","sources":["../../src/mkdirp-native.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AAC9B,OAAO,EAAE,QAAQ,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAA;AACvD,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAiB,OAAO,EAAE,MAAM,eAAe,CAAA;AAEtD,MAAM,CAAC,MAAM,gBAAgB,GAAG,CAC9B,IAAY,EACZ,OAAuB,EACI,EAAE;IAC7B,MAAM,IAAI,GAAG,OAAO,CAAC,OAAO,CAAC,CAAA;IAC7B,IAAI,CAAC,SAAS,GAAG,IAAI,CAAA;IACrB,MAAM,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IAC5B,IAAI,MAAM,KAAK,IAAI,EAAE;QACnB,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;KAClC;IAED,MAAM,IAAI,GAAG,YAAY,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;IACrC,IAAI;QACF,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;QAC1B,OAAO,IAAI,CAAA;KACZ;IAAC,OAAO,EAAE,EAAE;QACX,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;YAChC,OAAO,gBAAgB,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;SACpC;aAAM;YACL,MAAM,EAAE,CAAA;SACT;KACF;AACH,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,YAAY,GAAG,MAAM,CAAC,MAAM,CACvC,KAAK,EACH,IAAY,EACZ,OAAuB,EACa,EAAE;IACtC,MAAM,IAAI,GAAG,EAAE,GAAG,OAAO,CAAC,OAAO,CAAC,EAAE,SAAS,EAAE,IAAI,EAAE,CAAA;IACrD,MAAM,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IAC5B,IAAI,MAAM,KAAK,IAAI,EAAE;QACnB,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;KACzC;IAED,OAAO,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC,IAAyB,EAAE,EAAE,CAC7D,IAAI;SACD,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC;SACtB,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,IAAI,CAAC,CAAC;SACpB,KAAK,CAAC,EAAE,CAAC,EAAE;QACV,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;YAChC,OAAO,YAAY,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;SAChC;aAAM;YACL,MAAM,EAAE,CAAA;SACT;IACH,CAAC,CAAC,CACL,CAAA;AACH,CAAC,EACD,EAAE,IAAI,EAAE,gBAAgB,EAAE,CAC3B,CAAA","sourcesContent":["import { dirname } from 'path'\nimport { findMade, findMadeSync } from './find-made.js'\nimport { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js'\nimport { MkdirpOptions, optsArg } from './opts-arg.js'\n\nexport const mkdirpNativeSync = (\n path: string,\n options?: MkdirpOptions\n): string | void | undefined => {\n const opts = optsArg(options)\n opts.recursive = true\n const parent = dirname(path)\n if (parent === path) {\n return opts.mkdirSync(path, opts)\n }\n\n const made = findMadeSync(opts, path)\n try {\n opts.mkdirSync(path, opts)\n return made\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code === 'ENOENT') {\n return mkdirpManualSync(path, opts)\n } else {\n throw er\n }\n }\n}\n\nexport const mkdirpNative = Object.assign(\n async (\n path: string,\n options?: MkdirpOptions\n ): Promise => {\n const opts = { ...optsArg(options), recursive: true }\n const parent = dirname(path)\n if (parent === path) {\n return await opts.mkdirAsync(path, opts)\n }\n\n return findMade(opts, path).then((made?: string | undefined) =>\n opts\n .mkdirAsync(path, opts)\n .then(m => made || m)\n .catch(er => {\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code === 'ENOENT') {\n return mkdirpManual(path, opts)\n } else {\n throw er\n }\n })\n )\n },\n { sync: mkdirpNativeSync }\n)\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/opts-arg.d.ts b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/opts-arg.d.ts
deleted file mode 100644
index 73d076b3b6923c..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/opts-arg.d.ts
+++ /dev/null
@@ -1,42 +0,0 @@
-///
-///
-import { MakeDirectoryOptions, Stats } from 'fs';
-export interface FsProvider {
- stat?: (path: string, callback: (err: NodeJS.ErrnoException | null, stats: Stats) => any) => any;
- mkdir?: (path: string, opts: MakeDirectoryOptions & {
- recursive?: boolean;
- }, callback: (err: NodeJS.ErrnoException | null, made?: string) => any) => any;
- statSync?: (path: string) => Stats;
- mkdirSync?: (path: string, opts: MakeDirectoryOptions & {
- recursive?: boolean;
- }) => string | undefined;
-}
-interface Options extends FsProvider {
- mode?: number | string;
- fs?: FsProvider;
- mkdirAsync?: (path: string, opts: MakeDirectoryOptions & {
- recursive?: boolean;
- }) => Promise;
- statAsync?: (path: string) => Promise;
-}
-export type MkdirpOptions = Options | number | string;
-export interface MkdirpOptionsResolved {
- mode: number;
- fs: FsProvider;
- mkdirAsync: (path: string, opts: MakeDirectoryOptions & {
- recursive?: boolean;
- }) => Promise;
- statAsync: (path: string) => Promise;
- stat: (path: string, callback: (err: NodeJS.ErrnoException | null, stats: Stats) => any) => any;
- mkdir: (path: string, opts: MakeDirectoryOptions & {
- recursive?: boolean;
- }, callback: (err: NodeJS.ErrnoException | null, made?: string) => any) => any;
- statSync: (path: string) => Stats;
- mkdirSync: (path: string, opts: MakeDirectoryOptions & {
- recursive?: boolean;
- }) => string | undefined;
- recursive?: boolean;
-}
-export declare const optsArg: (opts?: MkdirpOptions) => MkdirpOptionsResolved;
-export {};
-//# sourceMappingURL=opts-arg.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/opts-arg.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/opts-arg.d.ts.map
deleted file mode 100644
index 717deb5f9cb0c6..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/opts-arg.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"opts-arg.d.ts","sourceRoot":"","sources":["../../src/opts-arg.ts"],"names":[],"mappings":";;AAAA,OAAO,EACL,oBAAoB,EAIpB,KAAK,EAEN,MAAM,IAAI,CAAA;AAEX,MAAM,WAAW,UAAU;IACzB,IAAI,CAAC,EAAE,CACL,IAAI,EAAE,MAAM,EACZ,QAAQ,EAAE,CAAC,GAAG,EAAE,MAAM,CAAC,cAAc,GAAG,IAAI,EAAE,KAAK,EAAE,KAAK,KAAK,GAAG,KAC/D,GAAG,CAAA;IACR,KAAK,CAAC,EAAE,CACN,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,EACpD,QAAQ,EAAE,CAAC,GAAG,EAAE,MAAM,CAAC,cAAc,GAAG,IAAI,EAAE,IAAI,CAAC,EAAE,MAAM,KAAK,GAAG,KAChE,GAAG,CAAA;IACR,QAAQ,CAAC,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,KAAK,CAAA;IAClC,SAAS,CAAC,EAAE,CACV,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,KACjD,MAAM,GAAG,SAAS,CAAA;CACxB;AAED,UAAU,OAAQ,SAAQ,UAAU;IAClC,IAAI,CAAC,EAAE,MAAM,GAAG,MAAM,CAAA;IACtB,EAAE,CAAC,EAAE,UAAU,CAAA;IACf,UAAU,CAAC,EAAE,CACX,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,KACjD,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC,CAAA;IAChC,SAAS,CAAC,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,KAAK,CAAC,CAAA;CAC7C;AAED,MAAM,MAAM,aAAa,GAAG,OAAO,GAAG,MAAM,GAAG,MAAM,CAAA;AAErD,MAAM,WAAW,qBAAqB;IACpC,IAAI,EAAE,MAAM,CAAA;IACZ,EAAE,EAAE,UAAU,CAAA;IACd,UAAU,EAAE,CACV,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,KACjD,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC,CAAA;IAChC,SAAS,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,KAAK,CAAC,CAAA;IAC3C,IAAI,EAAE,CACJ,IAAI,EAAE,MAAM,EACZ,QAAQ,EAAE,CAAC,GAAG,EAAE,MAAM,CAAC,cAAc,GAAG,IAAI,EAAE,KAAK,EAAE,KAAK,KAAK,GAAG,KAC/D,GAAG,CAAA;IACR,KAAK,EAAE,CACL,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,EACpD,QAAQ,EAAE,CAAC,GAAG,EAAE,MAAM,CAAC,cAAc,GAAG,IAAI,EAAE,IAAI,CAAC,EAAE,MAAM,KAAK,GAAG,KAChE,GAAG,CAAA;IACR,QAAQ,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,KAAK,CAAA;IACjC,SAAS,EAAE,CACT,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,KACjD,MAAM,GAAG,SAAS,CAAA;IACvB,SAAS,CAAC,EAAE,OAAO,CAAA;CACpB;AAED,eAAO,MAAM,OAAO,UAAW,aAAa,KAAG,qBA2C9C,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/opts-arg.js b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/opts-arg.js
deleted file mode 100644
index d47e2927fee4c0..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/opts-arg.js
+++ /dev/null
@@ -1,34 +0,0 @@
-import { mkdir, mkdirSync, stat, statSync, } from 'fs';
-export const optsArg = (opts) => {
- if (!opts) {
- opts = { mode: 0o777 };
- }
- else if (typeof opts === 'object') {
- opts = { mode: 0o777, ...opts };
- }
- else if (typeof opts === 'number') {
- opts = { mode: opts };
- }
- else if (typeof opts === 'string') {
- opts = { mode: parseInt(opts, 8) };
- }
- else {
- throw new TypeError('invalid options argument');
- }
- const resolved = opts;
- const optsFs = opts.fs || {};
- opts.mkdir = opts.mkdir || optsFs.mkdir || mkdir;
- opts.mkdirAsync = opts.mkdirAsync
- ? opts.mkdirAsync
- : async (path, options) => {
- return new Promise((res, rej) => resolved.mkdir(path, options, (er, made) => er ? rej(er) : res(made)));
- };
- opts.stat = opts.stat || optsFs.stat || stat;
- opts.statAsync = opts.statAsync
- ? opts.statAsync
- : async (path) => new Promise((res, rej) => resolved.stat(path, (err, stats) => (err ? rej(err) : res(stats))));
- opts.statSync = opts.statSync || optsFs.statSync || statSync;
- opts.mkdirSync = opts.mkdirSync || optsFs.mkdirSync || mkdirSync;
- return resolved;
-};
-//# sourceMappingURL=opts-arg.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/opts-arg.js.map b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/opts-arg.js.map
deleted file mode 100644
index 663286dc7212ed..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/opts-arg.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"opts-arg.js","sourceRoot":"","sources":["../../src/opts-arg.ts"],"names":[],"mappings":"AAAA,OAAO,EAEL,KAAK,EACL,SAAS,EACT,IAAI,EAEJ,QAAQ,GACT,MAAM,IAAI,CAAA;AAwDX,MAAM,CAAC,MAAM,OAAO,GAAG,CAAC,IAAoB,EAAyB,EAAE;IACrE,IAAI,CAAC,IAAI,EAAE;QACT,IAAI,GAAG,EAAE,IAAI,EAAE,KAAK,EAAE,CAAA;KACvB;SAAM,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QACnC,IAAI,GAAG,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,IAAI,EAAE,CAAA;KAChC;SAAM,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QACnC,IAAI,GAAG,EAAE,IAAI,EAAE,IAAI,EAAE,CAAA;KACtB;SAAM,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QACnC,IAAI,GAAG,EAAE,IAAI,EAAE,QAAQ,CAAC,IAAI,EAAE,CAAC,CAAC,EAAE,CAAA;KACnC;SAAM;QACL,MAAM,IAAI,SAAS,CAAC,0BAA0B,CAAC,CAAA;KAChD;IAED,MAAM,QAAQ,GAAG,IAA6B,CAAA;IAC9C,MAAM,MAAM,GAAG,IAAI,CAAC,EAAE,IAAI,EAAE,CAAA;IAE5B,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,IAAI,MAAM,CAAC,KAAK,IAAI,KAAK,CAAA;IAEhD,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,UAAU;QAC/B,CAAC,CAAC,IAAI,CAAC,UAAU;QACjB,CAAC,CAAC,KAAK,EACH,IAAY,EACZ,OAAuD,EAC1B,EAAE;YAC/B,OAAO,IAAI,OAAO,CAAqB,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CAClD,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE,CACzC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CACzB,CACF,CAAA;QACH,CAAC,CAAA;IAEL,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,IAAI,MAAM,CAAC,IAAI,IAAI,IAAI,CAAA;IAC5C,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,SAAS;QAC7B,CAAC,CAAC,IAAI,CAAC,SAAS;QAChB,CAAC,CAAC,KAAK,EAAE,IAAY,EAAE,EAAE,CACrB,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,GAAG,EAAE,KAAK,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,CAAA;IAEP,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,MAAM,CAAC,QAAQ,IAAI,QAAQ,CAAA;IAC5D,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,SAAS,IAAI,MAAM,CAAC,SAAS,IAAI,SAAS,CAAA;IAEhE,OAAO,QAAQ,CAAA;AACjB,CAAC,CAAA","sourcesContent":["import {\n MakeDirectoryOptions,\n mkdir,\n mkdirSync,\n stat,\n Stats,\n statSync,\n} from 'fs'\n\nexport interface FsProvider {\n stat?: (\n path: string,\n callback: (err: NodeJS.ErrnoException | null, stats: Stats) => any\n ) => any\n mkdir?: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean },\n callback: (err: NodeJS.ErrnoException | null, made?: string) => any\n ) => any\n statSync?: (path: string) => Stats\n mkdirSync?: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean }\n ) => string | undefined\n}\n\ninterface Options extends FsProvider {\n mode?: number | string\n fs?: FsProvider\n mkdirAsync?: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean }\n ) => Promise\n statAsync?: (path: string) => Promise\n}\n\nexport type MkdirpOptions = Options | number | string\n\nexport interface MkdirpOptionsResolved {\n mode: number\n fs: FsProvider\n mkdirAsync: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean }\n ) => Promise\n statAsync: (path: string) => Promise\n stat: (\n path: string,\n callback: (err: NodeJS.ErrnoException | null, stats: Stats) => any\n ) => any\n mkdir: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean },\n callback: (err: NodeJS.ErrnoException | null, made?: string) => any\n ) => any\n statSync: (path: string) => Stats\n mkdirSync: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean }\n ) => string | undefined\n recursive?: boolean\n}\n\nexport const optsArg = (opts?: MkdirpOptions): MkdirpOptionsResolved => {\n if (!opts) {\n opts = { mode: 0o777 }\n } else if (typeof opts === 'object') {\n opts = { mode: 0o777, ...opts }\n } else if (typeof opts === 'number') {\n opts = { mode: opts }\n } else if (typeof opts === 'string') {\n opts = { mode: parseInt(opts, 8) }\n } else {\n throw new TypeError('invalid options argument')\n }\n\n const resolved = opts as MkdirpOptionsResolved\n const optsFs = opts.fs || {}\n\n opts.mkdir = opts.mkdir || optsFs.mkdir || mkdir\n\n opts.mkdirAsync = opts.mkdirAsync\n ? opts.mkdirAsync\n : async (\n path: string,\n options: MakeDirectoryOptions & { recursive?: boolean }\n ): Promise => {\n return new Promise((res, rej) =>\n resolved.mkdir(path, options, (er, made) =>\n er ? rej(er) : res(made)\n )\n )\n }\n\n opts.stat = opts.stat || optsFs.stat || stat\n opts.statAsync = opts.statAsync\n ? opts.statAsync\n : async (path: string) =>\n new Promise((res, rej) =>\n resolved.stat(path, (err, stats) => (err ? rej(err) : res(stats)))\n )\n\n opts.statSync = opts.statSync || optsFs.statSync || statSync\n opts.mkdirSync = opts.mkdirSync || optsFs.mkdirSync || mkdirSync\n\n return resolved\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/package.json b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/package.json
deleted file mode 100644
index 3dbc1ca591c055..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
- "type": "module"
-}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/path-arg.d.ts b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/path-arg.d.ts
deleted file mode 100644
index ad0ccfc482a485..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/path-arg.d.ts
+++ /dev/null
@@ -1,2 +0,0 @@
-export declare const pathArg: (path: string) => string;
-//# sourceMappingURL=path-arg.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/path-arg.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/path-arg.d.ts.map
deleted file mode 100644
index 801799e766fabc..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/path-arg.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"path-arg.d.ts","sourceRoot":"","sources":["../../src/path-arg.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,OAAO,SAAU,MAAM,WAyBnC,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/path-arg.js b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/path-arg.js
deleted file mode 100644
index 03539cc5a94f98..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/path-arg.js
+++ /dev/null
@@ -1,24 +0,0 @@
-const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform;
-import { parse, resolve } from 'path';
-export const pathArg = (path) => {
- if (/\0/.test(path)) {
- // simulate same failure that node raises
- throw Object.assign(new TypeError('path must be a string without null bytes'), {
- path,
- code: 'ERR_INVALID_ARG_VALUE',
- });
- }
- path = resolve(path);
- if (platform === 'win32') {
- const badWinChars = /[*|"<>?:]/;
- const { root } = parse(path);
- if (badWinChars.test(path.substring(root.length))) {
- throw Object.assign(new Error('Illegal characters in path.'), {
- path,
- code: 'EINVAL',
- });
- }
- }
- return path;
-};
-//# sourceMappingURL=path-arg.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/path-arg.js.map b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/path-arg.js.map
deleted file mode 100644
index 43efe1e3a9976f..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/path-arg.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"path-arg.js","sourceRoot":"","sources":["../../src/path-arg.ts"],"names":[],"mappings":"AAAA,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,2BAA2B,IAAI,OAAO,CAAC,QAAQ,CAAA;AAC5E,OAAO,EAAE,KAAK,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AACrC,MAAM,CAAC,MAAM,OAAO,GAAG,CAAC,IAAY,EAAE,EAAE;IACtC,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;QACnB,yCAAyC;QACzC,MAAM,MAAM,CAAC,MAAM,CACjB,IAAI,SAAS,CAAC,0CAA0C,CAAC,EACzD;YACE,IAAI;YACJ,IAAI,EAAE,uBAAuB;SAC9B,CACF,CAAA;KACF;IAED,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IACpB,IAAI,QAAQ,KAAK,OAAO,EAAE;QACxB,MAAM,WAAW,GAAG,WAAW,CAAA;QAC/B,MAAM,EAAE,IAAI,EAAE,GAAG,KAAK,CAAC,IAAI,CAAC,CAAA;QAC5B,IAAI,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE;YACjD,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,6BAA6B,CAAC,EAAE;gBAC5D,IAAI;gBACJ,IAAI,EAAE,QAAQ;aACf,CAAC,CAAA;SACH;KACF;IAED,OAAO,IAAI,CAAA;AACb,CAAC,CAAA","sourcesContent":["const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform\nimport { parse, resolve } from 'path'\nexport const pathArg = (path: string) => {\n if (/\\0/.test(path)) {\n // simulate same failure that node raises\n throw Object.assign(\n new TypeError('path must be a string without null bytes'),\n {\n path,\n code: 'ERR_INVALID_ARG_VALUE',\n }\n )\n }\n\n path = resolve(path)\n if (platform === 'win32') {\n const badWinChars = /[*|\"<>?:]/\n const { root } = parse(path)\n if (badWinChars.test(path.substring(root.length))) {\n throw Object.assign(new Error('Illegal characters in path.'), {\n path,\n code: 'EINVAL',\n })\n }\n }\n\n return path\n}\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/use-native.d.ts b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/use-native.d.ts
deleted file mode 100644
index 1c6cb619e30405..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/use-native.d.ts
+++ /dev/null
@@ -1,6 +0,0 @@
-import { MkdirpOptions } from './opts-arg.js';
-export declare const useNativeSync: (opts?: MkdirpOptions) => boolean;
-export declare const useNative: ((opts?: MkdirpOptions) => boolean) & {
- sync: (opts?: MkdirpOptions) => boolean;
-};
-//# sourceMappingURL=use-native.d.ts.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/use-native.d.ts.map b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/use-native.d.ts.map
deleted file mode 100644
index e2484228a04472..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/use-native.d.ts.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"use-native.d.ts","sourceRoot":"","sources":["../../src/use-native.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,aAAa,EAAW,MAAM,eAAe,CAAA;AAMtD,eAAO,MAAM,aAAa,UAEd,aAAa,YAA0C,CAAA;AAEnE,eAAO,MAAM,SAAS,WAGR,aAAa;kBALf,aAAa;CASxB,CAAA"}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/use-native.js b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/use-native.js
deleted file mode 100644
index ad2093867eb74e..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/use-native.js
+++ /dev/null
@@ -1,14 +0,0 @@
-import { mkdir, mkdirSync } from 'fs';
-import { optsArg } from './opts-arg.js';
-const version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version;
-const versArr = version.replace(/^v/, '').split('.');
-const hasNative = +versArr[0] > 10 || (+versArr[0] === 10 && +versArr[1] >= 12);
-export const useNativeSync = !hasNative
- ? () => false
- : (opts) => optsArg(opts).mkdirSync === mkdirSync;
-export const useNative = Object.assign(!hasNative
- ? () => false
- : (opts) => optsArg(opts).mkdir === mkdir, {
- sync: useNativeSync,
-});
-//# sourceMappingURL=use-native.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/use-native.js.map b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/use-native.js.map
deleted file mode 100644
index 08c616d365510f..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/use-native.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"use-native.js","sourceRoot":"","sources":["../../src/use-native.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,SAAS,EAAE,MAAM,IAAI,CAAA;AACrC,OAAO,EAAiB,OAAO,EAAE,MAAM,eAAe,CAAA;AAEtD,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,+BAA+B,IAAI,OAAO,CAAC,OAAO,CAAA;AAC9E,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;AACpD,MAAM,SAAS,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAA;AAE/E,MAAM,CAAC,MAAM,aAAa,GAAG,CAAC,SAAS;IACrC,CAAC,CAAC,GAAG,EAAE,CAAC,KAAK;IACb,CAAC,CAAC,CAAC,IAAoB,EAAE,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,SAAS,KAAK,SAAS,CAAA;AAEnE,MAAM,CAAC,MAAM,SAAS,GAAG,MAAM,CAAC,MAAM,CACpC,CAAC,SAAS;IACR,CAAC,CAAC,GAAG,EAAE,CAAC,KAAK;IACb,CAAC,CAAC,CAAC,IAAoB,EAAE,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK,EAC3D;IACE,IAAI,EAAE,aAAa;CACpB,CACF,CAAA","sourcesContent":["import { mkdir, mkdirSync } from 'fs'\nimport { MkdirpOptions, optsArg } from './opts-arg.js'\n\nconst version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version\nconst versArr = version.replace(/^v/, '').split('.')\nconst hasNative = +versArr[0] > 10 || (+versArr[0] === 10 && +versArr[1] >= 12)\n\nexport const useNativeSync = !hasNative\n ? () => false\n : (opts?: MkdirpOptions) => optsArg(opts).mkdirSync === mkdirSync\n\nexport const useNative = Object.assign(\n !hasNative\n ? () => false\n : (opts?: MkdirpOptions) => optsArg(opts).mkdir === mkdir,\n {\n sync: useNativeSync,\n }\n)\n"]}
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/package.json b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/package.json
deleted file mode 100644
index f31ac3314d6f6a..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/package.json
+++ /dev/null
@@ -1,91 +0,0 @@
-{
- "name": "mkdirp",
- "description": "Recursively mkdir, like `mkdir -p`",
- "version": "3.0.1",
- "keywords": [
- "mkdir",
- "directory",
- "make dir",
- "make",
- "dir",
- "recursive",
- "native"
- ],
- "bin": "./dist/cjs/src/bin.js",
- "main": "./dist/cjs/src/index.js",
- "module": "./dist/mjs/index.js",
- "types": "./dist/mjs/index.d.ts",
- "exports": {
- ".": {
- "import": {
- "types": "./dist/mjs/index.d.ts",
- "default": "./dist/mjs/index.js"
- },
- "require": {
- "types": "./dist/cjs/src/index.d.ts",
- "default": "./dist/cjs/src/index.js"
- }
- }
- },
- "files": [
- "dist"
- ],
- "scripts": {
- "preversion": "npm test",
- "postversion": "npm publish",
- "prepublishOnly": "git push origin --follow-tags",
- "preprepare": "rm -rf dist",
- "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json",
- "postprepare": "bash fixup.sh",
- "pretest": "npm run prepare",
- "presnap": "npm run prepare",
- "test": "c8 tap",
- "snap": "c8 tap",
- "format": "prettier --write . --loglevel warn",
- "benchmark": "node benchmark/index.js",
- "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
- },
- "prettier": {
- "semi": false,
- "printWidth": 80,
- "tabWidth": 2,
- "useTabs": false,
- "singleQuote": true,
- "jsxSingleQuote": false,
- "bracketSameLine": true,
- "arrowParens": "avoid",
- "endOfLine": "lf"
- },
- "devDependencies": {
- "@types/brace-expansion": "^1.1.0",
- "@types/node": "^18.11.9",
- "@types/tap": "^15.0.7",
- "c8": "^7.12.0",
- "eslint-config-prettier": "^8.6.0",
- "prettier": "^2.8.2",
- "tap": "^16.3.3",
- "ts-node": "^10.9.1",
- "typedoc": "^0.23.21",
- "typescript": "^4.9.3"
- },
- "tap": {
- "coverage": false,
- "node-arg": [
- "--no-warnings",
- "--loader",
- "ts-node/esm"
- ],
- "ts": false
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- },
- "repository": {
- "type": "git",
- "url": "https://github.com/isaacs/node-mkdirp.git"
- },
- "license": "MIT",
- "engines": {
- "node": ">=10"
- }
-}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/readme.markdown b/deps/npm/node_modules/node-gyp/node_modules/mkdirp/readme.markdown
deleted file mode 100644
index df654b808755f5..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/mkdirp/readme.markdown
+++ /dev/null
@@ -1,281 +0,0 @@
-# mkdirp
-
-Like `mkdir -p`, but in Node.js!
-
-Now with a modern API and no\* bugs!
-
-\* may contain some bugs
-
-# example
-
-## pow.js
-
-```js
-// hybrid module, import or require() both work
-import { mkdirp } from 'mkdirp'
-// or:
-const { mkdirp } = require('mkdirp')
-
-// return value is a Promise resolving to the first directory created
-mkdirp('/tmp/foo/bar/baz').then(made =>
- console.log(`made directories, starting with ${made}`)
-)
-```
-
-Output (where `/tmp/foo` already exists)
-
-```
-made directories, starting with /tmp/foo/bar
-```
-
-Or, if you don't have time to wait around for promises:
-
-```js
-import { mkdirp } from 'mkdirp'
-
-// return value is the first directory created
-const made = mkdirp.sync('/tmp/foo/bar/baz')
-console.log(`made directories, starting with ${made}`)
-```
-
-And now /tmp/foo/bar/baz exists, huzzah!
-
-# methods
-
-```js
-import { mkdirp } from 'mkdirp'
-```
-
-## `mkdirp(dir: string, opts?: MkdirpOptions) => Promise`
-
-Create a new directory and any necessary subdirectories at `dir`
-with octal permission string `opts.mode`. If `opts` is a string
-or number, it will be treated as the `opts.mode`.
-
-If `opts.mode` isn't specified, it defaults to `0o777`.
-
-Promise resolves to first directory `made` that had to be
-created, or `undefined` if everything already exists. Promise
-rejects if any errors are encountered. Note that, in the case of
-promise rejection, some directories _may_ have been created, as
-recursive directory creation is not an atomic operation.
-
-You can optionally pass in an alternate `fs` implementation by
-passing in `opts.fs`. Your implementation should have
-`opts.fs.mkdir(path, opts, cb)` and `opts.fs.stat(path, cb)`.
-
-You can also override just one or the other of `mkdir` and `stat`
-by passing in `opts.stat` or `opts.mkdir`, or providing an `fs`
-option that only overrides one of these.
-
-## `mkdirp.sync(dir: string, opts: MkdirpOptions) => string|undefined`
-
-Synchronously create a new directory and any necessary
-subdirectories at `dir` with octal permission string `opts.mode`.
-If `opts` is a string or number, it will be treated as the
-`opts.mode`.
-
-If `opts.mode` isn't specified, it defaults to `0o777`.
-
-Returns the first directory that had to be created, or undefined
-if everything already exists.
-
-You can optionally pass in an alternate `fs` implementation by
-passing in `opts.fs`. Your implementation should have
-`opts.fs.mkdirSync(path, mode)` and `opts.fs.statSync(path)`.
-
-You can also override just one or the other of `mkdirSync` and
-`statSync` by passing in `opts.statSync` or `opts.mkdirSync`, or
-providing an `fs` option that only overrides one of these.
-
-## `mkdirp.manual`, `mkdirp.manualSync`
-
-Use the manual implementation (not the native one). This is the
-default when the native implementation is not available or the
-stat/mkdir implementation is overridden.
-
-## `mkdirp.native`, `mkdirp.nativeSync`
-
-Use the native implementation (not the manual one). This is the
-default when the native implementation is available and
-stat/mkdir are not overridden.
-
-# implementation
-
-On Node.js v10.12.0 and above, use the native `fs.mkdir(p,
-{recursive:true})` option, unless `fs.mkdir`/`fs.mkdirSync` has
-been overridden by an option.
-
-## native implementation
-
-- If the path is a root directory, then pass it to the underlying
- implementation and return the result/error. (In this case,
- it'll either succeed or fail, but we aren't actually creating
- any dirs.)
-- Walk up the path statting each directory, to find the first
- path that will be created, `made`.
-- Call `fs.mkdir(path, { recursive: true })` (or `fs.mkdirSync`)
-- If error, raise it to the caller.
-- Return `made`.
-
-## manual implementation
-
-- Call underlying `fs.mkdir` implementation, with `recursive:
-false`
-- If error:
- - If path is a root directory, raise to the caller and do not
- handle it
- - If ENOENT, mkdirp parent dir, store result as `made`
- - stat(path)
- - If error, raise original `mkdir` error
- - If directory, return `made`
- - Else, raise original `mkdir` error
-- else
- - return `undefined` if a root dir, or `made` if set, or `path`
-
-## windows vs unix caveat
-
-On Windows file systems, attempts to create a root directory (ie,
-a drive letter or root UNC path) will fail. If the root
-directory exists, then it will fail with `EPERM`. If the root
-directory does not exist, then it will fail with `ENOENT`.
-
-On posix file systems, attempts to create a root directory (in
-recursive mode) will succeed silently, as it is treated like just
-another directory that already exists. (In non-recursive mode,
-of course, it fails with `EEXIST`.)
-
-In order to preserve this system-specific behavior (and because
-it's not as if we can create the parent of a root directory
-anyway), attempts to create a root directory are passed directly
-to the `fs` implementation, and any errors encountered are not
-handled.
-
-## native error caveat
-
-The native implementation (as of at least Node.js v13.4.0) does
-not provide appropriate errors in some cases (see
-[nodejs/node#31481](https://github.com/nodejs/node/issues/31481)
-and
-[nodejs/node#28015](https://github.com/nodejs/node/issues/28015)).
-
-In order to work around this issue, the native implementation
-will fall back to the manual implementation if an `ENOENT` error
-is encountered.
-
-# choosing a recursive mkdir implementation
-
-There are a few to choose from! Use the one that suits your
-needs best :D
-
-## use `fs.mkdir(path, {recursive: true}, cb)` if:
-
-- You wish to optimize performance even at the expense of other
- factors.
-- You don't need to know the first dir created.
-- You are ok with getting `ENOENT` as the error when some other
- problem is the actual cause.
-- You can limit your platforms to Node.js v10.12 and above.
-- You're ok with using callbacks instead of promises.
-- You don't need/want a CLI.
-- You don't need to override the `fs` methods in use.
-
-## use this module (mkdirp 1.x or 2.x) if:
-
-- You need to know the first directory that was created.
-- You wish to use the native implementation if available, but
- fall back when it's not.
-- You prefer promise-returning APIs to callback-taking APIs.
-- You want more useful error messages than the native recursive
- mkdir provides (at least as of Node.js v13.4), and are ok with
- re-trying on `ENOENT` to achieve this.
-- You need (or at least, are ok with) a CLI.
-- You need to override the `fs` methods in use.
-
-## use [`make-dir`](http://npm.im/make-dir) if:
-
-- You do not need to know the first dir created (and wish to save
- a few `stat` calls when using the native implementation for
- this reason).
-- You wish to use the native implementation if available, but
- fall back when it's not.
-- You prefer promise-returning APIs to callback-taking APIs.
-- You are ok with occasionally getting `ENOENT` errors for
- failures that are actually related to something other than a
- missing file system entry.
-- You don't need/want a CLI.
-- You need to override the `fs` methods in use.
-
-## use mkdirp 0.x if:
-
-- You need to know the first directory that was created.
-- You need (or at least, are ok with) a CLI.
-- You need to override the `fs` methods in use.
-- You're ok with using callbacks instead of promises.
-- You are not running on Windows, where the root-level ENOENT
- errors can lead to infinite regress.
-- You think vinyl just sounds warmer and richer for some weird
- reason.
-- You are supporting truly ancient Node.js versions, before even
- the advent of a `Promise` language primitive. (Please don't.
- You deserve better.)
-
-# cli
-
-This package also ships with a `mkdirp` command.
-
-```
-$ mkdirp -h
-
-usage: mkdirp [DIR1,DIR2..] {OPTIONS}
-
- Create each supplied directory including any necessary parent directories
- that don't yet exist.
-
- If the directory already exists, do nothing.
-
-OPTIONS are:
-
- -m If a directory needs to be created, set the mode as an octal
- --mode= permission string.
-
- -v --version Print the mkdirp version number
-
- -h --help Print this helpful banner
-
- -p --print Print the first directories created for each path provided
-
- --manual Use manual implementation, even if native is available
-```
-
-# install
-
-With [npm](http://npmjs.org) do:
-
-```
-npm install mkdirp
-```
-
-to get the library locally, or
-
-```
-npm install -g mkdirp
-```
-
-to get the command everywhere, or
-
-```
-npx mkdirp ...
-```
-
-to run the command without installing it globally.
-
-# platform support
-
-This module works on node v8, but only v10 and above are officially
-supported, as Node v8 reached its LTS end of life 2020-01-01, which is in
-the past, as of this writing.
-
-# license
-
-MIT
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/create.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/create.js
deleted file mode 100644
index 3190afc48318f9..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/create.js
+++ /dev/null
@@ -1,83 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.create = void 0;
-const fs_minipass_1 = require("@isaacs/fs-minipass");
-const node_path_1 = __importDefault(require("node:path"));
-const list_js_1 = require("./list.js");
-const make_command_js_1 = require("./make-command.js");
-const pack_js_1 = require("./pack.js");
-const createFileSync = (opt, files) => {
- const p = new pack_js_1.PackSync(opt);
- const stream = new fs_minipass_1.WriteStreamSync(opt.file, {
- mode: opt.mode || 0o666,
- });
- p.pipe(stream);
- addFilesSync(p, files);
-};
-const createFile = (opt, files) => {
- const p = new pack_js_1.Pack(opt);
- const stream = new fs_minipass_1.WriteStream(opt.file, {
- mode: opt.mode || 0o666,
- });
- p.pipe(stream);
- const promise = new Promise((res, rej) => {
- stream.on('error', rej);
- stream.on('close', res);
- p.on('error', rej);
- });
- addFilesAsync(p, files);
- return promise;
-};
-const addFilesSync = (p, files) => {
- files.forEach(file => {
- if (file.charAt(0) === '@') {
- (0, list_js_1.list)({
- file: node_path_1.default.resolve(p.cwd, file.slice(1)),
- sync: true,
- noResume: true,
- onReadEntry: entry => p.add(entry),
- });
- }
- else {
- p.add(file);
- }
- });
- p.end();
-};
-const addFilesAsync = async (p, files) => {
- for (let i = 0; i < files.length; i++) {
- const file = String(files[i]);
- if (file.charAt(0) === '@') {
- await (0, list_js_1.list)({
- file: node_path_1.default.resolve(String(p.cwd), file.slice(1)),
- noResume: true,
- onReadEntry: entry => {
- p.add(entry);
- },
- });
- }
- else {
- p.add(file);
- }
- }
- p.end();
-};
-const createSync = (opt, files) => {
- const p = new pack_js_1.PackSync(opt);
- addFilesSync(p, files);
- return p;
-};
-const createAsync = (opt, files) => {
- const p = new pack_js_1.Pack(opt);
- addFilesAsync(p, files);
- return p;
-};
-exports.create = (0, make_command_js_1.makeCommand)(createFileSync, createFile, createSync, createAsync, (_opt, files) => {
- if (!files?.length) {
- throw new TypeError('no paths specified to add to archive');
- }
-});
-//# sourceMappingURL=create.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/cwd-error.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/cwd-error.js
deleted file mode 100644
index d703a7772be3a5..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/cwd-error.js
+++ /dev/null
@@ -1,18 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.CwdError = void 0;
-class CwdError extends Error {
- path;
- code;
- syscall = 'chdir';
- constructor(path, code) {
- super(`${code}: Cannot cd into '${path}'`);
- this.path = path;
- this.code = code;
- }
- get name() {
- return 'CwdError';
- }
-}
-exports.CwdError = CwdError;
-//# sourceMappingURL=cwd-error.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/extract.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/extract.js
deleted file mode 100644
index f848cbcbf779e8..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/extract.js
+++ /dev/null
@@ -1,78 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- var desc = Object.getOwnPropertyDescriptor(m, k);
- if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
- desc = { enumerable: true, get: function() { return m[k]; } };
- }
- Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
- Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
- o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
- __setModuleDefault(result, mod);
- return result;
-};
-var __importDefault = (this && this.__importDefault) || function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.extract = void 0;
-// tar -x
-const fsm = __importStar(require("@isaacs/fs-minipass"));
-const node_fs_1 = __importDefault(require("node:fs"));
-const list_js_1 = require("./list.js");
-const make_command_js_1 = require("./make-command.js");
-const unpack_js_1 = require("./unpack.js");
-const extractFileSync = (opt) => {
- const u = new unpack_js_1.UnpackSync(opt);
- const file = opt.file;
- const stat = node_fs_1.default.statSync(file);
- // This trades a zero-byte read() syscall for a stat
- // However, it will usually result in less memory allocation
- const readSize = opt.maxReadSize || 16 * 1024 * 1024;
- const stream = new fsm.ReadStreamSync(file, {
- readSize: readSize,
- size: stat.size,
- });
- stream.pipe(u);
-};
-const extractFile = (opt, _) => {
- const u = new unpack_js_1.Unpack(opt);
- const readSize = opt.maxReadSize || 16 * 1024 * 1024;
- const file = opt.file;
- const p = new Promise((resolve, reject) => {
- u.on('error', reject);
- u.on('close', resolve);
- // This trades a zero-byte read() syscall for a stat
- // However, it will usually result in less memory allocation
- node_fs_1.default.stat(file, (er, stat) => {
- if (er) {
- reject(er);
- }
- else {
- const stream = new fsm.ReadStream(file, {
- readSize: readSize,
- size: stat.size,
- });
- stream.on('error', reject);
- stream.pipe(u);
- }
- });
- });
- return p;
-};
-exports.extract = (0, make_command_js_1.makeCommand)(extractFileSync, extractFile, opt => new unpack_js_1.UnpackSync(opt), opt => new unpack_js_1.Unpack(opt), (opt, files) => {
- if (files?.length)
- (0, list_js_1.filesFilter)(opt, files);
-});
-//# sourceMappingURL=extract.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/get-write-flag.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/get-write-flag.js
deleted file mode 100644
index 94add8f6b2231c..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/get-write-flag.js
+++ /dev/null
@@ -1,29 +0,0 @@
-"use strict";
-// Get the appropriate flag to use for creating files
-// We use fmap on Windows platforms for files less than
-// 512kb. This is a fairly low limit, but avoids making
-// things slower in some cases. Since most of what this
-// library is used for is extracting tarballs of many
-// relatively small files in npm packages and the like,
-// it can be a big boost on Windows platforms.
-var __importDefault = (this && this.__importDefault) || function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.getWriteFlag = void 0;
-const fs_1 = __importDefault(require("fs"));
-const platform = process.env.__FAKE_PLATFORM__ || process.platform;
-const isWindows = platform === 'win32';
-/* c8 ignore start */
-const { O_CREAT, O_TRUNC, O_WRONLY } = fs_1.default.constants;
-const UV_FS_O_FILEMAP = Number(process.env.__FAKE_FS_O_FILENAME__) ||
- fs_1.default.constants.UV_FS_O_FILEMAP ||
- 0;
-/* c8 ignore stop */
-const fMapEnabled = isWindows && !!UV_FS_O_FILEMAP;
-const fMapLimit = 512 * 1024;
-const fMapFlag = UV_FS_O_FILEMAP | O_TRUNC | O_CREAT | O_WRONLY;
-exports.getWriteFlag = !fMapEnabled ?
- () => 'w'
- : (size) => (size < fMapLimit ? fMapFlag : 'w');
-//# sourceMappingURL=get-write-flag.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/large-numbers.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/large-numbers.js
deleted file mode 100644
index 5b07aa7f71b48d..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/large-numbers.js
+++ /dev/null
@@ -1,99 +0,0 @@
-"use strict";
-// Tar can encode large and negative numbers using a leading byte of
-// 0xff for negative, and 0x80 for positive.
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.parse = exports.encode = void 0;
-const encode = (num, buf) => {
- if (!Number.isSafeInteger(num)) {
- // The number is so large that javascript cannot represent it with integer
- // precision.
- throw Error('cannot encode number outside of javascript safe integer range');
- }
- else if (num < 0) {
- encodeNegative(num, buf);
- }
- else {
- encodePositive(num, buf);
- }
- return buf;
-};
-exports.encode = encode;
-const encodePositive = (num, buf) => {
- buf[0] = 0x80;
- for (var i = buf.length; i > 1; i--) {
- buf[i - 1] = num & 0xff;
- num = Math.floor(num / 0x100);
- }
-};
-const encodeNegative = (num, buf) => {
- buf[0] = 0xff;
- var flipped = false;
- num = num * -1;
- for (var i = buf.length; i > 1; i--) {
- var byte = num & 0xff;
- num = Math.floor(num / 0x100);
- if (flipped) {
- buf[i - 1] = onesComp(byte);
- }
- else if (byte === 0) {
- buf[i - 1] = 0;
- }
- else {
- flipped = true;
- buf[i - 1] = twosComp(byte);
- }
- }
-};
-const parse = (buf) => {
- const pre = buf[0];
- const value = pre === 0x80 ? pos(buf.subarray(1, buf.length))
- : pre === 0xff ? twos(buf)
- : null;
- if (value === null) {
- throw Error('invalid base256 encoding');
- }
- if (!Number.isSafeInteger(value)) {
- // The number is so large that javascript cannot represent it with integer
- // precision.
- throw Error('parsed number outside of javascript safe integer range');
- }
- return value;
-};
-exports.parse = parse;
-const twos = (buf) => {
- var len = buf.length;
- var sum = 0;
- var flipped = false;
- for (var i = len - 1; i > -1; i--) {
- var byte = Number(buf[i]);
- var f;
- if (flipped) {
- f = onesComp(byte);
- }
- else if (byte === 0) {
- f = byte;
- }
- else {
- flipped = true;
- f = twosComp(byte);
- }
- if (f !== 0) {
- sum -= f * Math.pow(256, len - i - 1);
- }
- }
- return sum;
-};
-const pos = (buf) => {
- var len = buf.length;
- var sum = 0;
- for (var i = len - 1; i > -1; i--) {
- var byte = Number(buf[i]);
- if (byte !== 0) {
- sum += byte * Math.pow(256, len - i - 1);
- }
- }
- return sum;
-};
-const onesComp = (byte) => (0xff ^ byte) & 0xff;
-const twosComp = (byte) => ((0xff ^ byte) + 1) & 0xff;
-//# sourceMappingURL=large-numbers.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/make-command.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/make-command.js
deleted file mode 100644
index 1814319e78bc62..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/make-command.js
+++ /dev/null
@@ -1,61 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.makeCommand = void 0;
-const options_js_1 = require("./options.js");
-const makeCommand = (syncFile, asyncFile, syncNoFile, asyncNoFile, validate) => {
- return Object.assign((opt_ = [], entries, cb) => {
- if (Array.isArray(opt_)) {
- entries = opt_;
- opt_ = {};
- }
- if (typeof entries === 'function') {
- cb = entries;
- entries = undefined;
- }
- if (!entries) {
- entries = [];
- }
- else {
- entries = Array.from(entries);
- }
- const opt = (0, options_js_1.dealias)(opt_);
- validate?.(opt, entries);
- if ((0, options_js_1.isSyncFile)(opt)) {
- if (typeof cb === 'function') {
- throw new TypeError('callback not supported for sync tar functions');
- }
- return syncFile(opt, entries);
- }
- else if ((0, options_js_1.isAsyncFile)(opt)) {
- const p = asyncFile(opt, entries);
- // weirdness to make TS happy
- const c = cb ? cb : undefined;
- return c ? p.then(() => c(), c) : p;
- }
- else if ((0, options_js_1.isSyncNoFile)(opt)) {
- if (typeof cb === 'function') {
- throw new TypeError('callback not supported for sync tar functions');
- }
- return syncNoFile(opt, entries);
- }
- else if ((0, options_js_1.isAsyncNoFile)(opt)) {
- if (typeof cb === 'function') {
- throw new TypeError('callback only supported with file option');
- }
- return asyncNoFile(opt, entries);
- /* c8 ignore start */
- }
- else {
- throw new Error('impossible options??');
- }
- /* c8 ignore stop */
- }, {
- syncFile,
- asyncFile,
- syncNoFile,
- asyncNoFile,
- validate,
- });
-};
-exports.makeCommand = makeCommand;
-//# sourceMappingURL=make-command.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/mkdir.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/mkdir.js
deleted file mode 100644
index 2b13ecbab6723e..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/mkdir.js
+++ /dev/null
@@ -1,209 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.mkdirSync = exports.mkdir = void 0;
-const chownr_1 = require("chownr");
-const fs_1 = __importDefault(require("fs"));
-const mkdirp_1 = require("mkdirp");
-const node_path_1 = __importDefault(require("node:path"));
-const cwd_error_js_1 = require("./cwd-error.js");
-const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
-const symlink_error_js_1 = require("./symlink-error.js");
-const cGet = (cache, key) => cache.get((0, normalize_windows_path_js_1.normalizeWindowsPath)(key));
-const cSet = (cache, key, val) => cache.set((0, normalize_windows_path_js_1.normalizeWindowsPath)(key), val);
-const checkCwd = (dir, cb) => {
- fs_1.default.stat(dir, (er, st) => {
- if (er || !st.isDirectory()) {
- er = new cwd_error_js_1.CwdError(dir, er?.code || 'ENOTDIR');
- }
- cb(er);
- });
-};
-/**
- * Wrapper around mkdirp for tar's needs.
- *
- * The main purpose is to avoid creating directories if we know that
- * they already exist (and track which ones exist for this purpose),
- * and prevent entries from being extracted into symlinked folders,
- * if `preservePaths` is not set.
- */
-const mkdir = (dir, opt, cb) => {
- dir = (0, normalize_windows_path_js_1.normalizeWindowsPath)(dir);
- // if there's any overlap between mask and mode,
- // then we'll need an explicit chmod
- /* c8 ignore next */
- const umask = opt.umask ?? 0o22;
- const mode = opt.mode | 0o0700;
- const needChmod = (mode & umask) !== 0;
- const uid = opt.uid;
- const gid = opt.gid;
- const doChown = typeof uid === 'number' &&
- typeof gid === 'number' &&
- (uid !== opt.processUid || gid !== opt.processGid);
- const preserve = opt.preserve;
- const unlink = opt.unlink;
- const cache = opt.cache;
- const cwd = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.cwd);
- const done = (er, created) => {
- if (er) {
- cb(er);
- }
- else {
- cSet(cache, dir, true);
- if (created && doChown) {
- (0, chownr_1.chownr)(created, uid, gid, er => done(er));
- }
- else if (needChmod) {
- fs_1.default.chmod(dir, mode, cb);
- }
- else {
- cb();
- }
- }
- };
- if (cache && cGet(cache, dir) === true) {
- return done();
- }
- if (dir === cwd) {
- return checkCwd(dir, done);
- }
- if (preserve) {
- return (0, mkdirp_1.mkdirp)(dir, { mode }).then(made => done(null, made ?? undefined), // oh, ts
- done);
- }
- const sub = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.relative(cwd, dir));
- const parts = sub.split('/');
- mkdir_(cwd, parts, mode, cache, unlink, cwd, undefined, done);
-};
-exports.mkdir = mkdir;
-const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => {
- if (!parts.length) {
- return cb(null, created);
- }
- const p = parts.shift();
- const part = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(base + '/' + p));
- if (cGet(cache, part)) {
- return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
- }
- fs_1.default.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb));
-};
-const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => (er) => {
- if (er) {
- fs_1.default.lstat(part, (statEr, st) => {
- if (statEr) {
- statEr.path =
- statEr.path && (0, normalize_windows_path_js_1.normalizeWindowsPath)(statEr.path);
- cb(statEr);
- }
- else if (st.isDirectory()) {
- mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
- }
- else if (unlink) {
- fs_1.default.unlink(part, er => {
- if (er) {
- return cb(er);
- }
- fs_1.default.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb));
- });
- }
- else if (st.isSymbolicLink()) {
- return cb(new symlink_error_js_1.SymlinkError(part, part + '/' + parts.join('/')));
- }
- else {
- cb(er);
- }
- });
- }
- else {
- created = created || part;
- mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
- }
-};
-const checkCwdSync = (dir) => {
- let ok = false;
- let code = undefined;
- try {
- ok = fs_1.default.statSync(dir).isDirectory();
- }
- catch (er) {
- code = er?.code;
- }
- finally {
- if (!ok) {
- throw new cwd_error_js_1.CwdError(dir, code ?? 'ENOTDIR');
- }
- }
-};
-const mkdirSync = (dir, opt) => {
- dir = (0, normalize_windows_path_js_1.normalizeWindowsPath)(dir);
- // if there's any overlap between mask and mode,
- // then we'll need an explicit chmod
- /* c8 ignore next */
- const umask = opt.umask ?? 0o22;
- const mode = opt.mode | 0o700;
- const needChmod = (mode & umask) !== 0;
- const uid = opt.uid;
- const gid = opt.gid;
- const doChown = typeof uid === 'number' &&
- typeof gid === 'number' &&
- (uid !== opt.processUid || gid !== opt.processGid);
- const preserve = opt.preserve;
- const unlink = opt.unlink;
- const cache = opt.cache;
- const cwd = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.cwd);
- const done = (created) => {
- cSet(cache, dir, true);
- if (created && doChown) {
- (0, chownr_1.chownrSync)(created, uid, gid);
- }
- if (needChmod) {
- fs_1.default.chmodSync(dir, mode);
- }
- };
- if (cache && cGet(cache, dir) === true) {
- return done();
- }
- if (dir === cwd) {
- checkCwdSync(cwd);
- return done();
- }
- if (preserve) {
- return done((0, mkdirp_1.mkdirpSync)(dir, mode) ?? undefined);
- }
- const sub = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.relative(cwd, dir));
- const parts = sub.split('/');
- let created = undefined;
- for (let p = parts.shift(), part = cwd; p && (part += '/' + p); p = parts.shift()) {
- part = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(part));
- if (cGet(cache, part)) {
- continue;
- }
- try {
- fs_1.default.mkdirSync(part, mode);
- created = created || part;
- cSet(cache, part, true);
- }
- catch (er) {
- const st = fs_1.default.lstatSync(part);
- if (st.isDirectory()) {
- cSet(cache, part, true);
- continue;
- }
- else if (unlink) {
- fs_1.default.unlinkSync(part);
- fs_1.default.mkdirSync(part, mode);
- created = created || part;
- cSet(cache, part, true);
- continue;
- }
- else if (st.isSymbolicLink()) {
- return new symlink_error_js_1.SymlinkError(part, part + '/' + parts.join('/'));
- }
- }
- }
- return done(created);
-};
-exports.mkdirSync = mkdirSync;
-//# sourceMappingURL=mkdir.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/mode-fix.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/mode-fix.js
deleted file mode 100644
index 49dd727961d290..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/mode-fix.js
+++ /dev/null
@@ -1,29 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.modeFix = void 0;
-const modeFix = (mode, isDir, portable) => {
- mode &= 0o7777;
- // in portable mode, use the minimum reasonable umask
- // if this system creates files with 0o664 by default
- // (as some linux distros do), then we'll write the
- // archive with 0o644 instead. Also, don't ever create
- // a file that is not readable/writable by the owner.
- if (portable) {
- mode = (mode | 0o600) & ~0o22;
- }
- // if dirs are readable, then they should be listable
- if (isDir) {
- if (mode & 0o400) {
- mode |= 0o100;
- }
- if (mode & 0o40) {
- mode |= 0o10;
- }
- if (mode & 0o4) {
- mode |= 0o1;
- }
- }
- return mode;
-};
-exports.modeFix = modeFix;
-//# sourceMappingURL=mode-fix.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/normalize-unicode.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/normalize-unicode.js
deleted file mode 100644
index 2f08ce46d98c4c..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/normalize-unicode.js
+++ /dev/null
@@ -1,17 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.normalizeUnicode = void 0;
-// warning: extremely hot code path.
-// This has been meticulously optimized for use
-// within npm install on large package trees.
-// Do not edit without careful benchmarking.
-const normalizeCache = Object.create(null);
-const { hasOwnProperty } = Object.prototype;
-const normalizeUnicode = (s) => {
- if (!hasOwnProperty.call(normalizeCache, s)) {
- normalizeCache[s] = s.normalize('NFD');
- }
- return normalizeCache[s];
-};
-exports.normalizeUnicode = normalizeUnicode;
-//# sourceMappingURL=normalize-unicode.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/normalize-windows-path.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/normalize-windows-path.js
deleted file mode 100644
index b0c7aaa9f2d175..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/normalize-windows-path.js
+++ /dev/null
@@ -1,12 +0,0 @@
-"use strict";
-// on windows, either \ or / are valid directory separators.
-// on unix, \ is a valid character in filenames.
-// so, on windows, and only on windows, we replace all \ chars with /,
-// so that we can use / as our one and only directory separator char.
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.normalizeWindowsPath = void 0;
-const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
-exports.normalizeWindowsPath = platform !== 'win32' ?
- (p) => p
- : (p) => p && p.replace(/\\/g, '/');
-//# sourceMappingURL=normalize-windows-path.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/options.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/options.js
deleted file mode 100644
index 4cd06505bc72b2..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/options.js
+++ /dev/null
@@ -1,66 +0,0 @@
-"use strict";
-// turn tar(1) style args like `C` into the more verbose things like `cwd`
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.dealias = exports.isNoFile = exports.isFile = exports.isAsync = exports.isSync = exports.isAsyncNoFile = exports.isSyncNoFile = exports.isAsyncFile = exports.isSyncFile = void 0;
-const argmap = new Map([
- ['C', 'cwd'],
- ['f', 'file'],
- ['z', 'gzip'],
- ['P', 'preservePaths'],
- ['U', 'unlink'],
- ['strip-components', 'strip'],
- ['stripComponents', 'strip'],
- ['keep-newer', 'newer'],
- ['keepNewer', 'newer'],
- ['keep-newer-files', 'newer'],
- ['keepNewerFiles', 'newer'],
- ['k', 'keep'],
- ['keep-existing', 'keep'],
- ['keepExisting', 'keep'],
- ['m', 'noMtime'],
- ['no-mtime', 'noMtime'],
- ['p', 'preserveOwner'],
- ['L', 'follow'],
- ['h', 'follow'],
- ['onentry', 'onReadEntry'],
-]);
-const isSyncFile = (o) => !!o.sync && !!o.file;
-exports.isSyncFile = isSyncFile;
-const isAsyncFile = (o) => !o.sync && !!o.file;
-exports.isAsyncFile = isAsyncFile;
-const isSyncNoFile = (o) => !!o.sync && !o.file;
-exports.isSyncNoFile = isSyncNoFile;
-const isAsyncNoFile = (o) => !o.sync && !o.file;
-exports.isAsyncNoFile = isAsyncNoFile;
-const isSync = (o) => !!o.sync;
-exports.isSync = isSync;
-const isAsync = (o) => !o.sync;
-exports.isAsync = isAsync;
-const isFile = (o) => !!o.file;
-exports.isFile = isFile;
-const isNoFile = (o) => !o.file;
-exports.isNoFile = isNoFile;
-const dealiasKey = (k) => {
- const d = argmap.get(k);
- if (d)
- return d;
- return k;
-};
-const dealias = (opt = {}) => {
- if (!opt)
- return {};
- const result = {};
- for (const [key, v] of Object.entries(opt)) {
- // TS doesn't know that aliases are going to always be the same type
- const k = dealiasKey(key);
- result[k] = v;
- }
- // affordance for deprecated noChmod -> chmod
- if (result.chmod === undefined && result.noChmod === false) {
- result.chmod = true;
- }
- delete result.noChmod;
- return result;
-};
-exports.dealias = dealias;
-//# sourceMappingURL=options.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/pack.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/pack.js
deleted file mode 100644
index 303e93063c2db4..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/pack.js
+++ /dev/null
@@ -1,477 +0,0 @@
-"use strict";
-// A readable tar stream creator
-// Technically, this is a transform stream that you write paths into,
-// and tar format comes out of.
-// The `add()` method is like `write()` but returns this,
-// and end() return `this` as well, so you can
-// do `new Pack(opt).add('files').add('dir').end().pipe(output)
-// You could also do something like:
-// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar'))
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- var desc = Object.getOwnPropertyDescriptor(m, k);
- if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
- desc = { enumerable: true, get: function() { return m[k]; } };
- }
- Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
- Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
- o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
- __setModuleDefault(result, mod);
- return result;
-};
-var __importDefault = (this && this.__importDefault) || function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.PackSync = exports.Pack = exports.PackJob = void 0;
-const fs_1 = __importDefault(require("fs"));
-const write_entry_js_1 = require("./write-entry.js");
-class PackJob {
- path;
- absolute;
- entry;
- stat;
- readdir;
- pending = false;
- ignore = false;
- piped = false;
- constructor(path, absolute) {
- this.path = path || './';
- this.absolute = absolute;
- }
-}
-exports.PackJob = PackJob;
-const minipass_1 = require("minipass");
-const zlib = __importStar(require("minizlib"));
-const yallist_1 = require("yallist");
-const read_entry_js_1 = require("./read-entry.js");
-const warn_method_js_1 = require("./warn-method.js");
-const EOF = Buffer.alloc(1024);
-const ONSTAT = Symbol('onStat');
-const ENDED = Symbol('ended');
-const QUEUE = Symbol('queue');
-const CURRENT = Symbol('current');
-const PROCESS = Symbol('process');
-const PROCESSING = Symbol('processing');
-const PROCESSJOB = Symbol('processJob');
-const JOBS = Symbol('jobs');
-const JOBDONE = Symbol('jobDone');
-const ADDFSENTRY = Symbol('addFSEntry');
-const ADDTARENTRY = Symbol('addTarEntry');
-const STAT = Symbol('stat');
-const READDIR = Symbol('readdir');
-const ONREADDIR = Symbol('onreaddir');
-const PIPE = Symbol('pipe');
-const ENTRY = Symbol('entry');
-const ENTRYOPT = Symbol('entryOpt');
-const WRITEENTRYCLASS = Symbol('writeEntryClass');
-const WRITE = Symbol('write');
-const ONDRAIN = Symbol('ondrain');
-const path_1 = __importDefault(require("path"));
-const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
-class Pack extends minipass_1.Minipass {
- opt;
- cwd;
- maxReadSize;
- preservePaths;
- strict;
- noPax;
- prefix;
- linkCache;
- statCache;
- file;
- portable;
- zip;
- readdirCache;
- noDirRecurse;
- follow;
- noMtime;
- mtime;
- filter;
- jobs;
- [WRITEENTRYCLASS];
- onWriteEntry;
- [QUEUE];
- [JOBS] = 0;
- [PROCESSING] = false;
- [ENDED] = false;
- constructor(opt = {}) {
- //@ts-ignore
- super();
- this.opt = opt;
- this.file = opt.file || '';
- this.cwd = opt.cwd || process.cwd();
- this.maxReadSize = opt.maxReadSize;
- this.preservePaths = !!opt.preservePaths;
- this.strict = !!opt.strict;
- this.noPax = !!opt.noPax;
- this.prefix = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.prefix || '');
- this.linkCache = opt.linkCache || new Map();
- this.statCache = opt.statCache || new Map();
- this.readdirCache = opt.readdirCache || new Map();
- this.onWriteEntry = opt.onWriteEntry;
- this[WRITEENTRYCLASS] = write_entry_js_1.WriteEntry;
- if (typeof opt.onwarn === 'function') {
- this.on('warn', opt.onwarn);
- }
- this.portable = !!opt.portable;
- if (opt.gzip || opt.brotli) {
- if (opt.gzip && opt.brotli) {
- throw new TypeError('gzip and brotli are mutually exclusive');
- }
- if (opt.gzip) {
- if (typeof opt.gzip !== 'object') {
- opt.gzip = {};
- }
- if (this.portable) {
- opt.gzip.portable = true;
- }
- this.zip = new zlib.Gzip(opt.gzip);
- }
- if (opt.brotli) {
- if (typeof opt.brotli !== 'object') {
- opt.brotli = {};
- }
- this.zip = new zlib.BrotliCompress(opt.brotli);
- }
- /* c8 ignore next */
- if (!this.zip)
- throw new Error('impossible');
- const zip = this.zip;
- zip.on('data', chunk => super.write(chunk));
- zip.on('end', () => super.end());
- zip.on('drain', () => this[ONDRAIN]());
- this.on('resume', () => zip.resume());
- }
- else {
- this.on('drain', this[ONDRAIN]);
- }
- this.noDirRecurse = !!opt.noDirRecurse;
- this.follow = !!opt.follow;
- this.noMtime = !!opt.noMtime;
- if (opt.mtime)
- this.mtime = opt.mtime;
- this.filter =
- typeof opt.filter === 'function' ? opt.filter : () => true;
- this[QUEUE] = new yallist_1.Yallist();
- this[JOBS] = 0;
- this.jobs = Number(opt.jobs) || 4;
- this[PROCESSING] = false;
- this[ENDED] = false;
- }
- [WRITE](chunk) {
- return super.write(chunk);
- }
- add(path) {
- this.write(path);
- return this;
- }
- end(path, encoding, cb) {
- /* c8 ignore start */
- if (typeof path === 'function') {
- cb = path;
- path = undefined;
- }
- if (typeof encoding === 'function') {
- cb = encoding;
- encoding = undefined;
- }
- /* c8 ignore stop */
- if (path) {
- this.add(path);
- }
- this[ENDED] = true;
- this[PROCESS]();
- /* c8 ignore next */
- if (cb)
- cb();
- return this;
- }
- write(path) {
- if (this[ENDED]) {
- throw new Error('write after end');
- }
- if (path instanceof read_entry_js_1.ReadEntry) {
- this[ADDTARENTRY](path);
- }
- else {
- this[ADDFSENTRY](path);
- }
- return this.flowing;
- }
- [ADDTARENTRY](p) {
- const absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(path_1.default.resolve(this.cwd, p.path));
- // in this case, we don't have to wait for the stat
- if (!this.filter(p.path, p)) {
- p.resume();
- }
- else {
- const job = new PackJob(p.path, absolute);
- job.entry = new write_entry_js_1.WriteEntryTar(p, this[ENTRYOPT](job));
- job.entry.on('end', () => this[JOBDONE](job));
- this[JOBS] += 1;
- this[QUEUE].push(job);
- }
- this[PROCESS]();
- }
- [ADDFSENTRY](p) {
- const absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(path_1.default.resolve(this.cwd, p));
- this[QUEUE].push(new PackJob(p, absolute));
- this[PROCESS]();
- }
- [STAT](job) {
- job.pending = true;
- this[JOBS] += 1;
- const stat = this.follow ? 'stat' : 'lstat';
- fs_1.default[stat](job.absolute, (er, stat) => {
- job.pending = false;
- this[JOBS] -= 1;
- if (er) {
- this.emit('error', er);
- }
- else {
- this[ONSTAT](job, stat);
- }
- });
- }
- [ONSTAT](job, stat) {
- this.statCache.set(job.absolute, stat);
- job.stat = stat;
- // now we have the stat, we can filter it.
- if (!this.filter(job.path, stat)) {
- job.ignore = true;
- }
- this[PROCESS]();
- }
- [READDIR](job) {
- job.pending = true;
- this[JOBS] += 1;
- fs_1.default.readdir(job.absolute, (er, entries) => {
- job.pending = false;
- this[JOBS] -= 1;
- if (er) {
- return this.emit('error', er);
- }
- this[ONREADDIR](job, entries);
- });
- }
- [ONREADDIR](job, entries) {
- this.readdirCache.set(job.absolute, entries);
- job.readdir = entries;
- this[PROCESS]();
- }
- [PROCESS]() {
- if (this[PROCESSING]) {
- return;
- }
- this[PROCESSING] = true;
- for (let w = this[QUEUE].head; !!w && this[JOBS] < this.jobs; w = w.next) {
- this[PROCESSJOB](w.value);
- if (w.value.ignore) {
- const p = w.next;
- this[QUEUE].removeNode(w);
- w.next = p;
- }
- }
- this[PROCESSING] = false;
- if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) {
- if (this.zip) {
- this.zip.end(EOF);
- }
- else {
- super.write(EOF);
- super.end();
- }
- }
- }
- get [CURRENT]() {
- return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value;
- }
- [JOBDONE](_job) {
- this[QUEUE].shift();
- this[JOBS] -= 1;
- this[PROCESS]();
- }
- [PROCESSJOB](job) {
- if (job.pending) {
- return;
- }
- if (job.entry) {
- if (job === this[CURRENT] && !job.piped) {
- this[PIPE](job);
- }
- return;
- }
- if (!job.stat) {
- const sc = this.statCache.get(job.absolute);
- if (sc) {
- this[ONSTAT](job, sc);
- }
- else {
- this[STAT](job);
- }
- }
- if (!job.stat) {
- return;
- }
- // filtered out!
- if (job.ignore) {
- return;
- }
- if (!this.noDirRecurse &&
- job.stat.isDirectory() &&
- !job.readdir) {
- const rc = this.readdirCache.get(job.absolute);
- if (rc) {
- this[ONREADDIR](job, rc);
- }
- else {
- this[READDIR](job);
- }
- if (!job.readdir) {
- return;
- }
- }
- // we know it doesn't have an entry, because that got checked above
- job.entry = this[ENTRY](job);
- if (!job.entry) {
- job.ignore = true;
- return;
- }
- if (job === this[CURRENT] && !job.piped) {
- this[PIPE](job);
- }
- }
- [ENTRYOPT](job) {
- return {
- onwarn: (code, msg, data) => this.warn(code, msg, data),
- noPax: this.noPax,
- cwd: this.cwd,
- absolute: job.absolute,
- preservePaths: this.preservePaths,
- maxReadSize: this.maxReadSize,
- strict: this.strict,
- portable: this.portable,
- linkCache: this.linkCache,
- statCache: this.statCache,
- noMtime: this.noMtime,
- mtime: this.mtime,
- prefix: this.prefix,
- onWriteEntry: this.onWriteEntry,
- };
- }
- [ENTRY](job) {
- this[JOBS] += 1;
- try {
- const e = new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job));
- return e
- .on('end', () => this[JOBDONE](job))
- .on('error', er => this.emit('error', er));
- }
- catch (er) {
- this.emit('error', er);
- }
- }
- [ONDRAIN]() {
- if (this[CURRENT] && this[CURRENT].entry) {
- this[CURRENT].entry.resume();
- }
- }
- // like .pipe() but using super, because our write() is special
- [PIPE](job) {
- job.piped = true;
- if (job.readdir) {
- job.readdir.forEach(entry => {
- const p = job.path;
- const base = p === './' ? '' : p.replace(/\/*$/, '/');
- this[ADDFSENTRY](base + entry);
- });
- }
- const source = job.entry;
- const zip = this.zip;
- /* c8 ignore start */
- if (!source)
- throw new Error('cannot pipe without source');
- /* c8 ignore stop */
- if (zip) {
- source.on('data', chunk => {
- if (!zip.write(chunk)) {
- source.pause();
- }
- });
- }
- else {
- source.on('data', chunk => {
- if (!super.write(chunk)) {
- source.pause();
- }
- });
- }
- }
- pause() {
- if (this.zip) {
- this.zip.pause();
- }
- return super.pause();
- }
- warn(code, message, data = {}) {
- (0, warn_method_js_1.warnMethod)(this, code, message, data);
- }
-}
-exports.Pack = Pack;
-class PackSync extends Pack {
- sync = true;
- constructor(opt) {
- super(opt);
- this[WRITEENTRYCLASS] = write_entry_js_1.WriteEntrySync;
- }
- // pause/resume are no-ops in sync streams.
- pause() { }
- resume() { }
- [STAT](job) {
- const stat = this.follow ? 'statSync' : 'lstatSync';
- this[ONSTAT](job, fs_1.default[stat](job.absolute));
- }
- [READDIR](job) {
- this[ONREADDIR](job, fs_1.default.readdirSync(job.absolute));
- }
- // gotta get it all in this tick
- [PIPE](job) {
- const source = job.entry;
- const zip = this.zip;
- if (job.readdir) {
- job.readdir.forEach(entry => {
- const p = job.path;
- const base = p === './' ? '' : p.replace(/\/*$/, '/');
- this[ADDFSENTRY](base + entry);
- });
- }
- /* c8 ignore start */
- if (!source)
- throw new Error('Cannot pipe without source');
- /* c8 ignore stop */
- if (zip) {
- source.on('data', chunk => {
- zip.write(chunk);
- });
- }
- else {
- source.on('data', chunk => {
- super[WRITE](chunk);
- });
- }
- }
-}
-exports.PackSync = PackSync;
-//# sourceMappingURL=pack.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/package.json b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee39..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
- "type": "commonjs"
-}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/pax.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/pax.js
deleted file mode 100644
index d30c0f3efbe9ea..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/pax.js
+++ /dev/null
@@ -1,158 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Pax = void 0;
-const node_path_1 = require("node:path");
-const header_js_1 = require("./header.js");
-class Pax {
- atime;
- mtime;
- ctime;
- charset;
- comment;
- gid;
- uid;
- gname;
- uname;
- linkpath;
- dev;
- ino;
- nlink;
- path;
- size;
- mode;
- global;
- constructor(obj, global = false) {
- this.atime = obj.atime;
- this.charset = obj.charset;
- this.comment = obj.comment;
- this.ctime = obj.ctime;
- this.dev = obj.dev;
- this.gid = obj.gid;
- this.global = global;
- this.gname = obj.gname;
- this.ino = obj.ino;
- this.linkpath = obj.linkpath;
- this.mtime = obj.mtime;
- this.nlink = obj.nlink;
- this.path = obj.path;
- this.size = obj.size;
- this.uid = obj.uid;
- this.uname = obj.uname;
- }
- encode() {
- const body = this.encodeBody();
- if (body === '') {
- return Buffer.allocUnsafe(0);
- }
- const bodyLen = Buffer.byteLength(body);
- // round up to 512 bytes
- // add 512 for header
- const bufLen = 512 * Math.ceil(1 + bodyLen / 512);
- const buf = Buffer.allocUnsafe(bufLen);
- // 0-fill the header section, it might not hit every field
- for (let i = 0; i < 512; i++) {
- buf[i] = 0;
- }
- new header_js_1.Header({
- // XXX split the path
- // then the path should be PaxHeader + basename, but less than 99,
- // prepend with the dirname
- /* c8 ignore start */
- path: ('PaxHeader/' + (0, node_path_1.basename)(this.path ?? '')).slice(0, 99),
- /* c8 ignore stop */
- mode: this.mode || 0o644,
- uid: this.uid,
- gid: this.gid,
- size: bodyLen,
- mtime: this.mtime,
- type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader',
- linkpath: '',
- uname: this.uname || '',
- gname: this.gname || '',
- devmaj: 0,
- devmin: 0,
- atime: this.atime,
- ctime: this.ctime,
- }).encode(buf);
- buf.write(body, 512, bodyLen, 'utf8');
- // null pad after the body
- for (let i = bodyLen + 512; i < buf.length; i++) {
- buf[i] = 0;
- }
- return buf;
- }
- encodeBody() {
- return (this.encodeField('path') +
- this.encodeField('ctime') +
- this.encodeField('atime') +
- this.encodeField('dev') +
- this.encodeField('ino') +
- this.encodeField('nlink') +
- this.encodeField('charset') +
- this.encodeField('comment') +
- this.encodeField('gid') +
- this.encodeField('gname') +
- this.encodeField('linkpath') +
- this.encodeField('mtime') +
- this.encodeField('size') +
- this.encodeField('uid') +
- this.encodeField('uname'));
- }
- encodeField(field) {
- if (this[field] === undefined) {
- return '';
- }
- const r = this[field];
- const v = r instanceof Date ? r.getTime() / 1000 : r;
- const s = ' ' +
- (field === 'dev' || field === 'ino' || field === 'nlink' ?
- 'SCHILY.'
- : '') +
- field +
- '=' +
- v +
- '\n';
- const byteLen = Buffer.byteLength(s);
- // the digits includes the length of the digits in ascii base-10
- // so if it's 9 characters, then adding 1 for the 9 makes it 10
- // which makes it 11 chars.
- let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1;
- if (byteLen + digits >= Math.pow(10, digits)) {
- digits += 1;
- }
- const len = digits + byteLen;
- return len + s;
- }
- static parse(str, ex, g = false) {
- return new Pax(merge(parseKV(str), ex), g);
- }
-}
-exports.Pax = Pax;
-const merge = (a, b) => b ? Object.assign({}, b, a) : a;
-const parseKV = (str) => str
- .replace(/\n$/, '')
- .split('\n')
- .reduce(parseKVLine, Object.create(null));
-const parseKVLine = (set, line) => {
- const n = parseInt(line, 10);
- // XXX Values with \n in them will fail this.
- // Refactor to not be a naive line-by-line parse.
- if (n !== Buffer.byteLength(line) + 1) {
- return set;
- }
- line = line.slice((n + ' ').length);
- const kv = line.split('=');
- const r = kv.shift();
- if (!r) {
- return set;
- }
- const k = r.replace(/^SCHILY\.(dev|ino|nlink)/, '$1');
- const v = kv.join('=');
- set[k] =
- /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k) ?
- new Date(Number(v) * 1000)
- : /^[0-9]+$/.test(v) ? +v
- : v;
- return set;
-};
-//# sourceMappingURL=pax.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/read-entry.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/read-entry.js
deleted file mode 100644
index 15e2d55c938a43..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/read-entry.js
+++ /dev/null
@@ -1,140 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.ReadEntry = void 0;
-const minipass_1 = require("minipass");
-const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
-class ReadEntry extends minipass_1.Minipass {
- extended;
- globalExtended;
- header;
- startBlockSize;
- blockRemain;
- remain;
- type;
- meta = false;
- ignore = false;
- path;
- mode;
- uid;
- gid;
- uname;
- gname;
- size = 0;
- mtime;
- atime;
- ctime;
- linkpath;
- dev;
- ino;
- nlink;
- invalid = false;
- absolute;
- unsupported = false;
- constructor(header, ex, gex) {
- super({});
- // read entries always start life paused. this is to avoid the
- // situation where Minipass's auto-ending empty streams results
- // in an entry ending before we're ready for it.
- this.pause();
- this.extended = ex;
- this.globalExtended = gex;
- this.header = header;
- /* c8 ignore start */
- this.remain = header.size ?? 0;
- /* c8 ignore stop */
- this.startBlockSize = 512 * Math.ceil(this.remain / 512);
- this.blockRemain = this.startBlockSize;
- this.type = header.type;
- switch (this.type) {
- case 'File':
- case 'OldFile':
- case 'Link':
- case 'SymbolicLink':
- case 'CharacterDevice':
- case 'BlockDevice':
- case 'Directory':
- case 'FIFO':
- case 'ContiguousFile':
- case 'GNUDumpDir':
- break;
- case 'NextFileHasLongLinkpath':
- case 'NextFileHasLongPath':
- case 'OldGnuLongPath':
- case 'GlobalExtendedHeader':
- case 'ExtendedHeader':
- case 'OldExtendedHeader':
- this.meta = true;
- break;
- // NOTE: gnutar and bsdtar treat unrecognized types as 'File'
- // it may be worth doing the same, but with a warning.
- default:
- this.ignore = true;
- }
- /* c8 ignore start */
- if (!header.path) {
- throw new Error('no path provided for tar.ReadEntry');
- }
- /* c8 ignore stop */
- this.path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(header.path);
- this.mode = header.mode;
- if (this.mode) {
- this.mode = this.mode & 0o7777;
- }
- this.uid = header.uid;
- this.gid = header.gid;
- this.uname = header.uname;
- this.gname = header.gname;
- this.size = this.remain;
- this.mtime = header.mtime;
- this.atime = header.atime;
- this.ctime = header.ctime;
- /* c8 ignore start */
- this.linkpath =
- header.linkpath ?
- (0, normalize_windows_path_js_1.normalizeWindowsPath)(header.linkpath)
- : undefined;
- /* c8 ignore stop */
- this.uname = header.uname;
- this.gname = header.gname;
- if (ex) {
- this.#slurp(ex);
- }
- if (gex) {
- this.#slurp(gex, true);
- }
- }
- write(data) {
- const writeLen = data.length;
- if (writeLen > this.blockRemain) {
- throw new Error('writing more to entry than is appropriate');
- }
- const r = this.remain;
- const br = this.blockRemain;
- this.remain = Math.max(0, r - writeLen);
- this.blockRemain = Math.max(0, br - writeLen);
- if (this.ignore) {
- return true;
- }
- if (r >= writeLen) {
- return super.write(data);
- }
- // r < writeLen
- return super.write(data.subarray(0, r));
- }
- #slurp(ex, gex = false) {
- if (ex.path)
- ex.path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(ex.path);
- if (ex.linkpath)
- ex.linkpath = (0, normalize_windows_path_js_1.normalizeWindowsPath)(ex.linkpath);
- Object.assign(this, Object.fromEntries(Object.entries(ex).filter(([k, v]) => {
- // we slurp in everything except for the path attribute in
- // a global extended header, because that's weird. Also, any
- // null/undefined values are ignored.
- return !(v === null ||
- v === undefined ||
- (k === 'path' && gex));
- })));
- }
-}
-exports.ReadEntry = ReadEntry;
-//# sourceMappingURL=read-entry.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/replace.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/replace.js
deleted file mode 100644
index 22eff246d4d75f..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/replace.js
+++ /dev/null
@@ -1,231 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.replace = void 0;
-// tar -r
-const fs_minipass_1 = require("@isaacs/fs-minipass");
-const node_fs_1 = __importDefault(require("node:fs"));
-const node_path_1 = __importDefault(require("node:path"));
-const header_js_1 = require("./header.js");
-const list_js_1 = require("./list.js");
-const make_command_js_1 = require("./make-command.js");
-const options_js_1 = require("./options.js");
-const pack_js_1 = require("./pack.js");
-// starting at the head of the file, read a Header
-// If the checksum is invalid, that's our position to start writing
-// If it is, jump forward by the specified size (round up to 512)
-// and try again.
-// Write the new Pack stream starting there.
-const replaceSync = (opt, files) => {
- const p = new pack_js_1.PackSync(opt);
- let threw = true;
- let fd;
- let position;
- try {
- try {
- fd = node_fs_1.default.openSync(opt.file, 'r+');
- }
- catch (er) {
- if (er?.code === 'ENOENT') {
- fd = node_fs_1.default.openSync(opt.file, 'w+');
- }
- else {
- throw er;
- }
- }
- const st = node_fs_1.default.fstatSync(fd);
- const headBuf = Buffer.alloc(512);
- POSITION: for (position = 0; position < st.size; position += 512) {
- for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {
- bytes = node_fs_1.default.readSync(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos);
- if (position === 0 &&
- headBuf[0] === 0x1f &&
- headBuf[1] === 0x8b) {
- throw new Error('cannot append to compressed archives');
- }
- if (!bytes) {
- break POSITION;
- }
- }
- const h = new header_js_1.Header(headBuf);
- if (!h.cksumValid) {
- break;
- }
- const entryBlockSize = 512 * Math.ceil((h.size || 0) / 512);
- if (position + entryBlockSize + 512 > st.size) {
- break;
- }
- // the 512 for the header we just parsed will be added as well
- // also jump ahead all the blocks for the body
- position += entryBlockSize;
- if (opt.mtimeCache && h.mtime) {
- opt.mtimeCache.set(String(h.path), h.mtime);
- }
- }
- threw = false;
- streamSync(opt, p, position, fd, files);
- }
- finally {
- if (threw) {
- try {
- node_fs_1.default.closeSync(fd);
- }
- catch (er) { }
- }
- }
-};
-const streamSync = (opt, p, position, fd, files) => {
- const stream = new fs_minipass_1.WriteStreamSync(opt.file, {
- fd: fd,
- start: position,
- });
- p.pipe(stream);
- addFilesSync(p, files);
-};
-const replaceAsync = (opt, files) => {
- files = Array.from(files);
- const p = new pack_js_1.Pack(opt);
- const getPos = (fd, size, cb_) => {
- const cb = (er, pos) => {
- if (er) {
- node_fs_1.default.close(fd, _ => cb_(er));
- }
- else {
- cb_(null, pos);
- }
- };
- let position = 0;
- if (size === 0) {
- return cb(null, 0);
- }
- let bufPos = 0;
- const headBuf = Buffer.alloc(512);
- const onread = (er, bytes) => {
- if (er || typeof bytes === 'undefined') {
- return cb(er);
- }
- bufPos += bytes;
- if (bufPos < 512 && bytes) {
- return node_fs_1.default.read(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos, onread);
- }
- if (position === 0 &&
- headBuf[0] === 0x1f &&
- headBuf[1] === 0x8b) {
- return cb(new Error('cannot append to compressed archives'));
- }
- // truncated header
- if (bufPos < 512) {
- return cb(null, position);
- }
- const h = new header_js_1.Header(headBuf);
- if (!h.cksumValid) {
- return cb(null, position);
- }
- /* c8 ignore next */
- const entryBlockSize = 512 * Math.ceil((h.size ?? 0) / 512);
- if (position + entryBlockSize + 512 > size) {
- return cb(null, position);
- }
- position += entryBlockSize + 512;
- if (position >= size) {
- return cb(null, position);
- }
- if (opt.mtimeCache && h.mtime) {
- opt.mtimeCache.set(String(h.path), h.mtime);
- }
- bufPos = 0;
- node_fs_1.default.read(fd, headBuf, 0, 512, position, onread);
- };
- node_fs_1.default.read(fd, headBuf, 0, 512, position, onread);
- };
- const promise = new Promise((resolve, reject) => {
- p.on('error', reject);
- let flag = 'r+';
- const onopen = (er, fd) => {
- if (er && er.code === 'ENOENT' && flag === 'r+') {
- flag = 'w+';
- return node_fs_1.default.open(opt.file, flag, onopen);
- }
- if (er || !fd) {
- return reject(er);
- }
- node_fs_1.default.fstat(fd, (er, st) => {
- if (er) {
- return node_fs_1.default.close(fd, () => reject(er));
- }
- getPos(fd, st.size, (er, position) => {
- if (er) {
- return reject(er);
- }
- const stream = new fs_minipass_1.WriteStream(opt.file, {
- fd: fd,
- start: position,
- });
- p.pipe(stream);
- stream.on('error', reject);
- stream.on('close', resolve);
- addFilesAsync(p, files);
- });
- });
- };
- node_fs_1.default.open(opt.file, flag, onopen);
- });
- return promise;
-};
-const addFilesSync = (p, files) => {
- files.forEach(file => {
- if (file.charAt(0) === '@') {
- (0, list_js_1.list)({
- file: node_path_1.default.resolve(p.cwd, file.slice(1)),
- sync: true,
- noResume: true,
- onReadEntry: entry => p.add(entry),
- });
- }
- else {
- p.add(file);
- }
- });
- p.end();
-};
-const addFilesAsync = async (p, files) => {
- for (let i = 0; i < files.length; i++) {
- const file = String(files[i]);
- if (file.charAt(0) === '@') {
- await (0, list_js_1.list)({
- file: node_path_1.default.resolve(String(p.cwd), file.slice(1)),
- noResume: true,
- onReadEntry: entry => p.add(entry),
- });
- }
- else {
- p.add(file);
- }
- }
- p.end();
-};
-exports.replace = (0, make_command_js_1.makeCommand)(replaceSync, replaceAsync,
-/* c8 ignore start */
-() => {
- throw new TypeError('file is required');
-}, () => {
- throw new TypeError('file is required');
-},
-/* c8 ignore stop */
-(opt, entries) => {
- if (!(0, options_js_1.isFile)(opt)) {
- throw new TypeError('file is required');
- }
- if (opt.gzip ||
- opt.brotli ||
- opt.file.endsWith('.br') ||
- opt.file.endsWith('.tbr')) {
- throw new TypeError('cannot append to compressed archives');
- }
- if (!entries?.length) {
- throw new TypeError('no paths specified to add/replace');
- }
-});
-//# sourceMappingURL=replace.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/strip-absolute-path.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/strip-absolute-path.js
deleted file mode 100644
index bb7639c35a1104..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/strip-absolute-path.js
+++ /dev/null
@@ -1,29 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.stripAbsolutePath = void 0;
-// unix absolute paths are also absolute on win32, so we use this for both
-const node_path_1 = require("node:path");
-const { isAbsolute, parse } = node_path_1.win32;
-// returns [root, stripped]
-// Note that windows will think that //x/y/z/a has a "root" of //x/y, and in
-// those cases, we want to sanitize it to x/y/z/a, not z/a, so we strip /
-// explicitly if it's the first character.
-// drive-specific relative paths on Windows get their root stripped off even
-// though they are not absolute, so `c:../foo` becomes ['c:', '../foo']
-const stripAbsolutePath = (path) => {
- let r = '';
- let parsed = parse(path);
- while (isAbsolute(path) || parsed.root) {
- // windows will think that //x/y/z has a "root" of //x/y/
- // but strip the //?/C:/ off of //?/C:/path
- const root = path.charAt(0) === '/' && path.slice(0, 4) !== '//?/' ?
- '/'
- : parsed.root;
- path = path.slice(root.length);
- r += root;
- parsed = parse(path);
- }
- return [r, path];
-};
-exports.stripAbsolutePath = stripAbsolutePath;
-//# sourceMappingURL=strip-absolute-path.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/strip-trailing-slashes.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/strip-trailing-slashes.js
deleted file mode 100644
index 6fa74ad6a4ac93..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/strip-trailing-slashes.js
+++ /dev/null
@@ -1,18 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.stripTrailingSlashes = void 0;
-// warning: extremely hot code path.
-// This has been meticulously optimized for use
-// within npm install on large package trees.
-// Do not edit without careful benchmarking.
-const stripTrailingSlashes = (str) => {
- let i = str.length - 1;
- let slashesStart = -1;
- while (i > -1 && str.charAt(i) === '/') {
- slashesStart = i;
- i--;
- }
- return slashesStart === -1 ? str : str.slice(0, slashesStart);
-};
-exports.stripTrailingSlashes = stripTrailingSlashes;
-//# sourceMappingURL=strip-trailing-slashes.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/symlink-error.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/symlink-error.js
deleted file mode 100644
index cc19ac1a2e3c6b..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/symlink-error.js
+++ /dev/null
@@ -1,19 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.SymlinkError = void 0;
-class SymlinkError extends Error {
- path;
- symlink;
- syscall = 'symlink';
- code = 'TAR_SYMLINK_ERROR';
- constructor(symlink, path) {
- super('TAR_SYMLINK_ERROR: Cannot extract through symbolic link');
- this.symlink = symlink;
- this.path = path;
- }
- get name() {
- return 'SymlinkError';
- }
-}
-exports.SymlinkError = SymlinkError;
-//# sourceMappingURL=symlink-error.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/types.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/types.js
deleted file mode 100644
index cb9b684e843b72..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/types.js
+++ /dev/null
@@ -1,50 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.code = exports.name = exports.isName = exports.isCode = void 0;
-const isCode = (c) => exports.name.has(c);
-exports.isCode = isCode;
-const isName = (c) => exports.code.has(c);
-exports.isName = isName;
-// map types from key to human-friendly name
-exports.name = new Map([
- ['0', 'File'],
- // same as File
- ['', 'OldFile'],
- ['1', 'Link'],
- ['2', 'SymbolicLink'],
- // Devices and FIFOs aren't fully supported
- // they are parsed, but skipped when unpacking
- ['3', 'CharacterDevice'],
- ['4', 'BlockDevice'],
- ['5', 'Directory'],
- ['6', 'FIFO'],
- // same as File
- ['7', 'ContiguousFile'],
- // pax headers
- ['g', 'GlobalExtendedHeader'],
- ['x', 'ExtendedHeader'],
- // vendor-specific stuff
- // skip
- ['A', 'SolarisACL'],
- // like 5, but with data, which should be skipped
- ['D', 'GNUDumpDir'],
- // metadata only, skip
- ['I', 'Inode'],
- // data = link path of next file
- ['K', 'NextFileHasLongLinkpath'],
- // data = path of next file
- ['L', 'NextFileHasLongPath'],
- // skip
- ['M', 'ContinuationFile'],
- // like L
- ['N', 'OldGnuLongPath'],
- // skip
- ['S', 'SparseFile'],
- // skip
- ['V', 'TapeVolumeHeader'],
- // like x
- ['X', 'OldExtendedHeader'],
-]);
-// map the other direction
-exports.code = new Map(Array.from(exports.name).map(kv => [kv[1], kv[0]]));
-//# sourceMappingURL=types.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/unpack.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/unpack.js
deleted file mode 100644
index edf8acbb18c408..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/unpack.js
+++ /dev/null
@@ -1,919 +0,0 @@
-"use strict";
-// the PEND/UNPEND stuff tracks whether we're ready to emit end/close yet.
-// but the path reservations are required to avoid race conditions where
-// parallelized unpack ops may mess with one another, due to dependencies
-// (like a Link depending on its target) or destructive operations (like
-// clobbering an fs object to create one of a different type.)
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- var desc = Object.getOwnPropertyDescriptor(m, k);
- if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
- desc = { enumerable: true, get: function() { return m[k]; } };
- }
- Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
- Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
- o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
- __setModuleDefault(result, mod);
- return result;
-};
-var __importDefault = (this && this.__importDefault) || function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.UnpackSync = exports.Unpack = void 0;
-const fsm = __importStar(require("@isaacs/fs-minipass"));
-const node_assert_1 = __importDefault(require("node:assert"));
-const node_crypto_1 = require("node:crypto");
-const node_fs_1 = __importDefault(require("node:fs"));
-const node_path_1 = __importDefault(require("node:path"));
-const get_write_flag_js_1 = require("./get-write-flag.js");
-const mkdir_js_1 = require("./mkdir.js");
-const normalize_unicode_js_1 = require("./normalize-unicode.js");
-const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
-const parse_js_1 = require("./parse.js");
-const strip_absolute_path_js_1 = require("./strip-absolute-path.js");
-const strip_trailing_slashes_js_1 = require("./strip-trailing-slashes.js");
-const wc = __importStar(require("./winchars.js"));
-const path_reservations_js_1 = require("./path-reservations.js");
-const ONENTRY = Symbol('onEntry');
-const CHECKFS = Symbol('checkFs');
-const CHECKFS2 = Symbol('checkFs2');
-const PRUNECACHE = Symbol('pruneCache');
-const ISREUSABLE = Symbol('isReusable');
-const MAKEFS = Symbol('makeFs');
-const FILE = Symbol('file');
-const DIRECTORY = Symbol('directory');
-const LINK = Symbol('link');
-const SYMLINK = Symbol('symlink');
-const HARDLINK = Symbol('hardlink');
-const UNSUPPORTED = Symbol('unsupported');
-const CHECKPATH = Symbol('checkPath');
-const MKDIR = Symbol('mkdir');
-const ONERROR = Symbol('onError');
-const PENDING = Symbol('pending');
-const PEND = Symbol('pend');
-const UNPEND = Symbol('unpend');
-const ENDED = Symbol('ended');
-const MAYBECLOSE = Symbol('maybeClose');
-const SKIP = Symbol('skip');
-const DOCHOWN = Symbol('doChown');
-const UID = Symbol('uid');
-const GID = Symbol('gid');
-const CHECKED_CWD = Symbol('checkedCwd');
-const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
-const isWindows = platform === 'win32';
-const DEFAULT_MAX_DEPTH = 1024;
-// Unlinks on Windows are not atomic.
-//
-// This means that if you have a file entry, followed by another
-// file entry with an identical name, and you cannot re-use the file
-// (because it's a hardlink, or because unlink:true is set, or it's
-// Windows, which does not have useful nlink values), then the unlink
-// will be committed to the disk AFTER the new file has been written
-// over the old one, deleting the new file.
-//
-// To work around this, on Windows systems, we rename the file and then
-// delete the renamed file. It's a sloppy kludge, but frankly, I do not
-// know of a better way to do this, given windows' non-atomic unlink
-// semantics.
-//
-// See: https://github.com/npm/node-tar/issues/183
-/* c8 ignore start */
-const unlinkFile = (path, cb) => {
- if (!isWindows) {
- return node_fs_1.default.unlink(path, cb);
- }
- const name = path + '.DELETE.' + (0, node_crypto_1.randomBytes)(16).toString('hex');
- node_fs_1.default.rename(path, name, er => {
- if (er) {
- return cb(er);
- }
- node_fs_1.default.unlink(name, cb);
- });
-};
-/* c8 ignore stop */
-/* c8 ignore start */
-const unlinkFileSync = (path) => {
- if (!isWindows) {
- return node_fs_1.default.unlinkSync(path);
- }
- const name = path + '.DELETE.' + (0, node_crypto_1.randomBytes)(16).toString('hex');
- node_fs_1.default.renameSync(path, name);
- node_fs_1.default.unlinkSync(name);
-};
-/* c8 ignore stop */
-// this.gid, entry.gid, this.processUid
-const uint32 = (a, b, c) => a !== undefined && a === a >>> 0 ? a
- : b !== undefined && b === b >>> 0 ? b
- : c;
-// clear the cache if it's a case-insensitive unicode-squashing match.
-// we can't know if the current file system is case-sensitive or supports
-// unicode fully, so we check for similarity on the maximally compatible
-// representation. Err on the side of pruning, since all it's doing is
-// preventing lstats, and it's not the end of the world if we get a false
-// positive.
-// Note that on windows, we always drop the entire cache whenever a
-// symbolic link is encountered, because 8.3 filenames are impossible
-// to reason about, and collisions are hazards rather than just failures.
-const cacheKeyNormalize = (path) => (0, strip_trailing_slashes_js_1.stripTrailingSlashes)((0, normalize_windows_path_js_1.normalizeWindowsPath)((0, normalize_unicode_js_1.normalizeUnicode)(path))).toLowerCase();
-// remove all cache entries matching ${abs}/**
-const pruneCache = (cache, abs) => {
- abs = cacheKeyNormalize(abs);
- for (const path of cache.keys()) {
- const pnorm = cacheKeyNormalize(path);
- if (pnorm === abs || pnorm.indexOf(abs + '/') === 0) {
- cache.delete(path);
- }
- }
-};
-const dropCache = (cache) => {
- for (const key of cache.keys()) {
- cache.delete(key);
- }
-};
-class Unpack extends parse_js_1.Parser {
- [ENDED] = false;
- [CHECKED_CWD] = false;
- [PENDING] = 0;
- reservations = new path_reservations_js_1.PathReservations();
- transform;
- writable = true;
- readable = false;
- dirCache;
- uid;
- gid;
- setOwner;
- preserveOwner;
- processGid;
- processUid;
- maxDepth;
- forceChown;
- win32;
- newer;
- keep;
- noMtime;
- preservePaths;
- unlink;
- cwd;
- strip;
- processUmask;
- umask;
- dmode;
- fmode;
- chmod;
- constructor(opt = {}) {
- opt.ondone = () => {
- this[ENDED] = true;
- this[MAYBECLOSE]();
- };
- super(opt);
- this.transform = opt.transform;
- this.dirCache = opt.dirCache || new Map();
- this.chmod = !!opt.chmod;
- if (typeof opt.uid === 'number' || typeof opt.gid === 'number') {
- // need both or neither
- if (typeof opt.uid !== 'number' ||
- typeof opt.gid !== 'number') {
- throw new TypeError('cannot set owner without number uid and gid');
- }
- if (opt.preserveOwner) {
- throw new TypeError('cannot preserve owner in archive and also set owner explicitly');
- }
- this.uid = opt.uid;
- this.gid = opt.gid;
- this.setOwner = true;
- }
- else {
- this.uid = undefined;
- this.gid = undefined;
- this.setOwner = false;
- }
- // default true for root
- if (opt.preserveOwner === undefined &&
- typeof opt.uid !== 'number') {
- this.preserveOwner = !!(process.getuid && process.getuid() === 0);
- }
- else {
- this.preserveOwner = !!opt.preserveOwner;
- }
- this.processUid =
- (this.preserveOwner || this.setOwner) && process.getuid ?
- process.getuid()
- : undefined;
- this.processGid =
- (this.preserveOwner || this.setOwner) && process.getgid ?
- process.getgid()
- : undefined;
- // prevent excessively deep nesting of subfolders
- // set to `Infinity` to remove this restriction
- this.maxDepth =
- typeof opt.maxDepth === 'number' ?
- opt.maxDepth
- : DEFAULT_MAX_DEPTH;
- // mostly just for testing, but useful in some cases.
- // Forcibly trigger a chown on every entry, no matter what
- this.forceChown = opt.forceChown === true;
- // turn >| in filenames into 0xf000-higher encoded forms
- this.win32 = !!opt.win32 || isWindows;
- // do not unpack over files that are newer than what's in the archive
- this.newer = !!opt.newer;
- // do not unpack over ANY files
- this.keep = !!opt.keep;
- // do not set mtime/atime of extracted entries
- this.noMtime = !!opt.noMtime;
- // allow .., absolute path entries, and unpacking through symlinks
- // without this, warn and skip .., relativize absolutes, and error
- // on symlinks in extraction path
- this.preservePaths = !!opt.preservePaths;
- // unlink files and links before writing. This breaks existing hard
- // links, and removes symlink directories rather than erroring
- this.unlink = !!opt.unlink;
- this.cwd = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(opt.cwd || process.cwd()));
- this.strip = Number(opt.strip) || 0;
- // if we're not chmodding, then we don't need the process umask
- this.processUmask =
- !this.chmod ? 0
- : typeof opt.processUmask === 'number' ? opt.processUmask
- : process.umask();
- this.umask =
- typeof opt.umask === 'number' ? opt.umask : this.processUmask;
- // default mode for dirs created as parents
- this.dmode = opt.dmode || 0o0777 & ~this.umask;
- this.fmode = opt.fmode || 0o0666 & ~this.umask;
- this.on('entry', entry => this[ONENTRY](entry));
- }
- // a bad or damaged archive is a warning for Parser, but an error
- // when extracting. Mark those errors as unrecoverable, because
- // the Unpack contract cannot be met.
- warn(code, msg, data = {}) {
- if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT') {
- data.recoverable = false;
- }
- return super.warn(code, msg, data);
- }
- [MAYBECLOSE]() {
- if (this[ENDED] && this[PENDING] === 0) {
- this.emit('prefinish');
- this.emit('finish');
- this.emit('end');
- }
- }
- [CHECKPATH](entry) {
- const p = (0, normalize_windows_path_js_1.normalizeWindowsPath)(entry.path);
- const parts = p.split('/');
- if (this.strip) {
- if (parts.length < this.strip) {
- return false;
- }
- if (entry.type === 'Link') {
- const linkparts = (0, normalize_windows_path_js_1.normalizeWindowsPath)(String(entry.linkpath)).split('/');
- if (linkparts.length >= this.strip) {
- entry.linkpath = linkparts.slice(this.strip).join('/');
- }
- else {
- return false;
- }
- }
- parts.splice(0, this.strip);
- entry.path = parts.join('/');
- }
- if (isFinite(this.maxDepth) && parts.length > this.maxDepth) {
- this.warn('TAR_ENTRY_ERROR', 'path excessively deep', {
- entry,
- path: p,
- depth: parts.length,
- maxDepth: this.maxDepth,
- });
- return false;
- }
- if (!this.preservePaths) {
- if (parts.includes('..') ||
- /* c8 ignore next */
- (isWindows && /^[a-z]:\.\.$/i.test(parts[0] ?? ''))) {
- this.warn('TAR_ENTRY_ERROR', `path contains '..'`, {
- entry,
- path: p,
- });
- return false;
- }
- // strip off the root
- const [root, stripped] = (0, strip_absolute_path_js_1.stripAbsolutePath)(p);
- if (root) {
- entry.path = String(stripped);
- this.warn('TAR_ENTRY_INFO', `stripping ${root} from absolute path`, {
- entry,
- path: p,
- });
- }
- }
- if (node_path_1.default.isAbsolute(entry.path)) {
- entry.absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(entry.path));
- }
- else {
- entry.absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(this.cwd, entry.path));
- }
- // if we somehow ended up with a path that escapes the cwd, and we are
- // not in preservePaths mode, then something is fishy! This should have
- // been prevented above, so ignore this for coverage.
- /* c8 ignore start - defense in depth */
- if (!this.preservePaths &&
- typeof entry.absolute === 'string' &&
- entry.absolute.indexOf(this.cwd + '/') !== 0 &&
- entry.absolute !== this.cwd) {
- this.warn('TAR_ENTRY_ERROR', 'path escaped extraction target', {
- entry,
- path: (0, normalize_windows_path_js_1.normalizeWindowsPath)(entry.path),
- resolvedPath: entry.absolute,
- cwd: this.cwd,
- });
- return false;
- }
- /* c8 ignore stop */
- // an archive can set properties on the extraction directory, but it
- // may not replace the cwd with a different kind of thing entirely.
- if (entry.absolute === this.cwd &&
- entry.type !== 'Directory' &&
- entry.type !== 'GNUDumpDir') {
- return false;
- }
- // only encode : chars that aren't drive letter indicators
- if (this.win32) {
- const { root: aRoot } = node_path_1.default.win32.parse(String(entry.absolute));
- entry.absolute =
- aRoot + wc.encode(String(entry.absolute).slice(aRoot.length));
- const { root: pRoot } = node_path_1.default.win32.parse(entry.path);
- entry.path = pRoot + wc.encode(entry.path.slice(pRoot.length));
- }
- return true;
- }
- [ONENTRY](entry) {
- if (!this[CHECKPATH](entry)) {
- return entry.resume();
- }
- node_assert_1.default.equal(typeof entry.absolute, 'string');
- switch (entry.type) {
- case 'Directory':
- case 'GNUDumpDir':
- if (entry.mode) {
- entry.mode = entry.mode | 0o700;
- }
- // eslint-disable-next-line no-fallthrough
- case 'File':
- case 'OldFile':
- case 'ContiguousFile':
- case 'Link':
- case 'SymbolicLink':
- return this[CHECKFS](entry);
- case 'CharacterDevice':
- case 'BlockDevice':
- case 'FIFO':
- default:
- return this[UNSUPPORTED](entry);
- }
- }
- [ONERROR](er, entry) {
- // Cwd has to exist, or else nothing works. That's serious.
- // Other errors are warnings, which raise the error in strict
- // mode, but otherwise continue on.
- if (er.name === 'CwdError') {
- this.emit('error', er);
- }
- else {
- this.warn('TAR_ENTRY_ERROR', er, { entry });
- this[UNPEND]();
- entry.resume();
- }
- }
- [MKDIR](dir, mode, cb) {
- (0, mkdir_js_1.mkdir)((0, normalize_windows_path_js_1.normalizeWindowsPath)(dir), {
- uid: this.uid,
- gid: this.gid,
- processUid: this.processUid,
- processGid: this.processGid,
- umask: this.processUmask,
- preserve: this.preservePaths,
- unlink: this.unlink,
- cache: this.dirCache,
- cwd: this.cwd,
- mode: mode,
- }, cb);
- }
- [DOCHOWN](entry) {
- // in preserve owner mode, chown if the entry doesn't match process
- // in set owner mode, chown if setting doesn't match process
- return (this.forceChown ||
- (this.preserveOwner &&
- ((typeof entry.uid === 'number' &&
- entry.uid !== this.processUid) ||
- (typeof entry.gid === 'number' &&
- entry.gid !== this.processGid))) ||
- (typeof this.uid === 'number' &&
- this.uid !== this.processUid) ||
- (typeof this.gid === 'number' && this.gid !== this.processGid));
- }
- [UID](entry) {
- return uint32(this.uid, entry.uid, this.processUid);
- }
- [GID](entry) {
- return uint32(this.gid, entry.gid, this.processGid);
- }
- [FILE](entry, fullyDone) {
- const mode = typeof entry.mode === 'number' ?
- entry.mode & 0o7777
- : this.fmode;
- const stream = new fsm.WriteStream(String(entry.absolute), {
- // slight lie, but it can be numeric flags
- flags: (0, get_write_flag_js_1.getWriteFlag)(entry.size),
- mode: mode,
- autoClose: false,
- });
- stream.on('error', (er) => {
- if (stream.fd) {
- node_fs_1.default.close(stream.fd, () => { });
- }
- // flush all the data out so that we aren't left hanging
- // if the error wasn't actually fatal. otherwise the parse
- // is blocked, and we never proceed.
- stream.write = () => true;
- this[ONERROR](er, entry);
- fullyDone();
- });
- let actions = 1;
- const done = (er) => {
- if (er) {
- /* c8 ignore start - we should always have a fd by now */
- if (stream.fd) {
- node_fs_1.default.close(stream.fd, () => { });
- }
- /* c8 ignore stop */
- this[ONERROR](er, entry);
- fullyDone();
- return;
- }
- if (--actions === 0) {
- if (stream.fd !== undefined) {
- node_fs_1.default.close(stream.fd, er => {
- if (er) {
- this[ONERROR](er, entry);
- }
- else {
- this[UNPEND]();
- }
- fullyDone();
- });
- }
- }
- };
- stream.on('finish', () => {
- // if futimes fails, try utimes
- // if utimes fails, fail with the original error
- // same for fchown/chown
- const abs = String(entry.absolute);
- const fd = stream.fd;
- if (typeof fd === 'number' && entry.mtime && !this.noMtime) {
- actions++;
- const atime = entry.atime || new Date();
- const mtime = entry.mtime;
- node_fs_1.default.futimes(fd, atime, mtime, er => er ?
- node_fs_1.default.utimes(abs, atime, mtime, er2 => done(er2 && er))
- : done());
- }
- if (typeof fd === 'number' && this[DOCHOWN](entry)) {
- actions++;
- const uid = this[UID](entry);
- const gid = this[GID](entry);
- if (typeof uid === 'number' && typeof gid === 'number') {
- node_fs_1.default.fchown(fd, uid, gid, er => er ?
- node_fs_1.default.chown(abs, uid, gid, er2 => done(er2 && er))
- : done());
- }
- }
- done();
- });
- const tx = this.transform ? this.transform(entry) || entry : entry;
- if (tx !== entry) {
- tx.on('error', (er) => {
- this[ONERROR](er, entry);
- fullyDone();
- });
- entry.pipe(tx);
- }
- tx.pipe(stream);
- }
- [DIRECTORY](entry, fullyDone) {
- const mode = typeof entry.mode === 'number' ?
- entry.mode & 0o7777
- : this.dmode;
- this[MKDIR](String(entry.absolute), mode, er => {
- if (er) {
- this[ONERROR](er, entry);
- fullyDone();
- return;
- }
- let actions = 1;
- const done = () => {
- if (--actions === 0) {
- fullyDone();
- this[UNPEND]();
- entry.resume();
- }
- };
- if (entry.mtime && !this.noMtime) {
- actions++;
- node_fs_1.default.utimes(String(entry.absolute), entry.atime || new Date(), entry.mtime, done);
- }
- if (this[DOCHOWN](entry)) {
- actions++;
- node_fs_1.default.chown(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry)), done);
- }
- done();
- });
- }
- [UNSUPPORTED](entry) {
- entry.unsupported = true;
- this.warn('TAR_ENTRY_UNSUPPORTED', `unsupported entry type: ${entry.type}`, { entry });
- entry.resume();
- }
- [SYMLINK](entry, done) {
- this[LINK](entry, String(entry.linkpath), 'symlink', done);
- }
- [HARDLINK](entry, done) {
- const linkpath = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(this.cwd, String(entry.linkpath)));
- this[LINK](entry, linkpath, 'link', done);
- }
- [PEND]() {
- this[PENDING]++;
- }
- [UNPEND]() {
- this[PENDING]--;
- this[MAYBECLOSE]();
- }
- [SKIP](entry) {
- this[UNPEND]();
- entry.resume();
- }
- // Check if we can reuse an existing filesystem entry safely and
- // overwrite it, rather than unlinking and recreating
- // Windows doesn't report a useful nlink, so we just never reuse entries
- [ISREUSABLE](entry, st) {
- return (entry.type === 'File' &&
- !this.unlink &&
- st.isFile() &&
- st.nlink <= 1 &&
- !isWindows);
- }
- // check if a thing is there, and if so, try to clobber it
- [CHECKFS](entry) {
- this[PEND]();
- const paths = [entry.path];
- if (entry.linkpath) {
- paths.push(entry.linkpath);
- }
- this.reservations.reserve(paths, done => this[CHECKFS2](entry, done));
- }
- [PRUNECACHE](entry) {
- // if we are not creating a directory, and the path is in the dirCache,
- // then that means we are about to delete the directory we created
- // previously, and it is no longer going to be a directory, and neither
- // is any of its children.
- // If a symbolic link is encountered, all bets are off. There is no
- // reasonable way to sanitize the cache in such a way we will be able to
- // avoid having filesystem collisions. If this happens with a non-symlink
- // entry, it'll just fail to unpack, but a symlink to a directory, using an
- // 8.3 shortname or certain unicode attacks, can evade detection and lead
- // to arbitrary writes to anywhere on the system.
- if (entry.type === 'SymbolicLink') {
- dropCache(this.dirCache);
- }
- else if (entry.type !== 'Directory') {
- pruneCache(this.dirCache, String(entry.absolute));
- }
- }
- [CHECKFS2](entry, fullyDone) {
- this[PRUNECACHE](entry);
- const done = (er) => {
- this[PRUNECACHE](entry);
- fullyDone(er);
- };
- const checkCwd = () => {
- this[MKDIR](this.cwd, this.dmode, er => {
- if (er) {
- this[ONERROR](er, entry);
- done();
- return;
- }
- this[CHECKED_CWD] = true;
- start();
- });
- };
- const start = () => {
- if (entry.absolute !== this.cwd) {
- const parent = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.dirname(String(entry.absolute)));
- if (parent !== this.cwd) {
- return this[MKDIR](parent, this.dmode, er => {
- if (er) {
- this[ONERROR](er, entry);
- done();
- return;
- }
- afterMakeParent();
- });
- }
- }
- afterMakeParent();
- };
- const afterMakeParent = () => {
- node_fs_1.default.lstat(String(entry.absolute), (lstatEr, st) => {
- if (st &&
- (this.keep ||
- /* c8 ignore next */
- (this.newer && st.mtime > (entry.mtime ?? st.mtime)))) {
- this[SKIP](entry);
- done();
- return;
- }
- if (lstatEr || this[ISREUSABLE](entry, st)) {
- return this[MAKEFS](null, entry, done);
- }
- if (st.isDirectory()) {
- if (entry.type === 'Directory') {
- const needChmod = this.chmod &&
- entry.mode &&
- (st.mode & 0o7777) !== entry.mode;
- const afterChmod = (er) => this[MAKEFS](er ?? null, entry, done);
- if (!needChmod) {
- return afterChmod();
- }
- return node_fs_1.default.chmod(String(entry.absolute), Number(entry.mode), afterChmod);
- }
- // Not a dir entry, have to remove it.
- // NB: the only way to end up with an entry that is the cwd
- // itself, in such a way that == does not detect, is a
- // tricky windows absolute path with UNC or 8.3 parts (and
- // preservePaths:true, or else it will have been stripped).
- // In that case, the user has opted out of path protections
- // explicitly, so if they blow away the cwd, c'est la vie.
- if (entry.absolute !== this.cwd) {
- return node_fs_1.default.rmdir(String(entry.absolute), (er) => this[MAKEFS](er ?? null, entry, done));
- }
- }
- // not a dir, and not reusable
- // don't remove if the cwd, we want that error
- if (entry.absolute === this.cwd) {
- return this[MAKEFS](null, entry, done);
- }
- unlinkFile(String(entry.absolute), er => this[MAKEFS](er ?? null, entry, done));
- });
- };
- if (this[CHECKED_CWD]) {
- start();
- }
- else {
- checkCwd();
- }
- }
- [MAKEFS](er, entry, done) {
- if (er) {
- this[ONERROR](er, entry);
- done();
- return;
- }
- switch (entry.type) {
- case 'File':
- case 'OldFile':
- case 'ContiguousFile':
- return this[FILE](entry, done);
- case 'Link':
- return this[HARDLINK](entry, done);
- case 'SymbolicLink':
- return this[SYMLINK](entry, done);
- case 'Directory':
- case 'GNUDumpDir':
- return this[DIRECTORY](entry, done);
- }
- }
- [LINK](entry, linkpath, link, done) {
- // XXX: get the type ('symlink' or 'junction') for windows
- node_fs_1.default[link](linkpath, String(entry.absolute), er => {
- if (er) {
- this[ONERROR](er, entry);
- }
- else {
- this[UNPEND]();
- entry.resume();
- }
- done();
- });
- }
-}
-exports.Unpack = Unpack;
-const callSync = (fn) => {
- try {
- return [null, fn()];
- }
- catch (er) {
- return [er, null];
- }
-};
-class UnpackSync extends Unpack {
- sync = true;
- [MAKEFS](er, entry) {
- return super[MAKEFS](er, entry, () => { });
- }
- [CHECKFS](entry) {
- this[PRUNECACHE](entry);
- if (!this[CHECKED_CWD]) {
- const er = this[MKDIR](this.cwd, this.dmode);
- if (er) {
- return this[ONERROR](er, entry);
- }
- this[CHECKED_CWD] = true;
- }
- // don't bother to make the parent if the current entry is the cwd,
- // we've already checked it.
- if (entry.absolute !== this.cwd) {
- const parent = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.dirname(String(entry.absolute)));
- if (parent !== this.cwd) {
- const mkParent = this[MKDIR](parent, this.dmode);
- if (mkParent) {
- return this[ONERROR](mkParent, entry);
- }
- }
- }
- const [lstatEr, st] = callSync(() => node_fs_1.default.lstatSync(String(entry.absolute)));
- if (st &&
- (this.keep ||
- /* c8 ignore next */
- (this.newer && st.mtime > (entry.mtime ?? st.mtime)))) {
- return this[SKIP](entry);
- }
- if (lstatEr || this[ISREUSABLE](entry, st)) {
- return this[MAKEFS](null, entry);
- }
- if (st.isDirectory()) {
- if (entry.type === 'Directory') {
- const needChmod = this.chmod &&
- entry.mode &&
- (st.mode & 0o7777) !== entry.mode;
- const [er] = needChmod ?
- callSync(() => {
- node_fs_1.default.chmodSync(String(entry.absolute), Number(entry.mode));
- })
- : [];
- return this[MAKEFS](er, entry);
- }
- // not a dir entry, have to remove it
- const [er] = callSync(() => node_fs_1.default.rmdirSync(String(entry.absolute)));
- this[MAKEFS](er, entry);
- }
- // not a dir, and not reusable.
- // don't remove if it's the cwd, since we want that error.
- const [er] = entry.absolute === this.cwd ?
- []
- : callSync(() => unlinkFileSync(String(entry.absolute)));
- this[MAKEFS](er, entry);
- }
- [FILE](entry, done) {
- const mode = typeof entry.mode === 'number' ?
- entry.mode & 0o7777
- : this.fmode;
- const oner = (er) => {
- let closeError;
- try {
- node_fs_1.default.closeSync(fd);
- }
- catch (e) {
- closeError = e;
- }
- if (er || closeError) {
- this[ONERROR](er || closeError, entry);
- }
- done();
- };
- let fd;
- try {
- fd = node_fs_1.default.openSync(String(entry.absolute), (0, get_write_flag_js_1.getWriteFlag)(entry.size), mode);
- }
- catch (er) {
- return oner(er);
- }
- const tx = this.transform ? this.transform(entry) || entry : entry;
- if (tx !== entry) {
- tx.on('error', (er) => this[ONERROR](er, entry));
- entry.pipe(tx);
- }
- tx.on('data', (chunk) => {
- try {
- node_fs_1.default.writeSync(fd, chunk, 0, chunk.length);
- }
- catch (er) {
- oner(er);
- }
- });
- tx.on('end', () => {
- let er = null;
- // try both, falling futimes back to utimes
- // if either fails, handle the first error
- if (entry.mtime && !this.noMtime) {
- const atime = entry.atime || new Date();
- const mtime = entry.mtime;
- try {
- node_fs_1.default.futimesSync(fd, atime, mtime);
- }
- catch (futimeser) {
- try {
- node_fs_1.default.utimesSync(String(entry.absolute), atime, mtime);
- }
- catch (utimeser) {
- er = futimeser;
- }
- }
- }
- if (this[DOCHOWN](entry)) {
- const uid = this[UID](entry);
- const gid = this[GID](entry);
- try {
- node_fs_1.default.fchownSync(fd, Number(uid), Number(gid));
- }
- catch (fchowner) {
- try {
- node_fs_1.default.chownSync(String(entry.absolute), Number(uid), Number(gid));
- }
- catch (chowner) {
- er = er || fchowner;
- }
- }
- }
- oner(er);
- });
- }
- [DIRECTORY](entry, done) {
- const mode = typeof entry.mode === 'number' ?
- entry.mode & 0o7777
- : this.dmode;
- const er = this[MKDIR](String(entry.absolute), mode);
- if (er) {
- this[ONERROR](er, entry);
- done();
- return;
- }
- if (entry.mtime && !this.noMtime) {
- try {
- node_fs_1.default.utimesSync(String(entry.absolute), entry.atime || new Date(), entry.mtime);
- /* c8 ignore next */
- }
- catch (er) { }
- }
- if (this[DOCHOWN](entry)) {
- try {
- node_fs_1.default.chownSync(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry)));
- }
- catch (er) { }
- }
- done();
- entry.resume();
- }
- [MKDIR](dir, mode) {
- try {
- return (0, mkdir_js_1.mkdirSync)((0, normalize_windows_path_js_1.normalizeWindowsPath)(dir), {
- uid: this.uid,
- gid: this.gid,
- processUid: this.processUid,
- processGid: this.processGid,
- umask: this.processUmask,
- preserve: this.preservePaths,
- unlink: this.unlink,
- cache: this.dirCache,
- cwd: this.cwd,
- mode: mode,
- });
- }
- catch (er) {
- return er;
- }
- }
- [LINK](entry, linkpath, link, done) {
- const ls = `${link}Sync`;
- try {
- node_fs_1.default[ls](linkpath, String(entry.absolute));
- done();
- entry.resume();
- }
- catch (er) {
- return this[ONERROR](er, entry);
- }
- }
-}
-exports.UnpackSync = UnpackSync;
-//# sourceMappingURL=unpack.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/update.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/update.js
deleted file mode 100644
index 7687896f4bfeeb..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/update.js
+++ /dev/null
@@ -1,33 +0,0 @@
-"use strict";
-// tar -u
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.update = void 0;
-const make_command_js_1 = require("./make-command.js");
-const replace_js_1 = require("./replace.js");
-// just call tar.r with the filter and mtimeCache
-exports.update = (0, make_command_js_1.makeCommand)(replace_js_1.replace.syncFile, replace_js_1.replace.asyncFile, replace_js_1.replace.syncNoFile, replace_js_1.replace.asyncNoFile, (opt, entries = []) => {
- replace_js_1.replace.validate?.(opt, entries);
- mtimeFilter(opt);
-});
-const mtimeFilter = (opt) => {
- const filter = opt.filter;
- if (!opt.mtimeCache) {
- opt.mtimeCache = new Map();
- }
- opt.filter =
- filter ?
- (path, stat) => filter(path, stat) &&
- !(
- /* c8 ignore start */
- ((opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) >
- (stat.mtime ?? 0))
- /* c8 ignore stop */
- )
- : (path, stat) => !(
- /* c8 ignore start */
- ((opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) >
- (stat.mtime ?? 0))
- /* c8 ignore stop */
- );
-};
-//# sourceMappingURL=update.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/warn-method.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/warn-method.js
deleted file mode 100644
index f25502776e36a3..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/warn-method.js
+++ /dev/null
@@ -1,31 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.warnMethod = void 0;
-const warnMethod = (self, code, message, data = {}) => {
- if (self.file) {
- data.file = self.file;
- }
- if (self.cwd) {
- data.cwd = self.cwd;
- }
- data.code =
- (message instanceof Error &&
- message.code) ||
- code;
- data.tarCode = code;
- if (!self.strict && data.recoverable !== false) {
- if (message instanceof Error) {
- data = Object.assign(message, data);
- message = message.message;
- }
- self.emit('warn', code, message, data);
- }
- else if (message instanceof Error) {
- self.emit('error', Object.assign(message, data));
- }
- else {
- self.emit('error', Object.assign(new Error(`${code}: ${message}`), data));
- }
-};
-exports.warnMethod = warnMethod;
-//# sourceMappingURL=warn-method.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/winchars.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/winchars.js
deleted file mode 100644
index c0a4405812929e..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/winchars.js
+++ /dev/null
@@ -1,14 +0,0 @@
-"use strict";
-// When writing files on Windows, translate the characters to their
-// 0xf000 higher-encoded versions.
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.decode = exports.encode = void 0;
-const raw = ['|', '<', '>', '?', ':'];
-const win = raw.map(char => String.fromCharCode(0xf000 + char.charCodeAt(0)));
-const toWin = new Map(raw.map((char, i) => [char, win[i]]));
-const toRaw = new Map(win.map((char, i) => [char, raw[i]]));
-const encode = (s) => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s);
-exports.encode = encode;
-const decode = (s) => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s);
-exports.decode = decode;
-//# sourceMappingURL=winchars.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/create.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/create.js
deleted file mode 100644
index 512a9911d70d5b..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/create.js
+++ /dev/null
@@ -1,77 +0,0 @@
-import { WriteStream, WriteStreamSync } from '@isaacs/fs-minipass';
-import path from 'node:path';
-import { list } from './list.js';
-import { makeCommand } from './make-command.js';
-import { Pack, PackSync } from './pack.js';
-const createFileSync = (opt, files) => {
- const p = new PackSync(opt);
- const stream = new WriteStreamSync(opt.file, {
- mode: opt.mode || 0o666,
- });
- p.pipe(stream);
- addFilesSync(p, files);
-};
-const createFile = (opt, files) => {
- const p = new Pack(opt);
- const stream = new WriteStream(opt.file, {
- mode: opt.mode || 0o666,
- });
- p.pipe(stream);
- const promise = new Promise((res, rej) => {
- stream.on('error', rej);
- stream.on('close', res);
- p.on('error', rej);
- });
- addFilesAsync(p, files);
- return promise;
-};
-const addFilesSync = (p, files) => {
- files.forEach(file => {
- if (file.charAt(0) === '@') {
- list({
- file: path.resolve(p.cwd, file.slice(1)),
- sync: true,
- noResume: true,
- onReadEntry: entry => p.add(entry),
- });
- }
- else {
- p.add(file);
- }
- });
- p.end();
-};
-const addFilesAsync = async (p, files) => {
- for (let i = 0; i < files.length; i++) {
- const file = String(files[i]);
- if (file.charAt(0) === '@') {
- await list({
- file: path.resolve(String(p.cwd), file.slice(1)),
- noResume: true,
- onReadEntry: entry => {
- p.add(entry);
- },
- });
- }
- else {
- p.add(file);
- }
- }
- p.end();
-};
-const createSync = (opt, files) => {
- const p = new PackSync(opt);
- addFilesSync(p, files);
- return p;
-};
-const createAsync = (opt, files) => {
- const p = new Pack(opt);
- addFilesAsync(p, files);
- return p;
-};
-export const create = makeCommand(createFileSync, createFile, createSync, createAsync, (_opt, files) => {
- if (!files?.length) {
- throw new TypeError('no paths specified to add to archive');
- }
-});
-//# sourceMappingURL=create.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/cwd-error.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/cwd-error.js
deleted file mode 100644
index 289a066b8e0317..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/cwd-error.js
+++ /dev/null
@@ -1,14 +0,0 @@
-export class CwdError extends Error {
- path;
- code;
- syscall = 'chdir';
- constructor(path, code) {
- super(`${code}: Cannot cd into '${path}'`);
- this.path = path;
- this.code = code;
- }
- get name() {
- return 'CwdError';
- }
-}
-//# sourceMappingURL=cwd-error.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/extract.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/extract.js
deleted file mode 100644
index 2274feef26e78f..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/extract.js
+++ /dev/null
@@ -1,49 +0,0 @@
-// tar -x
-import * as fsm from '@isaacs/fs-minipass';
-import fs from 'node:fs';
-import { filesFilter } from './list.js';
-import { makeCommand } from './make-command.js';
-import { Unpack, UnpackSync } from './unpack.js';
-const extractFileSync = (opt) => {
- const u = new UnpackSync(opt);
- const file = opt.file;
- const stat = fs.statSync(file);
- // This trades a zero-byte read() syscall for a stat
- // However, it will usually result in less memory allocation
- const readSize = opt.maxReadSize || 16 * 1024 * 1024;
- const stream = new fsm.ReadStreamSync(file, {
- readSize: readSize,
- size: stat.size,
- });
- stream.pipe(u);
-};
-const extractFile = (opt, _) => {
- const u = new Unpack(opt);
- const readSize = opt.maxReadSize || 16 * 1024 * 1024;
- const file = opt.file;
- const p = new Promise((resolve, reject) => {
- u.on('error', reject);
- u.on('close', resolve);
- // This trades a zero-byte read() syscall for a stat
- // However, it will usually result in less memory allocation
- fs.stat(file, (er, stat) => {
- if (er) {
- reject(er);
- }
- else {
- const stream = new fsm.ReadStream(file, {
- readSize: readSize,
- size: stat.size,
- });
- stream.on('error', reject);
- stream.pipe(u);
- }
- });
- });
- return p;
-};
-export const extract = makeCommand(extractFileSync, extractFile, opt => new UnpackSync(opt), opt => new Unpack(opt), (opt, files) => {
- if (files?.length)
- filesFilter(opt, files);
-});
-//# sourceMappingURL=extract.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/get-write-flag.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/get-write-flag.js
deleted file mode 100644
index 2c7f3e8b28fdaf..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/get-write-flag.js
+++ /dev/null
@@ -1,23 +0,0 @@
-// Get the appropriate flag to use for creating files
-// We use fmap on Windows platforms for files less than
-// 512kb. This is a fairly low limit, but avoids making
-// things slower in some cases. Since most of what this
-// library is used for is extracting tarballs of many
-// relatively small files in npm packages and the like,
-// it can be a big boost on Windows platforms.
-import fs from 'fs';
-const platform = process.env.__FAKE_PLATFORM__ || process.platform;
-const isWindows = platform === 'win32';
-/* c8 ignore start */
-const { O_CREAT, O_TRUNC, O_WRONLY } = fs.constants;
-const UV_FS_O_FILEMAP = Number(process.env.__FAKE_FS_O_FILENAME__) ||
- fs.constants.UV_FS_O_FILEMAP ||
- 0;
-/* c8 ignore stop */
-const fMapEnabled = isWindows && !!UV_FS_O_FILEMAP;
-const fMapLimit = 512 * 1024;
-const fMapFlag = UV_FS_O_FILEMAP | O_TRUNC | O_CREAT | O_WRONLY;
-export const getWriteFlag = !fMapEnabled ?
- () => 'w'
- : (size) => (size < fMapLimit ? fMapFlag : 'w');
-//# sourceMappingURL=get-write-flag.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/header.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/header.js
deleted file mode 100644
index e15192b14b16e1..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/header.js
+++ /dev/null
@@ -1,279 +0,0 @@
-// parse a 512-byte header block to a data object, or vice-versa
-// encode returns `true` if a pax extended header is needed, because
-// the data could not be faithfully encoded in a simple header.
-// (Also, check header.needPax to see if it needs a pax header.)
-import { posix as pathModule } from 'node:path';
-import * as large from './large-numbers.js';
-import * as types from './types.js';
-export class Header {
- cksumValid = false;
- needPax = false;
- nullBlock = false;
- block;
- path;
- mode;
- uid;
- gid;
- size;
- cksum;
- #type = 'Unsupported';
- linkpath;
- uname;
- gname;
- devmaj = 0;
- devmin = 0;
- atime;
- ctime;
- mtime;
- charset;
- comment;
- constructor(data, off = 0, ex, gex) {
- if (Buffer.isBuffer(data)) {
- this.decode(data, off || 0, ex, gex);
- }
- else if (data) {
- this.#slurp(data);
- }
- }
- decode(buf, off, ex, gex) {
- if (!off) {
- off = 0;
- }
- if (!buf || !(buf.length >= off + 512)) {
- throw new Error('need 512 bytes for header');
- }
- this.path = decString(buf, off, 100);
- this.mode = decNumber(buf, off + 100, 8);
- this.uid = decNumber(buf, off + 108, 8);
- this.gid = decNumber(buf, off + 116, 8);
- this.size = decNumber(buf, off + 124, 12);
- this.mtime = decDate(buf, off + 136, 12);
- this.cksum = decNumber(buf, off + 148, 12);
- // if we have extended or global extended headers, apply them now
- // See https://github.com/npm/node-tar/pull/187
- // Apply global before local, so it overrides
- if (gex)
- this.#slurp(gex, true);
- if (ex)
- this.#slurp(ex);
- // old tar versions marked dirs as a file with a trailing /
- const t = decString(buf, off + 156, 1);
- if (types.isCode(t)) {
- this.#type = t || '0';
- }
- if (this.#type === '0' && this.path.slice(-1) === '/') {
- this.#type = '5';
- }
- // tar implementations sometimes incorrectly put the stat(dir).size
- // as the size in the tarball, even though Directory entries are
- // not able to have any body at all. In the very rare chance that
- // it actually DOES have a body, we weren't going to do anything with
- // it anyway, and it'll just be a warning about an invalid header.
- if (this.#type === '5') {
- this.size = 0;
- }
- this.linkpath = decString(buf, off + 157, 100);
- if (buf.subarray(off + 257, off + 265).toString() ===
- 'ustar\u000000') {
- this.uname = decString(buf, off + 265, 32);
- this.gname = decString(buf, off + 297, 32);
- /* c8 ignore start */
- this.devmaj = decNumber(buf, off + 329, 8) ?? 0;
- this.devmin = decNumber(buf, off + 337, 8) ?? 0;
- /* c8 ignore stop */
- if (buf[off + 475] !== 0) {
- // definitely a prefix, definitely >130 chars.
- const prefix = decString(buf, off + 345, 155);
- this.path = prefix + '/' + this.path;
- }
- else {
- const prefix = decString(buf, off + 345, 130);
- if (prefix) {
- this.path = prefix + '/' + this.path;
- }
- this.atime = decDate(buf, off + 476, 12);
- this.ctime = decDate(buf, off + 488, 12);
- }
- }
- let sum = 8 * 0x20;
- for (let i = off; i < off + 148; i++) {
- sum += buf[i];
- }
- for (let i = off + 156; i < off + 512; i++) {
- sum += buf[i];
- }
- this.cksumValid = sum === this.cksum;
- if (this.cksum === undefined && sum === 8 * 0x20) {
- this.nullBlock = true;
- }
- }
- #slurp(ex, gex = false) {
- Object.assign(this, Object.fromEntries(Object.entries(ex).filter(([k, v]) => {
- // we slurp in everything except for the path attribute in
- // a global extended header, because that's weird. Also, any
- // null/undefined values are ignored.
- return !(v === null ||
- v === undefined ||
- (k === 'path' && gex) ||
- (k === 'linkpath' && gex) ||
- k === 'global');
- })));
- }
- encode(buf, off = 0) {
- if (!buf) {
- buf = this.block = Buffer.alloc(512);
- }
- if (this.#type === 'Unsupported') {
- this.#type = '0';
- }
- if (!(buf.length >= off + 512)) {
- throw new Error('need 512 bytes for header');
- }
- const prefixSize = this.ctime || this.atime ? 130 : 155;
- const split = splitPrefix(this.path || '', prefixSize);
- const path = split[0];
- const prefix = split[1];
- this.needPax = !!split[2];
- this.needPax = encString(buf, off, 100, path) || this.needPax;
- this.needPax =
- encNumber(buf, off + 100, 8, this.mode) || this.needPax;
- this.needPax =
- encNumber(buf, off + 108, 8, this.uid) || this.needPax;
- this.needPax =
- encNumber(buf, off + 116, 8, this.gid) || this.needPax;
- this.needPax =
- encNumber(buf, off + 124, 12, this.size) || this.needPax;
- this.needPax =
- encDate(buf, off + 136, 12, this.mtime) || this.needPax;
- buf[off + 156] = this.#type.charCodeAt(0);
- this.needPax =
- encString(buf, off + 157, 100, this.linkpath) || this.needPax;
- buf.write('ustar\u000000', off + 257, 8);
- this.needPax =
- encString(buf, off + 265, 32, this.uname) || this.needPax;
- this.needPax =
- encString(buf, off + 297, 32, this.gname) || this.needPax;
- this.needPax =
- encNumber(buf, off + 329, 8, this.devmaj) || this.needPax;
- this.needPax =
- encNumber(buf, off + 337, 8, this.devmin) || this.needPax;
- this.needPax =
- encString(buf, off + 345, prefixSize, prefix) || this.needPax;
- if (buf[off + 475] !== 0) {
- this.needPax =
- encString(buf, off + 345, 155, prefix) || this.needPax;
- }
- else {
- this.needPax =
- encString(buf, off + 345, 130, prefix) || this.needPax;
- this.needPax =
- encDate(buf, off + 476, 12, this.atime) || this.needPax;
- this.needPax =
- encDate(buf, off + 488, 12, this.ctime) || this.needPax;
- }
- let sum = 8 * 0x20;
- for (let i = off; i < off + 148; i++) {
- sum += buf[i];
- }
- for (let i = off + 156; i < off + 512; i++) {
- sum += buf[i];
- }
- this.cksum = sum;
- encNumber(buf, off + 148, 8, this.cksum);
- this.cksumValid = true;
- return this.needPax;
- }
- get type() {
- return (this.#type === 'Unsupported' ?
- this.#type
- : types.name.get(this.#type));
- }
- get typeKey() {
- return this.#type;
- }
- set type(type) {
- const c = String(types.code.get(type));
- if (types.isCode(c) || c === 'Unsupported') {
- this.#type = c;
- }
- else if (types.isCode(type)) {
- this.#type = type;
- }
- else {
- throw new TypeError('invalid entry type: ' + type);
- }
- }
-}
-const splitPrefix = (p, prefixSize) => {
- const pathSize = 100;
- let pp = p;
- let prefix = '';
- let ret = undefined;
- const root = pathModule.parse(p).root || '.';
- if (Buffer.byteLength(pp) < pathSize) {
- ret = [pp, prefix, false];
- }
- else {
- // first set prefix to the dir, and path to the base
- prefix = pathModule.dirname(pp);
- pp = pathModule.basename(pp);
- do {
- if (Buffer.byteLength(pp) <= pathSize &&
- Buffer.byteLength(prefix) <= prefixSize) {
- // both fit!
- ret = [pp, prefix, false];
- }
- else if (Buffer.byteLength(pp) > pathSize &&
- Buffer.byteLength(prefix) <= prefixSize) {
- // prefix fits in prefix, but path doesn't fit in path
- ret = [pp.slice(0, pathSize - 1), prefix, true];
- }
- else {
- // make path take a bit from prefix
- pp = pathModule.join(pathModule.basename(prefix), pp);
- prefix = pathModule.dirname(prefix);
- }
- } while (prefix !== root && ret === undefined);
- // at this point, found no resolution, just truncate
- if (!ret) {
- ret = [p.slice(0, pathSize - 1), '', true];
- }
- }
- return ret;
-};
-const decString = (buf, off, size) => buf
- .subarray(off, off + size)
- .toString('utf8')
- .replace(/\0.*/, '');
-const decDate = (buf, off, size) => numToDate(decNumber(buf, off, size));
-const numToDate = (num) => num === undefined ? undefined : new Date(num * 1000);
-const decNumber = (buf, off, size) => Number(buf[off]) & 0x80 ?
- large.parse(buf.subarray(off, off + size))
- : decSmallNumber(buf, off, size);
-const nanUndef = (value) => (isNaN(value) ? undefined : value);
-const decSmallNumber = (buf, off, size) => nanUndef(parseInt(buf
- .subarray(off, off + size)
- .toString('utf8')
- .replace(/\0.*$/, '')
- .trim(), 8));
-// the maximum encodable as a null-terminated octal, by field size
-const MAXNUM = {
- 12: 0o77777777777,
- 8: 0o7777777,
-};
-const encNumber = (buf, off, size, num) => num === undefined ? false
- : num > MAXNUM[size] || num < 0 ?
- (large.encode(num, buf.subarray(off, off + size)), true)
- : (encSmallNumber(buf, off, size, num), false);
-const encSmallNumber = (buf, off, size, num) => buf.write(octalString(num, size), off, size, 'ascii');
-const octalString = (num, size) => padOctal(Math.floor(num).toString(8), size);
-const padOctal = (str, size) => (str.length === size - 1 ?
- str
- : new Array(size - str.length - 1).join('0') + str + ' ') + '\0';
-const encDate = (buf, off, size, date) => date === undefined ? false : (encNumber(buf, off, size, date.getTime() / 1000));
-// enough to fill the longest string we've got
-const NULLS = new Array(156).join('\0');
-// pad with nulls, return true if it's longer or non-ascii
-const encString = (buf, off, size, str) => str === undefined ? false : ((buf.write(str + NULLS, off, size, 'utf8'),
- str.length !== Buffer.byteLength(str) || str.length > size));
-//# sourceMappingURL=header.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/index.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/index.js
deleted file mode 100644
index 1bac6415c8d732..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/index.js
+++ /dev/null
@@ -1,20 +0,0 @@
-export * from './create.js';
-export { create as c } from './create.js';
-export * from './extract.js';
-export { extract as x } from './extract.js';
-export * from './header.js';
-export * from './list.js';
-export { list as t } from './list.js';
-// classes
-export * from './pack.js';
-export * from './parse.js';
-export * from './pax.js';
-export * from './read-entry.js';
-export * from './replace.js';
-export { replace as r } from './replace.js';
-export * as types from './types.js';
-export * from './unpack.js';
-export * from './update.js';
-export { update as u } from './update.js';
-export * from './write-entry.js';
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/large-numbers.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/large-numbers.js
deleted file mode 100644
index 4f2f7e5f14fc1b..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/large-numbers.js
+++ /dev/null
@@ -1,94 +0,0 @@
-// Tar can encode large and negative numbers using a leading byte of
-// 0xff for negative, and 0x80 for positive.
-export const encode = (num, buf) => {
- if (!Number.isSafeInteger(num)) {
- // The number is so large that javascript cannot represent it with integer
- // precision.
- throw Error('cannot encode number outside of javascript safe integer range');
- }
- else if (num < 0) {
- encodeNegative(num, buf);
- }
- else {
- encodePositive(num, buf);
- }
- return buf;
-};
-const encodePositive = (num, buf) => {
- buf[0] = 0x80;
- for (var i = buf.length; i > 1; i--) {
- buf[i - 1] = num & 0xff;
- num = Math.floor(num / 0x100);
- }
-};
-const encodeNegative = (num, buf) => {
- buf[0] = 0xff;
- var flipped = false;
- num = num * -1;
- for (var i = buf.length; i > 1; i--) {
- var byte = num & 0xff;
- num = Math.floor(num / 0x100);
- if (flipped) {
- buf[i - 1] = onesComp(byte);
- }
- else if (byte === 0) {
- buf[i - 1] = 0;
- }
- else {
- flipped = true;
- buf[i - 1] = twosComp(byte);
- }
- }
-};
-export const parse = (buf) => {
- const pre = buf[0];
- const value = pre === 0x80 ? pos(buf.subarray(1, buf.length))
- : pre === 0xff ? twos(buf)
- : null;
- if (value === null) {
- throw Error('invalid base256 encoding');
- }
- if (!Number.isSafeInteger(value)) {
- // The number is so large that javascript cannot represent it with integer
- // precision.
- throw Error('parsed number outside of javascript safe integer range');
- }
- return value;
-};
-const twos = (buf) => {
- var len = buf.length;
- var sum = 0;
- var flipped = false;
- for (var i = len - 1; i > -1; i--) {
- var byte = Number(buf[i]);
- var f;
- if (flipped) {
- f = onesComp(byte);
- }
- else if (byte === 0) {
- f = byte;
- }
- else {
- flipped = true;
- f = twosComp(byte);
- }
- if (f !== 0) {
- sum -= f * Math.pow(256, len - i - 1);
- }
- }
- return sum;
-};
-const pos = (buf) => {
- var len = buf.length;
- var sum = 0;
- for (var i = len - 1; i > -1; i--) {
- var byte = Number(buf[i]);
- if (byte !== 0) {
- sum += byte * Math.pow(256, len - i - 1);
- }
- }
- return sum;
-};
-const onesComp = (byte) => (0xff ^ byte) & 0xff;
-const twosComp = (byte) => ((0xff ^ byte) + 1) & 0xff;
-//# sourceMappingURL=large-numbers.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/list.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/list.js
deleted file mode 100644
index f49068400b6c92..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/list.js
+++ /dev/null
@@ -1,106 +0,0 @@
-// tar -t
-import * as fsm from '@isaacs/fs-minipass';
-import fs from 'node:fs';
-import { dirname, parse } from 'path';
-import { makeCommand } from './make-command.js';
-import { Parser } from './parse.js';
-import { stripTrailingSlashes } from './strip-trailing-slashes.js';
-const onReadEntryFunction = (opt) => {
- const onReadEntry = opt.onReadEntry;
- opt.onReadEntry =
- onReadEntry ?
- e => {
- onReadEntry(e);
- e.resume();
- }
- : e => e.resume();
-};
-// construct a filter that limits the file entries listed
-// include child entries if a dir is included
-export const filesFilter = (opt, files) => {
- const map = new Map(files.map(f => [stripTrailingSlashes(f), true]));
- const filter = opt.filter;
- const mapHas = (file, r = '') => {
- const root = r || parse(file).root || '.';
- let ret;
- if (file === root)
- ret = false;
- else {
- const m = map.get(file);
- if (m !== undefined) {
- ret = m;
- }
- else {
- ret = mapHas(dirname(file), root);
- }
- }
- map.set(file, ret);
- return ret;
- };
- opt.filter =
- filter ?
- (file, entry) => filter(file, entry) && mapHas(stripTrailingSlashes(file))
- : file => mapHas(stripTrailingSlashes(file));
-};
-const listFileSync = (opt) => {
- const p = new Parser(opt);
- const file = opt.file;
- let fd;
- try {
- const stat = fs.statSync(file);
- const readSize = opt.maxReadSize || 16 * 1024 * 1024;
- if (stat.size < readSize) {
- p.end(fs.readFileSync(file));
- }
- else {
- let pos = 0;
- const buf = Buffer.allocUnsafe(readSize);
- fd = fs.openSync(file, 'r');
- while (pos < stat.size) {
- const bytesRead = fs.readSync(fd, buf, 0, readSize, pos);
- pos += bytesRead;
- p.write(buf.subarray(0, bytesRead));
- }
- p.end();
- }
- }
- finally {
- if (typeof fd === 'number') {
- try {
- fs.closeSync(fd);
- /* c8 ignore next */
- }
- catch (er) { }
- }
- }
-};
-const listFile = (opt, _files) => {
- const parse = new Parser(opt);
- const readSize = opt.maxReadSize || 16 * 1024 * 1024;
- const file = opt.file;
- const p = new Promise((resolve, reject) => {
- parse.on('error', reject);
- parse.on('end', resolve);
- fs.stat(file, (er, stat) => {
- if (er) {
- reject(er);
- }
- else {
- const stream = new fsm.ReadStream(file, {
- readSize: readSize,
- size: stat.size,
- });
- stream.on('error', reject);
- stream.pipe(parse);
- }
- });
- });
- return p;
-};
-export const list = makeCommand(listFileSync, listFile, opt => new Parser(opt), opt => new Parser(opt), (opt, files) => {
- if (files?.length)
- filesFilter(opt, files);
- if (!opt.noResume)
- onReadEntryFunction(opt);
-});
-//# sourceMappingURL=list.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/make-command.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/make-command.js
deleted file mode 100644
index f2f737bca78fd7..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/make-command.js
+++ /dev/null
@@ -1,57 +0,0 @@
-import { dealias, isAsyncFile, isAsyncNoFile, isSyncFile, isSyncNoFile, } from './options.js';
-export const makeCommand = (syncFile, asyncFile, syncNoFile, asyncNoFile, validate) => {
- return Object.assign((opt_ = [], entries, cb) => {
- if (Array.isArray(opt_)) {
- entries = opt_;
- opt_ = {};
- }
- if (typeof entries === 'function') {
- cb = entries;
- entries = undefined;
- }
- if (!entries) {
- entries = [];
- }
- else {
- entries = Array.from(entries);
- }
- const opt = dealias(opt_);
- validate?.(opt, entries);
- if (isSyncFile(opt)) {
- if (typeof cb === 'function') {
- throw new TypeError('callback not supported for sync tar functions');
- }
- return syncFile(opt, entries);
- }
- else if (isAsyncFile(opt)) {
- const p = asyncFile(opt, entries);
- // weirdness to make TS happy
- const c = cb ? cb : undefined;
- return c ? p.then(() => c(), c) : p;
- }
- else if (isSyncNoFile(opt)) {
- if (typeof cb === 'function') {
- throw new TypeError('callback not supported for sync tar functions');
- }
- return syncNoFile(opt, entries);
- }
- else if (isAsyncNoFile(opt)) {
- if (typeof cb === 'function') {
- throw new TypeError('callback only supported with file option');
- }
- return asyncNoFile(opt, entries);
- /* c8 ignore start */
- }
- else {
- throw new Error('impossible options??');
- }
- /* c8 ignore stop */
- }, {
- syncFile,
- asyncFile,
- syncNoFile,
- asyncNoFile,
- validate,
- });
-};
-//# sourceMappingURL=make-command.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/mode-fix.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/mode-fix.js
deleted file mode 100644
index 5fd3bb88c1cb25..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/mode-fix.js
+++ /dev/null
@@ -1,25 +0,0 @@
-export const modeFix = (mode, isDir, portable) => {
- mode &= 0o7777;
- // in portable mode, use the minimum reasonable umask
- // if this system creates files with 0o664 by default
- // (as some linux distros do), then we'll write the
- // archive with 0o644 instead. Also, don't ever create
- // a file that is not readable/writable by the owner.
- if (portable) {
- mode = (mode | 0o600) & ~0o22;
- }
- // if dirs are readable, then they should be listable
- if (isDir) {
- if (mode & 0o400) {
- mode |= 0o100;
- }
- if (mode & 0o40) {
- mode |= 0o10;
- }
- if (mode & 0o4) {
- mode |= 0o1;
- }
- }
- return mode;
-};
-//# sourceMappingURL=mode-fix.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/normalize-unicode.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/normalize-unicode.js
deleted file mode 100644
index 94e5095476d6e0..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/normalize-unicode.js
+++ /dev/null
@@ -1,13 +0,0 @@
-// warning: extremely hot code path.
-// This has been meticulously optimized for use
-// within npm install on large package trees.
-// Do not edit without careful benchmarking.
-const normalizeCache = Object.create(null);
-const { hasOwnProperty } = Object.prototype;
-export const normalizeUnicode = (s) => {
- if (!hasOwnProperty.call(normalizeCache, s)) {
- normalizeCache[s] = s.normalize('NFD');
- }
- return normalizeCache[s];
-};
-//# sourceMappingURL=normalize-unicode.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/normalize-windows-path.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/normalize-windows-path.js
deleted file mode 100644
index 2d97d2b884e627..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/normalize-windows-path.js
+++ /dev/null
@@ -1,9 +0,0 @@
-// on windows, either \ or / are valid directory separators.
-// on unix, \ is a valid character in filenames.
-// so, on windows, and only on windows, we replace all \ chars with /,
-// so that we can use / as our one and only directory separator char.
-const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
-export const normalizeWindowsPath = platform !== 'win32' ?
- (p) => p
- : (p) => p && p.replace(/\\/g, '/');
-//# sourceMappingURL=normalize-windows-path.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/options.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/options.js
deleted file mode 100644
index a006d36c23c923..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/options.js
+++ /dev/null
@@ -1,54 +0,0 @@
-// turn tar(1) style args like `C` into the more verbose things like `cwd`
-const argmap = new Map([
- ['C', 'cwd'],
- ['f', 'file'],
- ['z', 'gzip'],
- ['P', 'preservePaths'],
- ['U', 'unlink'],
- ['strip-components', 'strip'],
- ['stripComponents', 'strip'],
- ['keep-newer', 'newer'],
- ['keepNewer', 'newer'],
- ['keep-newer-files', 'newer'],
- ['keepNewerFiles', 'newer'],
- ['k', 'keep'],
- ['keep-existing', 'keep'],
- ['keepExisting', 'keep'],
- ['m', 'noMtime'],
- ['no-mtime', 'noMtime'],
- ['p', 'preserveOwner'],
- ['L', 'follow'],
- ['h', 'follow'],
- ['onentry', 'onReadEntry'],
-]);
-export const isSyncFile = (o) => !!o.sync && !!o.file;
-export const isAsyncFile = (o) => !o.sync && !!o.file;
-export const isSyncNoFile = (o) => !!o.sync && !o.file;
-export const isAsyncNoFile = (o) => !o.sync && !o.file;
-export const isSync = (o) => !!o.sync;
-export const isAsync = (o) => !o.sync;
-export const isFile = (o) => !!o.file;
-export const isNoFile = (o) => !o.file;
-const dealiasKey = (k) => {
- const d = argmap.get(k);
- if (d)
- return d;
- return k;
-};
-export const dealias = (opt = {}) => {
- if (!opt)
- return {};
- const result = {};
- for (const [key, v] of Object.entries(opt)) {
- // TS doesn't know that aliases are going to always be the same type
- const k = dealiasKey(key);
- result[k] = v;
- }
- // affordance for deprecated noChmod -> chmod
- if (result.chmod === undefined && result.noChmod === false) {
- result.chmod = true;
- }
- delete result.noChmod;
- return result;
-};
-//# sourceMappingURL=options.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/package.json b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c055..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
- "type": "module"
-}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/parse.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/parse.js
deleted file mode 100644
index f2c802e6eef04d..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/parse.js
+++ /dev/null
@@ -1,595 +0,0 @@
-// this[BUFFER] is the remainder of a chunk if we're waiting for
-// the full 512 bytes of a header to come in. We will Buffer.concat()
-// it to the next write(), which is a mem copy, but a small one.
-//
-// this[QUEUE] is a Yallist of entries that haven't been emitted
-// yet this can only get filled up if the user keeps write()ing after
-// a write() returns false, or does a write() with more than one entry
-//
-// We don't buffer chunks, we always parse them and either create an
-// entry, or push it into the active entry. The ReadEntry class knows
-// to throw data away if .ignore=true
-//
-// Shift entry off the buffer when it emits 'end', and emit 'entry' for
-// the next one in the list.
-//
-// At any time, we're pushing body chunks into the entry at WRITEENTRY,
-// and waiting for 'end' on the entry at READENTRY
-//
-// ignored entries get .resume() called on them straight away
-import { EventEmitter as EE } from 'events';
-import { BrotliDecompress, Unzip } from 'minizlib';
-import { Yallist } from 'yallist';
-import { Header } from './header.js';
-import { Pax } from './pax.js';
-import { ReadEntry } from './read-entry.js';
-import { warnMethod, } from './warn-method.js';
-const maxMetaEntrySize = 1024 * 1024;
-const gzipHeader = Buffer.from([0x1f, 0x8b]);
-const STATE = Symbol('state');
-const WRITEENTRY = Symbol('writeEntry');
-const READENTRY = Symbol('readEntry');
-const NEXTENTRY = Symbol('nextEntry');
-const PROCESSENTRY = Symbol('processEntry');
-const EX = Symbol('extendedHeader');
-const GEX = Symbol('globalExtendedHeader');
-const META = Symbol('meta');
-const EMITMETA = Symbol('emitMeta');
-const BUFFER = Symbol('buffer');
-const QUEUE = Symbol('queue');
-const ENDED = Symbol('ended');
-const EMITTEDEND = Symbol('emittedEnd');
-const EMIT = Symbol('emit');
-const UNZIP = Symbol('unzip');
-const CONSUMECHUNK = Symbol('consumeChunk');
-const CONSUMECHUNKSUB = Symbol('consumeChunkSub');
-const CONSUMEBODY = Symbol('consumeBody');
-const CONSUMEMETA = Symbol('consumeMeta');
-const CONSUMEHEADER = Symbol('consumeHeader');
-const CONSUMING = Symbol('consuming');
-const BUFFERCONCAT = Symbol('bufferConcat');
-const MAYBEEND = Symbol('maybeEnd');
-const WRITING = Symbol('writing');
-const ABORTED = Symbol('aborted');
-const DONE = Symbol('onDone');
-const SAW_VALID_ENTRY = Symbol('sawValidEntry');
-const SAW_NULL_BLOCK = Symbol('sawNullBlock');
-const SAW_EOF = Symbol('sawEOF');
-const CLOSESTREAM = Symbol('closeStream');
-const noop = () => true;
-export class Parser extends EE {
- file;
- strict;
- maxMetaEntrySize;
- filter;
- brotli;
- writable = true;
- readable = false;
- [QUEUE] = new Yallist();
- [BUFFER];
- [READENTRY];
- [WRITEENTRY];
- [STATE] = 'begin';
- [META] = '';
- [EX];
- [GEX];
- [ENDED] = false;
- [UNZIP];
- [ABORTED] = false;
- [SAW_VALID_ENTRY];
- [SAW_NULL_BLOCK] = false;
- [SAW_EOF] = false;
- [WRITING] = false;
- [CONSUMING] = false;
- [EMITTEDEND] = false;
- constructor(opt = {}) {
- super();
- this.file = opt.file || '';
- // these BADARCHIVE errors can't be detected early. listen on DONE.
- this.on(DONE, () => {
- if (this[STATE] === 'begin' ||
- this[SAW_VALID_ENTRY] === false) {
- // either less than 1 block of data, or all entries were invalid.
- // Either way, probably not even a tarball.
- this.warn('TAR_BAD_ARCHIVE', 'Unrecognized archive format');
- }
- });
- if (opt.ondone) {
- this.on(DONE, opt.ondone);
- }
- else {
- this.on(DONE, () => {
- this.emit('prefinish');
- this.emit('finish');
- this.emit('end');
- });
- }
- this.strict = !!opt.strict;
- this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize;
- this.filter = typeof opt.filter === 'function' ? opt.filter : noop;
- // Unlike gzip, brotli doesn't have any magic bytes to identify it
- // Users need to explicitly tell us they're extracting a brotli file
- // Or we infer from the file extension
- const isTBR = opt.file &&
- (opt.file.endsWith('.tar.br') || opt.file.endsWith('.tbr'));
- // if it's a tbr file it MIGHT be brotli, but we don't know until
- // we look at it and verify it's not a valid tar file.
- this.brotli =
- !opt.gzip && opt.brotli !== undefined ? opt.brotli
- : isTBR ? undefined
- : false;
- // have to set this so that streams are ok piping into it
- this.on('end', () => this[CLOSESTREAM]());
- if (typeof opt.onwarn === 'function') {
- this.on('warn', opt.onwarn);
- }
- if (typeof opt.onReadEntry === 'function') {
- this.on('entry', opt.onReadEntry);
- }
- }
- warn(code, message, data = {}) {
- warnMethod(this, code, message, data);
- }
- [CONSUMEHEADER](chunk, position) {
- if (this[SAW_VALID_ENTRY] === undefined) {
- this[SAW_VALID_ENTRY] = false;
- }
- let header;
- try {
- header = new Header(chunk, position, this[EX], this[GEX]);
- }
- catch (er) {
- return this.warn('TAR_ENTRY_INVALID', er);
- }
- if (header.nullBlock) {
- if (this[SAW_NULL_BLOCK]) {
- this[SAW_EOF] = true;
- // ending an archive with no entries. pointless, but legal.
- if (this[STATE] === 'begin') {
- this[STATE] = 'header';
- }
- this[EMIT]('eof');
- }
- else {
- this[SAW_NULL_BLOCK] = true;
- this[EMIT]('nullBlock');
- }
- }
- else {
- this[SAW_NULL_BLOCK] = false;
- if (!header.cksumValid) {
- this.warn('TAR_ENTRY_INVALID', 'checksum failure', { header });
- }
- else if (!header.path) {
- this.warn('TAR_ENTRY_INVALID', 'path is required', { header });
- }
- else {
- const type = header.type;
- if (/^(Symbolic)?Link$/.test(type) && !header.linkpath) {
- this.warn('TAR_ENTRY_INVALID', 'linkpath required', {
- header,
- });
- }
- else if (!/^(Symbolic)?Link$/.test(type) &&
- !/^(Global)?ExtendedHeader$/.test(type) &&
- header.linkpath) {
- this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', {
- header,
- });
- }
- else {
- const entry = (this[WRITEENTRY] = new ReadEntry(header, this[EX], this[GEX]));
- // we do this for meta & ignored entries as well, because they
- // are still valid tar, or else we wouldn't know to ignore them
- if (!this[SAW_VALID_ENTRY]) {
- if (entry.remain) {
- // this might be the one!
- const onend = () => {
- if (!entry.invalid) {
- this[SAW_VALID_ENTRY] = true;
- }
- };
- entry.on('end', onend);
- }
- else {
- this[SAW_VALID_ENTRY] = true;
- }
- }
- if (entry.meta) {
- if (entry.size > this.maxMetaEntrySize) {
- entry.ignore = true;
- this[EMIT]('ignoredEntry', entry);
- this[STATE] = 'ignore';
- entry.resume();
- }
- else if (entry.size > 0) {
- this[META] = '';
- entry.on('data', c => (this[META] += c));
- this[STATE] = 'meta';
- }
- }
- else {
- this[EX] = undefined;
- entry.ignore =
- entry.ignore || !this.filter(entry.path, entry);
- if (entry.ignore) {
- // probably valid, just not something we care about
- this[EMIT]('ignoredEntry', entry);
- this[STATE] = entry.remain ? 'ignore' : 'header';
- entry.resume();
- }
- else {
- if (entry.remain) {
- this[STATE] = 'body';
- }
- else {
- this[STATE] = 'header';
- entry.end();
- }
- if (!this[READENTRY]) {
- this[QUEUE].push(entry);
- this[NEXTENTRY]();
- }
- else {
- this[QUEUE].push(entry);
- }
- }
- }
- }
- }
- }
- }
- [CLOSESTREAM]() {
- queueMicrotask(() => this.emit('close'));
- }
- [PROCESSENTRY](entry) {
- let go = true;
- if (!entry) {
- this[READENTRY] = undefined;
- go = false;
- }
- else if (Array.isArray(entry)) {
- const [ev, ...args] = entry;
- this.emit(ev, ...args);
- }
- else {
- this[READENTRY] = entry;
- this.emit('entry', entry);
- if (!entry.emittedEnd) {
- entry.on('end', () => this[NEXTENTRY]());
- go = false;
- }
- }
- return go;
- }
- [NEXTENTRY]() {
- do { } while (this[PROCESSENTRY](this[QUEUE].shift()));
- if (!this[QUEUE].length) {
- // At this point, there's nothing in the queue, but we may have an
- // entry which is being consumed (readEntry).
- // If we don't, then we definitely can handle more data.
- // If we do, and either it's flowing, or it has never had any data
- // written to it, then it needs more.
- // The only other possibility is that it has returned false from a
- // write() call, so we wait for the next drain to continue.
- const re = this[READENTRY];
- const drainNow = !re || re.flowing || re.size === re.remain;
- if (drainNow) {
- if (!this[WRITING]) {
- this.emit('drain');
- }
- }
- else {
- re.once('drain', () => this.emit('drain'));
- }
- }
- }
- [CONSUMEBODY](chunk, position) {
- // write up to but no more than writeEntry.blockRemain
- const entry = this[WRITEENTRY];
- /* c8 ignore start */
- if (!entry) {
- throw new Error('attempt to consume body without entry??');
- }
- const br = entry.blockRemain ?? 0;
- /* c8 ignore stop */
- const c = br >= chunk.length && position === 0 ?
- chunk
- : chunk.subarray(position, position + br);
- entry.write(c);
- if (!entry.blockRemain) {
- this[STATE] = 'header';
- this[WRITEENTRY] = undefined;
- entry.end();
- }
- return c.length;
- }
- [CONSUMEMETA](chunk, position) {
- const entry = this[WRITEENTRY];
- const ret = this[CONSUMEBODY](chunk, position);
- // if we finished, then the entry is reset
- if (!this[WRITEENTRY] && entry) {
- this[EMITMETA](entry);
- }
- return ret;
- }
- [EMIT](ev, data, extra) {
- if (!this[QUEUE].length && !this[READENTRY]) {
- this.emit(ev, data, extra);
- }
- else {
- this[QUEUE].push([ev, data, extra]);
- }
- }
- [EMITMETA](entry) {
- this[EMIT]('meta', this[META]);
- switch (entry.type) {
- case 'ExtendedHeader':
- case 'OldExtendedHeader':
- this[EX] = Pax.parse(this[META], this[EX], false);
- break;
- case 'GlobalExtendedHeader':
- this[GEX] = Pax.parse(this[META], this[GEX], true);
- break;
- case 'NextFileHasLongPath':
- case 'OldGnuLongPath': {
- const ex = this[EX] ?? Object.create(null);
- this[EX] = ex;
- ex.path = this[META].replace(/\0.*/, '');
- break;
- }
- case 'NextFileHasLongLinkpath': {
- const ex = this[EX] || Object.create(null);
- this[EX] = ex;
- ex.linkpath = this[META].replace(/\0.*/, '');
- break;
- }
- /* c8 ignore start */
- default:
- throw new Error('unknown meta: ' + entry.type);
- /* c8 ignore stop */
- }
- }
- abort(error) {
- this[ABORTED] = true;
- this.emit('abort', error);
- // always throws, even in non-strict mode
- this.warn('TAR_ABORT', error, { recoverable: false });
- }
- write(chunk, encoding, cb) {
- if (typeof encoding === 'function') {
- cb = encoding;
- encoding = undefined;
- }
- if (typeof chunk === 'string') {
- chunk = Buffer.from(chunk,
- /* c8 ignore next */
- typeof encoding === 'string' ? encoding : 'utf8');
- }
- if (this[ABORTED]) {
- /* c8 ignore next */
- cb?.();
- return false;
- }
- // first write, might be gzipped
- const needSniff = this[UNZIP] === undefined ||
- (this.brotli === undefined && this[UNZIP] === false);
- if (needSniff && chunk) {
- if (this[BUFFER]) {
- chunk = Buffer.concat([this[BUFFER], chunk]);
- this[BUFFER] = undefined;
- }
- if (chunk.length < gzipHeader.length) {
- this[BUFFER] = chunk;
- /* c8 ignore next */
- cb?.();
- return true;
- }
- // look for gzip header
- for (let i = 0; this[UNZIP] === undefined && i < gzipHeader.length; i++) {
- if (chunk[i] !== gzipHeader[i]) {
- this[UNZIP] = false;
- }
- }
- const maybeBrotli = this.brotli === undefined;
- if (this[UNZIP] === false && maybeBrotli) {
- // read the first header to see if it's a valid tar file. If so,
- // we can safely assume that it's not actually brotli, despite the
- // .tbr or .tar.br file extension.
- // if we ended before getting a full chunk, yes, def brotli
- if (chunk.length < 512) {
- if (this[ENDED]) {
- this.brotli = true;
- }
- else {
- this[BUFFER] = chunk;
- /* c8 ignore next */
- cb?.();
- return true;
- }
- }
- else {
- // if it's tar, it's pretty reliably not brotli, chances of
- // that happening are astronomical.
- try {
- new Header(chunk.subarray(0, 512));
- this.brotli = false;
- }
- catch (_) {
- this.brotli = true;
- }
- }
- }
- if (this[UNZIP] === undefined ||
- (this[UNZIP] === false && this.brotli)) {
- const ended = this[ENDED];
- this[ENDED] = false;
- this[UNZIP] =
- this[UNZIP] === undefined ?
- new Unzip({})
- : new BrotliDecompress({});
- this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk));
- this[UNZIP].on('error', er => this.abort(er));
- this[UNZIP].on('end', () => {
- this[ENDED] = true;
- this[CONSUMECHUNK]();
- });
- this[WRITING] = true;
- const ret = !!this[UNZIP][ended ? 'end' : 'write'](chunk);
- this[WRITING] = false;
- cb?.();
- return ret;
- }
- }
- this[WRITING] = true;
- if (this[UNZIP]) {
- this[UNZIP].write(chunk);
- }
- else {
- this[CONSUMECHUNK](chunk);
- }
- this[WRITING] = false;
- // return false if there's a queue, or if the current entry isn't flowing
- const ret = this[QUEUE].length ? false
- : this[READENTRY] ? this[READENTRY].flowing
- : true;
- // if we have no queue, then that means a clogged READENTRY
- if (!ret && !this[QUEUE].length) {
- this[READENTRY]?.once('drain', () => this.emit('drain'));
- }
- /* c8 ignore next */
- cb?.();
- return ret;
- }
- [BUFFERCONCAT](c) {
- if (c && !this[ABORTED]) {
- this[BUFFER] =
- this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c;
- }
- }
- [MAYBEEND]() {
- if (this[ENDED] &&
- !this[EMITTEDEND] &&
- !this[ABORTED] &&
- !this[CONSUMING]) {
- this[EMITTEDEND] = true;
- const entry = this[WRITEENTRY];
- if (entry && entry.blockRemain) {
- // truncated, likely a damaged file
- const have = this[BUFFER] ? this[BUFFER].length : 0;
- this.warn('TAR_BAD_ARCHIVE', `Truncated input (needed ${entry.blockRemain} more bytes, only ${have} available)`, { entry });
- if (this[BUFFER]) {
- entry.write(this[BUFFER]);
- }
- entry.end();
- }
- this[EMIT](DONE);
- }
- }
- [CONSUMECHUNK](chunk) {
- if (this[CONSUMING] && chunk) {
- this[BUFFERCONCAT](chunk);
- }
- else if (!chunk && !this[BUFFER]) {
- this[MAYBEEND]();
- }
- else if (chunk) {
- this[CONSUMING] = true;
- if (this[BUFFER]) {
- this[BUFFERCONCAT](chunk);
- const c = this[BUFFER];
- this[BUFFER] = undefined;
- this[CONSUMECHUNKSUB](c);
- }
- else {
- this[CONSUMECHUNKSUB](chunk);
- }
- while (this[BUFFER] &&
- this[BUFFER]?.length >= 512 &&
- !this[ABORTED] &&
- !this[SAW_EOF]) {
- const c = this[BUFFER];
- this[BUFFER] = undefined;
- this[CONSUMECHUNKSUB](c);
- }
- this[CONSUMING] = false;
- }
- if (!this[BUFFER] || this[ENDED]) {
- this[MAYBEEND]();
- }
- }
- [CONSUMECHUNKSUB](chunk) {
- // we know that we are in CONSUMING mode, so anything written goes into
- // the buffer. Advance the position and put any remainder in the buffer.
- let position = 0;
- const length = chunk.length;
- while (position + 512 <= length &&
- !this[ABORTED] &&
- !this[SAW_EOF]) {
- switch (this[STATE]) {
- case 'begin':
- case 'header':
- this[CONSUMEHEADER](chunk, position);
- position += 512;
- break;
- case 'ignore':
- case 'body':
- position += this[CONSUMEBODY](chunk, position);
- break;
- case 'meta':
- position += this[CONSUMEMETA](chunk, position);
- break;
- /* c8 ignore start */
- default:
- throw new Error('invalid state: ' + this[STATE]);
- /* c8 ignore stop */
- }
- }
- if (position < length) {
- if (this[BUFFER]) {
- this[BUFFER] = Buffer.concat([
- chunk.subarray(position),
- this[BUFFER],
- ]);
- }
- else {
- this[BUFFER] = chunk.subarray(position);
- }
- }
- }
- end(chunk, encoding, cb) {
- if (typeof chunk === 'function') {
- cb = chunk;
- encoding = undefined;
- chunk = undefined;
- }
- if (typeof encoding === 'function') {
- cb = encoding;
- encoding = undefined;
- }
- if (typeof chunk === 'string') {
- chunk = Buffer.from(chunk, encoding);
- }
- if (cb)
- this.once('finish', cb);
- if (!this[ABORTED]) {
- if (this[UNZIP]) {
- /* c8 ignore start */
- if (chunk)
- this[UNZIP].write(chunk);
- /* c8 ignore stop */
- this[UNZIP].end();
- }
- else {
- this[ENDED] = true;
- if (this.brotli === undefined)
- chunk = chunk || Buffer.alloc(0);
- if (chunk)
- this.write(chunk);
- this[MAYBEEND]();
- }
- }
- return this;
- }
-}
-//# sourceMappingURL=parse.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/path-reservations.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/path-reservations.js
deleted file mode 100644
index e63b9c91e9a808..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/path-reservations.js
+++ /dev/null
@@ -1,166 +0,0 @@
-// A path exclusive reservation system
-// reserve([list, of, paths], fn)
-// When the fn is first in line for all its paths, it
-// is called with a cb that clears the reservation.
-//
-// Used by async unpack to avoid clobbering paths in use,
-// while still allowing maximal safe parallelization.
-import { join } from 'node:path';
-import { normalizeUnicode } from './normalize-unicode.js';
-import { stripTrailingSlashes } from './strip-trailing-slashes.js';
-const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
-const isWindows = platform === 'win32';
-// return a set of parent dirs for a given path
-// '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d']
-const getDirs = (path) => {
- const dirs = path
- .split('/')
- .slice(0, -1)
- .reduce((set, path) => {
- const s = set[set.length - 1];
- if (s !== undefined) {
- path = join(s, path);
- }
- set.push(path || '/');
- return set;
- }, []);
- return dirs;
-};
-export class PathReservations {
- // path => [function or Set]
- // A Set object means a directory reservation
- // A fn is a direct reservation on that path
- #queues = new Map();
- // fn => {paths:[path,...], dirs:[path, ...]}
- #reservations = new Map();
- // functions currently running
- #running = new Set();
- reserve(paths, fn) {
- paths =
- isWindows ?
- ['win32 parallelization disabled']
- : paths.map(p => {
- // don't need normPath, because we skip this entirely for windows
- return stripTrailingSlashes(join(normalizeUnicode(p))).toLowerCase();
- });
- const dirs = new Set(paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b)));
- this.#reservations.set(fn, { dirs, paths });
- for (const p of paths) {
- const q = this.#queues.get(p);
- if (!q) {
- this.#queues.set(p, [fn]);
- }
- else {
- q.push(fn);
- }
- }
- for (const dir of dirs) {
- const q = this.#queues.get(dir);
- if (!q) {
- this.#queues.set(dir, [new Set([fn])]);
- }
- else {
- const l = q[q.length - 1];
- if (l instanceof Set) {
- l.add(fn);
- }
- else {
- q.push(new Set([fn]));
- }
- }
- }
- return this.#run(fn);
- }
- // return the queues for each path the function cares about
- // fn => {paths, dirs}
- #getQueues(fn) {
- const res = this.#reservations.get(fn);
- /* c8 ignore start */
- if (!res) {
- throw new Error('function does not have any path reservations');
- }
- /* c8 ignore stop */
- return {
- paths: res.paths.map((path) => this.#queues.get(path)),
- dirs: [...res.dirs].map(path => this.#queues.get(path)),
- };
- }
- // check if fn is first in line for all its paths, and is
- // included in the first set for all its dir queues
- check(fn) {
- const { paths, dirs } = this.#getQueues(fn);
- return (paths.every(q => q && q[0] === fn) &&
- dirs.every(q => q && q[0] instanceof Set && q[0].has(fn)));
- }
- // run the function if it's first in line and not already running
- #run(fn) {
- if (this.#running.has(fn) || !this.check(fn)) {
- return false;
- }
- this.#running.add(fn);
- fn(() => this.#clear(fn));
- return true;
- }
- #clear(fn) {
- if (!this.#running.has(fn)) {
- return false;
- }
- const res = this.#reservations.get(fn);
- /* c8 ignore start */
- if (!res) {
- throw new Error('invalid reservation');
- }
- /* c8 ignore stop */
- const { paths, dirs } = res;
- const next = new Set();
- for (const path of paths) {
- const q = this.#queues.get(path);
- /* c8 ignore start */
- if (!q || q?.[0] !== fn) {
- continue;
- }
- /* c8 ignore stop */
- const q0 = q[1];
- if (!q0) {
- this.#queues.delete(path);
- continue;
- }
- q.shift();
- if (typeof q0 === 'function') {
- next.add(q0);
- }
- else {
- for (const f of q0) {
- next.add(f);
- }
- }
- }
- for (const dir of dirs) {
- const q = this.#queues.get(dir);
- const q0 = q?.[0];
- /* c8 ignore next - type safety only */
- if (!q || !(q0 instanceof Set))
- continue;
- if (q0.size === 1 && q.length === 1) {
- this.#queues.delete(dir);
- continue;
- }
- else if (q0.size === 1) {
- q.shift();
- // next one must be a function,
- // or else the Set would've been reused
- const n = q[0];
- if (typeof n === 'function') {
- next.add(n);
- }
- }
- else {
- q0.delete(fn);
- }
- }
- this.#running.delete(fn);
- next.forEach(fn => this.#run(fn));
- return true;
- }
-}
-//# sourceMappingURL=path-reservations.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/pax.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/pax.js
deleted file mode 100644
index 832808f344da53..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/pax.js
+++ /dev/null
@@ -1,154 +0,0 @@
-import { basename } from 'node:path';
-import { Header } from './header.js';
-export class Pax {
- atime;
- mtime;
- ctime;
- charset;
- comment;
- gid;
- uid;
- gname;
- uname;
- linkpath;
- dev;
- ino;
- nlink;
- path;
- size;
- mode;
- global;
- constructor(obj, global = false) {
- this.atime = obj.atime;
- this.charset = obj.charset;
- this.comment = obj.comment;
- this.ctime = obj.ctime;
- this.dev = obj.dev;
- this.gid = obj.gid;
- this.global = global;
- this.gname = obj.gname;
- this.ino = obj.ino;
- this.linkpath = obj.linkpath;
- this.mtime = obj.mtime;
- this.nlink = obj.nlink;
- this.path = obj.path;
- this.size = obj.size;
- this.uid = obj.uid;
- this.uname = obj.uname;
- }
- encode() {
- const body = this.encodeBody();
- if (body === '') {
- return Buffer.allocUnsafe(0);
- }
- const bodyLen = Buffer.byteLength(body);
- // round up to 512 bytes
- // add 512 for header
- const bufLen = 512 * Math.ceil(1 + bodyLen / 512);
- const buf = Buffer.allocUnsafe(bufLen);
- // 0-fill the header section, it might not hit every field
- for (let i = 0; i < 512; i++) {
- buf[i] = 0;
- }
- new Header({
- // XXX split the path
- // then the path should be PaxHeader + basename, but less than 99,
- // prepend with the dirname
- /* c8 ignore start */
- path: ('PaxHeader/' + basename(this.path ?? '')).slice(0, 99),
- /* c8 ignore stop */
- mode: this.mode || 0o644,
- uid: this.uid,
- gid: this.gid,
- size: bodyLen,
- mtime: this.mtime,
- type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader',
- linkpath: '',
- uname: this.uname || '',
- gname: this.gname || '',
- devmaj: 0,
- devmin: 0,
- atime: this.atime,
- ctime: this.ctime,
- }).encode(buf);
- buf.write(body, 512, bodyLen, 'utf8');
- // null pad after the body
- for (let i = bodyLen + 512; i < buf.length; i++) {
- buf[i] = 0;
- }
- return buf;
- }
- encodeBody() {
- return (this.encodeField('path') +
- this.encodeField('ctime') +
- this.encodeField('atime') +
- this.encodeField('dev') +
- this.encodeField('ino') +
- this.encodeField('nlink') +
- this.encodeField('charset') +
- this.encodeField('comment') +
- this.encodeField('gid') +
- this.encodeField('gname') +
- this.encodeField('linkpath') +
- this.encodeField('mtime') +
- this.encodeField('size') +
- this.encodeField('uid') +
- this.encodeField('uname'));
- }
- encodeField(field) {
- if (this[field] === undefined) {
- return '';
- }
- const r = this[field];
- const v = r instanceof Date ? r.getTime() / 1000 : r;
- const s = ' ' +
- (field === 'dev' || field === 'ino' || field === 'nlink' ?
- 'SCHILY.'
- : '') +
- field +
- '=' +
- v +
- '\n';
- const byteLen = Buffer.byteLength(s);
- // the digits includes the length of the digits in ascii base-10
- // so if it's 9 characters, then adding 1 for the 9 makes it 10
- // which makes it 11 chars.
- let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1;
- if (byteLen + digits >= Math.pow(10, digits)) {
- digits += 1;
- }
- const len = digits + byteLen;
- return len + s;
- }
- static parse(str, ex, g = false) {
- return new Pax(merge(parseKV(str), ex), g);
- }
-}
-const merge = (a, b) => b ? Object.assign({}, b, a) : a;
-const parseKV = (str) => str
- .replace(/\n$/, '')
- .split('\n')
- .reduce(parseKVLine, Object.create(null));
-const parseKVLine = (set, line) => {
- const n = parseInt(line, 10);
- // XXX Values with \n in them will fail this.
- // Refactor to not be a naive line-by-line parse.
- if (n !== Buffer.byteLength(line) + 1) {
- return set;
- }
- line = line.slice((n + ' ').length);
- const kv = line.split('=');
- const r = kv.shift();
- if (!r) {
- return set;
- }
- const k = r.replace(/^SCHILY\.(dev|ino|nlink)/, '$1');
- const v = kv.join('=');
- set[k] =
- /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k) ?
- new Date(Number(v) * 1000)
- : /^[0-9]+$/.test(v) ? +v
- : v;
- return set;
-};
-//# sourceMappingURL=pax.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/read-entry.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/read-entry.js
deleted file mode 100644
index 23cc673e610879..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/read-entry.js
+++ /dev/null
@@ -1,136 +0,0 @@
-import { Minipass } from 'minipass';
-import { normalizeWindowsPath } from './normalize-windows-path.js';
-export class ReadEntry extends Minipass {
- extended;
- globalExtended;
- header;
- startBlockSize;
- blockRemain;
- remain;
- type;
- meta = false;
- ignore = false;
- path;
- mode;
- uid;
- gid;
- uname;
- gname;
- size = 0;
- mtime;
- atime;
- ctime;
- linkpath;
- dev;
- ino;
- nlink;
- invalid = false;
- absolute;
- unsupported = false;
- constructor(header, ex, gex) {
- super({});
- // read entries always start life paused. this is to avoid the
- // situation where Minipass's auto-ending empty streams results
- // in an entry ending before we're ready for it.
- this.pause();
- this.extended = ex;
- this.globalExtended = gex;
- this.header = header;
- /* c8 ignore start */
- this.remain = header.size ?? 0;
- /* c8 ignore stop */
- this.startBlockSize = 512 * Math.ceil(this.remain / 512);
- this.blockRemain = this.startBlockSize;
- this.type = header.type;
- switch (this.type) {
- case 'File':
- case 'OldFile':
- case 'Link':
- case 'SymbolicLink':
- case 'CharacterDevice':
- case 'BlockDevice':
- case 'Directory':
- case 'FIFO':
- case 'ContiguousFile':
- case 'GNUDumpDir':
- break;
- case 'NextFileHasLongLinkpath':
- case 'NextFileHasLongPath':
- case 'OldGnuLongPath':
- case 'GlobalExtendedHeader':
- case 'ExtendedHeader':
- case 'OldExtendedHeader':
- this.meta = true;
- break;
- // NOTE: gnutar and bsdtar treat unrecognized types as 'File'
- // it may be worth doing the same, but with a warning.
- default:
- this.ignore = true;
- }
- /* c8 ignore start */
- if (!header.path) {
- throw new Error('no path provided for tar.ReadEntry');
- }
- /* c8 ignore stop */
- this.path = normalizeWindowsPath(header.path);
- this.mode = header.mode;
- if (this.mode) {
- this.mode = this.mode & 0o7777;
- }
- this.uid = header.uid;
- this.gid = header.gid;
- this.uname = header.uname;
- this.gname = header.gname;
- this.size = this.remain;
- this.mtime = header.mtime;
- this.atime = header.atime;
- this.ctime = header.ctime;
- /* c8 ignore start */
- this.linkpath =
- header.linkpath ?
- normalizeWindowsPath(header.linkpath)
- : undefined;
- /* c8 ignore stop */
- this.uname = header.uname;
- this.gname = header.gname;
- if (ex) {
- this.#slurp(ex);
- }
- if (gex) {
- this.#slurp(gex, true);
- }
- }
- write(data) {
- const writeLen = data.length;
- if (writeLen > this.blockRemain) {
- throw new Error('writing more to entry than is appropriate');
- }
- const r = this.remain;
- const br = this.blockRemain;
- this.remain = Math.max(0, r - writeLen);
- this.blockRemain = Math.max(0, br - writeLen);
- if (this.ignore) {
- return true;
- }
- if (r >= writeLen) {
- return super.write(data);
- }
- // r < writeLen
- return super.write(data.subarray(0, r));
- }
- #slurp(ex, gex = false) {
- if (ex.path)
- ex.path = normalizeWindowsPath(ex.path);
- if (ex.linkpath)
- ex.linkpath = normalizeWindowsPath(ex.linkpath);
- Object.assign(this, Object.fromEntries(Object.entries(ex).filter(([k, v]) => {
- // we slurp in everything except for the path attribute in
- // a global extended header, because that's weird. Also, any
- // null/undefined values are ignored.
- return !(v === null ||
- v === undefined ||
- (k === 'path' && gex));
- })));
- }
-}
-//# sourceMappingURL=read-entry.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/strip-absolute-path.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/strip-absolute-path.js
deleted file mode 100644
index cce5ff80b00db3..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/strip-absolute-path.js
+++ /dev/null
@@ -1,25 +0,0 @@
-// unix absolute paths are also absolute on win32, so we use this for both
-import { win32 } from 'node:path';
-const { isAbsolute, parse } = win32;
-// returns [root, stripped]
-// Note that windows will think that //x/y/z/a has a "root" of //x/y, and in
-// those cases, we want to sanitize it to x/y/z/a, not z/a, so we strip /
-// explicitly if it's the first character.
-// drive-specific relative paths on Windows get their root stripped off even
-// though they are not absolute, so `c:../foo` becomes ['c:', '../foo']
-export const stripAbsolutePath = (path) => {
- let r = '';
- let parsed = parse(path);
- while (isAbsolute(path) || parsed.root) {
- // windows will think that //x/y/z has a "root" of //x/y/
- // but strip the //?/C:/ off of //?/C:/path
- const root = path.charAt(0) === '/' && path.slice(0, 4) !== '//?/' ?
- '/'
- : parsed.root;
- path = path.slice(root.length);
- r += root;
- parsed = parse(path);
- }
- return [r, path];
-};
-//# sourceMappingURL=strip-absolute-path.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/strip-trailing-slashes.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/strip-trailing-slashes.js
deleted file mode 100644
index ace4218a7547bf..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/strip-trailing-slashes.js
+++ /dev/null
@@ -1,14 +0,0 @@
-// warning: extremely hot code path.
-// This has been meticulously optimized for use
-// within npm install on large package trees.
-// Do not edit without careful benchmarking.
-export const stripTrailingSlashes = (str) => {
- let i = str.length - 1;
- let slashesStart = -1;
- while (i > -1 && str.charAt(i) === '/') {
- slashesStart = i;
- i--;
- }
- return slashesStart === -1 ? str : str.slice(0, slashesStart);
-};
-//# sourceMappingURL=strip-trailing-slashes.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/symlink-error.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/symlink-error.js
deleted file mode 100644
index d31766e2e0afa0..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/symlink-error.js
+++ /dev/null
@@ -1,15 +0,0 @@
-export class SymlinkError extends Error {
- path;
- symlink;
- syscall = 'symlink';
- code = 'TAR_SYMLINK_ERROR';
- constructor(symlink, path) {
- super('TAR_SYMLINK_ERROR: Cannot extract through symbolic link');
- this.symlink = symlink;
- this.path = path;
- }
- get name() {
- return 'SymlinkError';
- }
-}
-//# sourceMappingURL=symlink-error.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/types.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/types.js
deleted file mode 100644
index 27b982ae1e0922..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/types.js
+++ /dev/null
@@ -1,45 +0,0 @@
-export const isCode = (c) => name.has(c);
-export const isName = (c) => code.has(c);
-// map types from key to human-friendly name
-export const name = new Map([
- ['0', 'File'],
- // same as File
- ['', 'OldFile'],
- ['1', 'Link'],
- ['2', 'SymbolicLink'],
- // Devices and FIFOs aren't fully supported
- // they are parsed, but skipped when unpacking
- ['3', 'CharacterDevice'],
- ['4', 'BlockDevice'],
- ['5', 'Directory'],
- ['6', 'FIFO'],
- // same as File
- ['7', 'ContiguousFile'],
- // pax headers
- ['g', 'GlobalExtendedHeader'],
- ['x', 'ExtendedHeader'],
- // vendor-specific stuff
- // skip
- ['A', 'SolarisACL'],
- // like 5, but with data, which should be skipped
- ['D', 'GNUDumpDir'],
- // metadata only, skip
- ['I', 'Inode'],
- // data = link path of next file
- ['K', 'NextFileHasLongLinkpath'],
- // data = path of next file
- ['L', 'NextFileHasLongPath'],
- // skip
- ['M', 'ContinuationFile'],
- // like L
- ['N', 'OldGnuLongPath'],
- // skip
- ['S', 'SparseFile'],
- // skip
- ['V', 'TapeVolumeHeader'],
- // like x
- ['X', 'OldExtendedHeader'],
-]);
-// map the other direction
-export const code = new Map(Array.from(name).map(kv => [kv[1], kv[0]]));
-//# sourceMappingURL=types.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/unpack.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/unpack.js
deleted file mode 100644
index 6e744cfc1a6f9f..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/unpack.js
+++ /dev/null
@@ -1,888 +0,0 @@
-// the PEND/UNPEND stuff tracks whether we're ready to emit end/close yet.
-// but the path reservations are required to avoid race conditions where
-// parallelized unpack ops may mess with one another, due to dependencies
-// (like a Link depending on its target) or destructive operations (like
-// clobbering an fs object to create one of a different type.)
-import * as fsm from '@isaacs/fs-minipass';
-import assert from 'node:assert';
-import { randomBytes } from 'node:crypto';
-import fs from 'node:fs';
-import path from 'node:path';
-import { getWriteFlag } from './get-write-flag.js';
-import { mkdir, mkdirSync } from './mkdir.js';
-import { normalizeUnicode } from './normalize-unicode.js';
-import { normalizeWindowsPath } from './normalize-windows-path.js';
-import { Parser } from './parse.js';
-import { stripAbsolutePath } from './strip-absolute-path.js';
-import { stripTrailingSlashes } from './strip-trailing-slashes.js';
-import * as wc from './winchars.js';
-import { PathReservations } from './path-reservations.js';
-const ONENTRY = Symbol('onEntry');
-const CHECKFS = Symbol('checkFs');
-const CHECKFS2 = Symbol('checkFs2');
-const PRUNECACHE = Symbol('pruneCache');
-const ISREUSABLE = Symbol('isReusable');
-const MAKEFS = Symbol('makeFs');
-const FILE = Symbol('file');
-const DIRECTORY = Symbol('directory');
-const LINK = Symbol('link');
-const SYMLINK = Symbol('symlink');
-const HARDLINK = Symbol('hardlink');
-const UNSUPPORTED = Symbol('unsupported');
-const CHECKPATH = Symbol('checkPath');
-const MKDIR = Symbol('mkdir');
-const ONERROR = Symbol('onError');
-const PENDING = Symbol('pending');
-const PEND = Symbol('pend');
-const UNPEND = Symbol('unpend');
-const ENDED = Symbol('ended');
-const MAYBECLOSE = Symbol('maybeClose');
-const SKIP = Symbol('skip');
-const DOCHOWN = Symbol('doChown');
-const UID = Symbol('uid');
-const GID = Symbol('gid');
-const CHECKED_CWD = Symbol('checkedCwd');
-const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
-const isWindows = platform === 'win32';
-const DEFAULT_MAX_DEPTH = 1024;
-// Unlinks on Windows are not atomic.
-//
-// This means that if you have a file entry, followed by another
-// file entry with an identical name, and you cannot re-use the file
-// (because it's a hardlink, or because unlink:true is set, or it's
-// Windows, which does not have useful nlink values), then the unlink
-// will be committed to the disk AFTER the new file has been written
-// over the old one, deleting the new file.
-//
-// To work around this, on Windows systems, we rename the file and then
-// delete the renamed file. It's a sloppy kludge, but frankly, I do not
-// know of a better way to do this, given windows' non-atomic unlink
-// semantics.
-//
-// See: https://github.com/npm/node-tar/issues/183
-/* c8 ignore start */
-const unlinkFile = (path, cb) => {
- if (!isWindows) {
- return fs.unlink(path, cb);
- }
- const name = path + '.DELETE.' + randomBytes(16).toString('hex');
- fs.rename(path, name, er => {
- if (er) {
- return cb(er);
- }
- fs.unlink(name, cb);
- });
-};
-/* c8 ignore stop */
-/* c8 ignore start */
-const unlinkFileSync = (path) => {
- if (!isWindows) {
- return fs.unlinkSync(path);
- }
- const name = path + '.DELETE.' + randomBytes(16).toString('hex');
- fs.renameSync(path, name);
- fs.unlinkSync(name);
-};
-/* c8 ignore stop */
-// this.gid, entry.gid, this.processUid
-const uint32 = (a, b, c) => a !== undefined && a === a >>> 0 ? a
- : b !== undefined && b === b >>> 0 ? b
- : c;
-// clear the cache if it's a case-insensitive unicode-squashing match.
-// we can't know if the current file system is case-sensitive or supports
-// unicode fully, so we check for similarity on the maximally compatible
-// representation. Err on the side of pruning, since all it's doing is
-// preventing lstats, and it's not the end of the world if we get a false
-// positive.
-// Note that on windows, we always drop the entire cache whenever a
-// symbolic link is encountered, because 8.3 filenames are impossible
-// to reason about, and collisions are hazards rather than just failures.
-const cacheKeyNormalize = (path) => stripTrailingSlashes(normalizeWindowsPath(normalizeUnicode(path))).toLowerCase();
-// remove all cache entries matching ${abs}/**
-const pruneCache = (cache, abs) => {
- abs = cacheKeyNormalize(abs);
- for (const path of cache.keys()) {
- const pnorm = cacheKeyNormalize(path);
- if (pnorm === abs || pnorm.indexOf(abs + '/') === 0) {
- cache.delete(path);
- }
- }
-};
-const dropCache = (cache) => {
- for (const key of cache.keys()) {
- cache.delete(key);
- }
-};
-export class Unpack extends Parser {
- [ENDED] = false;
- [CHECKED_CWD] = false;
- [PENDING] = 0;
- reservations = new PathReservations();
- transform;
- writable = true;
- readable = false;
- dirCache;
- uid;
- gid;
- setOwner;
- preserveOwner;
- processGid;
- processUid;
- maxDepth;
- forceChown;
- win32;
- newer;
- keep;
- noMtime;
- preservePaths;
- unlink;
- cwd;
- strip;
- processUmask;
- umask;
- dmode;
- fmode;
- chmod;
- constructor(opt = {}) {
- opt.ondone = () => {
- this[ENDED] = true;
- this[MAYBECLOSE]();
- };
- super(opt);
- this.transform = opt.transform;
- this.dirCache = opt.dirCache || new Map();
- this.chmod = !!opt.chmod;
- if (typeof opt.uid === 'number' || typeof opt.gid === 'number') {
- // need both or neither
- if (typeof opt.uid !== 'number' ||
- typeof opt.gid !== 'number') {
- throw new TypeError('cannot set owner without number uid and gid');
- }
- if (opt.preserveOwner) {
- throw new TypeError('cannot preserve owner in archive and also set owner explicitly');
- }
- this.uid = opt.uid;
- this.gid = opt.gid;
- this.setOwner = true;
- }
- else {
- this.uid = undefined;
- this.gid = undefined;
- this.setOwner = false;
- }
- // default true for root
- if (opt.preserveOwner === undefined &&
- typeof opt.uid !== 'number') {
- this.preserveOwner = !!(process.getuid && process.getuid() === 0);
- }
- else {
- this.preserveOwner = !!opt.preserveOwner;
- }
- this.processUid =
- (this.preserveOwner || this.setOwner) && process.getuid ?
- process.getuid()
- : undefined;
- this.processGid =
- (this.preserveOwner || this.setOwner) && process.getgid ?
- process.getgid()
- : undefined;
- // prevent excessively deep nesting of subfolders
- // set to `Infinity` to remove this restriction
- this.maxDepth =
- typeof opt.maxDepth === 'number' ?
- opt.maxDepth
- : DEFAULT_MAX_DEPTH;
- // mostly just for testing, but useful in some cases.
- // Forcibly trigger a chown on every entry, no matter what
- this.forceChown = opt.forceChown === true;
- // turn >| in filenames into 0xf000-higher encoded forms
- this.win32 = !!opt.win32 || isWindows;
- // do not unpack over files that are newer than what's in the archive
- this.newer = !!opt.newer;
- // do not unpack over ANY files
- this.keep = !!opt.keep;
- // do not set mtime/atime of extracted entries
- this.noMtime = !!opt.noMtime;
- // allow .., absolute path entries, and unpacking through symlinks
- // without this, warn and skip .., relativize absolutes, and error
- // on symlinks in extraction path
- this.preservePaths = !!opt.preservePaths;
- // unlink files and links before writing. This breaks existing hard
- // links, and removes symlink directories rather than erroring
- this.unlink = !!opt.unlink;
- this.cwd = normalizeWindowsPath(path.resolve(opt.cwd || process.cwd()));
- this.strip = Number(opt.strip) || 0;
- // if we're not chmodding, then we don't need the process umask
- this.processUmask =
- !this.chmod ? 0
- : typeof opt.processUmask === 'number' ? opt.processUmask
- : process.umask();
- this.umask =
- typeof opt.umask === 'number' ? opt.umask : this.processUmask;
- // default mode for dirs created as parents
- this.dmode = opt.dmode || 0o0777 & ~this.umask;
- this.fmode = opt.fmode || 0o0666 & ~this.umask;
- this.on('entry', entry => this[ONENTRY](entry));
- }
- // a bad or damaged archive is a warning for Parser, but an error
- // when extracting. Mark those errors as unrecoverable, because
- // the Unpack contract cannot be met.
- warn(code, msg, data = {}) {
- if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT') {
- data.recoverable = false;
- }
- return super.warn(code, msg, data);
- }
- [MAYBECLOSE]() {
- if (this[ENDED] && this[PENDING] === 0) {
- this.emit('prefinish');
- this.emit('finish');
- this.emit('end');
- }
- }
- [CHECKPATH](entry) {
- const p = normalizeWindowsPath(entry.path);
- const parts = p.split('/');
- if (this.strip) {
- if (parts.length < this.strip) {
- return false;
- }
- if (entry.type === 'Link') {
- const linkparts = normalizeWindowsPath(String(entry.linkpath)).split('/');
- if (linkparts.length >= this.strip) {
- entry.linkpath = linkparts.slice(this.strip).join('/');
- }
- else {
- return false;
- }
- }
- parts.splice(0, this.strip);
- entry.path = parts.join('/');
- }
- if (isFinite(this.maxDepth) && parts.length > this.maxDepth) {
- this.warn('TAR_ENTRY_ERROR', 'path excessively deep', {
- entry,
- path: p,
- depth: parts.length,
- maxDepth: this.maxDepth,
- });
- return false;
- }
- if (!this.preservePaths) {
- if (parts.includes('..') ||
- /* c8 ignore next */
- (isWindows && /^[a-z]:\.\.$/i.test(parts[0] ?? ''))) {
- this.warn('TAR_ENTRY_ERROR', `path contains '..'`, {
- entry,
- path: p,
- });
- return false;
- }
- // strip off the root
- const [root, stripped] = stripAbsolutePath(p);
- if (root) {
- entry.path = String(stripped);
- this.warn('TAR_ENTRY_INFO', `stripping ${root} from absolute path`, {
- entry,
- path: p,
- });
- }
- }
- if (path.isAbsolute(entry.path)) {
- entry.absolute = normalizeWindowsPath(path.resolve(entry.path));
- }
- else {
- entry.absolute = normalizeWindowsPath(path.resolve(this.cwd, entry.path));
- }
- // if we somehow ended up with a path that escapes the cwd, and we are
- // not in preservePaths mode, then something is fishy! This should have
- // been prevented above, so ignore this for coverage.
- /* c8 ignore start - defense in depth */
- if (!this.preservePaths &&
- typeof entry.absolute === 'string' &&
- entry.absolute.indexOf(this.cwd + '/') !== 0 &&
- entry.absolute !== this.cwd) {
- this.warn('TAR_ENTRY_ERROR', 'path escaped extraction target', {
- entry,
- path: normalizeWindowsPath(entry.path),
- resolvedPath: entry.absolute,
- cwd: this.cwd,
- });
- return false;
- }
- /* c8 ignore stop */
- // an archive can set properties on the extraction directory, but it
- // may not replace the cwd with a different kind of thing entirely.
- if (entry.absolute === this.cwd &&
- entry.type !== 'Directory' &&
- entry.type !== 'GNUDumpDir') {
- return false;
- }
- // only encode : chars that aren't drive letter indicators
- if (this.win32) {
- const { root: aRoot } = path.win32.parse(String(entry.absolute));
- entry.absolute =
- aRoot + wc.encode(String(entry.absolute).slice(aRoot.length));
- const { root: pRoot } = path.win32.parse(entry.path);
- entry.path = pRoot + wc.encode(entry.path.slice(pRoot.length));
- }
- return true;
- }
- [ONENTRY](entry) {
- if (!this[CHECKPATH](entry)) {
- return entry.resume();
- }
- assert.equal(typeof entry.absolute, 'string');
- switch (entry.type) {
- case 'Directory':
- case 'GNUDumpDir':
- if (entry.mode) {
- entry.mode = entry.mode | 0o700;
- }
- // eslint-disable-next-line no-fallthrough
- case 'File':
- case 'OldFile':
- case 'ContiguousFile':
- case 'Link':
- case 'SymbolicLink':
- return this[CHECKFS](entry);
- case 'CharacterDevice':
- case 'BlockDevice':
- case 'FIFO':
- default:
- return this[UNSUPPORTED](entry);
- }
- }
- [ONERROR](er, entry) {
- // Cwd has to exist, or else nothing works. That's serious.
- // Other errors are warnings, which raise the error in strict
- // mode, but otherwise continue on.
- if (er.name === 'CwdError') {
- this.emit('error', er);
- }
- else {
- this.warn('TAR_ENTRY_ERROR', er, { entry });
- this[UNPEND]();
- entry.resume();
- }
- }
- [MKDIR](dir, mode, cb) {
- mkdir(normalizeWindowsPath(dir), {
- uid: this.uid,
- gid: this.gid,
- processUid: this.processUid,
- processGid: this.processGid,
- umask: this.processUmask,
- preserve: this.preservePaths,
- unlink: this.unlink,
- cache: this.dirCache,
- cwd: this.cwd,
- mode: mode,
- }, cb);
- }
- [DOCHOWN](entry) {
- // in preserve owner mode, chown if the entry doesn't match process
- // in set owner mode, chown if setting doesn't match process
- return (this.forceChown ||
- (this.preserveOwner &&
- ((typeof entry.uid === 'number' &&
- entry.uid !== this.processUid) ||
- (typeof entry.gid === 'number' &&
- entry.gid !== this.processGid))) ||
- (typeof this.uid === 'number' &&
- this.uid !== this.processUid) ||
- (typeof this.gid === 'number' && this.gid !== this.processGid));
- }
- [UID](entry) {
- return uint32(this.uid, entry.uid, this.processUid);
- }
- [GID](entry) {
- return uint32(this.gid, entry.gid, this.processGid);
- }
- [FILE](entry, fullyDone) {
- const mode = typeof entry.mode === 'number' ?
- entry.mode & 0o7777
- : this.fmode;
- const stream = new fsm.WriteStream(String(entry.absolute), {
- // slight lie, but it can be numeric flags
- flags: getWriteFlag(entry.size),
- mode: mode,
- autoClose: false,
- });
- stream.on('error', (er) => {
- if (stream.fd) {
- fs.close(stream.fd, () => { });
- }
- // flush all the data out so that we aren't left hanging
- // if the error wasn't actually fatal. otherwise the parse
- // is blocked, and we never proceed.
- stream.write = () => true;
- this[ONERROR](er, entry);
- fullyDone();
- });
- let actions = 1;
- const done = (er) => {
- if (er) {
- /* c8 ignore start - we should always have a fd by now */
- if (stream.fd) {
- fs.close(stream.fd, () => { });
- }
- /* c8 ignore stop */
- this[ONERROR](er, entry);
- fullyDone();
- return;
- }
- if (--actions === 0) {
- if (stream.fd !== undefined) {
- fs.close(stream.fd, er => {
- if (er) {
- this[ONERROR](er, entry);
- }
- else {
- this[UNPEND]();
- }
- fullyDone();
- });
- }
- }
- };
- stream.on('finish', () => {
- // if futimes fails, try utimes
- // if utimes fails, fail with the original error
- // same for fchown/chown
- const abs = String(entry.absolute);
- const fd = stream.fd;
- if (typeof fd === 'number' && entry.mtime && !this.noMtime) {
- actions++;
- const atime = entry.atime || new Date();
- const mtime = entry.mtime;
- fs.futimes(fd, atime, mtime, er => er ?
- fs.utimes(abs, atime, mtime, er2 => done(er2 && er))
- : done());
- }
- if (typeof fd === 'number' && this[DOCHOWN](entry)) {
- actions++;
- const uid = this[UID](entry);
- const gid = this[GID](entry);
- if (typeof uid === 'number' && typeof gid === 'number') {
- fs.fchown(fd, uid, gid, er => er ?
- fs.chown(abs, uid, gid, er2 => done(er2 && er))
- : done());
- }
- }
- done();
- });
- const tx = this.transform ? this.transform(entry) || entry : entry;
- if (tx !== entry) {
- tx.on('error', (er) => {
- this[ONERROR](er, entry);
- fullyDone();
- });
- entry.pipe(tx);
- }
- tx.pipe(stream);
- }
- [DIRECTORY](entry, fullyDone) {
- const mode = typeof entry.mode === 'number' ?
- entry.mode & 0o7777
- : this.dmode;
- this[MKDIR](String(entry.absolute), mode, er => {
- if (er) {
- this[ONERROR](er, entry);
- fullyDone();
- return;
- }
- let actions = 1;
- const done = () => {
- if (--actions === 0) {
- fullyDone();
- this[UNPEND]();
- entry.resume();
- }
- };
- if (entry.mtime && !this.noMtime) {
- actions++;
- fs.utimes(String(entry.absolute), entry.atime || new Date(), entry.mtime, done);
- }
- if (this[DOCHOWN](entry)) {
- actions++;
- fs.chown(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry)), done);
- }
- done();
- });
- }
- [UNSUPPORTED](entry) {
- entry.unsupported = true;
- this.warn('TAR_ENTRY_UNSUPPORTED', `unsupported entry type: ${entry.type}`, { entry });
- entry.resume();
- }
- [SYMLINK](entry, done) {
- this[LINK](entry, String(entry.linkpath), 'symlink', done);
- }
- [HARDLINK](entry, done) {
- const linkpath = normalizeWindowsPath(path.resolve(this.cwd, String(entry.linkpath)));
- this[LINK](entry, linkpath, 'link', done);
- }
- [PEND]() {
- this[PENDING]++;
- }
- [UNPEND]() {
- this[PENDING]--;
- this[MAYBECLOSE]();
- }
- [SKIP](entry) {
- this[UNPEND]();
- entry.resume();
- }
- // Check if we can reuse an existing filesystem entry safely and
- // overwrite it, rather than unlinking and recreating
- // Windows doesn't report a useful nlink, so we just never reuse entries
- [ISREUSABLE](entry, st) {
- return (entry.type === 'File' &&
- !this.unlink &&
- st.isFile() &&
- st.nlink <= 1 &&
- !isWindows);
- }
- // check if a thing is there, and if so, try to clobber it
- [CHECKFS](entry) {
- this[PEND]();
- const paths = [entry.path];
- if (entry.linkpath) {
- paths.push(entry.linkpath);
- }
- this.reservations.reserve(paths, done => this[CHECKFS2](entry, done));
- }
- [PRUNECACHE](entry) {
- // if we are not creating a directory, and the path is in the dirCache,
- // then that means we are about to delete the directory we created
- // previously, and it is no longer going to be a directory, and neither
- // is any of its children.
- // If a symbolic link is encountered, all bets are off. There is no
- // reasonable way to sanitize the cache in such a way we will be able to
- // avoid having filesystem collisions. If this happens with a non-symlink
- // entry, it'll just fail to unpack, but a symlink to a directory, using an
- // 8.3 shortname or certain unicode attacks, can evade detection and lead
- // to arbitrary writes to anywhere on the system.
- if (entry.type === 'SymbolicLink') {
- dropCache(this.dirCache);
- }
- else if (entry.type !== 'Directory') {
- pruneCache(this.dirCache, String(entry.absolute));
- }
- }
- [CHECKFS2](entry, fullyDone) {
- this[PRUNECACHE](entry);
- const done = (er) => {
- this[PRUNECACHE](entry);
- fullyDone(er);
- };
- const checkCwd = () => {
- this[MKDIR](this.cwd, this.dmode, er => {
- if (er) {
- this[ONERROR](er, entry);
- done();
- return;
- }
- this[CHECKED_CWD] = true;
- start();
- });
- };
- const start = () => {
- if (entry.absolute !== this.cwd) {
- const parent = normalizeWindowsPath(path.dirname(String(entry.absolute)));
- if (parent !== this.cwd) {
- return this[MKDIR](parent, this.dmode, er => {
- if (er) {
- this[ONERROR](er, entry);
- done();
- return;
- }
- afterMakeParent();
- });
- }
- }
- afterMakeParent();
- };
- const afterMakeParent = () => {
- fs.lstat(String(entry.absolute), (lstatEr, st) => {
- if (st &&
- (this.keep ||
- /* c8 ignore next */
- (this.newer && st.mtime > (entry.mtime ?? st.mtime)))) {
- this[SKIP](entry);
- done();
- return;
- }
- if (lstatEr || this[ISREUSABLE](entry, st)) {
- return this[MAKEFS](null, entry, done);
- }
- if (st.isDirectory()) {
- if (entry.type === 'Directory') {
- const needChmod = this.chmod &&
- entry.mode &&
- (st.mode & 0o7777) !== entry.mode;
- const afterChmod = (er) => this[MAKEFS](er ?? null, entry, done);
- if (!needChmod) {
- return afterChmod();
- }
- return fs.chmod(String(entry.absolute), Number(entry.mode), afterChmod);
- }
- // Not a dir entry, have to remove it.
- // NB: the only way to end up with an entry that is the cwd
- // itself, in such a way that == does not detect, is a
- // tricky windows absolute path with UNC or 8.3 parts (and
- // preservePaths:true, or else it will have been stripped).
- // In that case, the user has opted out of path protections
- // explicitly, so if they blow away the cwd, c'est la vie.
- if (entry.absolute !== this.cwd) {
- return fs.rmdir(String(entry.absolute), (er) => this[MAKEFS](er ?? null, entry, done));
- }
- }
- // not a dir, and not reusable
- // don't remove if the cwd, we want that error
- if (entry.absolute === this.cwd) {
- return this[MAKEFS](null, entry, done);
- }
- unlinkFile(String(entry.absolute), er => this[MAKEFS](er ?? null, entry, done));
- });
- };
- if (this[CHECKED_CWD]) {
- start();
- }
- else {
- checkCwd();
- }
- }
- [MAKEFS](er, entry, done) {
- if (er) {
- this[ONERROR](er, entry);
- done();
- return;
- }
- switch (entry.type) {
- case 'File':
- case 'OldFile':
- case 'ContiguousFile':
- return this[FILE](entry, done);
- case 'Link':
- return this[HARDLINK](entry, done);
- case 'SymbolicLink':
- return this[SYMLINK](entry, done);
- case 'Directory':
- case 'GNUDumpDir':
- return this[DIRECTORY](entry, done);
- }
- }
- [LINK](entry, linkpath, link, done) {
- // XXX: get the type ('symlink' or 'junction') for windows
- fs[link](linkpath, String(entry.absolute), er => {
- if (er) {
- this[ONERROR](er, entry);
- }
- else {
- this[UNPEND]();
- entry.resume();
- }
- done();
- });
- }
-}
-const callSync = (fn) => {
- try {
- return [null, fn()];
- }
- catch (er) {
- return [er, null];
- }
-};
-export class UnpackSync extends Unpack {
- sync = true;
- [MAKEFS](er, entry) {
- return super[MAKEFS](er, entry, () => { });
- }
- [CHECKFS](entry) {
- this[PRUNECACHE](entry);
- if (!this[CHECKED_CWD]) {
- const er = this[MKDIR](this.cwd, this.dmode);
- if (er) {
- return this[ONERROR](er, entry);
- }
- this[CHECKED_CWD] = true;
- }
- // don't bother to make the parent if the current entry is the cwd,
- // we've already checked it.
- if (entry.absolute !== this.cwd) {
- const parent = normalizeWindowsPath(path.dirname(String(entry.absolute)));
- if (parent !== this.cwd) {
- const mkParent = this[MKDIR](parent, this.dmode);
- if (mkParent) {
- return this[ONERROR](mkParent, entry);
- }
- }
- }
- const [lstatEr, st] = callSync(() => fs.lstatSync(String(entry.absolute)));
- if (st &&
- (this.keep ||
- /* c8 ignore next */
- (this.newer && st.mtime > (entry.mtime ?? st.mtime)))) {
- return this[SKIP](entry);
- }
- if (lstatEr || this[ISREUSABLE](entry, st)) {
- return this[MAKEFS](null, entry);
- }
- if (st.isDirectory()) {
- if (entry.type === 'Directory') {
- const needChmod = this.chmod &&
- entry.mode &&
- (st.mode & 0o7777) !== entry.mode;
- const [er] = needChmod ?
- callSync(() => {
- fs.chmodSync(String(entry.absolute), Number(entry.mode));
- })
- : [];
- return this[MAKEFS](er, entry);
- }
- // not a dir entry, have to remove it
- const [er] = callSync(() => fs.rmdirSync(String(entry.absolute)));
- this[MAKEFS](er, entry);
- }
- // not a dir, and not reusable.
- // don't remove if it's the cwd, since we want that error.
- const [er] = entry.absolute === this.cwd ?
- []
- : callSync(() => unlinkFileSync(String(entry.absolute)));
- this[MAKEFS](er, entry);
- }
- [FILE](entry, done) {
- const mode = typeof entry.mode === 'number' ?
- entry.mode & 0o7777
- : this.fmode;
- const oner = (er) => {
- let closeError;
- try {
- fs.closeSync(fd);
- }
- catch (e) {
- closeError = e;
- }
- if (er || closeError) {
- this[ONERROR](er || closeError, entry);
- }
- done();
- };
- let fd;
- try {
- fd = fs.openSync(String(entry.absolute), getWriteFlag(entry.size), mode);
- }
- catch (er) {
- return oner(er);
- }
- const tx = this.transform ? this.transform(entry) || entry : entry;
- if (tx !== entry) {
- tx.on('error', (er) => this[ONERROR](er, entry));
- entry.pipe(tx);
- }
- tx.on('data', (chunk) => {
- try {
- fs.writeSync(fd, chunk, 0, chunk.length);
- }
- catch (er) {
- oner(er);
- }
- });
- tx.on('end', () => {
- let er = null;
- // try both, falling futimes back to utimes
- // if either fails, handle the first error
- if (entry.mtime && !this.noMtime) {
- const atime = entry.atime || new Date();
- const mtime = entry.mtime;
- try {
- fs.futimesSync(fd, atime, mtime);
- }
- catch (futimeser) {
- try {
- fs.utimesSync(String(entry.absolute), atime, mtime);
- }
- catch (utimeser) {
- er = futimeser;
- }
- }
- }
- if (this[DOCHOWN](entry)) {
- const uid = this[UID](entry);
- const gid = this[GID](entry);
- try {
- fs.fchownSync(fd, Number(uid), Number(gid));
- }
- catch (fchowner) {
- try {
- fs.chownSync(String(entry.absolute), Number(uid), Number(gid));
- }
- catch (chowner) {
- er = er || fchowner;
- }
- }
- }
- oner(er);
- });
- }
- [DIRECTORY](entry, done) {
- const mode = typeof entry.mode === 'number' ?
- entry.mode & 0o7777
- : this.dmode;
- const er = this[MKDIR](String(entry.absolute), mode);
- if (er) {
- this[ONERROR](er, entry);
- done();
- return;
- }
- if (entry.mtime && !this.noMtime) {
- try {
- fs.utimesSync(String(entry.absolute), entry.atime || new Date(), entry.mtime);
- /* c8 ignore next */
- }
- catch (er) { }
- }
- if (this[DOCHOWN](entry)) {
- try {
- fs.chownSync(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry)));
- }
- catch (er) { }
- }
- done();
- entry.resume();
- }
- [MKDIR](dir, mode) {
- try {
- return mkdirSync(normalizeWindowsPath(dir), {
- uid: this.uid,
- gid: this.gid,
- processUid: this.processUid,
- processGid: this.processGid,
- umask: this.processUmask,
- preserve: this.preservePaths,
- unlink: this.unlink,
- cache: this.dirCache,
- cwd: this.cwd,
- mode: mode,
- });
- }
- catch (er) {
- return er;
- }
- }
- [LINK](entry, linkpath, link, done) {
- const ls = `${link}Sync`;
- try {
- fs[ls](linkpath, String(entry.absolute));
- done();
- entry.resume();
- }
- catch (er) {
- return this[ONERROR](er, entry);
- }
- }
-}
-//# sourceMappingURL=unpack.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/update.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/update.js
deleted file mode 100644
index 21398e9766663d..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/update.js
+++ /dev/null
@@ -1,30 +0,0 @@
-// tar -u
-import { makeCommand } from './make-command.js';
-import { replace as r } from './replace.js';
-// just call tar.r with the filter and mtimeCache
-export const update = makeCommand(r.syncFile, r.asyncFile, r.syncNoFile, r.asyncNoFile, (opt, entries = []) => {
- r.validate?.(opt, entries);
- mtimeFilter(opt);
-});
-const mtimeFilter = (opt) => {
- const filter = opt.filter;
- if (!opt.mtimeCache) {
- opt.mtimeCache = new Map();
- }
- opt.filter =
- filter ?
- (path, stat) => filter(path, stat) &&
- !(
- /* c8 ignore start */
- ((opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) >
- (stat.mtime ?? 0))
- /* c8 ignore stop */
- )
- : (path, stat) => !(
- /* c8 ignore start */
- ((opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) >
- (stat.mtime ?? 0))
- /* c8 ignore stop */
- );
-};
-//# sourceMappingURL=update.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/warn-method.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/warn-method.js
deleted file mode 100644
index 13e798afefc85e..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/warn-method.js
+++ /dev/null
@@ -1,27 +0,0 @@
-export const warnMethod = (self, code, message, data = {}) => {
- if (self.file) {
- data.file = self.file;
- }
- if (self.cwd) {
- data.cwd = self.cwd;
- }
- data.code =
- (message instanceof Error &&
- message.code) ||
- code;
- data.tarCode = code;
- if (!self.strict && data.recoverable !== false) {
- if (message instanceof Error) {
- data = Object.assign(message, data);
- message = message.message;
- }
- self.emit('warn', code, message, data);
- }
- else if (message instanceof Error) {
- self.emit('error', Object.assign(message, data));
- }
- else {
- self.emit('error', Object.assign(new Error(`${code}: ${message}`), data));
- }
-};
-//# sourceMappingURL=warn-method.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/winchars.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/winchars.js
deleted file mode 100644
index c41eb86d69a4bb..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/winchars.js
+++ /dev/null
@@ -1,9 +0,0 @@
-// When writing files on Windows, translate the characters to their
-// 0xf000 higher-encoded versions.
-const raw = ['|', '<', '>', '?', ':'];
-const win = raw.map(char => String.fromCharCode(0xf000 + char.charCodeAt(0)));
-const toWin = new Map(raw.map((char, i) => [char, win[i]]));
-const toRaw = new Map(win.map((char, i) => [char, raw[i]]));
-export const encode = (s) => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s);
-export const decode = (s) => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s);
-//# sourceMappingURL=winchars.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/write-entry.js b/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/write-entry.js
deleted file mode 100644
index 9028cd676b4cd2..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/esm/write-entry.js
+++ /dev/null
@@ -1,657 +0,0 @@
-import fs from 'fs';
-import { Minipass } from 'minipass';
-import path from 'path';
-import { Header } from './header.js';
-import { modeFix } from './mode-fix.js';
-import { normalizeWindowsPath } from './normalize-windows-path.js';
-import { dealias, } from './options.js';
-import { Pax } from './pax.js';
-import { stripAbsolutePath } from './strip-absolute-path.js';
-import { stripTrailingSlashes } from './strip-trailing-slashes.js';
-import { warnMethod, } from './warn-method.js';
-import * as winchars from './winchars.js';
-const prefixPath = (path, prefix) => {
- if (!prefix) {
- return normalizeWindowsPath(path);
- }
- path = normalizeWindowsPath(path).replace(/^\.(\/|$)/, '');
- return stripTrailingSlashes(prefix) + '/' + path;
-};
-const maxReadSize = 16 * 1024 * 1024;
-const PROCESS = Symbol('process');
-const FILE = Symbol('file');
-const DIRECTORY = Symbol('directory');
-const SYMLINK = Symbol('symlink');
-const HARDLINK = Symbol('hardlink');
-const HEADER = Symbol('header');
-const READ = Symbol('read');
-const LSTAT = Symbol('lstat');
-const ONLSTAT = Symbol('onlstat');
-const ONREAD = Symbol('onread');
-const ONREADLINK = Symbol('onreadlink');
-const OPENFILE = Symbol('openfile');
-const ONOPENFILE = Symbol('onopenfile');
-const CLOSE = Symbol('close');
-const MODE = Symbol('mode');
-const AWAITDRAIN = Symbol('awaitDrain');
-const ONDRAIN = Symbol('ondrain');
-const PREFIX = Symbol('prefix');
-export class WriteEntry extends Minipass {
- path;
- portable;
- myuid = (process.getuid && process.getuid()) || 0;
- // until node has builtin pwnam functions, this'll have to do
- myuser = process.env.USER || '';
- maxReadSize;
- linkCache;
- statCache;
- preservePaths;
- cwd;
- strict;
- mtime;
- noPax;
- noMtime;
- prefix;
- fd;
- blockLen = 0;
- blockRemain = 0;
- buf;
- pos = 0;
- remain = 0;
- length = 0;
- offset = 0;
- win32;
- absolute;
- header;
- type;
- linkpath;
- stat;
- onWriteEntry;
- #hadError = false;
- constructor(p, opt_ = {}) {
- const opt = dealias(opt_);
- super();
- this.path = normalizeWindowsPath(p);
- // suppress atime, ctime, uid, gid, uname, gname
- this.portable = !!opt.portable;
- this.maxReadSize = opt.maxReadSize || maxReadSize;
- this.linkCache = opt.linkCache || new Map();
- this.statCache = opt.statCache || new Map();
- this.preservePaths = !!opt.preservePaths;
- this.cwd = normalizeWindowsPath(opt.cwd || process.cwd());
- this.strict = !!opt.strict;
- this.noPax = !!opt.noPax;
- this.noMtime = !!opt.noMtime;
- this.mtime = opt.mtime;
- this.prefix =
- opt.prefix ? normalizeWindowsPath(opt.prefix) : undefined;
- this.onWriteEntry = opt.onWriteEntry;
- if (typeof opt.onwarn === 'function') {
- this.on('warn', opt.onwarn);
- }
- let pathWarn = false;
- if (!this.preservePaths) {
- const [root, stripped] = stripAbsolutePath(this.path);
- if (root && typeof stripped === 'string') {
- this.path = stripped;
- pathWarn = root;
- }
- }
- this.win32 = !!opt.win32 || process.platform === 'win32';
- if (this.win32) {
- // force the \ to / normalization, since we might not *actually*
- // be on windows, but want \ to be considered a path separator.
- this.path = winchars.decode(this.path.replace(/\\/g, '/'));
- p = p.replace(/\\/g, '/');
- }
- this.absolute = normalizeWindowsPath(opt.absolute || path.resolve(this.cwd, p));
- if (this.path === '') {
- this.path = './';
- }
- if (pathWarn) {
- this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
- entry: this,
- path: pathWarn + this.path,
- });
- }
- const cs = this.statCache.get(this.absolute);
- if (cs) {
- this[ONLSTAT](cs);
- }
- else {
- this[LSTAT]();
- }
- }
- warn(code, message, data = {}) {
- return warnMethod(this, code, message, data);
- }
- emit(ev, ...data) {
- if (ev === 'error') {
- this.#hadError = true;
- }
- return super.emit(ev, ...data);
- }
- [LSTAT]() {
- fs.lstat(this.absolute, (er, stat) => {
- if (er) {
- return this.emit('error', er);
- }
- this[ONLSTAT](stat);
- });
- }
- [ONLSTAT](stat) {
- this.statCache.set(this.absolute, stat);
- this.stat = stat;
- if (!stat.isFile()) {
- stat.size = 0;
- }
- this.type = getType(stat);
- this.emit('stat', stat);
- this[PROCESS]();
- }
- [PROCESS]() {
- switch (this.type) {
- case 'File':
- return this[FILE]();
- case 'Directory':
- return this[DIRECTORY]();
- case 'SymbolicLink':
- return this[SYMLINK]();
- // unsupported types are ignored.
- default:
- return this.end();
- }
- }
- [MODE](mode) {
- return modeFix(mode, this.type === 'Directory', this.portable);
- }
- [PREFIX](path) {
- return prefixPath(path, this.prefix);
- }
- [HEADER]() {
- /* c8 ignore start */
- if (!this.stat) {
- throw new Error('cannot write header before stat');
- }
- /* c8 ignore stop */
- if (this.type === 'Directory' && this.portable) {
- this.noMtime = true;
- }
- this.onWriteEntry?.(this);
- this.header = new Header({
- path: this[PREFIX](this.path),
- // only apply the prefix to hard links.
- linkpath: this.type === 'Link' && this.linkpath !== undefined ?
- this[PREFIX](this.linkpath)
- : this.linkpath,
- // only the permissions and setuid/setgid/sticky bitflags
- // not the higher-order bits that specify file type
- mode: this[MODE](this.stat.mode),
- uid: this.portable ? undefined : this.stat.uid,
- gid: this.portable ? undefined : this.stat.gid,
- size: this.stat.size,
- mtime: this.noMtime ? undefined : this.mtime || this.stat.mtime,
- /* c8 ignore next */
- type: this.type === 'Unsupported' ? undefined : this.type,
- uname: this.portable ? undefined
- : this.stat.uid === this.myuid ? this.myuser
- : '',
- atime: this.portable ? undefined : this.stat.atime,
- ctime: this.portable ? undefined : this.stat.ctime,
- });
- if (this.header.encode() && !this.noPax) {
- super.write(new Pax({
- atime: this.portable ? undefined : this.header.atime,
- ctime: this.portable ? undefined : this.header.ctime,
- gid: this.portable ? undefined : this.header.gid,
- mtime: this.noMtime ? undefined : (this.mtime || this.header.mtime),
- path: this[PREFIX](this.path),
- linkpath: this.type === 'Link' && this.linkpath !== undefined ?
- this[PREFIX](this.linkpath)
- : this.linkpath,
- size: this.header.size,
- uid: this.portable ? undefined : this.header.uid,
- uname: this.portable ? undefined : this.header.uname,
- dev: this.portable ? undefined : this.stat.dev,
- ino: this.portable ? undefined : this.stat.ino,
- nlink: this.portable ? undefined : this.stat.nlink,
- }).encode());
- }
- const block = this.header?.block;
- /* c8 ignore start */
- if (!block) {
- throw new Error('failed to encode header');
- }
- /* c8 ignore stop */
- super.write(block);
- }
- [DIRECTORY]() {
- /* c8 ignore start */
- if (!this.stat) {
- throw new Error('cannot create directory entry without stat');
- }
- /* c8 ignore stop */
- if (this.path.slice(-1) !== '/') {
- this.path += '/';
- }
- this.stat.size = 0;
- this[HEADER]();
- this.end();
- }
- [SYMLINK]() {
- fs.readlink(this.absolute, (er, linkpath) => {
- if (er) {
- return this.emit('error', er);
- }
- this[ONREADLINK](linkpath);
- });
- }
- [ONREADLINK](linkpath) {
- this.linkpath = normalizeWindowsPath(linkpath);
- this[HEADER]();
- this.end();
- }
- [HARDLINK](linkpath) {
- /* c8 ignore start */
- if (!this.stat) {
- throw new Error('cannot create link entry without stat');
- }
- /* c8 ignore stop */
- this.type = 'Link';
- this.linkpath = normalizeWindowsPath(path.relative(this.cwd, linkpath));
- this.stat.size = 0;
- this[HEADER]();
- this.end();
- }
- [FILE]() {
- /* c8 ignore start */
- if (!this.stat) {
- throw new Error('cannot create file entry without stat');
- }
- /* c8 ignore stop */
- if (this.stat.nlink > 1) {
- const linkKey = `${this.stat.dev}:${this.stat.ino}`;
- const linkpath = this.linkCache.get(linkKey);
- if (linkpath?.indexOf(this.cwd) === 0) {
- return this[HARDLINK](linkpath);
- }
- this.linkCache.set(linkKey, this.absolute);
- }
- this[HEADER]();
- if (this.stat.size === 0) {
- return this.end();
- }
- this[OPENFILE]();
- }
- [OPENFILE]() {
- fs.open(this.absolute, 'r', (er, fd) => {
- if (er) {
- return this.emit('error', er);
- }
- this[ONOPENFILE](fd);
- });
- }
- [ONOPENFILE](fd) {
- this.fd = fd;
- if (this.#hadError) {
- return this[CLOSE]();
- }
- /* c8 ignore start */
- if (!this.stat) {
- throw new Error('should stat before calling onopenfile');
- }
- /* c8 ignore start */
- this.blockLen = 512 * Math.ceil(this.stat.size / 512);
- this.blockRemain = this.blockLen;
- const bufLen = Math.min(this.blockLen, this.maxReadSize);
- this.buf = Buffer.allocUnsafe(bufLen);
- this.offset = 0;
- this.pos = 0;
- this.remain = this.stat.size;
- this.length = this.buf.length;
- this[READ]();
- }
- [READ]() {
- const { fd, buf, offset, length, pos } = this;
- if (fd === undefined || buf === undefined) {
- throw new Error('cannot read file without first opening');
- }
- fs.read(fd, buf, offset, length, pos, (er, bytesRead) => {
- if (er) {
- // ignoring the error from close(2) is a bad practice, but at
- // this point we already have an error, don't need another one
- return this[CLOSE](() => this.emit('error', er));
- }
- this[ONREAD](bytesRead);
- });
- }
- /* c8 ignore start */
- [CLOSE](cb = () => { }) {
- /* c8 ignore stop */
- if (this.fd !== undefined)
- fs.close(this.fd, cb);
- }
- [ONREAD](bytesRead) {
- if (bytesRead <= 0 && this.remain > 0) {
- const er = Object.assign(new Error('encountered unexpected EOF'), {
- path: this.absolute,
- syscall: 'read',
- code: 'EOF',
- });
- return this[CLOSE](() => this.emit('error', er));
- }
- if (bytesRead > this.remain) {
- const er = Object.assign(new Error('did not encounter expected EOF'), {
- path: this.absolute,
- syscall: 'read',
- code: 'EOF',
- });
- return this[CLOSE](() => this.emit('error', er));
- }
- /* c8 ignore start */
- if (!this.buf) {
- throw new Error('should have created buffer prior to reading');
- }
- /* c8 ignore stop */
- // null out the rest of the buffer, if we could fit the block padding
- // at the end of this loop, we've incremented bytesRead and this.remain
- // to be incremented up to the blockRemain level, as if we had expected
- // to get a null-padded file, and read it until the end. then we will
- // decrement both remain and blockRemain by bytesRead, and know that we
- // reached the expected EOF, without any null buffer to append.
- if (bytesRead === this.remain) {
- for (let i = bytesRead; i < this.length && bytesRead < this.blockRemain; i++) {
- this.buf[i + this.offset] = 0;
- bytesRead++;
- this.remain++;
- }
- }
- const chunk = this.offset === 0 && bytesRead === this.buf.length ?
- this.buf
- : this.buf.subarray(this.offset, this.offset + bytesRead);
- const flushed = this.write(chunk);
- if (!flushed) {
- this[AWAITDRAIN](() => this[ONDRAIN]());
- }
- else {
- this[ONDRAIN]();
- }
- }
- [AWAITDRAIN](cb) {
- this.once('drain', cb);
- }
- write(chunk, encoding, cb) {
- /* c8 ignore start - just junk to comply with NodeJS.WritableStream */
- if (typeof encoding === 'function') {
- cb = encoding;
- encoding = undefined;
- }
- if (typeof chunk === 'string') {
- chunk = Buffer.from(chunk, typeof encoding === 'string' ? encoding : 'utf8');
- }
- /* c8 ignore stop */
- if (this.blockRemain < chunk.length) {
- const er = Object.assign(new Error('writing more data than expected'), {
- path: this.absolute,
- });
- return this.emit('error', er);
- }
- this.remain -= chunk.length;
- this.blockRemain -= chunk.length;
- this.pos += chunk.length;
- this.offset += chunk.length;
- return super.write(chunk, null, cb);
- }
- [ONDRAIN]() {
- if (!this.remain) {
- if (this.blockRemain) {
- super.write(Buffer.alloc(this.blockRemain));
- }
- return this[CLOSE](er => er ? this.emit('error', er) : this.end());
- }
- /* c8 ignore start */
- if (!this.buf) {
- throw new Error('buffer lost somehow in ONDRAIN');
- }
- /* c8 ignore stop */
- if (this.offset >= this.length) {
- // if we only have a smaller bit left to read, alloc a smaller buffer
- // otherwise, keep it the same length it was before.
- this.buf = Buffer.allocUnsafe(Math.min(this.blockRemain, this.buf.length));
- this.offset = 0;
- }
- this.length = this.buf.length - this.offset;
- this[READ]();
- }
-}
-export class WriteEntrySync extends WriteEntry {
- sync = true;
- [LSTAT]() {
- this[ONLSTAT](fs.lstatSync(this.absolute));
- }
- [SYMLINK]() {
- this[ONREADLINK](fs.readlinkSync(this.absolute));
- }
- [OPENFILE]() {
- this[ONOPENFILE](fs.openSync(this.absolute, 'r'));
- }
- [READ]() {
- let threw = true;
- try {
- const { fd, buf, offset, length, pos } = this;
- /* c8 ignore start */
- if (fd === undefined || buf === undefined) {
- throw new Error('fd and buf must be set in READ method');
- }
- /* c8 ignore stop */
- const bytesRead = fs.readSync(fd, buf, offset, length, pos);
- this[ONREAD](bytesRead);
- threw = false;
- }
- finally {
- // ignoring the error from close(2) is a bad practice, but at
- // this point we already have an error, don't need another one
- if (threw) {
- try {
- this[CLOSE](() => { });
- }
- catch (er) { }
- }
- }
- }
- [AWAITDRAIN](cb) {
- cb();
- }
- /* c8 ignore start */
- [CLOSE](cb = () => { }) {
- /* c8 ignore stop */
- if (this.fd !== undefined)
- fs.closeSync(this.fd);
- cb();
- }
-}
-export class WriteEntryTar extends Minipass {
- blockLen = 0;
- blockRemain = 0;
- buf = 0;
- pos = 0;
- remain = 0;
- length = 0;
- preservePaths;
- portable;
- strict;
- noPax;
- noMtime;
- readEntry;
- type;
- prefix;
- path;
- mode;
- uid;
- gid;
- uname;
- gname;
- header;
- mtime;
- atime;
- ctime;
- linkpath;
- size;
- onWriteEntry;
- warn(code, message, data = {}) {
- return warnMethod(this, code, message, data);
- }
- constructor(readEntry, opt_ = {}) {
- const opt = dealias(opt_);
- super();
- this.preservePaths = !!opt.preservePaths;
- this.portable = !!opt.portable;
- this.strict = !!opt.strict;
- this.noPax = !!opt.noPax;
- this.noMtime = !!opt.noMtime;
- this.onWriteEntry = opt.onWriteEntry;
- this.readEntry = readEntry;
- const { type } = readEntry;
- /* c8 ignore start */
- if (type === 'Unsupported') {
- throw new Error('writing entry that should be ignored');
- }
- /* c8 ignore stop */
- this.type = type;
- if (this.type === 'Directory' && this.portable) {
- this.noMtime = true;
- }
- this.prefix = opt.prefix;
- this.path = normalizeWindowsPath(readEntry.path);
- this.mode =
- readEntry.mode !== undefined ?
- this[MODE](readEntry.mode)
- : undefined;
- this.uid = this.portable ? undefined : readEntry.uid;
- this.gid = this.portable ? undefined : readEntry.gid;
- this.uname = this.portable ? undefined : readEntry.uname;
- this.gname = this.portable ? undefined : readEntry.gname;
- this.size = readEntry.size;
- this.mtime =
- this.noMtime ? undefined : opt.mtime || readEntry.mtime;
- this.atime = this.portable ? undefined : readEntry.atime;
- this.ctime = this.portable ? undefined : readEntry.ctime;
- this.linkpath =
- readEntry.linkpath !== undefined ?
- normalizeWindowsPath(readEntry.linkpath)
- : undefined;
- if (typeof opt.onwarn === 'function') {
- this.on('warn', opt.onwarn);
- }
- let pathWarn = false;
- if (!this.preservePaths) {
- const [root, stripped] = stripAbsolutePath(this.path);
- if (root && typeof stripped === 'string') {
- this.path = stripped;
- pathWarn = root;
- }
- }
- this.remain = readEntry.size;
- this.blockRemain = readEntry.startBlockSize;
- this.onWriteEntry?.(this);
- this.header = new Header({
- path: this[PREFIX](this.path),
- linkpath: this.type === 'Link' && this.linkpath !== undefined ?
- this[PREFIX](this.linkpath)
- : this.linkpath,
- // only the permissions and setuid/setgid/sticky bitflags
- // not the higher-order bits that specify file type
- mode: this.mode,
- uid: this.portable ? undefined : this.uid,
- gid: this.portable ? undefined : this.gid,
- size: this.size,
- mtime: this.noMtime ? undefined : this.mtime,
- type: this.type,
- uname: this.portable ? undefined : this.uname,
- atime: this.portable ? undefined : this.atime,
- ctime: this.portable ? undefined : this.ctime,
- });
- if (pathWarn) {
- this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
- entry: this,
- path: pathWarn + this.path,
- });
- }
- if (this.header.encode() && !this.noPax) {
- super.write(new Pax({
- atime: this.portable ? undefined : this.atime,
- ctime: this.portable ? undefined : this.ctime,
- gid: this.portable ? undefined : this.gid,
- mtime: this.noMtime ? undefined : this.mtime,
- path: this[PREFIX](this.path),
- linkpath: this.type === 'Link' && this.linkpath !== undefined ?
- this[PREFIX](this.linkpath)
- : this.linkpath,
- size: this.size,
- uid: this.portable ? undefined : this.uid,
- uname: this.portable ? undefined : this.uname,
- dev: this.portable ? undefined : this.readEntry.dev,
- ino: this.portable ? undefined : this.readEntry.ino,
- nlink: this.portable ? undefined : this.readEntry.nlink,
- }).encode());
- }
- const b = this.header?.block;
- /* c8 ignore start */
- if (!b)
- throw new Error('failed to encode header');
- /* c8 ignore stop */
- super.write(b);
- readEntry.pipe(this);
- }
- [PREFIX](path) {
- return prefixPath(path, this.prefix);
- }
- [MODE](mode) {
- return modeFix(mode, this.type === 'Directory', this.portable);
- }
- write(chunk, encoding, cb) {
- /* c8 ignore start - just junk to comply with NodeJS.WritableStream */
- if (typeof encoding === 'function') {
- cb = encoding;
- encoding = undefined;
- }
- if (typeof chunk === 'string') {
- chunk = Buffer.from(chunk, typeof encoding === 'string' ? encoding : 'utf8');
- }
- /* c8 ignore stop */
- const writeLen = chunk.length;
- if (writeLen > this.blockRemain) {
- throw new Error('writing more to entry than is appropriate');
- }
- this.blockRemain -= writeLen;
- return super.write(chunk, cb);
- }
- end(chunk, encoding, cb) {
- if (this.blockRemain) {
- super.write(Buffer.alloc(this.blockRemain));
- }
- /* c8 ignore start - just junk to comply with NodeJS.WritableStream */
- if (typeof chunk === 'function') {
- cb = chunk;
- encoding = undefined;
- chunk = undefined;
- }
- if (typeof encoding === 'function') {
- cb = encoding;
- encoding = undefined;
- }
- if (typeof chunk === 'string') {
- chunk = Buffer.from(chunk, encoding ?? 'utf8');
- }
- if (cb)
- this.once('finish', cb);
- chunk ? super.end(chunk, cb) : super.end(cb);
- /* c8 ignore stop */
- return this;
- }
-}
-const getType = (stat) => stat.isFile() ? 'File'
- : stat.isDirectory() ? 'Directory'
- : stat.isSymbolicLink() ? 'SymbolicLink'
- : 'Unsupported';
-//# sourceMappingURL=write-entry.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/package.json b/deps/npm/node_modules/node-gyp/node_modules/tar/package.json
deleted file mode 100644
index 0283103ee9eaf9..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/package.json
+++ /dev/null
@@ -1,325 +0,0 @@
-{
- "author": "Isaac Z. Schlueter",
- "name": "tar",
- "description": "tar for node",
- "version": "7.4.3",
- "repository": {
- "type": "git",
- "url": "https://github.com/isaacs/node-tar.git"
- },
- "scripts": {
- "genparse": "node scripts/generate-parse-fixtures.js",
- "snap": "tap",
- "test": "tap",
- "pretest": "npm run prepare",
- "presnap": "npm run prepare",
- "prepare": "tshy",
- "preversion": "npm test",
- "postversion": "npm publish",
- "prepublishOnly": "git push origin --follow-tags",
- "format": "prettier --write . --log-level warn",
- "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
- },
- "dependencies": {
- "@isaacs/fs-minipass": "^4.0.0",
- "chownr": "^3.0.0",
- "minipass": "^7.1.2",
- "minizlib": "^3.0.1",
- "mkdirp": "^3.0.1",
- "yallist": "^5.0.0"
- },
- "devDependencies": {
- "chmodr": "^1.2.0",
- "end-of-stream": "^1.4.3",
- "events-to-array": "^2.0.3",
- "mutate-fs": "^2.1.1",
- "nock": "^13.5.4",
- "prettier": "^3.2.5",
- "rimraf": "^5.0.5",
- "tap": "^18.7.2",
- "tshy": "^1.13.1",
- "typedoc": "^0.25.13"
- },
- "license": "ISC",
- "engines": {
- "node": ">=18"
- },
- "files": [
- "dist"
- ],
- "tap": {
- "coverage-map": "map.js",
- "timeout": 0,
- "typecheck": true
- },
- "prettier": {
- "experimentalTernaries": true,
- "semi": false,
- "printWidth": 70,
- "tabWidth": 2,
- "useTabs": false,
- "singleQuote": true,
- "jsxSingleQuote": false,
- "bracketSameLine": true,
- "arrowParens": "avoid",
- "endOfLine": "lf"
- },
- "tshy": {
- "exports": {
- "./package.json": "./package.json",
- ".": "./src/index.ts",
- "./c": "./src/create.ts",
- "./create": "./src/create.ts",
- "./replace": "./src/create.ts",
- "./r": "./src/create.ts",
- "./list": "./src/list.ts",
- "./t": "./src/list.ts",
- "./update": "./src/update.ts",
- "./u": "./src/update.ts",
- "./extract": "./src/extract.ts",
- "./x": "./src/extract.ts",
- "./pack": "./src/pack.ts",
- "./unpack": "./src/unpack.ts",
- "./parse": "./src/parse.ts",
- "./read-entry": "./src/read-entry.ts",
- "./write-entry": "./src/write-entry.ts",
- "./header": "./src/header.ts",
- "./pax": "./src/pax.ts",
- "./types": "./src/types.ts"
- }
- },
- "exports": {
- "./package.json": "./package.json",
- ".": {
- "import": {
- "source": "./src/index.ts",
- "types": "./dist/esm/index.d.ts",
- "default": "./dist/esm/index.js"
- },
- "require": {
- "source": "./src/index.ts",
- "types": "./dist/commonjs/index.d.ts",
- "default": "./dist/commonjs/index.js"
- }
- },
- "./c": {
- "import": {
- "source": "./src/create.ts",
- "types": "./dist/esm/create.d.ts",
- "default": "./dist/esm/create.js"
- },
- "require": {
- "source": "./src/create.ts",
- "types": "./dist/commonjs/create.d.ts",
- "default": "./dist/commonjs/create.js"
- }
- },
- "./create": {
- "import": {
- "source": "./src/create.ts",
- "types": "./dist/esm/create.d.ts",
- "default": "./dist/esm/create.js"
- },
- "require": {
- "source": "./src/create.ts",
- "types": "./dist/commonjs/create.d.ts",
- "default": "./dist/commonjs/create.js"
- }
- },
- "./replace": {
- "import": {
- "source": "./src/create.ts",
- "types": "./dist/esm/create.d.ts",
- "default": "./dist/esm/create.js"
- },
- "require": {
- "source": "./src/create.ts",
- "types": "./dist/commonjs/create.d.ts",
- "default": "./dist/commonjs/create.js"
- }
- },
- "./r": {
- "import": {
- "source": "./src/create.ts",
- "types": "./dist/esm/create.d.ts",
- "default": "./dist/esm/create.js"
- },
- "require": {
- "source": "./src/create.ts",
- "types": "./dist/commonjs/create.d.ts",
- "default": "./dist/commonjs/create.js"
- }
- },
- "./list": {
- "import": {
- "source": "./src/list.ts",
- "types": "./dist/esm/list.d.ts",
- "default": "./dist/esm/list.js"
- },
- "require": {
- "source": "./src/list.ts",
- "types": "./dist/commonjs/list.d.ts",
- "default": "./dist/commonjs/list.js"
- }
- },
- "./t": {
- "import": {
- "source": "./src/list.ts",
- "types": "./dist/esm/list.d.ts",
- "default": "./dist/esm/list.js"
- },
- "require": {
- "source": "./src/list.ts",
- "types": "./dist/commonjs/list.d.ts",
- "default": "./dist/commonjs/list.js"
- }
- },
- "./update": {
- "import": {
- "source": "./src/update.ts",
- "types": "./dist/esm/update.d.ts",
- "default": "./dist/esm/update.js"
- },
- "require": {
- "source": "./src/update.ts",
- "types": "./dist/commonjs/update.d.ts",
- "default": "./dist/commonjs/update.js"
- }
- },
- "./u": {
- "import": {
- "source": "./src/update.ts",
- "types": "./dist/esm/update.d.ts",
- "default": "./dist/esm/update.js"
- },
- "require": {
- "source": "./src/update.ts",
- "types": "./dist/commonjs/update.d.ts",
- "default": "./dist/commonjs/update.js"
- }
- },
- "./extract": {
- "import": {
- "source": "./src/extract.ts",
- "types": "./dist/esm/extract.d.ts",
- "default": "./dist/esm/extract.js"
- },
- "require": {
- "source": "./src/extract.ts",
- "types": "./dist/commonjs/extract.d.ts",
- "default": "./dist/commonjs/extract.js"
- }
- },
- "./x": {
- "import": {
- "source": "./src/extract.ts",
- "types": "./dist/esm/extract.d.ts",
- "default": "./dist/esm/extract.js"
- },
- "require": {
- "source": "./src/extract.ts",
- "types": "./dist/commonjs/extract.d.ts",
- "default": "./dist/commonjs/extract.js"
- }
- },
- "./pack": {
- "import": {
- "source": "./src/pack.ts",
- "types": "./dist/esm/pack.d.ts",
- "default": "./dist/esm/pack.js"
- },
- "require": {
- "source": "./src/pack.ts",
- "types": "./dist/commonjs/pack.d.ts",
- "default": "./dist/commonjs/pack.js"
- }
- },
- "./unpack": {
- "import": {
- "source": "./src/unpack.ts",
- "types": "./dist/esm/unpack.d.ts",
- "default": "./dist/esm/unpack.js"
- },
- "require": {
- "source": "./src/unpack.ts",
- "types": "./dist/commonjs/unpack.d.ts",
- "default": "./dist/commonjs/unpack.js"
- }
- },
- "./parse": {
- "import": {
- "source": "./src/parse.ts",
- "types": "./dist/esm/parse.d.ts",
- "default": "./dist/esm/parse.js"
- },
- "require": {
- "source": "./src/parse.ts",
- "types": "./dist/commonjs/parse.d.ts",
- "default": "./dist/commonjs/parse.js"
- }
- },
- "./read-entry": {
- "import": {
- "source": "./src/read-entry.ts",
- "types": "./dist/esm/read-entry.d.ts",
- "default": "./dist/esm/read-entry.js"
- },
- "require": {
- "source": "./src/read-entry.ts",
- "types": "./dist/commonjs/read-entry.d.ts",
- "default": "./dist/commonjs/read-entry.js"
- }
- },
- "./write-entry": {
- "import": {
- "source": "./src/write-entry.ts",
- "types": "./dist/esm/write-entry.d.ts",
- "default": "./dist/esm/write-entry.js"
- },
- "require": {
- "source": "./src/write-entry.ts",
- "types": "./dist/commonjs/write-entry.d.ts",
- "default": "./dist/commonjs/write-entry.js"
- }
- },
- "./header": {
- "import": {
- "source": "./src/header.ts",
- "types": "./dist/esm/header.d.ts",
- "default": "./dist/esm/header.js"
- },
- "require": {
- "source": "./src/header.ts",
- "types": "./dist/commonjs/header.d.ts",
- "default": "./dist/commonjs/header.js"
- }
- },
- "./pax": {
- "import": {
- "source": "./src/pax.ts",
- "types": "./dist/esm/pax.d.ts",
- "default": "./dist/esm/pax.js"
- },
- "require": {
- "source": "./src/pax.ts",
- "types": "./dist/commonjs/pax.d.ts",
- "default": "./dist/commonjs/pax.js"
- }
- },
- "./types": {
- "import": {
- "source": "./src/types.ts",
- "types": "./dist/esm/types.d.ts",
- "default": "./dist/esm/types.js"
- },
- "require": {
- "source": "./src/types.ts",
- "types": "./dist/commonjs/types.d.ts",
- "default": "./dist/commonjs/types.js"
- }
- }
- },
- "type": "module",
- "main": "./dist/commonjs/index.js",
- "types": "./dist/commonjs/index.d.ts"
-}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/yallist/dist/commonjs/index.js b/deps/npm/node_modules/node-gyp/node_modules/yallist/dist/commonjs/index.js
deleted file mode 100644
index c1e1e4741689d9..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/yallist/dist/commonjs/index.js
+++ /dev/null
@@ -1,384 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Node = exports.Yallist = void 0;
-class Yallist {
- tail;
- head;
- length = 0;
- static create(list = []) {
- return new Yallist(list);
- }
- constructor(list = []) {
- for (const item of list) {
- this.push(item);
- }
- }
- *[Symbol.iterator]() {
- for (let walker = this.head; walker; walker = walker.next) {
- yield walker.value;
- }
- }
- removeNode(node) {
- if (node.list !== this) {
- throw new Error('removing node which does not belong to this list');
- }
- const next = node.next;
- const prev = node.prev;
- if (next) {
- next.prev = prev;
- }
- if (prev) {
- prev.next = next;
- }
- if (node === this.head) {
- this.head = next;
- }
- if (node === this.tail) {
- this.tail = prev;
- }
- this.length--;
- node.next = undefined;
- node.prev = undefined;
- node.list = undefined;
- return next;
- }
- unshiftNode(node) {
- if (node === this.head) {
- return;
- }
- if (node.list) {
- node.list.removeNode(node);
- }
- const head = this.head;
- node.list = this;
- node.next = head;
- if (head) {
- head.prev = node;
- }
- this.head = node;
- if (!this.tail) {
- this.tail = node;
- }
- this.length++;
- }
- pushNode(node) {
- if (node === this.tail) {
- return;
- }
- if (node.list) {
- node.list.removeNode(node);
- }
- const tail = this.tail;
- node.list = this;
- node.prev = tail;
- if (tail) {
- tail.next = node;
- }
- this.tail = node;
- if (!this.head) {
- this.head = node;
- }
- this.length++;
- }
- push(...args) {
- for (let i = 0, l = args.length; i < l; i++) {
- push(this, args[i]);
- }
- return this.length;
- }
- unshift(...args) {
- for (var i = 0, l = args.length; i < l; i++) {
- unshift(this, args[i]);
- }
- return this.length;
- }
- pop() {
- if (!this.tail) {
- return undefined;
- }
- const res = this.tail.value;
- const t = this.tail;
- this.tail = this.tail.prev;
- if (this.tail) {
- this.tail.next = undefined;
- }
- else {
- this.head = undefined;
- }
- t.list = undefined;
- this.length--;
- return res;
- }
- shift() {
- if (!this.head) {
- return undefined;
- }
- const res = this.head.value;
- const h = this.head;
- this.head = this.head.next;
- if (this.head) {
- this.head.prev = undefined;
- }
- else {
- this.tail = undefined;
- }
- h.list = undefined;
- this.length--;
- return res;
- }
- forEach(fn, thisp) {
- thisp = thisp || this;
- for (let walker = this.head, i = 0; !!walker; i++) {
- fn.call(thisp, walker.value, i, this);
- walker = walker.next;
- }
- }
- forEachReverse(fn, thisp) {
- thisp = thisp || this;
- for (let walker = this.tail, i = this.length - 1; !!walker; i--) {
- fn.call(thisp, walker.value, i, this);
- walker = walker.prev;
- }
- }
- get(n) {
- let i = 0;
- let walker = this.head;
- for (; !!walker && i < n; i++) {
- walker = walker.next;
- }
- if (i === n && !!walker) {
- return walker.value;
- }
- }
- getReverse(n) {
- let i = 0;
- let walker = this.tail;
- for (; !!walker && i < n; i++) {
- // abort out of the list early if we hit a cycle
- walker = walker.prev;
- }
- if (i === n && !!walker) {
- return walker.value;
- }
- }
- map(fn, thisp) {
- thisp = thisp || this;
- const res = new Yallist();
- for (let walker = this.head; !!walker;) {
- res.push(fn.call(thisp, walker.value, this));
- walker = walker.next;
- }
- return res;
- }
- mapReverse(fn, thisp) {
- thisp = thisp || this;
- var res = new Yallist();
- for (let walker = this.tail; !!walker;) {
- res.push(fn.call(thisp, walker.value, this));
- walker = walker.prev;
- }
- return res;
- }
- reduce(fn, initial) {
- let acc;
- let walker = this.head;
- if (arguments.length > 1) {
- acc = initial;
- }
- else if (this.head) {
- walker = this.head.next;
- acc = this.head.value;
- }
- else {
- throw new TypeError('Reduce of empty list with no initial value');
- }
- for (var i = 0; !!walker; i++) {
- acc = fn(acc, walker.value, i);
- walker = walker.next;
- }
- return acc;
- }
- reduceReverse(fn, initial) {
- let acc;
- let walker = this.tail;
- if (arguments.length > 1) {
- acc = initial;
- }
- else if (this.tail) {
- walker = this.tail.prev;
- acc = this.tail.value;
- }
- else {
- throw new TypeError('Reduce of empty list with no initial value');
- }
- for (let i = this.length - 1; !!walker; i--) {
- acc = fn(acc, walker.value, i);
- walker = walker.prev;
- }
- return acc;
- }
- toArray() {
- const arr = new Array(this.length);
- for (let i = 0, walker = this.head; !!walker; i++) {
- arr[i] = walker.value;
- walker = walker.next;
- }
- return arr;
- }
- toArrayReverse() {
- const arr = new Array(this.length);
- for (let i = 0, walker = this.tail; !!walker; i++) {
- arr[i] = walker.value;
- walker = walker.prev;
- }
- return arr;
- }
- slice(from = 0, to = this.length) {
- if (to < 0) {
- to += this.length;
- }
- if (from < 0) {
- from += this.length;
- }
- const ret = new Yallist();
- if (to < from || to < 0) {
- return ret;
- }
- if (from < 0) {
- from = 0;
- }
- if (to > this.length) {
- to = this.length;
- }
- let walker = this.head;
- let i = 0;
- for (i = 0; !!walker && i < from; i++) {
- walker = walker.next;
- }
- for (; !!walker && i < to; i++, walker = walker.next) {
- ret.push(walker.value);
- }
- return ret;
- }
- sliceReverse(from = 0, to = this.length) {
- if (to < 0) {
- to += this.length;
- }
- if (from < 0) {
- from += this.length;
- }
- const ret = new Yallist();
- if (to < from || to < 0) {
- return ret;
- }
- if (from < 0) {
- from = 0;
- }
- if (to > this.length) {
- to = this.length;
- }
- let i = this.length;
- let walker = this.tail;
- for (; !!walker && i > to; i--) {
- walker = walker.prev;
- }
- for (; !!walker && i > from; i--, walker = walker.prev) {
- ret.push(walker.value);
- }
- return ret;
- }
- splice(start, deleteCount = 0, ...nodes) {
- if (start > this.length) {
- start = this.length - 1;
- }
- if (start < 0) {
- start = this.length + start;
- }
- let walker = this.head;
- for (let i = 0; !!walker && i < start; i++) {
- walker = walker.next;
- }
- const ret = [];
- for (let i = 0; !!walker && i < deleteCount; i++) {
- ret.push(walker.value);
- walker = this.removeNode(walker);
- }
- if (!walker) {
- walker = this.tail;
- }
- else if (walker !== this.tail) {
- walker = walker.prev;
- }
- for (const v of nodes) {
- walker = insertAfter(this, walker, v);
- }
- return ret;
- }
- reverse() {
- const head = this.head;
- const tail = this.tail;
- for (let walker = head; !!walker; walker = walker.prev) {
- const p = walker.prev;
- walker.prev = walker.next;
- walker.next = p;
- }
- this.head = tail;
- this.tail = head;
- return this;
- }
-}
-exports.Yallist = Yallist;
-// insertAfter undefined means "make the node the new head of list"
-function insertAfter(self, node, value) {
- const prev = node;
- const next = node ? node.next : self.head;
- const inserted = new Node(value, prev, next, self);
- if (inserted.next === undefined) {
- self.tail = inserted;
- }
- if (inserted.prev === undefined) {
- self.head = inserted;
- }
- self.length++;
- return inserted;
-}
-function push(self, item) {
- self.tail = new Node(item, self.tail, undefined, self);
- if (!self.head) {
- self.head = self.tail;
- }
- self.length++;
-}
-function unshift(self, item) {
- self.head = new Node(item, undefined, self.head, self);
- if (!self.tail) {
- self.tail = self.head;
- }
- self.length++;
-}
-class Node {
- list;
- next;
- prev;
- value;
- constructor(value, prev, next, list) {
- this.list = list;
- this.value = value;
- if (prev) {
- prev.next = this;
- this.prev = prev;
- }
- else {
- this.prev = undefined;
- }
- if (next) {
- next.prev = this;
- this.next = next;
- }
- else {
- this.next = undefined;
- }
- }
-}
-exports.Node = Node;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/yallist/dist/commonjs/package.json b/deps/npm/node_modules/node-gyp/node_modules/yallist/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee39..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/yallist/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
- "type": "commonjs"
-}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/yallist/dist/esm/index.js b/deps/npm/node_modules/node-gyp/node_modules/yallist/dist/esm/index.js
deleted file mode 100644
index 3d81c5113b93a8..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/yallist/dist/esm/index.js
+++ /dev/null
@@ -1,379 +0,0 @@
-export class Yallist {
- tail;
- head;
- length = 0;
- static create(list = []) {
- return new Yallist(list);
- }
- constructor(list = []) {
- for (const item of list) {
- this.push(item);
- }
- }
- *[Symbol.iterator]() {
- for (let walker = this.head; walker; walker = walker.next) {
- yield walker.value;
- }
- }
- removeNode(node) {
- if (node.list !== this) {
- throw new Error('removing node which does not belong to this list');
- }
- const next = node.next;
- const prev = node.prev;
- if (next) {
- next.prev = prev;
- }
- if (prev) {
- prev.next = next;
- }
- if (node === this.head) {
- this.head = next;
- }
- if (node === this.tail) {
- this.tail = prev;
- }
- this.length--;
- node.next = undefined;
- node.prev = undefined;
- node.list = undefined;
- return next;
- }
- unshiftNode(node) {
- if (node === this.head) {
- return;
- }
- if (node.list) {
- node.list.removeNode(node);
- }
- const head = this.head;
- node.list = this;
- node.next = head;
- if (head) {
- head.prev = node;
- }
- this.head = node;
- if (!this.tail) {
- this.tail = node;
- }
- this.length++;
- }
- pushNode(node) {
- if (node === this.tail) {
- return;
- }
- if (node.list) {
- node.list.removeNode(node);
- }
- const tail = this.tail;
- node.list = this;
- node.prev = tail;
- if (tail) {
- tail.next = node;
- }
- this.tail = node;
- if (!this.head) {
- this.head = node;
- }
- this.length++;
- }
- push(...args) {
- for (let i = 0, l = args.length; i < l; i++) {
- push(this, args[i]);
- }
- return this.length;
- }
- unshift(...args) {
- for (var i = 0, l = args.length; i < l; i++) {
- unshift(this, args[i]);
- }
- return this.length;
- }
- pop() {
- if (!this.tail) {
- return undefined;
- }
- const res = this.tail.value;
- const t = this.tail;
- this.tail = this.tail.prev;
- if (this.tail) {
- this.tail.next = undefined;
- }
- else {
- this.head = undefined;
- }
- t.list = undefined;
- this.length--;
- return res;
- }
- shift() {
- if (!this.head) {
- return undefined;
- }
- const res = this.head.value;
- const h = this.head;
- this.head = this.head.next;
- if (this.head) {
- this.head.prev = undefined;
- }
- else {
- this.tail = undefined;
- }
- h.list = undefined;
- this.length--;
- return res;
- }
- forEach(fn, thisp) {
- thisp = thisp || this;
- for (let walker = this.head, i = 0; !!walker; i++) {
- fn.call(thisp, walker.value, i, this);
- walker = walker.next;
- }
- }
- forEachReverse(fn, thisp) {
- thisp = thisp || this;
- for (let walker = this.tail, i = this.length - 1; !!walker; i--) {
- fn.call(thisp, walker.value, i, this);
- walker = walker.prev;
- }
- }
- get(n) {
- let i = 0;
- let walker = this.head;
- for (; !!walker && i < n; i++) {
- walker = walker.next;
- }
- if (i === n && !!walker) {
- return walker.value;
- }
- }
- getReverse(n) {
- let i = 0;
- let walker = this.tail;
- for (; !!walker && i < n; i++) {
- // abort out of the list early if we hit a cycle
- walker = walker.prev;
- }
- if (i === n && !!walker) {
- return walker.value;
- }
- }
- map(fn, thisp) {
- thisp = thisp || this;
- const res = new Yallist();
- for (let walker = this.head; !!walker;) {
- res.push(fn.call(thisp, walker.value, this));
- walker = walker.next;
- }
- return res;
- }
- mapReverse(fn, thisp) {
- thisp = thisp || this;
- var res = new Yallist();
- for (let walker = this.tail; !!walker;) {
- res.push(fn.call(thisp, walker.value, this));
- walker = walker.prev;
- }
- return res;
- }
- reduce(fn, initial) {
- let acc;
- let walker = this.head;
- if (arguments.length > 1) {
- acc = initial;
- }
- else if (this.head) {
- walker = this.head.next;
- acc = this.head.value;
- }
- else {
- throw new TypeError('Reduce of empty list with no initial value');
- }
- for (var i = 0; !!walker; i++) {
- acc = fn(acc, walker.value, i);
- walker = walker.next;
- }
- return acc;
- }
- reduceReverse(fn, initial) {
- let acc;
- let walker = this.tail;
- if (arguments.length > 1) {
- acc = initial;
- }
- else if (this.tail) {
- walker = this.tail.prev;
- acc = this.tail.value;
- }
- else {
- throw new TypeError('Reduce of empty list with no initial value');
- }
- for (let i = this.length - 1; !!walker; i--) {
- acc = fn(acc, walker.value, i);
- walker = walker.prev;
- }
- return acc;
- }
- toArray() {
- const arr = new Array(this.length);
- for (let i = 0, walker = this.head; !!walker; i++) {
- arr[i] = walker.value;
- walker = walker.next;
- }
- return arr;
- }
- toArrayReverse() {
- const arr = new Array(this.length);
- for (let i = 0, walker = this.tail; !!walker; i++) {
- arr[i] = walker.value;
- walker = walker.prev;
- }
- return arr;
- }
- slice(from = 0, to = this.length) {
- if (to < 0) {
- to += this.length;
- }
- if (from < 0) {
- from += this.length;
- }
- const ret = new Yallist();
- if (to < from || to < 0) {
- return ret;
- }
- if (from < 0) {
- from = 0;
- }
- if (to > this.length) {
- to = this.length;
- }
- let walker = this.head;
- let i = 0;
- for (i = 0; !!walker && i < from; i++) {
- walker = walker.next;
- }
- for (; !!walker && i < to; i++, walker = walker.next) {
- ret.push(walker.value);
- }
- return ret;
- }
- sliceReverse(from = 0, to = this.length) {
- if (to < 0) {
- to += this.length;
- }
- if (from < 0) {
- from += this.length;
- }
- const ret = new Yallist();
- if (to < from || to < 0) {
- return ret;
- }
- if (from < 0) {
- from = 0;
- }
- if (to > this.length) {
- to = this.length;
- }
- let i = this.length;
- let walker = this.tail;
- for (; !!walker && i > to; i--) {
- walker = walker.prev;
- }
- for (; !!walker && i > from; i--, walker = walker.prev) {
- ret.push(walker.value);
- }
- return ret;
- }
- splice(start, deleteCount = 0, ...nodes) {
- if (start > this.length) {
- start = this.length - 1;
- }
- if (start < 0) {
- start = this.length + start;
- }
- let walker = this.head;
- for (let i = 0; !!walker && i < start; i++) {
- walker = walker.next;
- }
- const ret = [];
- for (let i = 0; !!walker && i < deleteCount; i++) {
- ret.push(walker.value);
- walker = this.removeNode(walker);
- }
- if (!walker) {
- walker = this.tail;
- }
- else if (walker !== this.tail) {
- walker = walker.prev;
- }
- for (const v of nodes) {
- walker = insertAfter(this, walker, v);
- }
- return ret;
- }
- reverse() {
- const head = this.head;
- const tail = this.tail;
- for (let walker = head; !!walker; walker = walker.prev) {
- const p = walker.prev;
- walker.prev = walker.next;
- walker.next = p;
- }
- this.head = tail;
- this.tail = head;
- return this;
- }
-}
-// insertAfter undefined means "make the node the new head of list"
-function insertAfter(self, node, value) {
- const prev = node;
- const next = node ? node.next : self.head;
- const inserted = new Node(value, prev, next, self);
- if (inserted.next === undefined) {
- self.tail = inserted;
- }
- if (inserted.prev === undefined) {
- self.head = inserted;
- }
- self.length++;
- return inserted;
-}
-function push(self, item) {
- self.tail = new Node(item, self.tail, undefined, self);
- if (!self.head) {
- self.head = self.tail;
- }
- self.length++;
-}
-function unshift(self, item) {
- self.head = new Node(item, undefined, self.head, self);
- if (!self.tail) {
- self.tail = self.head;
- }
- self.length++;
-}
-export class Node {
- list;
- next;
- prev;
- value;
- constructor(value, prev, next, list) {
- this.list = list;
- this.value = value;
- if (prev) {
- prev.next = this;
- this.prev = prev;
- }
- else {
- this.prev = undefined;
- }
- if (next) {
- next.prev = this;
- this.next = next;
- }
- else {
- this.next = undefined;
- }
- }
-}
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/deps/npm/node_modules/node-gyp/node_modules/yallist/dist/esm/package.json b/deps/npm/node_modules/node-gyp/node_modules/yallist/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c055..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/yallist/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
- "type": "module"
-}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/yallist/package.json b/deps/npm/node_modules/node-gyp/node_modules/yallist/package.json
deleted file mode 100644
index 2f5247808bbea8..00000000000000
--- a/deps/npm/node_modules/node-gyp/node_modules/yallist/package.json
+++ /dev/null
@@ -1,68 +0,0 @@
-{
- "name": "yallist",
- "version": "5.0.0",
- "description": "Yet Another Linked List",
- "files": [
- "dist"
- ],
- "devDependencies": {
- "prettier": "^3.2.5",
- "tap": "^18.7.2",
- "tshy": "^1.13.1",
- "typedoc": "^0.25.13"
- },
- "scripts": {
- "preversion": "npm test",
- "postversion": "npm publish",
- "prepublishOnly": "git push origin --follow-tags",
- "prepare": "tshy",
- "pretest": "npm run prepare",
- "presnap": "npm run prepare",
- "test": "tap",
- "snap": "tap",
- "format": "prettier --write . --loglevel warn --ignore-path ../../.prettierignore --cache",
- "typedoc": "typedoc"
- },
- "repository": {
- "type": "git",
- "url": "git+https://github.com/isaacs/yallist.git"
- },
- "author": "Isaac Z. Schlueter (http://blog.izs.me/)",
- "license": "BlueOak-1.0.0",
- "tshy": {
- "exports": {
- "./package.json": "./package.json",
- ".": "./src/index.ts"
- }
- },
- "exports": {
- "./package.json": "./package.json",
- ".": {
- "import": {
- "types": "./dist/esm/index.d.ts",
- "default": "./dist/esm/index.js"
- },
- "require": {
- "types": "./dist/commonjs/index.d.ts",
- "default": "./dist/commonjs/index.js"
- }
- }
- },
- "main": "./dist/commonjs/index.js",
- "types": "./dist/commonjs/index.d.ts",
- "type": "module",
- "prettier": {
- "semi": false,
- "printWidth": 70,
- "tabWidth": 2,
- "useTabs": false,
- "singleQuote": true,
- "jsxSingleQuote": false,
- "bracketSameLine": true,
- "arrowParens": "avoid",
- "endOfLine": "lf"
- },
- "engines": {
- "node": ">=18"
- }
-}
diff --git a/deps/npm/node_modules/node-gyp/package.json b/deps/npm/node_modules/node-gyp/package.json
index f69a022ef3d12b..3c9fd0ff318bad 100644
--- a/deps/npm/node_modules/node-gyp/package.json
+++ b/deps/npm/node_modules/node-gyp/package.json
@@ -11,7 +11,7 @@
"bindings",
"gyp"
],
- "version": "11.2.0",
+ "version": "11.5.0",
"installVersion": 11,
"author": "Nathan Rajlich (http://tootallnate.net)",
"repository": {
diff --git a/deps/npm/node_modules/normalize-package-data/lib/fixer.js b/deps/npm/node_modules/normalize-package-data/lib/fixer.js
index 1c30cad65e6cb1..49b97f5e322e7a 100644
--- a/deps/npm/node_modules/normalize-package-data/lib/fixer.js
+++ b/deps/npm/node_modules/normalize-package-data/lib/fixer.js
@@ -1,11 +1,11 @@
+var { URL } = require('node:url')
var isValidSemver = require('semver/functions/valid')
var cleanSemver = require('semver/functions/clean')
var validateLicense = require('validate-npm-package-license')
var hostedGitInfo = require('hosted-git-info')
-var moduleBuiltin = require('node:module')
+var { isBuiltin } = require('node:module')
var depTypes = ['dependencies', 'devDependencies', 'optionalDependencies']
var extractDescription = require('./extract_description')
-var url = require('url')
var typos = require('./typos.json')
var isEmail = str => str.includes('@') && (str.indexOf('@') < str.lastIndexOf('.'))
@@ -231,7 +231,7 @@ module.exports = {
data.name = data.name.trim()
}
ensureValidName(data.name, strict, options.allowLegacyCase)
- if (moduleBuiltin.builtinModules.includes(data.name)) {
+ if (isBuiltin(data.name)) {
this.warn('conflictingName', data.name)
}
},
@@ -269,8 +269,7 @@ module.exports = {
if (typeof data.bugs === 'string') {
if (isEmail(data.bugs)) {
data.bugs = { email: data.bugs }
- /* eslint-disable-next-line node/no-deprecated-api */
- } else if (url.parse(data.bugs).protocol) {
+ } else if (URL.canParse(data.bugs)) {
data.bugs = { url: data.bugs }
} else {
this.warn('nonEmailUrlBugsString')
@@ -280,8 +279,7 @@ module.exports = {
var oldBugs = data.bugs
data.bugs = {}
if (oldBugs.url) {
- /* eslint-disable-next-line node/no-deprecated-api */
- if (typeof (oldBugs.url) === 'string' && url.parse(oldBugs.url).protocol) {
+ if (URL.canParse(oldBugs.url)) {
data.bugs.url = oldBugs.url
} else {
this.warn('nonUrlBugsUrlField')
@@ -317,8 +315,7 @@ module.exports = {
this.warn('nonUrlHomepage')
return delete data.homepage
}
- /* eslint-disable-next-line node/no-deprecated-api */
- if (!url.parse(data.homepage).protocol) {
+ if (!URL.canParse(data.homepage)) {
data.homepage = 'http://' + data.homepage
}
},
diff --git a/deps/npm/node_modules/normalize-package-data/package.json b/deps/npm/node_modules/normalize-package-data/package.json
index a849ea3a848397..bf9b20f19d6233 100644
--- a/deps/npm/node_modules/normalize-package-data/package.json
+++ b/deps/npm/node_modules/normalize-package-data/package.json
@@ -1,6 +1,6 @@
{
"name": "normalize-package-data",
- "version": "7.0.0",
+ "version": "7.0.1",
"author": "GitHub Inc.",
"description": "Normalizes data that can be found in package.json files.",
"license": "BSD-2-Clause",
@@ -28,7 +28,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^5.0.0",
- "@npmcli/template-oss": "4.23.3",
+ "@npmcli/template-oss": "4.25.0",
"tap": "^16.0.1"
},
"files": [
@@ -40,7 +40,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.23.3",
+ "version": "4.25.0",
"publish": "true"
},
"tap": {
diff --git a/deps/npm/node_modules/npm-install-checks/lib/dev-engines.js b/deps/npm/node_modules/npm-install-checks/lib/dev-engines.js
index ac5a182330d3b2..2c483349ae70a9 100644
--- a/deps/npm/node_modules/npm-install-checks/lib/dev-engines.js
+++ b/deps/npm/node_modules/npm-install-checks/lib/dev-engines.js
@@ -90,14 +90,14 @@ function checkDependency (wanted, current, opts) {
/** checks devEngines package property and returns array of warnings / errors */
function checkDevEngines (wanted, current = {}, opts = {}) {
if ((typeof wanted !== 'object' || wanted === null) || Array.isArray(wanted)) {
- throw new Error(`Invalid non-object value for devEngines`)
+ throw new Error(`Invalid non-object value for "devEngines"`)
}
const errors = []
for (const engine of Object.keys(wanted)) {
if (!recognizedEngines.includes(engine)) {
- throw new Error(`Invalid property "${engine}"`)
+ throw new Error(`Invalid property "devEngines.${engine}"`)
}
const dependencyAsAuthored = wanted[engine]
const dependencies = [dependencyAsAuthored].flat()
@@ -125,7 +125,7 @@ function checkDevEngines (wanted, current = {}, opts = {}) {
onFail = 'error'
}
- const err = Object.assign(new Error(`Invalid engine "${engine}"`), {
+ const err = Object.assign(new Error(`Invalid devEngines.${engine}`), {
errors: depErrors,
engine,
isWarn: onFail === 'warn',
diff --git a/deps/npm/node_modules/npm-install-checks/package.json b/deps/npm/node_modules/npm-install-checks/package.json
index 967f5f659b2fac..28a23354bdbfea 100644
--- a/deps/npm/node_modules/npm-install-checks/package.json
+++ b/deps/npm/node_modules/npm-install-checks/package.json
@@ -1,6 +1,6 @@
{
"name": "npm-install-checks",
- "version": "7.1.1",
+ "version": "7.1.2",
"description": "Check the engines and platform fields in package.json",
"main": "lib/index.js",
"dependencies": {
@@ -8,7 +8,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^5.0.0",
- "@npmcli/template-oss": "4.23.4",
+ "@npmcli/template-oss": "4.25.0",
"tap": "^16.0.1"
},
"scripts": {
@@ -40,7 +40,7 @@
"author": "GitHub Inc.",
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.23.4",
+ "version": "4.25.0",
"publish": "true"
},
"tap": {
diff --git a/deps/npm/node_modules/p-map/index.js b/deps/npm/node_modules/p-map/index.js
index 10558008a77283..f713b9bf363b0b 100644
--- a/deps/npm/node_modules/p-map/index.js
+++ b/deps/npm/node_modules/p-map/index.js
@@ -203,31 +203,32 @@ export function pMapIterable(
const iterator = iterable[Symbol.asyncIterator] === undefined ? iterable[Symbol.iterator]() : iterable[Symbol.asyncIterator]();
const promises = [];
- let runningMappersCount = 0;
+ let pendingPromisesCount = 0;
let isDone = false;
let index = 0;
function trySpawn() {
- if (isDone || !(runningMappersCount < concurrency && promises.length < backpressure)) {
+ if (isDone || !(pendingPromisesCount < concurrency && promises.length < backpressure)) {
return;
}
+ pendingPromisesCount++;
+
const promise = (async () => {
const {done, value} = await iterator.next();
if (done) {
+ pendingPromisesCount--;
return {done: true};
}
- runningMappersCount++;
-
// Spawn if still below concurrency and backpressure limit
trySpawn();
try {
const returnValue = await mapper(await value, index++);
- runningMappersCount--;
+ pendingPromisesCount--;
if (returnValue === pMapSkip) {
const index = promises.indexOf(promise);
@@ -242,6 +243,7 @@ export function pMapIterable(
return {done: false, value: returnValue};
} catch (error) {
+ pendingPromisesCount--;
isDone = true;
return {error};
}
diff --git a/deps/npm/node_modules/p-map/package.json b/deps/npm/node_modules/p-map/package.json
index b7b6594c855d8c..6401a2a6a51a9b 100644
--- a/deps/npm/node_modules/p-map/package.json
+++ b/deps/npm/node_modules/p-map/package.json
@@ -1,6 +1,6 @@
{
"name": "p-map",
- "version": "7.0.3",
+ "version": "7.0.4",
"description": "Map over promises concurrently",
"license": "MIT",
"repository": "sindresorhus/p-map",
diff --git a/deps/npm/node_modules/pacote/package.json b/deps/npm/node_modules/pacote/package.json
index 71c9aa1ce32572..a992dd915d9dd8 100644
--- a/deps/npm/node_modules/pacote/package.json
+++ b/deps/npm/node_modules/pacote/package.json
@@ -1,6 +1,6 @@
{
"name": "pacote",
- "version": "19.0.1",
+ "version": "19.0.2",
"description": "JavaScript package downloader",
"author": "GitHub Inc.",
"bin": {
@@ -28,7 +28,7 @@
"devDependencies": {
"@npmcli/arborist": "^7.1.0",
"@npmcli/eslint-config": "^5.0.0",
- "@npmcli/template-oss": "4.23.3",
+ "@npmcli/template-oss": "4.29.0",
"hosted-git-info": "^8.0.0",
"mutate-fs": "^2.1.1",
"nock": "^13.2.4",
@@ -61,7 +61,7 @@
"promise-retry": "^2.0.1",
"sigstore": "^3.0.0",
"ssri": "^12.0.0",
- "tar": "^6.1.11"
+ "tar": "^7.5.10"
},
"engines": {
"node": "^18.17.0 || >=20.5.0"
@@ -72,8 +72,9 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.23.3",
+ "version": "4.29.0",
"windowsCI": false,
- "publish": "true"
+ "publish": "true",
+ "backport": 19
}
}
diff --git a/deps/npm/node_modules/tinyglobby/node_modules/picomatch/LICENSE b/deps/npm/node_modules/picomatch/LICENSE
similarity index 100%
rename from deps/npm/node_modules/tinyglobby/node_modules/picomatch/LICENSE
rename to deps/npm/node_modules/picomatch/LICENSE
diff --git a/deps/npm/node_modules/tinyglobby/node_modules/picomatch/index.js b/deps/npm/node_modules/picomatch/index.js
similarity index 100%
rename from deps/npm/node_modules/tinyglobby/node_modules/picomatch/index.js
rename to deps/npm/node_modules/picomatch/index.js
diff --git a/deps/npm/node_modules/tinyglobby/node_modules/picomatch/lib/constants.js b/deps/npm/node_modules/picomatch/lib/constants.js
similarity index 99%
rename from deps/npm/node_modules/tinyglobby/node_modules/picomatch/lib/constants.js
rename to deps/npm/node_modules/picomatch/lib/constants.js
index 27b3e20fdfe9b7..3f7ef7e53adaf9 100644
--- a/deps/npm/node_modules/tinyglobby/node_modules/picomatch/lib/constants.js
+++ b/deps/npm/node_modules/picomatch/lib/constants.js
@@ -99,6 +99,7 @@ module.exports = {
// Replace globs with equivalent patterns to reduce parsing time.
REPLACEMENTS: {
+ __proto__: null,
'***': '*',
'**/**': '**',
'**/**/**': '**'
diff --git a/deps/npm/node_modules/tinyglobby/node_modules/picomatch/lib/parse.js b/deps/npm/node_modules/picomatch/lib/parse.js
similarity index 100%
rename from deps/npm/node_modules/tinyglobby/node_modules/picomatch/lib/parse.js
rename to deps/npm/node_modules/picomatch/lib/parse.js
diff --git a/deps/npm/node_modules/tinyglobby/node_modules/picomatch/lib/picomatch.js b/deps/npm/node_modules/picomatch/lib/picomatch.js
similarity index 100%
rename from deps/npm/node_modules/tinyglobby/node_modules/picomatch/lib/picomatch.js
rename to deps/npm/node_modules/picomatch/lib/picomatch.js
diff --git a/deps/npm/node_modules/tinyglobby/node_modules/picomatch/lib/scan.js b/deps/npm/node_modules/picomatch/lib/scan.js
similarity index 100%
rename from deps/npm/node_modules/tinyglobby/node_modules/picomatch/lib/scan.js
rename to deps/npm/node_modules/picomatch/lib/scan.js
diff --git a/deps/npm/node_modules/tinyglobby/node_modules/picomatch/lib/utils.js b/deps/npm/node_modules/picomatch/lib/utils.js
similarity index 100%
rename from deps/npm/node_modules/tinyglobby/node_modules/picomatch/lib/utils.js
rename to deps/npm/node_modules/picomatch/lib/utils.js
diff --git a/deps/npm/node_modules/tinyglobby/node_modules/picomatch/package.json b/deps/npm/node_modules/picomatch/package.json
similarity index 98%
rename from deps/npm/node_modules/tinyglobby/node_modules/picomatch/package.json
rename to deps/npm/node_modules/picomatch/package.json
index 703a83dcd06611..372e27e05f412f 100644
--- a/deps/npm/node_modules/tinyglobby/node_modules/picomatch/package.json
+++ b/deps/npm/node_modules/picomatch/package.json
@@ -1,7 +1,7 @@
{
"name": "picomatch",
"description": "Blazing fast and accurate glob matcher written in JavaScript, with no dependencies and full support for standard and extended Bash glob features, including braces, extglobs, POSIX brackets, and regular expressions.",
- "version": "4.0.2",
+ "version": "4.0.3",
"homepage": "https://github.com/micromatch/picomatch",
"author": "Jon Schlinkert (https://github.com/jonschlinkert)",
"funding": "https://github.com/sponsors/jonschlinkert",
diff --git a/deps/npm/node_modules/tinyglobby/node_modules/picomatch/posix.js b/deps/npm/node_modules/picomatch/posix.js
similarity index 100%
rename from deps/npm/node_modules/tinyglobby/node_modules/picomatch/posix.js
rename to deps/npm/node_modules/picomatch/posix.js
diff --git a/deps/npm/node_modules/postcss-selector-parser/dist/parser.js b/deps/npm/node_modules/postcss-selector-parser/dist/parser.js
index ada61582777805..e36af64dbd3380 100644
--- a/deps/npm/node_modules/postcss-selector-parser/dist/parser.js
+++ b/deps/npm/node_modules/postcss-selector-parser/dist/parser.js
@@ -548,7 +548,7 @@ var Parser = /*#__PURE__*/function () {
if (_space2.endsWith(' ') && _rawSpace2.endsWith(' ')) {
spaces.before = _space2.slice(0, _space2.length - 1);
raws.spaces.before = _rawSpace2.slice(0, _rawSpace2.length - 1);
- } else if (_space2.startsWith(' ') && _rawSpace2.startsWith(' ')) {
+ } else if (_space2[0] === ' ' && _rawSpace2[0] === ' ') {
spaces.after = _space2.slice(1);
raws.spaces.after = _rawSpace2.slice(1);
} else {
diff --git a/deps/npm/node_modules/postcss-selector-parser/package.json b/deps/npm/node_modules/postcss-selector-parser/package.json
index f8b1d3619c0be1..c4f057c915bfd8 100644
--- a/deps/npm/node_modules/postcss-selector-parser/package.json
+++ b/deps/npm/node_modules/postcss-selector-parser/package.json
@@ -1,6 +1,6 @@
{
"name": "postcss-selector-parser",
- "version": "7.1.0",
+ "version": "7.1.1",
"devDependencies": {
"@babel/cli": "^7.11.6",
"@babel/core": "^7.11.6",
@@ -11,7 +11,7 @@
"@babel/register": "^7.11.5",
"ava": "^5.1.0",
"babel-plugin-add-module-exports": "^1.0.4",
- "coveralls": "^3.1.0",
+ "coveralls-next": "^4.2.1",
"del-cli": "^5.0.0",
"eslint": "^8.28.0",
"eslint-plugin-import": "^2.26.0",
@@ -39,6 +39,7 @@
"lintfix": "eslint --fix src",
"report": "nyc report --reporter=html",
"test": "nyc ava src/__tests__/*.mjs",
+ "test:node22": "nyc ava src/__tests__/*.mjs --node-arguments=--no-experimental-detect-module",
"testone": "ava"
},
"dependencies": {
diff --git a/deps/npm/node_modules/semver/README.md b/deps/npm/node_modules/semver/README.md
index 67c311f87ca3cd..f6503bfefd4640 100644
--- a/deps/npm/node_modules/semver/README.md
+++ b/deps/npm/node_modules/semver/README.md
@@ -110,8 +110,9 @@ Options:
-l --loose
Interpret versions and ranges loosely
--n <0|1>
- This is the base to be used for the prerelease identifier.
+-n <0|1|false>
+ Base number for prerelease identifier (default: 0).
+ Use false to omit the number altogether.
-p --include-prerelease
Always include prerelease versions in range matching
diff --git a/deps/npm/node_modules/semver/bin/semver.js b/deps/npm/node_modules/semver/bin/semver.js
index dbb1bf534ec722..d62bfc0ecd5216 100755
--- a/deps/npm/node_modules/semver/bin/semver.js
+++ b/deps/npm/node_modules/semver/bin/semver.js
@@ -105,7 +105,7 @@ const main = () => {
versions = versions.map((v) => {
return coerce ? (semver.coerce(v, options) || { version: v }).version : v
}).filter((v) => {
- return semver.valid(v)
+ return semver.valid(v, options)
})
if (!versions.length) {
return fail()
diff --git a/deps/npm/node_modules/semver/classes/range.js b/deps/npm/node_modules/semver/classes/range.js
index f80c2359c6b82f..94629ce6f5df60 100644
--- a/deps/npm/node_modules/semver/classes/range.js
+++ b/deps/npm/node_modules/semver/classes/range.js
@@ -255,6 +255,7 @@ const isSatisfiable = (comparators, options) => {
// already replaced the hyphen ranges
// turn into a set of JUST comparators.
const parseComparator = (comp, options) => {
+ comp = comp.replace(re[t.BUILD], '')
debug('comp', comp, options)
comp = replaceCarets(comp, options)
debug('caret', comp)
diff --git a/deps/npm/node_modules/semver/classes/semver.js b/deps/npm/node_modules/semver/classes/semver.js
index 2efba0f4b6451e..92254be1bf075a 100644
--- a/deps/npm/node_modules/semver/classes/semver.js
+++ b/deps/npm/node_modules/semver/classes/semver.js
@@ -111,11 +111,25 @@ class SemVer {
other = new SemVer(other, this.options)
}
- return (
- compareIdentifiers(this.major, other.major) ||
- compareIdentifiers(this.minor, other.minor) ||
- compareIdentifiers(this.patch, other.patch)
- )
+ if (this.major < other.major) {
+ return -1
+ }
+ if (this.major > other.major) {
+ return 1
+ }
+ if (this.minor < other.minor) {
+ return -1
+ }
+ if (this.minor > other.minor) {
+ return 1
+ }
+ if (this.patch < other.patch) {
+ return -1
+ }
+ if (this.patch > other.patch) {
+ return 1
+ }
+ return 0
}
comparePre (other) {
diff --git a/deps/npm/node_modules/semver/functions/diff.js b/deps/npm/node_modules/semver/functions/diff.js
index 04e064e9196b58..c99ab51cc57169 100644
--- a/deps/npm/node_modules/semver/functions/diff.js
+++ b/deps/npm/node_modules/semver/functions/diff.js
@@ -53,7 +53,7 @@ const diff = (version1, version2) => {
return prefix + 'patch'
}
- // high and low are preleases
+ // high and low are prereleases
return 'prerelease'
}
diff --git a/deps/npm/node_modules/semver/internal/identifiers.js b/deps/npm/node_modules/semver/internal/identifiers.js
index a4613dee7977f0..d053472dd58b3c 100644
--- a/deps/npm/node_modules/semver/internal/identifiers.js
+++ b/deps/npm/node_modules/semver/internal/identifiers.js
@@ -2,6 +2,10 @@
const numeric = /^[0-9]+$/
const compareIdentifiers = (a, b) => {
+ if (typeof a === 'number' && typeof b === 'number') {
+ return a === b ? 0 : a < b ? -1 : 1
+ }
+
const anum = numeric.test(a)
const bnum = numeric.test(b)
diff --git a/deps/npm/node_modules/semver/internal/re.js b/deps/npm/node_modules/semver/internal/re.js
index 4758c58d424a9b..639fca89de8e63 100644
--- a/deps/npm/node_modules/semver/internal/re.js
+++ b/deps/npm/node_modules/semver/internal/re.js
@@ -78,8 +78,8 @@ createToken('MAINVERSIONLOOSE', `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` +
// ## Pre-release Version Identifier
// A numeric identifier, or a non-numeric identifier.
-// Non-numberic identifiers include numberic identifiers but can be longer.
-// Therefore non-numberic identifiers must go first.
+// Non-numeric identifiers include numeric identifiers but can be longer.
+// Therefore non-numeric identifiers must go first.
createToken('PRERELEASEIDENTIFIER', `(?:${src[t.NONNUMERICIDENTIFIER]
}|${src[t.NUMERICIDENTIFIER]})`)
diff --git a/deps/npm/node_modules/semver/package.json b/deps/npm/node_modules/semver/package.json
index 1fbef5a9bf9cd8..a84de916085998 100644
--- a/deps/npm/node_modules/semver/package.json
+++ b/deps/npm/node_modules/semver/package.json
@@ -1,6 +1,6 @@
{
"name": "semver",
- "version": "7.7.2",
+ "version": "7.7.4",
"description": "The semantic version parser used by npm.",
"main": "index.js",
"scripts": {
@@ -14,8 +14,8 @@
"eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
},
"devDependencies": {
- "@npmcli/eslint-config": "^5.0.0",
- "@npmcli/template-oss": "4.24.3",
+ "@npmcli/eslint-config": "^6.0.0",
+ "@npmcli/template-oss": "4.29.0",
"benchmark": "^2.1.4",
"tap": "^16.0.0"
},
@@ -52,7 +52,7 @@
"author": "GitHub Inc.",
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.24.3",
+ "version": "4.29.0",
"engines": ">=10",
"distPaths": [
"classes/",
diff --git a/deps/npm/node_modules/semver/ranges/subset.js b/deps/npm/node_modules/semver/ranges/subset.js
index 2c49aef1be5e87..99f43218075c86 100644
--- a/deps/npm/node_modules/semver/ranges/subset.js
+++ b/deps/npm/node_modules/semver/ranges/subset.js
@@ -38,7 +38,7 @@ const compare = require('../functions/compare.js')
// - If LT
// - If LT.semver is greater than any < or <= comp in C, return false
// - If LT is <=, and LT.semver does not satisfy every C, return false
-// - If GT.semver has a prerelease, and not in prerelease mode
+// - If LT.semver has a prerelease, and not in prerelease mode
// - If no C has a prerelease and the LT.semver tuple, return false
// - Else return true
diff --git a/deps/npm/node_modules/socks/build/common/helpers.js b/deps/npm/node_modules/socks/build/common/helpers.js
index 58331c8659dfa8..f0fcaf043d6046 100644
--- a/deps/npm/node_modules/socks/build/common/helpers.js
+++ b/deps/npm/node_modules/socks/build/common/helpers.js
@@ -104,6 +104,7 @@ function validateCustomProxyAuth(proxy, options) {
function isValidSocksRemoteHost(remoteHost) {
return (remoteHost &&
typeof remoteHost.host === 'string' &&
+ Buffer.byteLength(remoteHost.host) < 256 &&
typeof remoteHost.port === 'number' &&
remoteHost.port >= 0 &&
remoteHost.port <= 65535);
diff --git a/deps/npm/node_modules/socks/package.json b/deps/npm/node_modules/socks/package.json
index 02e4f14e00cdc0..a7a2a20190ad3a 100644
--- a/deps/npm/node_modules/socks/package.json
+++ b/deps/npm/node_modules/socks/package.json
@@ -1,7 +1,7 @@
{
"name": "socks",
"private": false,
- "version": "2.8.5",
+ "version": "2.8.7",
"description": "Fully featured SOCKS proxy client supporting SOCKSv4, SOCKSv4a, and SOCKSv5. Includes Bind and Associate functionality.",
"main": "build/index.js",
"typings": "typings/index.d.ts",
@@ -44,7 +44,7 @@
"typescript": "^5.3.3"
},
"dependencies": {
- "ip-address": "^9.0.5",
+ "ip-address": "^10.0.1",
"smart-buffer": "^4.2.0"
},
"scripts": {
diff --git a/deps/npm/node_modules/spdx-license-ids/index.json b/deps/npm/node_modules/spdx-license-ids/index.json
index c1ae5520b18add..f51552687007bd 100644
--- a/deps/npm/node_modules/spdx-license-ids/index.json
+++ b/deps/npm/node_modules/spdx-license-ids/index.json
@@ -12,6 +12,7 @@
"AGPL-1.0-or-later",
"AGPL-3.0-only",
"AGPL-3.0-or-later",
+ "ALGLIB-Documentation",
"AMD-newlib",
"AMDPLPA",
"AML",
@@ -33,6 +34,7 @@
"Adobe-Display-PostScript",
"Adobe-Glyph",
"Adobe-Utopia",
+ "Advanced-Cryptics-Dictionary",
"Afmparse",
"Aladdin",
"Apache-1.0",
@@ -44,12 +46,16 @@
"Artistic-1.0-Perl",
"Artistic-1.0-cl8",
"Artistic-2.0",
+ "Artistic-dist",
+ "Aspell-RU",
+ "BOLA-1.1",
"BSD-1-Clause",
"BSD-2-Clause",
"BSD-2-Clause-Darwin",
"BSD-2-Clause-Patent",
"BSD-2-Clause-Views",
"BSD-2-Clause-first-lines",
+ "BSD-2-Clause-pkgconf-disclaimer",
"BSD-3-Clause",
"BSD-3-Clause-Attribution",
"BSD-3-Clause-Clear",
@@ -62,6 +68,7 @@
"BSD-3-Clause-No-Nuclear-Warranty",
"BSD-3-Clause-Open-MPI",
"BSD-3-Clause-Sun",
+ "BSD-3-Clause-Tso",
"BSD-3-Clause-acpica",
"BSD-3-Clause-flex",
"BSD-4-Clause",
@@ -72,6 +79,7 @@
"BSD-Advertising-Acknowledgement",
"BSD-Attribution-HPND-disclaimer",
"BSD-Inferno-Nettverk",
+ "BSD-Mark-Modifications",
"BSD-Protection",
"BSD-Source-Code",
"BSD-Source-beginning-file",
@@ -93,9 +101,11 @@
"Borceux",
"Brian-Gladman-2-Clause",
"Brian-Gladman-3-Clause",
+ "Buddy",
"C-UDA-1.0",
"CAL-1.0",
"CAL-1.0-Combined-Work-Exception",
+ "CAPEC-tou",
"CATOSL-1.1",
"CC-BY-1.0",
"CC-BY-2.0",
@@ -190,6 +200,7 @@
"Cornell-Lossless-JPEG",
"Cronyx",
"Crossword",
+ "CryptoSwift",
"CrystalStacker",
"Cube",
"D-FSL-1.0",
@@ -200,6 +211,7 @@
"DRL-1.0",
"DRL-1.1",
"DSDP",
+ "DocBook-DTD",
"DocBook-Schema",
"DocBook-Stylesheet",
"DocBook-XML",
@@ -211,6 +223,9 @@
"EPICS",
"EPL-1.0",
"EPL-2.0",
+ "ESA-PL-permissive-2.4",
+ "ESA-PL-strong-copyleft-2.4",
+ "ESA-PL-weak-copyleft-2.4",
"EUDatagrid",
"EUPL-1.0",
"EUPL-1.1",
@@ -225,7 +240,10 @@
"FSFAP-no-warranty-disclaimer",
"FSFUL",
"FSFULLR",
+ "FSFULLRSD",
"FSFULLRWD",
+ "FSL-1.1-ALv2",
+ "FSL-1.1-MIT",
"FTL",
"Fair",
"Ferguson-Twofish",
@@ -261,11 +279,13 @@
"GPL-2.0-or-later",
"GPL-3.0-only",
"GPL-3.0-or-later",
+ "Game-Programming-Gems",
"Giftware",
"Glide",
"Glulxe",
"Graphics-Gems",
"Gutmann",
+ "HDF5",
"HIDAPI",
"HP-1986",
"HP-1989",
@@ -279,6 +299,7 @@
"HPND-Markus-Kuhn",
"HPND-Netrek",
"HPND-Pbmplus",
+ "HPND-SMC",
"HPND-UC",
"HPND-UC-export-US",
"HPND-doc",
@@ -293,6 +314,7 @@
"HPND-sell-variant",
"HPND-sell-variant-MIT-disclaimer",
"HPND-sell-variant-MIT-disclaimer-rev",
+ "HPND-sell-variant-critical-systems",
"HTMLTIDY",
"HaskellReport",
"Hippocratic-2.1",
@@ -305,6 +327,7 @@
"IPL-1.0",
"ISC",
"ISC-Veillard",
+ "ISO-permission",
"ImageMagick",
"Imlib2",
"Info-ZIP",
@@ -362,6 +385,7 @@
"MIT-Festival",
"MIT-Khronos-old",
"MIT-Modern-Variant",
+ "MIT-STK",
"MIT-Wu",
"MIT-advertising",
"MIT-enna",
@@ -370,6 +394,7 @@
"MIT-testregex",
"MITNFA",
"MMIXware",
+ "MMPL-1.0.1",
"MPEG-SSG",
"MPL-1.0",
"MPL-1.1",
@@ -401,6 +426,7 @@
"NGPL",
"NICTA-1.0",
"NIST-PD",
+ "NIST-PD-TNT",
"NIST-PD-fallback",
"NIST-Software",
"NLOD-1.0",
@@ -411,6 +437,7 @@
"NPL-1.1",
"NPOSL-3.0",
"NRL",
+ "NTIA-PD",
"NTP",
"NTP-0",
"Naumen",
@@ -459,12 +486,15 @@
"OPL-1.0",
"OPL-UK-3.0",
"OPUBL-1.0",
+ "OSC-1.0",
"OSET-PL-2.1",
"OSL-1.0",
"OSL-1.1",
"OSL-2.0",
"OSL-2.1",
"OSL-3.0",
+ "OSSP",
+ "OpenMDW-1.0",
"OpenPBS-2.3",
"OpenSSL",
"OpenSSL-standalone",
@@ -475,6 +505,7 @@
"PHP-3.01",
"PPL",
"PSF-2.0",
+ "ParaType-Free-Font-1.3",
"Parity-6.0.0",
"Parity-7.0.0",
"Pixar",
@@ -503,6 +534,7 @@
"SGI-B-1.1",
"SGI-B-2.0",
"SGI-OpenGL",
+ "SGMLUG-PM",
"SGP4",
"SHL-0.5",
"SHL-0.51",
@@ -513,11 +545,13 @@
"SMLNJ",
"SMPPL",
"SNIA",
+ "SOFA",
"SPL-1.0",
"SSH-OpenSSH",
"SSH-short",
"SSLeay-standalone",
"SSPL-1.0",
+ "SUL-1.0",
"SWL",
"Saxpath",
"SchemeReport",
@@ -548,6 +582,7 @@
"TTYP0",
"TU-Berlin-1.0",
"TU-Berlin-2.0",
+ "TekHVC",
"TermReadKey",
"ThirdEye",
"TrustedQSL",
@@ -557,24 +592,31 @@
"UPL-1.0",
"URT-RLE",
"Ubuntu-font-1.0",
+ "UnRAR",
"Unicode-3.0",
"Unicode-DFS-2015",
"Unicode-DFS-2016",
"Unicode-TOU",
"UnixCrypt",
"Unlicense",
+ "Unlicense-libtelnet",
+ "Unlicense-libwhirlpool",
"VOSTROM",
"VSL-1.0",
"Vim",
+ "Vixie-Cron",
"W3C",
"W3C-19980720",
"W3C-20150513",
+ "WTFNMFPL",
"WTFPL",
"Watcom-1.0",
"Widget-Workshop",
+ "WordNet",
"Wsuipa",
"X11",
"X11-distribute-modifications-variant",
+ "X11-no-permit-persons",
"X11-swapped",
"XFree86-1.1",
"XSkat",
@@ -615,7 +657,10 @@
"gnuplot",
"gtkbook",
"hdparm",
+ "hyphen-bulgarian",
"iMatix",
+ "jove",
+ "libpng-1.6.35",
"libpng-2.0",
"libselinux-1.0",
"libtiff",
@@ -623,10 +668,12 @@
"lsof",
"magaz",
"mailprio",
+ "man2html",
"metamail",
"mpi-permissive",
"mpich2",
"mplus",
+ "ngrep",
"pkgconf",
"pnmstitch",
"psfrag",
diff --git a/deps/npm/node_modules/spdx-license-ids/package.json b/deps/npm/node_modules/spdx-license-ids/package.json
index 9b02c267604590..9c9ba083889a7e 100644
--- a/deps/npm/node_modules/spdx-license-ids/package.json
+++ b/deps/npm/node_modules/spdx-license-ids/package.json
@@ -1,6 +1,6 @@
{
"name": "spdx-license-ids",
- "version": "3.0.21",
+ "version": "3.0.23",
"description": "A list of SPDX license identifiers",
"repository": "jslicense/spdx-license-ids",
"author": "Shinnosuke Watanabe (https://github.com/shinnn)",
diff --git a/deps/npm/node_modules/sprintf-js/CONTRIBUTORS.md b/deps/npm/node_modules/sprintf-js/CONTRIBUTORS.md
deleted file mode 100644
index a16608e936a72c..00000000000000
--- a/deps/npm/node_modules/sprintf-js/CONTRIBUTORS.md
+++ /dev/null
@@ -1,26 +0,0 @@
-Alexander Rose [@arose](https://github.com/arose)
-Alexandru Mărășteanu [@alexei](https://github.com/alexei)
-Andras [@andrasq](https://github.com/andrasq)
-Benoit Giannangeli [@giann](https://github.com/giann)
-Branden Visser [@mrvisser](https://github.com/mrvisser)
-David Baird
-daurnimator [@daurnimator](https://github.com/daurnimator)
-Doug Beck [@beck](https://github.com/beck)
-Dzmitry Litskalau [@litmit](https://github.com/litmit)
-Fred Ludlow [@fredludlow](https://github.com/fredludlow)
-Hans Pufal
-Henry [@alograg](https://github.com/alograg)
-Johnny Shields [@johnnyshields](https://github.com/johnnyshields)
-Kamal Abdali
-Matt Simerson [@msimerson](https://github.com/msimerson)
-Maxime Robert [@marob](https://github.com/marob)
-MeriemKhelifi [@MeriemKhelifi](https://github.com/MeriemKhelifi)
-Michael Schramm [@wodka](https://github.com/wodka)
-Nazar Mokrynskyi [@nazar-pc](https://github.com/nazar-pc)
-Oliver Salzburg [@oliversalzburg](https://github.com/oliversalzburg)
-Pablo [@ppollono](https://github.com/ppollono)
-Rabehaja Stevens [@RABEHAJA-STEVENS](https://github.com/RABEHAJA-STEVENS)
-Raphael Pigulla [@pigulla](https://github.com/pigulla)
-rebeccapeltz [@rebeccapeltz](https://github.com/rebeccapeltz)
-Stefan Tingström [@stingstrom](https://github.com/stingstrom)
-Tim Gates [@timgates42](https://github.com/timgates42)
diff --git a/deps/npm/node_modules/sprintf-js/LICENSE b/deps/npm/node_modules/sprintf-js/LICENSE
deleted file mode 100644
index 83f832a2ee2829..00000000000000
--- a/deps/npm/node_modules/sprintf-js/LICENSE
+++ /dev/null
@@ -1,24 +0,0 @@
-Copyright (c) 2007-present, Alexandru Mărășteanu
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are met:
-* Redistributions of source code must retain the above copyright
- notice, this list of conditions and the following disclaimer.
-* Redistributions in binary form must reproduce the above copyright
- notice, this list of conditions and the following disclaimer in the
- documentation and/or other materials provided with the distribution.
-* Neither the name of this software nor the names of its contributors may be
- used to endorse or promote products derived from this software without
- specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
-ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/deps/npm/node_modules/sprintf-js/bower.json b/deps/npm/node_modules/sprintf-js/bower.json
deleted file mode 100644
index d90a75989f7b05..00000000000000
--- a/deps/npm/node_modules/sprintf-js/bower.json
+++ /dev/null
@@ -1,14 +0,0 @@
-{
- "name": "sprintf",
- "description": "JavaScript sprintf implementation",
- "version": "1.0.3",
- "main": "src/sprintf.js",
- "license": "BSD-3-Clause-Clear",
- "keywords": ["sprintf", "string", "formatting"],
- "authors": ["Alexandru Marasteanu (http://alexei.ro/)"],
- "homepage": "https://github.com/alexei/sprintf.js",
- "repository": {
- "type": "git",
- "url": "git://github.com/alexei/sprintf.js.git"
- }
-}
diff --git a/deps/npm/node_modules/sprintf-js/demo/angular.html b/deps/npm/node_modules/sprintf-js/demo/angular.html
deleted file mode 100644
index 3559efd7635634..00000000000000
--- a/deps/npm/node_modules/sprintf-js/demo/angular.html
+++ /dev/null
@@ -1,20 +0,0 @@
-
-
-
-
-
-
-
-
- {{ "%+010d"|sprintf:-123 }}
- {{ "%+010d"|vsprintf:[-123] }}
- {{ "%+010d"|fmt:-123 }}
- {{ "%+010d"|vfmt:[-123] }}
- {{ "I've got %2$d apples and %1$d oranges."|fmt:4:2 }}
- {{ "I've got %(apples)d apples and %(oranges)d oranges."|fmt:{apples: 2, oranges: 4} }}
-
-
-
-
diff --git a/deps/npm/node_modules/sprintf-js/dist/.gitattributes b/deps/npm/node_modules/sprintf-js/dist/.gitattributes
deleted file mode 100644
index a837fd3849f783..00000000000000
--- a/deps/npm/node_modules/sprintf-js/dist/.gitattributes
+++ /dev/null
@@ -1,4 +0,0 @@
-#ignore all generated files from diff
-#also skip line ending check
-*.js -diff -text
-*.map -diff -text
diff --git a/deps/npm/node_modules/sprintf-js/dist/angular-sprintf.min.js b/deps/npm/node_modules/sprintf-js/dist/angular-sprintf.min.js
deleted file mode 100644
index 5dff8c54337dbd..00000000000000
--- a/deps/npm/node_modules/sprintf-js/dist/angular-sprintf.min.js
+++ /dev/null
@@ -1,3 +0,0 @@
-/*! sprintf-js v1.1.3 | Copyright (c) 2007-present, Alexandru Mărășteanu | BSD-3-Clause */
-!function(){"use strict";angular.module("sprintf",[]).filter("sprintf",function(){return function(){return sprintf.apply(null,arguments)}}).filter("fmt",["$filter",function(t){return t("sprintf")}]).filter("vsprintf",function(){return function(t,n){return vsprintf(t,n)}}).filter("vfmt",["$filter",function(t){return t("vsprintf")}])}();
-//# sourceMappingURL=angular-sprintf.min.js.map
diff --git a/deps/npm/node_modules/sprintf-js/dist/sprintf.min.js b/deps/npm/node_modules/sprintf-js/dist/sprintf.min.js
deleted file mode 100644
index ed09637ea39052..00000000000000
--- a/deps/npm/node_modules/sprintf-js/dist/sprintf.min.js
+++ /dev/null
@@ -1,3 +0,0 @@
-/*! sprintf-js v1.1.3 | Copyright (c) 2007-present, Alexandru Mărășteanu | BSD-3-Clause */
-!function(){"use strict";var g={not_string:/[^s]/,not_bool:/[^t]/,not_type:/[^T]/,not_primitive:/[^v]/,number:/[diefg]/,numeric_arg:/[bcdiefguxX]/,json:/[j]/,not_json:/[^j]/,text:/^[^\x25]+/,modulo:/^\x25{2}/,placeholder:/^\x25(?:([1-9]\d*)\$|\(([^)]+)\))?(\+)?(0|'[^$])?(-)?(\d+)?(?:\.(\d+))?([b-gijostTuvxX])/,key:/^([a-z_][a-z_\d]*)/i,key_access:/^\.([a-z_][a-z_\d]*)/i,index_access:/^\[(\d+)\]/,sign:/^[+-]/};function y(e){return function(e,t){var r,n,i,s,a,o,p,c,l,u=1,f=e.length,d="";for(n=0;n>>0).toString(8);break;case"s":r=String(r),r=s.precision?r.substring(0,s.precision):r;break;case"t":r=String(!!r),r=s.precision?r.substring(0,s.precision):r;break;case"T":r=Object.prototype.toString.call(r).slice(8,-1).toLowerCase(),r=s.precision?r.substring(0,s.precision):r;break;case"u":r=parseInt(r,10)>>>0;break;case"v":r=r.valueOf(),r=s.precision?r.substring(0,s.precision):r;break;case"x":r=(parseInt(r,10)>>>0).toString(16);break;case"X":r=(parseInt(r,10)>>>0).toString(16).toUpperCase()}g.json.test(s.type)?d+=r:(!g.number.test(s.type)||c&&!s.sign?l="":(l=c?"+":"-",r=r.toString().replace(g.sign,"")),o=s.pad_char?"0"===s.pad_char?"0":s.pad_char.charAt(1):" ",p=s.width-(l+r).length,a=s.width&&0 | <%= pkg.author %> | <%= pkg.license %> */\n",
- sourceMap: true
- },
- build: {
- files: [
- {
- src: "src/sprintf.js",
- dest: "dist/sprintf.min.js"
- },
- {
- src: "src/angular-sprintf.js",
- dest: "dist/angular-sprintf.min.js"
- }
- ]
- }
- },
-
- watch: {
- js: {
- files: "src/*.js",
- tasks: ["uglify"]
- }
- }
- })
-
- grunt.loadNpmTasks("grunt-contrib-uglify")
- grunt.loadNpmTasks("grunt-contrib-watch")
-
- grunt.registerTask("default", ["uglify", "watch"])
-}
diff --git a/deps/npm/node_modules/sprintf-js/package.json b/deps/npm/node_modules/sprintf-js/package.json
deleted file mode 100644
index 1d3dcf3deef669..00000000000000
--- a/deps/npm/node_modules/sprintf-js/package.json
+++ /dev/null
@@ -1,35 +0,0 @@
-{
- "name": "sprintf-js",
- "version": "1.1.3",
- "description": "JavaScript sprintf implementation",
- "author": "Alexandru Mărășteanu ",
- "main": "src/sprintf.js",
- "scripts": {
- "test": "mocha test/*.js",
- "pretest": "npm run lint",
- "lint": "eslint .",
- "lint:fix": "eslint --fix ."
- },
- "repository": {
- "type": "git",
- "url": "https://github.com/alexei/sprintf.js.git"
- },
- "license": "BSD-3-Clause",
- "readmeFilename": "README.md",
- "devDependencies": {
- "benchmark": "^2.1.4",
- "eslint": "^5.10.0",
- "gulp": "^3.9.1",
- "gulp-benchmark": "^1.1.1",
- "gulp-eslint": "^5.0.0",
- "gulp-header": "^2.0.5",
- "gulp-mocha": "^6.0.0",
- "gulp-rename": "^1.4.0",
- "gulp-sourcemaps": "^2.6.4",
- "gulp-uglify": "^3.0.1",
- "mocha": "^5.2.0"
- },
- "overrides": {
- "graceful-fs": "^4.2.11"
- }
-}
diff --git a/deps/npm/node_modules/sprintf-js/src/angular-sprintf.js b/deps/npm/node_modules/sprintf-js/src/angular-sprintf.js
deleted file mode 100644
index dbfdd65ab25083..00000000000000
--- a/deps/npm/node_modules/sprintf-js/src/angular-sprintf.js
+++ /dev/null
@@ -1,24 +0,0 @@
-/* global angular, sprintf, vsprintf */
-
-!function() {
- 'use strict'
-
- angular.
- module('sprintf', []).
- filter('sprintf', function() {
- return function() {
- return sprintf.apply(null, arguments)
- }
- }).
- filter('fmt', ['$filter', function($filter) {
- return $filter('sprintf')
- }]).
- filter('vsprintf', function() {
- return function(format, argv) {
- return vsprintf(format, argv)
- }
- }).
- filter('vfmt', ['$filter', function($filter) {
- return $filter('vsprintf')
- }])
-}(); // eslint-disable-line
diff --git a/deps/npm/node_modules/sprintf-js/src/sprintf.js b/deps/npm/node_modules/sprintf-js/src/sprintf.js
deleted file mode 100644
index 65d6324645ef1d..00000000000000
--- a/deps/npm/node_modules/sprintf-js/src/sprintf.js
+++ /dev/null
@@ -1,231 +0,0 @@
-/* global window, exports, define */
-
-!function() {
- 'use strict'
-
- var re = {
- not_string: /[^s]/,
- not_bool: /[^t]/,
- not_type: /[^T]/,
- not_primitive: /[^v]/,
- number: /[diefg]/,
- numeric_arg: /[bcdiefguxX]/,
- json: /[j]/,
- not_json: /[^j]/,
- text: /^[^\x25]+/,
- modulo: /^\x25{2}/,
- placeholder: /^\x25(?:([1-9]\d*)\$|\(([^)]+)\))?(\+)?(0|'[^$])?(-)?(\d+)?(?:\.(\d+))?([b-gijostTuvxX])/,
- key: /^([a-z_][a-z_\d]*)/i,
- key_access: /^\.([a-z_][a-z_\d]*)/i,
- index_access: /^\[(\d+)\]/,
- sign: /^[+-]/
- }
-
- function sprintf(key) {
- // `arguments` is not an array, but should be fine for this call
- return sprintf_format(sprintf_parse(key), arguments)
- }
-
- function vsprintf(fmt, argv) {
- return sprintf.apply(null, [fmt].concat(argv || []))
- }
-
- function sprintf_format(parse_tree, argv) {
- var cursor = 1, tree_length = parse_tree.length, arg, output = '', i, k, ph, pad, pad_character, pad_length, is_positive, sign
- for (i = 0; i < tree_length; i++) {
- if (typeof parse_tree[i] === 'string') {
- output += parse_tree[i]
- }
- else if (typeof parse_tree[i] === 'object') {
- ph = parse_tree[i] // convenience purposes only
- if (ph.keys) { // keyword argument
- arg = argv[cursor]
- for (k = 0; k < ph.keys.length; k++) {
- if (arg == undefined) {
- throw new Error(sprintf('[sprintf] Cannot access property "%s" of undefined value "%s"', ph.keys[k], ph.keys[k-1]))
- }
- arg = arg[ph.keys[k]]
- }
- }
- else if (ph.param_no) { // positional argument (explicit)
- arg = argv[ph.param_no]
- }
- else { // positional argument (implicit)
- arg = argv[cursor++]
- }
-
- if (re.not_type.test(ph.type) && re.not_primitive.test(ph.type) && arg instanceof Function) {
- arg = arg()
- }
-
- if (re.numeric_arg.test(ph.type) && (typeof arg !== 'number' && isNaN(arg))) {
- throw new TypeError(sprintf('[sprintf] expecting number but found %T', arg))
- }
-
- if (re.number.test(ph.type)) {
- is_positive = arg >= 0
- }
-
- switch (ph.type) {
- case 'b':
- arg = parseInt(arg, 10).toString(2)
- break
- case 'c':
- arg = String.fromCharCode(parseInt(arg, 10))
- break
- case 'd':
- case 'i':
- arg = parseInt(arg, 10)
- break
- case 'j':
- arg = JSON.stringify(arg, null, ph.width ? parseInt(ph.width) : 0)
- break
- case 'e':
- arg = ph.precision ? parseFloat(arg).toExponential(ph.precision) : parseFloat(arg).toExponential()
- break
- case 'f':
- arg = ph.precision ? parseFloat(arg).toFixed(ph.precision) : parseFloat(arg)
- break
- case 'g':
- arg = ph.precision ? String(Number(arg.toPrecision(ph.precision))) : parseFloat(arg)
- break
- case 'o':
- arg = (parseInt(arg, 10) >>> 0).toString(8)
- break
- case 's':
- arg = String(arg)
- arg = (ph.precision ? arg.substring(0, ph.precision) : arg)
- break
- case 't':
- arg = String(!!arg)
- arg = (ph.precision ? arg.substring(0, ph.precision) : arg)
- break
- case 'T':
- arg = Object.prototype.toString.call(arg).slice(8, -1).toLowerCase()
- arg = (ph.precision ? arg.substring(0, ph.precision) : arg)
- break
- case 'u':
- arg = parseInt(arg, 10) >>> 0
- break
- case 'v':
- arg = arg.valueOf()
- arg = (ph.precision ? arg.substring(0, ph.precision) : arg)
- break
- case 'x':
- arg = (parseInt(arg, 10) >>> 0).toString(16)
- break
- case 'X':
- arg = (parseInt(arg, 10) >>> 0).toString(16).toUpperCase()
- break
- }
- if (re.json.test(ph.type)) {
- output += arg
- }
- else {
- if (re.number.test(ph.type) && (!is_positive || ph.sign)) {
- sign = is_positive ? '+' : '-'
- arg = arg.toString().replace(re.sign, '')
- }
- else {
- sign = ''
- }
- pad_character = ph.pad_char ? ph.pad_char === '0' ? '0' : ph.pad_char.charAt(1) : ' '
- pad_length = ph.width - (sign + arg).length
- pad = ph.width ? (pad_length > 0 ? pad_character.repeat(pad_length) : '') : ''
- output += ph.align ? sign + arg + pad : (pad_character === '0' ? sign + pad + arg : pad + sign + arg)
- }
- }
- }
- return output
- }
-
- var sprintf_cache = Object.create(null)
-
- function sprintf_parse(fmt) {
- if (sprintf_cache[fmt]) {
- return sprintf_cache[fmt]
- }
-
- var _fmt = fmt, match, parse_tree = [], arg_names = 0
- while (_fmt) {
- if ((match = re.text.exec(_fmt)) !== null) {
- parse_tree.push(match[0])
- }
- else if ((match = re.modulo.exec(_fmt)) !== null) {
- parse_tree.push('%')
- }
- else if ((match = re.placeholder.exec(_fmt)) !== null) {
- if (match[2]) {
- arg_names |= 1
- var field_list = [], replacement_field = match[2], field_match = []
- if ((field_match = re.key.exec(replacement_field)) !== null) {
- field_list.push(field_match[1])
- while ((replacement_field = replacement_field.substring(field_match[0].length)) !== '') {
- if ((field_match = re.key_access.exec(replacement_field)) !== null) {
- field_list.push(field_match[1])
- }
- else if ((field_match = re.index_access.exec(replacement_field)) !== null) {
- field_list.push(field_match[1])
- }
- else {
- throw new SyntaxError('[sprintf] failed to parse named argument key')
- }
- }
- }
- else {
- throw new SyntaxError('[sprintf] failed to parse named argument key')
- }
- match[2] = field_list
- }
- else {
- arg_names |= 2
- }
- if (arg_names === 3) {
- throw new Error('[sprintf] mixing positional and named placeholders is not (yet) supported')
- }
-
- parse_tree.push(
- {
- placeholder: match[0],
- param_no: match[1],
- keys: match[2],
- sign: match[3],
- pad_char: match[4],
- align: match[5],
- width: match[6],
- precision: match[7],
- type: match[8]
- }
- )
- }
- else {
- throw new SyntaxError('[sprintf] unexpected placeholder')
- }
- _fmt = _fmt.substring(match[0].length)
- }
- return sprintf_cache[fmt] = parse_tree
- }
-
- /**
- * export to either browser or node.js
- */
- /* eslint-disable quote-props */
- if (typeof exports !== 'undefined') {
- exports['sprintf'] = sprintf
- exports['vsprintf'] = vsprintf
- }
- if (typeof window !== 'undefined') {
- window['sprintf'] = sprintf
- window['vsprintf'] = vsprintf
-
- if (typeof define === 'function' && define['amd']) {
- define(function() {
- return {
- 'sprintf': sprintf,
- 'vsprintf': vsprintf
- }
- })
- }
- }
- /* eslint-enable quote-props */
-}(); // eslint-disable-line
diff --git a/deps/npm/node_modules/sprintf-js/test/test.js b/deps/npm/node_modules/sprintf-js/test/test.js
deleted file mode 100644
index 6f57b2538c8522..00000000000000
--- a/deps/npm/node_modules/sprintf-js/test/test.js
+++ /dev/null
@@ -1,82 +0,0 @@
-var assert = require("assert"),
- sprintfjs = require("../src/sprintf.js"),
- sprintf = sprintfjs.sprintf,
- vsprintf = sprintfjs.vsprintf
-
-describe("sprintfjs", function() {
- var pi = 3.141592653589793
-
- it("should return formated strings for simple placeholders", function() {
- assert.equal("%", sprintf("%%"))
- assert.equal("10", sprintf("%b", 2))
- assert.equal("A", sprintf("%c", 65))
- assert.equal("2", sprintf("%d", 2))
- assert.equal("2", sprintf("%i", 2))
- assert.equal("2", sprintf("%d", "2"))
- assert.equal("2", sprintf("%i", "2"))
- assert.equal('{"foo":"bar"}', sprintf("%j", {foo: "bar"}))
- assert.equal('["foo","bar"]', sprintf("%j", ["foo", "bar"]))
- assert.equal("2e+0", sprintf("%e", 2))
- assert.equal("2", sprintf("%u", 2))
- assert.equal("4294967294", sprintf("%u", -2))
- assert.equal("2.2", sprintf("%f", 2.2))
- assert.equal("3.141592653589793", sprintf("%g", pi))
- assert.equal("10", sprintf("%o", 8))
- assert.equal("%s", sprintf("%s", "%s"))
- assert.equal("ff", sprintf("%x", 255))
- assert.equal("FF", sprintf("%X", 255))
- assert.equal("Polly wants a cracker", sprintf("%2$s %3$s a %1$s", "cracker", "Polly", "wants"))
- assert.equal("Hello world!", sprintf("Hello %(who)s!", {"who": "world"}))
- })
-
- it("should return formated strings for complex placeholders", function() {
- // sign
- assert.equal("2", sprintf("%d", 2))
- assert.equal("-2", sprintf("%d", -2))
- assert.equal("+2", sprintf("%+d", 2))
- assert.equal("-2", sprintf("%+d", -2))
- assert.equal("2", sprintf("%i", 2))
- assert.equal("-2", sprintf("%i", -2))
- assert.equal("+2", sprintf("%+i", 2))
- assert.equal("-2", sprintf("%+i", -2))
- assert.equal("2.2", sprintf("%f", 2.2))
- assert.equal("-2.2", sprintf("%f", -2.2))
- assert.equal("+2.2", sprintf("%+f", 2.2))
- assert.equal("-2.2", sprintf("%+f", -2.2))
- assert.equal("-2.3", sprintf("%+.1f", -2.34))
- assert.equal("-0.0", sprintf("%+.1f", -0.01))
- assert.equal("3.14159", sprintf("%.6g", pi))
- assert.equal("3.14", sprintf("%.3g", pi))
- assert.equal("3", sprintf("%.1g", pi))
- assert.equal("-000000123", sprintf("%+010d", -123))
- assert.equal("______-123", sprintf("%+'_10d", -123))
- assert.equal("-234.34 123.2", sprintf("%f %f", -234.34, 123.2))
-
- // padding
- assert.equal("-0002", sprintf("%05d", -2))
- assert.equal("-0002", sprintf("%05i", -2))
- assert.equal(" <", sprintf("%5s", "<"))
- assert.equal("0000<", sprintf("%05s", "<"))
- assert.equal("____<", sprintf("%'_5s", "<"))
- assert.equal("> ", sprintf("%-5s", ">"))
- assert.equal(">0000", sprintf("%0-5s", ">"))
- assert.equal(">____", sprintf("%'_-5s", ">"))
- assert.equal("xxxxxx", sprintf("%5s", "xxxxxx"))
- assert.equal("1234", sprintf("%02u", 1234))
- assert.equal(" -10.235", sprintf("%8.3f", -10.23456))
- assert.equal("-12.34 xxx", sprintf("%f %s", -12.34, "xxx"))
- assert.equal('{\n "foo": "bar"\n}', sprintf("%2j", {foo: "bar"}))
- assert.equal('[\n "foo",\n "bar"\n]', sprintf("%2j", ["foo", "bar"]))
-
- // precision
- assert.equal("2.3", sprintf("%.1f", 2.345))
- assert.equal("xxxxx", sprintf("%5.5s", "xxxxxx"))
- assert.equal(" x", sprintf("%5.1s", "xxxxxx"))
-
- })
-
- it("should return formated strings for callbacks", function() {
- assert.equal("foobar", sprintf("%s", function() { return "foobar" }))
- assert.equal(Date.now(), sprintf("%s", Date.now)) // should pass...
- })
-})
diff --git a/deps/npm/node_modules/tar/LICENSE b/deps/npm/node_modules/tar/LICENSE
deleted file mode 100644
index 19129e315fe593..00000000000000
--- a/deps/npm/node_modules/tar/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/node-gyp/node_modules/yallist/LICENSE.md b/deps/npm/node_modules/tar/LICENSE.md
similarity index 87%
rename from deps/npm/node_modules/node-gyp/node_modules/yallist/LICENSE.md
rename to deps/npm/node_modules/tar/LICENSE.md
index 881248b6d7f0ca..c5402b9577a8cd 100644
--- a/deps/npm/node_modules/node-gyp/node_modules/yallist/LICENSE.md
+++ b/deps/npm/node_modules/tar/LICENSE.md
@@ -1,11 +1,3 @@
-All packages under `src/` are licensed according to the terms in
-their respective `LICENSE` or `LICENSE.md` files.
-
-The remainder of this project is licensed under the Blue Oak
-Model License, as follows:
-
------
-
# Blue Oak Model License
Version 1.0.0
diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/create.js b/deps/npm/node_modules/tar/dist/commonjs/create.js
similarity index 100%
rename from deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/create.js
rename to deps/npm/node_modules/tar/dist/commonjs/create.js
diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/cwd-error.js b/deps/npm/node_modules/tar/dist/commonjs/cwd-error.js
similarity index 100%
rename from deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/cwd-error.js
rename to deps/npm/node_modules/tar/dist/commonjs/cwd-error.js
diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/extract.js b/deps/npm/node_modules/tar/dist/commonjs/extract.js
similarity index 80%
rename from deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/extract.js
rename to deps/npm/node_modules/tar/dist/commonjs/extract.js
index f848cbcbf779e8..86deb304d8b018 100644
--- a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/extract.js
+++ b/deps/npm/node_modules/tar/dist/commonjs/extract.js
@@ -15,13 +15,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
}) : function(o, v) {
o["default"] = v;
});
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
- __setModuleDefault(result, mod);
- return result;
-};
+var __importStar = (this && this.__importStar) || (function () {
+ var ownKeys = function(o) {
+ ownKeys = Object.getOwnPropertyNames || function (o) {
+ var ar = [];
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
+ return ar;
+ };
+ return ownKeys(o);
+ };
+ return function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
+ __setModuleDefault(result, mod);
+ return result;
+ };
+})();
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/get-write-flag.js b/deps/npm/node_modules/tar/dist/commonjs/get-write-flag.js
similarity index 100%
rename from deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/get-write-flag.js
rename to deps/npm/node_modules/tar/dist/commonjs/get-write-flag.js
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/header.js b/deps/npm/node_modules/tar/dist/commonjs/header.js
similarity index 85%
rename from deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/header.js
rename to deps/npm/node_modules/tar/dist/commonjs/header.js
index b3a48037b849ab..12558ed9256234 100644
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/header.js
+++ b/deps/npm/node_modules/tar/dist/commonjs/header.js
@@ -19,13 +19,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
}) : function(o, v) {
o["default"] = v;
});
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
- __setModuleDefault(result, mod);
- return result;
-};
+var __importStar = (this && this.__importStar) || (function () {
+ var ownKeys = function(o) {
+ ownKeys = Object.getOwnPropertyNames || function (o) {
+ var ar = [];
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
+ return ar;
+ };
+ return ownKeys(o);
+ };
+ return function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
+ __setModuleDefault(result, mod);
+ return result;
+ };
+})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.Header = void 0;
const node_path_1 = require("node:path");
@@ -68,12 +78,13 @@ class Header {
if (!buf || !(buf.length >= off + 512)) {
throw new Error('need 512 bytes for header');
}
- this.path = decString(buf, off, 100);
- this.mode = decNumber(buf, off + 100, 8);
- this.uid = decNumber(buf, off + 108, 8);
- this.gid = decNumber(buf, off + 116, 8);
- this.size = decNumber(buf, off + 124, 12);
- this.mtime = decDate(buf, off + 136, 12);
+ this.path = ex?.path ?? decString(buf, off, 100);
+ this.mode = ex?.mode ?? gex?.mode ?? decNumber(buf, off + 100, 8);
+ this.uid = ex?.uid ?? gex?.uid ?? decNumber(buf, off + 108, 8);
+ this.gid = ex?.gid ?? gex?.gid ?? decNumber(buf, off + 116, 8);
+ this.size = ex?.size ?? gex?.size ?? decNumber(buf, off + 124, 12);
+ this.mtime =
+ ex?.mtime ?? gex?.mtime ?? decDate(buf, off + 136, 12);
this.cksum = decNumber(buf, off + 148, 12);
// if we have extended or global extended headers, apply them now
// See https://github.com/npm/node-tar/pull/187
@@ -101,11 +112,15 @@ class Header {
this.linkpath = decString(buf, off + 157, 100);
if (buf.subarray(off + 257, off + 265).toString() ===
'ustar\u000000') {
- this.uname = decString(buf, off + 265, 32);
- this.gname = decString(buf, off + 297, 32);
/* c8 ignore start */
- this.devmaj = decNumber(buf, off + 329, 8) ?? 0;
- this.devmin = decNumber(buf, off + 337, 8) ?? 0;
+ this.uname =
+ ex?.uname ?? gex?.uname ?? decString(buf, off + 265, 32);
+ this.gname =
+ ex?.gname ?? gex?.gname ?? decString(buf, off + 297, 32);
+ this.devmaj =
+ ex?.devmaj ?? gex?.devmaj ?? decNumber(buf, off + 329, 8) ?? 0;
+ this.devmin =
+ ex?.devmin ?? gex?.devmin ?? decNumber(buf, off + 337, 8) ?? 0;
/* c8 ignore stop */
if (buf[off + 475] !== 0) {
// definitely a prefix, definitely >130 chars.
@@ -117,8 +132,12 @@ class Header {
if (prefix) {
this.path = prefix + '/' + this.path;
}
- this.atime = decDate(buf, off + 476, 12);
- this.ctime = decDate(buf, off + 488, 12);
+ /* c8 ignore start */
+ this.atime =
+ ex?.atime ?? gex?.atime ?? decDate(buf, off + 476, 12);
+ this.ctime =
+ ex?.ctime ?? gex?.ctime ?? decDate(buf, off + 488, 12);
+ /* c8 ignore stop */
}
}
let sum = 8 * 0x20;
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/index.js b/deps/npm/node_modules/tar/dist/commonjs/index.js
similarity index 78%
rename from deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/index.js
rename to deps/npm/node_modules/tar/dist/commonjs/index.js
index e93ed5ad54aa6e..bd975c77281b71 100644
--- a/deps/npm/node_modules/node-gyp/node_modules/tar/dist/commonjs/index.js
+++ b/deps/npm/node_modules/tar/dist/commonjs/index.js
@@ -18,13 +18,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
var __exportStar = (this && this.__exportStar) || function(m, exports) {
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
};
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
- __setModuleDefault(result, mod);
- return result;
-};
+var __importStar = (this && this.__importStar) || (function () {
+ var ownKeys = function(o) {
+ ownKeys = Object.getOwnPropertyNames || function (o) {
+ var ar = [];
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
+ return ar;
+ };
+ return ownKeys(o);
+ };
+ return function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
+ __setModuleDefault(result, mod);
+ return result;
+ };
+})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.u = exports.types = exports.r = exports.t = exports.x = exports.c = void 0;
__exportStar(require("./create.js"), exports);
diff --git a/deps/npm/node_modules/tar/dist/commonjs/index.min.js b/deps/npm/node_modules/tar/dist/commonjs/index.min.js
new file mode 100644
index 00000000000000..d7e71f819af1a5
--- /dev/null
+++ b/deps/npm/node_modules/tar/dist/commonjs/index.min.js
@@ -0,0 +1,4 @@
+"use strict";var d=(s,e)=>()=>(e||s((e={exports:{}}).exports,e),e.exports);var We=d(C=>{"use strict";var yo=C&&C.__importDefault||function(s){return s&&s.__esModule?s:{default:s}};Object.defineProperty(C,"__esModule",{value:!0});C.Minipass=C.isWritable=C.isReadable=C.isStream=void 0;var Br=typeof process=="object"&&process?process:{stdout:null,stderr:null},es=require("node:events"),xr=yo(require("node:stream")),Eo=require("node:string_decoder"),bo=s=>!!s&&typeof s=="object"&&(s instanceof Wt||s instanceof xr.default||(0,C.isReadable)(s)||(0,C.isWritable)(s));C.isStream=bo;var So=s=>!!s&&typeof s=="object"&&s instanceof es.EventEmitter&&typeof s.pipe=="function"&&s.pipe!==xr.default.Writable.prototype.pipe;C.isReadable=So;var go=s=>!!s&&typeof s=="object"&&s instanceof es.EventEmitter&&typeof s.write=="function"&&typeof s.end=="function";C.isWritable=go;var le=Symbol("EOF"),ue=Symbol("maybeEmitEnd"),_e=Symbol("emittedEnd"),zt=Symbol("emittingEnd"),ft=Symbol("emittedError"),kt=Symbol("closed"),zr=Symbol("read"),jt=Symbol("flush"),kr=Symbol("flushChunk"),K=Symbol("encoding"),Ue=Symbol("decoder"),R=Symbol("flowing"),dt=Symbol("paused"),qe=Symbol("resume"),O=Symbol("buffer"),I=Symbol("pipes"),v=Symbol("bufferLength"),Ki=Symbol("bufferPush"),xt=Symbol("bufferShift"),N=Symbol("objectMode"),y=Symbol("destroyed"),Vi=Symbol("error"),$i=Symbol("emitData"),jr=Symbol("emitEnd"),Xi=Symbol("emitEnd2"),J=Symbol("async"),Qi=Symbol("abort"),Ut=Symbol("aborted"),mt=Symbol("signal"),Pe=Symbol("dataListeners"),k=Symbol("discarded"),pt=s=>Promise.resolve().then(s),Ro=s=>s(),Oo=s=>s==="end"||s==="finish"||s==="prefinish",vo=s=>s instanceof ArrayBuffer||!!s&&typeof s=="object"&&s.constructor&&s.constructor.name==="ArrayBuffer"&&s.byteLength>=0,To=s=>!Buffer.isBuffer(s)&&ArrayBuffer.isView(s),qt=class{src;dest;opts;ondrain;constructor(e,t,i){this.src=e,this.dest=t,this.opts=i,this.ondrain=()=>e[qe](),this.dest.on("drain",this.ondrain)}unpipe(){this.dest.removeListener("drain",this.ondrain)}proxyErrors(e){}end(){this.unpipe(),this.opts.end&&this.dest.end()}},Ji=class extends qt{unpipe(){this.src.removeListener("error",this.proxyErrors),super.unpipe()}constructor(e,t,i){super(e,t,i),this.proxyErrors=r=>this.dest.emit("error",r),e.on("error",this.proxyErrors)}},Do=s=>!!s.objectMode,Po=s=>!s.objectMode&&!!s.encoding&&s.encoding!=="buffer",Wt=class extends es.EventEmitter{[R]=!1;[dt]=!1;[I]=[];[O]=[];[N];[K];[J];[Ue];[le]=!1;[_e]=!1;[zt]=!1;[kt]=!1;[ft]=null;[v]=0;[y]=!1;[mt];[Ut]=!1;[Pe]=0;[k]=!1;writable=!0;readable=!0;constructor(...e){let t=e[0]||{};if(super(),t.objectMode&&typeof t.encoding=="string")throw new TypeError("Encoding and objectMode may not be used together");Do(t)?(this[N]=!0,this[K]=null):Po(t)?(this[K]=t.encoding,this[N]=!1):(this[N]=!1,this[K]=null),this[J]=!!t.async,this[Ue]=this[K]?new Eo.StringDecoder(this[K]):null,t&&t.debugExposeBuffer===!0&&Object.defineProperty(this,"buffer",{get:()=>this[O]}),t&&t.debugExposePipes===!0&&Object.defineProperty(this,"pipes",{get:()=>this[I]});let{signal:i}=t;i&&(this[mt]=i,i.aborted?this[Qi]():i.addEventListener("abort",()=>this[Qi]()))}get bufferLength(){return this[v]}get encoding(){return this[K]}set encoding(e){throw new Error("Encoding must be set at instantiation time")}setEncoding(e){throw new Error("Encoding must be set at instantiation time")}get objectMode(){return this[N]}set objectMode(e){throw new Error("objectMode must be set at instantiation time")}get async(){return this[J]}set async(e){this[J]=this[J]||!!e}[Qi](){this[Ut]=!0,this.emit("abort",this[mt]?.reason),this.destroy(this[mt]?.reason)}get aborted(){return this[Ut]}set aborted(e){}write(e,t,i){if(this[Ut])return!1;if(this[le])throw new Error("write after end");if(this[y])return this.emit("error",Object.assign(new Error("Cannot call write after a stream was destroyed"),{code:"ERR_STREAM_DESTROYED"})),!0;typeof t=="function"&&(i=t,t="utf8"),t||(t="utf8");let r=this[J]?pt:Ro;if(!this[N]&&!Buffer.isBuffer(e)){if(To(e))e=Buffer.from(e.buffer,e.byteOffset,e.byteLength);else if(vo(e))e=Buffer.from(e);else if(typeof e!="string")throw new Error("Non-contiguous data written to non-objectMode stream")}return this[N]?(this[R]&&this[v]!==0&&this[jt](!0),this[R]?this.emit("data",e):this[Ki](e),this[v]!==0&&this.emit("readable"),i&&r(i),this[R]):e.length?(typeof e=="string"&&!(t===this[K]&&!this[Ue]?.lastNeed)&&(e=Buffer.from(e,t)),Buffer.isBuffer(e)&&this[K]&&(e=this[Ue].write(e)),this[R]&&this[v]!==0&&this[jt](!0),this[R]?this.emit("data",e):this[Ki](e),this[v]!==0&&this.emit("readable"),i&&r(i),this[R]):(this[v]!==0&&this.emit("readable"),i&&r(i),this[R])}read(e){if(this[y])return null;if(this[k]=!1,this[v]===0||e===0||e&&e>this[v])return this[ue](),null;this[N]&&(e=null),this[O].length>1&&!this[N]&&(this[O]=[this[K]?this[O].join(""):Buffer.concat(this[O],this[v])]);let t=this[zr](e||null,this[O][0]);return this[ue](),t}[zr](e,t){if(this[N])this[xt]();else{let i=t;e===i.length||e===null?this[xt]():typeof i=="string"?(this[O][0]=i.slice(e),t=i.slice(0,e),this[v]-=e):(this[O][0]=i.subarray(e),t=i.subarray(0,e),this[v]-=e)}return this.emit("data",t),!this[O].length&&!this[le]&&this.emit("drain"),t}end(e,t,i){return typeof e=="function"&&(i=e,e=void 0),typeof t=="function"&&(i=t,t="utf8"),e!==void 0&&this.write(e,t),i&&this.once("end",i),this[le]=!0,this.writable=!1,(this[R]||!this[dt])&&this[ue](),this}[qe](){this[y]||(!this[Pe]&&!this[I].length&&(this[k]=!0),this[dt]=!1,this[R]=!0,this.emit("resume"),this[O].length?this[jt]():this[le]?this[ue]():this.emit("drain"))}resume(){return this[qe]()}pause(){this[R]=!1,this[dt]=!0,this[k]=!1}get destroyed(){return this[y]}get flowing(){return this[R]}get paused(){return this[dt]}[Ki](e){this[N]?this[v]+=1:this[v]+=e.length,this[O].push(e)}[xt](){return this[N]?this[v]-=1:this[v]-=this[O][0].length,this[O].shift()}[jt](e=!1){do;while(this[kr](this[xt]())&&this[O].length);!e&&!this[O].length&&!this[le]&&this.emit("drain")}[kr](e){return this.emit("data",e),this[R]}pipe(e,t){if(this[y])return e;this[k]=!1;let i=this[_e];return t=t||{},e===Br.stdout||e===Br.stderr?t.end=!1:t.end=t.end!==!1,t.proxyErrors=!!t.proxyErrors,i?t.end&&e.end():(this[I].push(t.proxyErrors?new Ji(this,e,t):new qt(this,e,t)),this[J]?pt(()=>this[qe]()):this[qe]()),e}unpipe(e){let t=this[I].find(i=>i.dest===e);t&&(this[I].length===1?(this[R]&&this[Pe]===0&&(this[R]=!1),this[I]=[]):this[I].splice(this[I].indexOf(t),1),t.unpipe())}addListener(e,t){return this.on(e,t)}on(e,t){let i=super.on(e,t);if(e==="data")this[k]=!1,this[Pe]++,!this[I].length&&!this[R]&&this[qe]();else if(e==="readable"&&this[v]!==0)super.emit("readable");else if(Oo(e)&&this[_e])super.emit(e),this.removeAllListeners(e);else if(e==="error"&&this[ft]){let r=t;this[J]?pt(()=>r.call(this,this[ft])):r.call(this,this[ft])}return i}removeListener(e,t){return this.off(e,t)}off(e,t){let i=super.off(e,t);return e==="data"&&(this[Pe]=this.listeners("data").length,this[Pe]===0&&!this[k]&&!this[I].length&&(this[R]=!1)),i}removeAllListeners(e){let t=super.removeAllListeners(e);return(e==="data"||e===void 0)&&(this[Pe]=0,!this[k]&&!this[I].length&&(this[R]=!1)),t}get emittedEnd(){return this[_e]}[ue](){!this[zt]&&!this[_e]&&!this[y]&&this[O].length===0&&this[le]&&(this[zt]=!0,this.emit("end"),this.emit("prefinish"),this.emit("finish"),this[kt]&&this.emit("close"),this[zt]=!1)}emit(e,...t){let i=t[0];if(e!=="error"&&e!=="close"&&e!==y&&this[y])return!1;if(e==="data")return!this[N]&&!i?!1:this[J]?(pt(()=>this[$i](i)),!0):this[$i](i);if(e==="end")return this[jr]();if(e==="close"){if(this[kt]=!0,!this[_e]&&!this[y])return!1;let n=super.emit("close");return this.removeAllListeners("close"),n}else if(e==="error"){this[ft]=i,super.emit(Vi,i);let n=!this[mt]||this.listeners("error").length?super.emit("error",i):!1;return this[ue](),n}else if(e==="resume"){let n=super.emit("resume");return this[ue](),n}else if(e==="finish"||e==="prefinish"){let n=super.emit(e);return this.removeAllListeners(e),n}let r=super.emit(e,...t);return this[ue](),r}[$i](e){for(let i of this[I])i.dest.write(e)===!1&&this.pause();let t=this[k]?!1:super.emit("data",e);return this[ue](),t}[jr](){return this[_e]?!1:(this[_e]=!0,this.readable=!1,this[J]?(pt(()=>this[Xi]()),!0):this[Xi]())}[Xi](){if(this[Ue]){let t=this[Ue].end();if(t){for(let i of this[I])i.dest.write(t);this[k]||super.emit("data",t)}}for(let t of this[I])t.end();let e=super.emit("end");return this.removeAllListeners("end"),e}async collect(){let e=Object.assign([],{dataLength:0});this[N]||(e.dataLength=0);let t=this.promise();return this.on("data",i=>{e.push(i),this[N]||(e.dataLength+=i.length)}),await t,e}async concat(){if(this[N])throw new Error("cannot concat in objectMode");let e=await this.collect();return this[K]?e.join(""):Buffer.concat(e,e.dataLength)}async promise(){return new Promise((e,t)=>{this.on(y,()=>t(new Error("stream destroyed"))),this.on("error",i=>t(i)),this.on("end",()=>e())})}[Symbol.asyncIterator](){this[k]=!1;let e=!1,t=async()=>(this.pause(),e=!0,{value:void 0,done:!0});return{next:()=>{if(e)return t();let r=this.read();if(r!==null)return Promise.resolve({done:!1,value:r});if(this[le])return t();let n,o,a=c=>{this.off("data",h),this.off("end",l),this.off(y,u),t(),o(c)},h=c=>{this.off("error",a),this.off("end",l),this.off(y,u),this.pause(),n({value:c,done:!!this[le]})},l=()=>{this.off("error",a),this.off("data",h),this.off(y,u),t(),n({done:!0,value:void 0})},u=()=>a(new Error("stream destroyed"));return new Promise((c,E)=>{o=E,n=c,this.once(y,u),this.once("error",a),this.once("end",l),this.once("data",h)})},throw:t,return:t,[Symbol.asyncIterator](){return this},[Symbol.asyncDispose]:async()=>{}}}[Symbol.iterator](){this[k]=!1;let e=!1,t=()=>(this.pause(),this.off(Vi,t),this.off(y,t),this.off("end",t),e=!0,{done:!0,value:void 0}),i=()=>{if(e)return t();let r=this.read();return r===null?t():{done:!1,value:r}};return this.once("end",t),this.once(Vi,t),this.once(y,t),{next:i,throw:t,return:t,[Symbol.iterator](){return this},[Symbol.dispose]:()=>{}}}destroy(e){if(this[y])return e?this.emit("error",e):this.emit(y),this;this[y]=!0,this[k]=!0,this[O].length=0,this[v]=0;let t=this;return typeof t.close=="function"&&!this[kt]&&t.close(),e?this.emit("error",e):this.emit(y),this}static get isStream(){return C.isStream}};C.Minipass=Wt});var Ke=d(W=>{"use strict";var Ur=W&&W.__importDefault||function(s){return s&&s.__esModule?s:{default:s}};Object.defineProperty(W,"__esModule",{value:!0});W.WriteStreamSync=W.WriteStream=W.ReadStreamSync=W.ReadStream=void 0;var No=Ur(require("events")),B=Ur(require("fs")),Mo=We(),Lo=B.default.writev,ye=Symbol("_autoClose"),$=Symbol("_close"),_t=Symbol("_ended"),p=Symbol("_fd"),ts=Symbol("_finished"),fe=Symbol("_flags"),is=Symbol("_flush"),os=Symbol("_handleChunk"),as=Symbol("_makeBuf"),yt=Symbol("_mode"),Ht=Symbol("_needDrain"),Ge=Symbol("_onerror"),Ye=Symbol("_onopen"),ss=Symbol("_onread"),He=Symbol("_onwrite"),Ee=Symbol("_open"),V=Symbol("_path"),we=Symbol("_pos"),ee=Symbol("_queue"),Ze=Symbol("_read"),rs=Symbol("_readSize"),ce=Symbol("_reading"),wt=Symbol("_remain"),ns=Symbol("_size"),Zt=Symbol("_write"),Ne=Symbol("_writing"),Gt=Symbol("_defaultFlag"),Me=Symbol("_errored"),Yt=class extends Mo.Minipass{[Me]=!1;[p];[V];[rs];[ce]=!1;[ns];[wt];[ye];constructor(e,t){if(t=t||{},super(t),this.readable=!0,this.writable=!1,typeof e!="string")throw new TypeError("path must be a string");this[Me]=!1,this[p]=typeof t.fd=="number"?t.fd:void 0,this[V]=e,this[rs]=t.readSize||16*1024*1024,this[ce]=!1,this[ns]=typeof t.size=="number"?t.size:1/0,this[wt]=this[ns],this[ye]=typeof t.autoClose=="boolean"?t.autoClose:!0,typeof this[p]=="number"?this[Ze]():this[Ee]()}get fd(){return this[p]}get path(){return this[V]}write(){throw new TypeError("this is a readable stream")}end(){throw new TypeError("this is a readable stream")}[Ee](){B.default.open(this[V],"r",(e,t)=>this[Ye](e,t))}[Ye](e,t){e?this[Ge](e):(this[p]=t,this.emit("open",t),this[Ze]())}[as](){return Buffer.allocUnsafe(Math.min(this[rs],this[wt]))}[Ze](){if(!this[ce]){this[ce]=!0;let e=this[as]();if(e.length===0)return process.nextTick(()=>this[ss](null,0,e));B.default.read(this[p],e,0,e.length,null,(t,i,r)=>this[ss](t,i,r))}}[ss](e,t,i){this[ce]=!1,e?this[Ge](e):this[os](t,i)&&this[Ze]()}[$](){if(this[ye]&&typeof this[p]=="number"){let e=this[p];this[p]=void 0,B.default.close(e,t=>t?this.emit("error",t):this.emit("close"))}}[Ge](e){this[ce]=!0,this[$](),this.emit("error",e)}[os](e,t){let i=!1;return this[wt]-=e,e>0&&(i=super.write(ethis[Ye](e,t))}[Ye](e,t){this[Gt]&&this[fe]==="r+"&&e&&e.code==="ENOENT"?(this[fe]="w",this[Ee]()):e?this[Ge](e):(this[p]=t,this.emit("open",t),this[Ne]||this[is]())}end(e,t){return e&&this.write(e,t),this[_t]=!0,!this[Ne]&&!this[ee].length&&typeof this[p]=="number"&&this[He](null,0),this}write(e,t){return typeof e=="string"&&(e=Buffer.from(e,t)),this[_t]?(this.emit("error",new Error("write() after end()")),!1):this[p]===void 0||this[Ne]||this[ee].length?(this[ee].push(e),this[Ht]=!0,!1):(this[Ne]=!0,this[Zt](e),!0)}[Zt](e){B.default.write(this[p],e,0,e.length,this[we],(t,i)=>this[He](t,i))}[He](e,t){e?this[Ge](e):(this[we]!==void 0&&typeof t=="number"&&(this[we]+=t),this[ee].length?this[is]():(this[Ne]=!1,this[_t]&&!this[ts]?(this[ts]=!0,this[$](),this.emit("finish")):this[Ht]&&(this[Ht]=!1,this.emit("drain"))))}[is](){if(this[ee].length===0)this[_t]&&this[He](null,0);else if(this[ee].length===1)this[Zt](this[ee].pop());else{let e=this[ee];this[ee]=[],Lo(this[p],e,this[we],(t,i)=>this[He](t,i))}}[$](){if(this[ye]&&typeof this[p]=="number"){let e=this[p];this[p]=void 0,B.default.close(e,t=>t?this.emit("error",t):this.emit("close"))}}};W.WriteStream=Kt;var ls=class extends Kt{[Ee](){let e;if(this[Gt]&&this[fe]==="r+")try{e=B.default.openSync(this[V],this[fe],this[yt])}catch(t){if(t?.code==="ENOENT")return this[fe]="w",this[Ee]();throw t}else e=B.default.openSync(this[V],this[fe],this[yt]);this[Ye](null,e)}[$](){if(this[ye]&&typeof this[p]=="number"){let e=this[p];this[p]=void 0,B.default.closeSync(e),this.emit("close")}}[Zt](e){let t=!0;try{this[He](null,B.default.writeSync(this[p],e,0,e.length,this[we])),t=!1}finally{if(t)try{this[$]()}catch{}}}};W.WriteStreamSync=ls});var Vt=d(b=>{"use strict";Object.defineProperty(b,"__esModule",{value:!0});b.dealias=b.isNoFile=b.isFile=b.isAsync=b.isSync=b.isAsyncNoFile=b.isSyncNoFile=b.isAsyncFile=b.isSyncFile=void 0;var Ao=new Map([["C","cwd"],["f","file"],["z","gzip"],["P","preservePaths"],["U","unlink"],["strip-components","strip"],["stripComponents","strip"],["keep-newer","newer"],["keepNewer","newer"],["keep-newer-files","newer"],["keepNewerFiles","newer"],["k","keep"],["keep-existing","keep"],["keepExisting","keep"],["m","noMtime"],["no-mtime","noMtime"],["p","preserveOwner"],["L","follow"],["h","follow"],["onentry","onReadEntry"]]),Io=s=>!!s.sync&&!!s.file;b.isSyncFile=Io;var Co=s=>!s.sync&&!!s.file;b.isAsyncFile=Co;var Fo=s=>!!s.sync&&!s.file;b.isSyncNoFile=Fo;var Bo=s=>!s.sync&&!s.file;b.isAsyncNoFile=Bo;var zo=s=>!!s.sync;b.isSync=zo;var ko=s=>!s.sync;b.isAsync=ko;var jo=s=>!!s.file;b.isFile=jo;var xo=s=>!s.file;b.isNoFile=xo;var Uo=s=>{let e=Ao.get(s);return e||s},qo=(s={})=>{if(!s)return{};let e={};for(let[t,i]of Object.entries(s)){let r=Uo(t);e[r]=i}return e.chmod===void 0&&e.noChmod===!1&&(e.chmod=!0),delete e.noChmod,e};b.dealias=qo});var Ve=d($t=>{"use strict";Object.defineProperty($t,"__esModule",{value:!0});$t.makeCommand=void 0;var Et=Vt(),Wo=(s,e,t,i,r)=>Object.assign((n=[],o,a)=>{Array.isArray(n)&&(o=n,n={}),typeof o=="function"&&(a=o,o=void 0),o?o=Array.from(o):o=[];let h=(0,Et.dealias)(n);if(r?.(h,o),(0,Et.isSyncFile)(h)){if(typeof a=="function")throw new TypeError("callback not supported for sync tar functions");return s(h,o)}else if((0,Et.isAsyncFile)(h)){let l=e(h,o),u=a||void 0;return u?l.then(()=>u(),u):l}else if((0,Et.isSyncNoFile)(h)){if(typeof a=="function")throw new TypeError("callback not supported for sync tar functions");return t(h,o)}else if((0,Et.isAsyncNoFile)(h)){if(typeof a=="function")throw new TypeError("callback only supported with file option");return i(h,o)}else throw new Error("impossible options??")},{syncFile:s,asyncFile:e,syncNoFile:t,asyncNoFile:i,validate:r});$t.makeCommand=Wo});var us=d($e=>{"use strict";var Ho=$e&&$e.__importDefault||function(s){return s&&s.__esModule?s:{default:s}};Object.defineProperty($e,"__esModule",{value:!0});$e.constants=void 0;var Zo=Ho(require("zlib")),Go=Zo.default.constants||{ZLIB_VERNUM:4736};$e.constants=Object.freeze(Object.assign(Object.create(null),{Z_NO_FLUSH:0,Z_PARTIAL_FLUSH:1,Z_SYNC_FLUSH:2,Z_FULL_FLUSH:3,Z_FINISH:4,Z_BLOCK:5,Z_OK:0,Z_STREAM_END:1,Z_NEED_DICT:2,Z_ERRNO:-1,Z_STREAM_ERROR:-2,Z_DATA_ERROR:-3,Z_MEM_ERROR:-4,Z_BUF_ERROR:-5,Z_VERSION_ERROR:-6,Z_NO_COMPRESSION:0,Z_BEST_SPEED:1,Z_BEST_COMPRESSION:9,Z_DEFAULT_COMPRESSION:-1,Z_FILTERED:1,Z_HUFFMAN_ONLY:2,Z_RLE:3,Z_FIXED:4,Z_DEFAULT_STRATEGY:0,DEFLATE:1,INFLATE:2,GZIP:3,GUNZIP:4,DEFLATERAW:5,INFLATERAW:6,UNZIP:7,BROTLI_DECODE:8,BROTLI_ENCODE:9,Z_MIN_WINDOWBITS:8,Z_MAX_WINDOWBITS:15,Z_DEFAULT_WINDOWBITS:15,Z_MIN_CHUNK:64,Z_MAX_CHUNK:1/0,Z_DEFAULT_CHUNK:16384,Z_MIN_MEMLEVEL:1,Z_MAX_MEMLEVEL:9,Z_DEFAULT_MEMLEVEL:8,Z_MIN_LEVEL:-1,Z_MAX_LEVEL:9,Z_DEFAULT_LEVEL:-1,BROTLI_OPERATION_PROCESS:0,BROTLI_OPERATION_FLUSH:1,BROTLI_OPERATION_FINISH:2,BROTLI_OPERATION_EMIT_METADATA:3,BROTLI_MODE_GENERIC:0,BROTLI_MODE_TEXT:1,BROTLI_MODE_FONT:2,BROTLI_DEFAULT_MODE:0,BROTLI_MIN_QUALITY:0,BROTLI_MAX_QUALITY:11,BROTLI_DEFAULT_QUALITY:11,BROTLI_MIN_WINDOW_BITS:10,BROTLI_MAX_WINDOW_BITS:24,BROTLI_LARGE_MAX_WINDOW_BITS:30,BROTLI_DEFAULT_WINDOW:22,BROTLI_MIN_INPUT_BLOCK_BITS:16,BROTLI_MAX_INPUT_BLOCK_BITS:24,BROTLI_PARAM_MODE:0,BROTLI_PARAM_QUALITY:1,BROTLI_PARAM_LGWIN:2,BROTLI_PARAM_LGBLOCK:3,BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING:4,BROTLI_PARAM_SIZE_HINT:5,BROTLI_PARAM_LARGE_WINDOW:6,BROTLI_PARAM_NPOSTFIX:7,BROTLI_PARAM_NDIRECT:8,BROTLI_DECODER_RESULT_ERROR:0,BROTLI_DECODER_RESULT_SUCCESS:1,BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT:2,BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT:3,BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION:0,BROTLI_DECODER_PARAM_LARGE_WINDOW:1,BROTLI_DECODER_NO_ERROR:0,BROTLI_DECODER_SUCCESS:1,BROTLI_DECODER_NEEDS_MORE_INPUT:2,BROTLI_DECODER_NEEDS_MORE_OUTPUT:3,BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE:-1,BROTLI_DECODER_ERROR_FORMAT_RESERVED:-2,BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE:-3,BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET:-4,BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME:-5,BROTLI_DECODER_ERROR_FORMAT_CL_SPACE:-6,BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE:-7,BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT:-8,BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1:-9,BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2:-10,BROTLI_DECODER_ERROR_FORMAT_TRANSFORM:-11,BROTLI_DECODER_ERROR_FORMAT_DICTIONARY:-12,BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS:-13,BROTLI_DECODER_ERROR_FORMAT_PADDING_1:-14,BROTLI_DECODER_ERROR_FORMAT_PADDING_2:-15,BROTLI_DECODER_ERROR_FORMAT_DISTANCE:-16,BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET:-19,BROTLI_DECODER_ERROR_INVALID_ARGUMENTS:-20,BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES:-21,BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS:-22,BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP:-25,BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1:-26,BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2:-27,BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES:-30,BROTLI_DECODER_ERROR_UNREACHABLE:-31},Go))});var vs=d(f=>{"use strict";var Yo=f&&f.__createBinding||(Object.create?(function(s,e,t,i){i===void 0&&(i=t);var r=Object.getOwnPropertyDescriptor(e,t);(!r||("get"in r?!e.__esModule:r.writable||r.configurable))&&(r={enumerable:!0,get:function(){return e[t]}}),Object.defineProperty(s,i,r)}):(function(s,e,t,i){i===void 0&&(i=t),s[i]=e[t]})),Ko=f&&f.__setModuleDefault||(Object.create?(function(s,e){Object.defineProperty(s,"default",{enumerable:!0,value:e})}):function(s,e){s.default=e}),Vo=f&&f.__importStar||(function(){var s=function(e){return s=Object.getOwnPropertyNames||function(t){var i=[];for(var r in t)Object.prototype.hasOwnProperty.call(t,r)&&(i[i.length]=r);return i},s(e)};return function(e){if(e&&e.__esModule)return e;var t={};if(e!=null)for(var i=s(e),r=0;rs,cs=Wr?.writable===!0||Wr?.set!==void 0?s=>{Le.Buffer.concat=s?ea:Jo}:s=>{},Ae=Symbol("_superWrite"),Ie=class extends Error{code;errno;constructor(e,t){super("zlib: "+e.message,{cause:e}),this.code=e.code,this.errno=e.errno,this.code||(this.code="ZLIB_ERROR"),this.message="zlib: "+e.message,Error.captureStackTrace(this,t??this.constructor)}get name(){return"ZlibError"}};f.ZlibError=Ie;var fs=Symbol("flushFlag"),bt=class extends Xo.Minipass{#e=!1;#i=!1;#s;#n;#r;#t;#o;get sawError(){return this.#e}get handle(){return this.#t}get flushFlag(){return this.#s}constructor(e,t){if(!e||typeof e!="object")throw new TypeError("invalid options for ZlibBase constructor");if(super(e),this.#s=e.flush??0,this.#n=e.finishFlush??0,this.#r=e.fullFlushFlag??0,typeof qr[t]!="function")throw new TypeError("Compression method not supported: "+t);try{this.#t=new qr[t](e)}catch(i){throw new Ie(i,this.constructor)}this.#o=i=>{this.#e||(this.#e=!0,this.close(),this.emit("error",i))},this.#t?.on("error",i=>this.#o(new Ie(i))),this.once("end",()=>this.close)}close(){this.#t&&(this.#t.close(),this.#t=void 0,this.emit("close"))}reset(){if(!this.#e)return(0,ds.default)(this.#t,"zlib binding closed"),this.#t.reset?.()}flush(e){this.ended||(typeof e!="number"&&(e=this.#r),this.write(Object.assign(Le.Buffer.alloc(0),{[fs]:e})))}end(e,t,i){return typeof e=="function"&&(i=e,t=void 0,e=void 0),typeof t=="function"&&(i=t,t=void 0),e&&(t?this.write(e,t):this.write(e)),this.flush(this.#n),this.#i=!0,super.end(i)}get ended(){return this.#i}[Ae](e){return super.write(e)}write(e,t,i){if(typeof t=="function"&&(i=t,t="utf8"),typeof e=="string"&&(e=Le.Buffer.from(e,t)),this.#e)return;(0,ds.default)(this.#t,"zlib binding closed");let r=this.#t._handle,n=r.close;r.close=()=>{};let o=this.#t.close;this.#t.close=()=>{},cs(!0);let a;try{let l=typeof e[fs]=="number"?e[fs]:this.#s;a=this.#t._processChunk(e,l),cs(!1)}catch(l){cs(!1),this.#o(new Ie(l,this.write))}finally{this.#t&&(this.#t._handle=r,r.close=n,this.#t.close=o,this.#t.removeAllListeners("error"))}this.#t&&this.#t.on("error",l=>this.#o(new Ie(l,this.write)));let h;if(a)if(Array.isArray(a)&&a.length>0){let l=a[0];h=this[Ae](Le.Buffer.from(l));for(let u=1;u{typeof r=="function"&&(n=r,r=this.flushFlag),this.flush(r),n?.()};try{this.handle.params(e,t)}finally{this.handle.flush=i}this.handle&&(this.#e=e,this.#i=t)}}}};f.Zlib=ie;var ms=class extends ie{constructor(e){super(e,"Deflate")}};f.Deflate=ms;var ps=class extends ie{constructor(e){super(e,"Inflate")}};f.Inflate=ps;var _s=class extends ie{#e;constructor(e){super(e,"Gzip"),this.#e=e&&!!e.portable}[Ae](e){return this.#e?(this.#e=!1,e[9]=255,super[Ae](e)):super[Ae](e)}};f.Gzip=_s;var ws=class extends ie{constructor(e){super(e,"Gunzip")}};f.Gunzip=ws;var ys=class extends ie{constructor(e){super(e,"DeflateRaw")}};f.DeflateRaw=ys;var Es=class extends ie{constructor(e){super(e,"InflateRaw")}};f.InflateRaw=Es;var bs=class extends ie{constructor(e){super(e,"Unzip")}};f.Unzip=bs;var Xt=class extends bt{constructor(e,t){e=e||{},e.flush=e.flush||te.constants.BROTLI_OPERATION_PROCESS,e.finishFlush=e.finishFlush||te.constants.BROTLI_OPERATION_FINISH,e.fullFlushFlag=te.constants.BROTLI_OPERATION_FLUSH,super(e,t)}},Ss=class extends Xt{constructor(e){super(e,"BrotliCompress")}};f.BrotliCompress=Ss;var gs=class extends Xt{constructor(e){super(e,"BrotliDecompress")}};f.BrotliDecompress=gs;var Qt=class extends bt{constructor(e,t){e=e||{},e.flush=e.flush||te.constants.ZSTD_e_continue,e.finishFlush=e.finishFlush||te.constants.ZSTD_e_end,e.fullFlushFlag=te.constants.ZSTD_e_flush,super(e,t)}},Rs=class extends Qt{constructor(e){super(e,"ZstdCompress")}};f.ZstdCompress=Rs;var Os=class extends Qt{constructor(e){super(e,"ZstdDecompress")}};f.ZstdDecompress=Os});var Gr=d(Xe=>{"use strict";Object.defineProperty(Xe,"__esModule",{value:!0});Xe.parse=Xe.encode=void 0;var ta=(s,e)=>{if(Number.isSafeInteger(s))s<0?sa(s,e):ia(s,e);else throw Error("cannot encode number outside of javascript safe integer range");return e};Xe.encode=ta;var ia=(s,e)=>{e[0]=128;for(var t=e.length;t>1;t--)e[t-1]=s&255,s=Math.floor(s/256)},sa=(s,e)=>{e[0]=255;var t=!1;s=s*-1;for(var i=e.length;i>1;i--){var r=s&255;s=Math.floor(s/256),t?e[i-1]=Hr(r):r===0?e[i-1]=0:(t=!0,e[i-1]=Zr(r))}},ra=s=>{let e=s[0],t=e===128?oa(s.subarray(1,s.length)):e===255?na(s):null;if(t===null)throw Error("invalid base256 encoding");if(!Number.isSafeInteger(t))throw Error("parsed number outside of javascript safe integer range");return t};Xe.parse=ra;var na=s=>{for(var e=s.length,t=0,i=!1,r=e-1;r>-1;r--){var n=Number(s[r]),o;i?o=Hr(n):n===0?o=n:(i=!0,o=Zr(n)),o!==0&&(t-=o*Math.pow(256,e-r-1))}return t},oa=s=>{for(var e=s.length,t=0,i=e-1;i>-1;i--){var r=Number(s[i]);r!==0&&(t+=r*Math.pow(256,e-i-1))}return t},Hr=s=>(255^s)&255,Zr=s=>(255^s)+1&255});var Ts=d(j=>{"use strict";Object.defineProperty(j,"__esModule",{value:!0});j.code=j.name=j.isName=j.isCode=void 0;var aa=s=>j.name.has(s);j.isCode=aa;var ha=s=>j.code.has(s);j.isName=ha;j.name=new Map([["0","File"],["","OldFile"],["1","Link"],["2","SymbolicLink"],["3","CharacterDevice"],["4","BlockDevice"],["5","Directory"],["6","FIFO"],["7","ContiguousFile"],["g","GlobalExtendedHeader"],["x","ExtendedHeader"],["A","SolarisACL"],["D","GNUDumpDir"],["I","Inode"],["K","NextFileHasLongLinkpath"],["L","NextFileHasLongPath"],["M","ContinuationFile"],["N","OldGnuLongPath"],["S","SparseFile"],["V","TapeVolumeHeader"],["X","OldExtendedHeader"]]);j.code=new Map(Array.from(j.name).map(s=>[s[1],s[0]]))});var Je=d(se=>{"use strict";var la=se&&se.__createBinding||(Object.create?(function(s,e,t,i){i===void 0&&(i=t);var r=Object.getOwnPropertyDescriptor(e,t);(!r||("get"in r?!e.__esModule:r.writable||r.configurable))&&(r={enumerable:!0,get:function(){return e[t]}}),Object.defineProperty(s,i,r)}):(function(s,e,t,i){i===void 0&&(i=t),s[i]=e[t]})),ua=se&&se.__setModuleDefault||(Object.create?(function(s,e){Object.defineProperty(s,"default",{enumerable:!0,value:e})}):function(s,e){s.default=e}),Yr=se&&se.__importStar||(function(){var s=function(e){return s=Object.getOwnPropertyNames||function(t){var i=[];for(var r in t)Object.prototype.hasOwnProperty.call(t,r)&&(i[i.length]=r);return i},s(e)};return function(e){if(e&&e.__esModule)return e;var t={};if(e!=null)for(var i=s(e),r=0;r=t+512))throw new Error("need 512 bytes for header");this.path=i?.path??Ce(e,t,100),this.mode=i?.mode??r?.mode??be(e,t+100,8),this.uid=i?.uid??r?.uid??be(e,t+108,8),this.gid=i?.gid??r?.gid??be(e,t+116,8),this.size=i?.size??r?.size??be(e,t+124,12),this.mtime=i?.mtime??r?.mtime??Ds(e,t+136,12),this.cksum=be(e,t+148,12),r&&this.#i(r,!0),i&&this.#i(i);let n=Ce(e,t+156,1);if(St.isCode(n)&&(this.#e=n||"0"),this.#e==="0"&&this.path.slice(-1)==="/"&&(this.#e="5"),this.#e==="5"&&(this.size=0),this.linkpath=Ce(e,t+157,100),e.subarray(t+257,t+265).toString()==="ustar\x0000")if(this.uname=i?.uname??r?.uname??Ce(e,t+265,32),this.gname=i?.gname??r?.gname??Ce(e,t+297,32),this.devmaj=i?.devmaj??r?.devmaj??be(e,t+329,8)??0,this.devmin=i?.devmin??r?.devmin??be(e,t+337,8)??0,e[t+475]!==0){let a=Ce(e,t+345,155);this.path=a+"/"+this.path}else{let a=Ce(e,t+345,130);a&&(this.path=a+"/"+this.path),this.atime=i?.atime??r?.atime??Ds(e,t+476,12),this.ctime=i?.ctime??r?.ctime??Ds(e,t+488,12)}let o=256;for(let a=t;a!(r==null||i==="path"&&t||i==="linkpath"&&t||i==="global"))))}encode(e,t=0){if(e||(e=this.block=Buffer.alloc(512)),this.#e==="Unsupported"&&(this.#e="0"),!(e.length>=t+512))throw new Error("need 512 bytes for header");let i=this.ctime||this.atime?130:155,r=ca(this.path||"",i),n=r[0],o=r[1];this.needPax=!!r[2],this.needPax=Fe(e,t,100,n)||this.needPax,this.needPax=Se(e,t+100,8,this.mode)||this.needPax,this.needPax=Se(e,t+108,8,this.uid)||this.needPax,this.needPax=Se(e,t+116,8,this.gid)||this.needPax,this.needPax=Se(e,t+124,12,this.size)||this.needPax,this.needPax=Ps(e,t+136,12,this.mtime)||this.needPax,e[t+156]=this.#e.charCodeAt(0),this.needPax=Fe(e,t+157,100,this.linkpath)||this.needPax,e.write("ustar\x0000",t+257,8),this.needPax=Fe(e,t+265,32,this.uname)||this.needPax,this.needPax=Fe(e,t+297,32,this.gname)||this.needPax,this.needPax=Se(e,t+329,8,this.devmaj)||this.needPax,this.needPax=Se(e,t+337,8,this.devmin)||this.needPax,this.needPax=Fe(e,t+345,i,o)||this.needPax,e[t+475]!==0?this.needPax=Fe(e,t+345,155,o)||this.needPax:(this.needPax=Fe(e,t+345,130,o)||this.needPax,this.needPax=Ps(e,t+476,12,this.atime)||this.needPax,this.needPax=Ps(e,t+488,12,this.ctime)||this.needPax);let a=256;for(let h=t;h{let i=s,r="",n,o=Qe.posix.parse(s).root||".";if(Buffer.byteLength(i)<100)n=[i,r,!1];else{r=Qe.posix.dirname(i),i=Qe.posix.basename(i);do Buffer.byteLength(i)<=100&&Buffer.byteLength(r)<=e?n=[i,r,!1]:Buffer.byteLength(i)>100&&Buffer.byteLength(r)<=e?n=[i.slice(0,99),r,!0]:(i=Qe.posix.join(Qe.posix.basename(r),i),r=Qe.posix.dirname(r));while(r!==o&&n===void 0);n||(n=[s.slice(0,99),"",!0])}return n},Ce=(s,e,t)=>s.subarray(e,e+t).toString("utf8").replace(/\0.*/,""),Ds=(s,e,t)=>fa(be(s,e,t)),fa=s=>s===void 0?void 0:new Date(s*1e3),be=(s,e,t)=>Number(s[e])&128?Kr.parse(s.subarray(e,e+t)):ma(s,e,t),da=s=>isNaN(s)?void 0:s,ma=(s,e,t)=>da(parseInt(s.subarray(e,e+t).toString("utf8").replace(/\0.*$/,"").trim(),8)),pa={12:8589934591,8:2097151},Se=(s,e,t,i)=>i===void 0?!1:i>pa[t]||i<0?(Kr.encode(i,s.subarray(e,e+t)),!0):(_a(s,e,t,i),!1),_a=(s,e,t,i)=>s.write(wa(i,t),e,t,"ascii"),wa=(s,e)=>ya(Math.floor(s).toString(8),e),ya=(s,e)=>(s.length===e-1?s:new Array(e-s.length-1).join("0")+s+" ")+"\0",Ps=(s,e,t,i)=>i===void 0?!1:Se(s,e,t,i.getTime()/1e3),Ea=new Array(156).join("\0"),Fe=(s,e,t,i)=>i===void 0?!1:(s.write(i+Ea,e,t,"utf8"),i.length!==Buffer.byteLength(i)||i.length>t)});var ei=d(Jt=>{"use strict";Object.defineProperty(Jt,"__esModule",{value:!0});Jt.Pax=void 0;var ba=require("node:path"),Sa=Je(),Ms=class s{atime;mtime;ctime;charset;comment;gid;uid;gname;uname;linkpath;dev;ino;nlink;path;size;mode;global;constructor(e,t=!1){this.atime=e.atime,this.charset=e.charset,this.comment=e.comment,this.ctime=e.ctime,this.dev=e.dev,this.gid=e.gid,this.global=t,this.gname=e.gname,this.ino=e.ino,this.linkpath=e.linkpath,this.mtime=e.mtime,this.nlink=e.nlink,this.path=e.path,this.size=e.size,this.uid=e.uid,this.uname=e.uname}encode(){let e=this.encodeBody();if(e==="")return Buffer.allocUnsafe(0);let t=Buffer.byteLength(e),i=512*Math.ceil(1+t/512),r=Buffer.allocUnsafe(i);for(let n=0;n<512;n++)r[n]=0;new Sa.Header({path:("PaxHeader/"+(0,ba.basename)(this.path??"")).slice(0,99),mode:this.mode||420,uid:this.uid,gid:this.gid,size:t,mtime:this.mtime,type:this.global?"GlobalExtendedHeader":"ExtendedHeader",linkpath:"",uname:this.uname||"",gname:this.gname||"",devmaj:0,devmin:0,atime:this.atime,ctime:this.ctime}).encode(r),r.write(e,512,t,"utf8");for(let n=t+512;n=Math.pow(10,o)&&(o+=1),o+n+r}static parse(e,t,i=!1){return new s(ga(Ra(e),t),i)}};Jt.Pax=Ms;var ga=(s,e)=>e?Object.assign({},e,s):s,Ra=s=>s.replace(/\n$/,"").split(`
+`).reduce(Oa,Object.create(null)),Oa=(s,e)=>{let t=parseInt(e,10);if(t!==Buffer.byteLength(e)+1)return s;e=e.slice((t+" ").length);let i=e.split("="),r=i.shift();if(!r)return s;let n=r.replace(/^SCHILY\.(dev|ino|nlink)/,"$1"),o=i.join("=");return s[n]=/^([A-Z]+\.)?([mac]|birth|creation)time$/.test(n)?new Date(Number(o)*1e3):/^[0-9]+$/.test(o)?+o:o,s}});var et=d(ti=>{"use strict";Object.defineProperty(ti,"__esModule",{value:!0});ti.normalizeWindowsPath=void 0;var va=process.env.TESTING_TAR_FAKE_PLATFORM||process.platform;ti.normalizeWindowsPath=va!=="win32"?s=>s:s=>s&&s.replace(/\\/g,"/")});var ri=d(si=>{"use strict";Object.defineProperty(si,"__esModule",{value:!0});si.ReadEntry=void 0;var Ta=We(),ii=et(),Ls=class extends Ta.Minipass{extended;globalExtended;header;startBlockSize;blockRemain;remain;type;meta=!1;ignore=!1;path;mode;uid;gid;uname;gname;size=0;mtime;atime;ctime;linkpath;dev;ino;nlink;invalid=!1;absolute;unsupported=!1;constructor(e,t,i){switch(super({}),this.pause(),this.extended=t,this.globalExtended=i,this.header=e,this.remain=e.size??0,this.startBlockSize=512*Math.ceil(this.remain/512),this.blockRemain=this.startBlockSize,this.type=e.type,this.type){case"File":case"OldFile":case"Link":case"SymbolicLink":case"CharacterDevice":case"BlockDevice":case"Directory":case"FIFO":case"ContiguousFile":case"GNUDumpDir":break;case"NextFileHasLongLinkpath":case"NextFileHasLongPath":case"OldGnuLongPath":case"GlobalExtendedHeader":case"ExtendedHeader":case"OldExtendedHeader":this.meta=!0;break;default:this.ignore=!0}if(!e.path)throw new Error("no path provided for tar.ReadEntry");this.path=(0,ii.normalizeWindowsPath)(e.path),this.mode=e.mode,this.mode&&(this.mode=this.mode&4095),this.uid=e.uid,this.gid=e.gid,this.uname=e.uname,this.gname=e.gname,this.size=this.remain,this.mtime=e.mtime,this.atime=e.atime,this.ctime=e.ctime,this.linkpath=e.linkpath?(0,ii.normalizeWindowsPath)(e.linkpath):void 0,this.uname=e.uname,this.gname=e.gname,t&&this.#e(t),i&&this.#e(i,!0)}write(e){let t=e.length;if(t>this.blockRemain)throw new Error("writing more to entry than is appropriate");let i=this.remain,r=this.blockRemain;return this.remain=Math.max(0,i-t),this.blockRemain=Math.max(0,r-t),this.ignore?!0:i>=t?super.write(e):super.write(e.subarray(0,i))}#e(e,t=!1){e.path&&(e.path=(0,ii.normalizeWindowsPath)(e.path)),e.linkpath&&(e.linkpath=(0,ii.normalizeWindowsPath)(e.linkpath)),Object.assign(this,Object.fromEntries(Object.entries(e).filter(([i,r])=>!(r==null||i==="path"&&t))))}};si.ReadEntry=Ls});var oi=d(ni=>{"use strict";Object.defineProperty(ni,"__esModule",{value:!0});ni.warnMethod=void 0;var Da=(s,e,t,i={})=>{s.file&&(i.file=s.file),s.cwd&&(i.cwd=s.cwd),i.code=t instanceof Error&&t.code||e,i.tarCode=e,!s.strict&&i.recoverable!==!1?(t instanceof Error&&(i=Object.assign(t,i),t=t.message),s.emit("warn",e,t,i)):t instanceof Error?s.emit("error",Object.assign(t,i)):s.emit("error",Object.assign(new Error(`${e}: ${t}`),i))};ni.warnMethod=Da});var mi=d(di=>{"use strict";Object.defineProperty(di,"__esModule",{value:!0});di.Parser=void 0;var Pa=require("events"),As=vs(),Vr=Je(),$r=ei(),Na=ri(),Ma=oi(),La=1024*1024,zs=Buffer.from([31,139]),ks=Buffer.from([40,181,47,253]),Aa=Math.max(zs.length,ks.length),H=Symbol("state"),Be=Symbol("writeEntry"),de=Symbol("readEntry"),Is=Symbol("nextEntry"),Xr=Symbol("processEntry"),re=Symbol("extendedHeader"),gt=Symbol("globalExtendedHeader"),ge=Symbol("meta"),Qr=Symbol("emitMeta"),_=Symbol("buffer"),me=Symbol("queue"),Re=Symbol("ended"),Cs=Symbol("emittedEnd"),ze=Symbol("emit"),S=Symbol("unzip"),ai=Symbol("consumeChunk"),hi=Symbol("consumeChunkSub"),Fs=Symbol("consumeBody"),Jr=Symbol("consumeMeta"),en=Symbol("consumeHeader"),Rt=Symbol("consuming"),Bs=Symbol("bufferConcat"),li=Symbol("maybeEnd"),tt=Symbol("writing"),Oe=Symbol("aborted"),ui=Symbol("onDone"),ke=Symbol("sawValidEntry"),ci=Symbol("sawNullBlock"),fi=Symbol("sawEOF"),tn=Symbol("closeStream"),Ia=()=>!0,js=class extends Pa.EventEmitter{file;strict;maxMetaEntrySize;filter;brotli;zstd;writable=!0;readable=!1;[me]=[];[_];[de];[Be];[H]="begin";[ge]="";[re];[gt];[Re]=!1;[S];[Oe]=!1;[ke];[ci]=!1;[fi]=!1;[tt]=!1;[Rt]=!1;[Cs]=!1;constructor(e={}){super(),this.file=e.file||"",this.on(ui,()=>{(this[H]==="begin"||this[ke]===!1)&&this.warn("TAR_BAD_ARCHIVE","Unrecognized archive format")}),e.ondone?this.on(ui,e.ondone):this.on(ui,()=>{this.emit("prefinish"),this.emit("finish"),this.emit("end")}),this.strict=!!e.strict,this.maxMetaEntrySize=e.maxMetaEntrySize||La,this.filter=typeof e.filter=="function"?e.filter:Ia;let t=e.file&&(e.file.endsWith(".tar.br")||e.file.endsWith(".tbr"));this.brotli=!(e.gzip||e.zstd)&&e.brotli!==void 0?e.brotli:t?void 0:!1;let i=e.file&&(e.file.endsWith(".tar.zst")||e.file.endsWith(".tzst"));this.zstd=!(e.gzip||e.brotli)&&e.zstd!==void 0?e.zstd:i?!0:void 0,this.on("end",()=>this[tn]()),typeof e.onwarn=="function"&&this.on("warn",e.onwarn),typeof e.onReadEntry=="function"&&this.on("entry",e.onReadEntry)}warn(e,t,i={}){(0,Ma.warnMethod)(this,e,t,i)}[en](e,t){this[ke]===void 0&&(this[ke]=!1);let i;try{i=new Vr.Header(e,t,this[re],this[gt])}catch(r){return this.warn("TAR_ENTRY_INVALID",r)}if(i.nullBlock)this[ci]?(this[fi]=!0,this[H]==="begin"&&(this[H]="header"),this[ze]("eof")):(this[ci]=!0,this[ze]("nullBlock"));else if(this[ci]=!1,!i.cksumValid)this.warn("TAR_ENTRY_INVALID","checksum failure",{header:i});else if(!i.path)this.warn("TAR_ENTRY_INVALID","path is required",{header:i});else{let r=i.type;if(/^(Symbolic)?Link$/.test(r)&&!i.linkpath)this.warn("TAR_ENTRY_INVALID","linkpath required",{header:i});else if(!/^(Symbolic)?Link$/.test(r)&&!/^(Global)?ExtendedHeader$/.test(r)&&i.linkpath)this.warn("TAR_ENTRY_INVALID","linkpath forbidden",{header:i});else{let n=this[Be]=new Na.ReadEntry(i,this[re],this[gt]);if(!this[ke])if(n.remain){let o=()=>{n.invalid||(this[ke]=!0)};n.on("end",o)}else this[ke]=!0;n.meta?n.size>this.maxMetaEntrySize?(n.ignore=!0,this[ze]("ignoredEntry",n),this[H]="ignore",n.resume()):n.size>0&&(this[ge]="",n.on("data",o=>this[ge]+=o),this[H]="meta"):(this[re]=void 0,n.ignore=n.ignore||!this.filter(n.path,n),n.ignore?(this[ze]("ignoredEntry",n),this[H]=n.remain?"ignore":"header",n.resume()):(n.remain?this[H]="body":(this[H]="header",n.end()),this[de]?this[me].push(n):(this[me].push(n),this[Is]())))}}}[tn](){queueMicrotask(()=>this.emit("close"))}[Xr](e){let t=!0;if(!e)this[de]=void 0,t=!1;else if(Array.isArray(e)){let[i,...r]=e;this.emit(i,...r)}else this[de]=e,this.emit("entry",e),e.emittedEnd||(e.on("end",()=>this[Is]()),t=!1);return t}[Is](){do;while(this[Xr](this[me].shift()));if(!this[me].length){let e=this[de];!e||e.flowing||e.size===e.remain?this[tt]||this.emit("drain"):e.once("drain",()=>this.emit("drain"))}}[Fs](e,t){let i=this[Be];if(!i)throw new Error("attempt to consume body without entry??");let r=i.blockRemain??0,n=r>=e.length&&t===0?e:e.subarray(t,t+r);return i.write(n),i.blockRemain||(this[H]="header",this[Be]=void 0,i.end()),n.length}[Jr](e,t){let i=this[Be],r=this[Fs](e,t);return!this[Be]&&i&&this[Qr](i),r}[ze](e,t,i){!this[me].length&&!this[de]?this.emit(e,t,i):this[me].push([e,t,i])}[Qr](e){switch(this[ze]("meta",this[ge]),e.type){case"ExtendedHeader":case"OldExtendedHeader":this[re]=$r.Pax.parse(this[ge],this[re],!1);break;case"GlobalExtendedHeader":this[gt]=$r.Pax.parse(this[ge],this[gt],!0);break;case"NextFileHasLongPath":case"OldGnuLongPath":{let t=this[re]??Object.create(null);this[re]=t,t.path=this[ge].replace(/\0.*/,"");break}case"NextFileHasLongLinkpath":{let t=this[re]||Object.create(null);this[re]=t,t.linkpath=this[ge].replace(/\0.*/,"");break}default:throw new Error("unknown meta: "+e.type)}}abort(e){this[Oe]=!0,this.emit("abort",e),this.warn("TAR_ABORT",e,{recoverable:!1})}write(e,t,i){if(typeof t=="function"&&(i=t,t=void 0),typeof e=="string"&&(e=Buffer.from(e,typeof t=="string"?t:"utf8")),this[Oe])return i?.(),!1;if((this[S]===void 0||this.brotli===void 0&&this[S]===!1)&&e){if(this[_]&&(e=Buffer.concat([this[_],e]),this[_]=void 0),e.lengththis[ai](u)),this[S].on("error",u=>this.abort(u)),this[S].on("end",()=>{this[Re]=!0,this[ai]()}),this[tt]=!0;let l=!!this[S][h?"end":"write"](e);return this[tt]=!1,i?.(),l}}this[tt]=!0,this[S]?this[S].write(e):this[ai](e),this[tt]=!1;let n=this[me].length?!1:this[de]?this[de].flowing:!0;return!n&&!this[me].length&&this[de]?.once("drain",()=>this.emit("drain")),i?.(),n}[Bs](e){e&&!this[Oe]&&(this[_]=this[_]?Buffer.concat([this[_],e]):e)}[li](){if(this[Re]&&!this[Cs]&&!this[Oe]&&!this[Rt]){this[Cs]=!0;let e=this[Be];if(e&&e.blockRemain){let t=this[_]?this[_].length:0;this.warn("TAR_BAD_ARCHIVE",`Truncated input (needed ${e.blockRemain} more bytes, only ${t} available)`,{entry:e}),this[_]&&e.write(this[_]),e.end()}this[ze](ui)}}[ai](e){if(this[Rt]&&e)this[Bs](e);else if(!e&&!this[_])this[li]();else if(e){if(this[Rt]=!0,this[_]){this[Bs](e);let t=this[_];this[_]=void 0,this[hi](t)}else this[hi](e);for(;this[_]&&this[_]?.length>=512&&!this[Oe]&&!this[fi];){let t=this[_];this[_]=void 0,this[hi](t)}this[Rt]=!1}(!this[_]||this[Re])&&this[li]()}[hi](e){let t=0,i=e.length;for(;t+512<=i&&!this[Oe]&&!this[fi];)switch(this[H]){case"begin":case"header":this[en](e,t),t+=512;break;case"ignore":case"body":t+=this[Fs](e,t);break;case"meta":t+=this[Jr](e,t);break;default:throw new Error("invalid state: "+this[H])}t{"use strict";Object.defineProperty(pi,"__esModule",{value:!0});pi.stripTrailingSlashes=void 0;var Ca=s=>{let e=s.length-1,t=-1;for(;e>-1&&s.charAt(e)==="/";)t=e,e--;return t===-1?s:s.slice(0,t)};pi.stripTrailingSlashes=Ca});var st=d(F=>{"use strict";var Fa=F&&F.__createBinding||(Object.create?(function(s,e,t,i){i===void 0&&(i=t);var r=Object.getOwnPropertyDescriptor(e,t);(!r||("get"in r?!e.__esModule:r.writable||r.configurable))&&(r={enumerable:!0,get:function(){return e[t]}}),Object.defineProperty(s,i,r)}):(function(s,e,t,i){i===void 0&&(i=t),s[i]=e[t]})),Ba=F&&F.__setModuleDefault||(Object.create?(function(s,e){Object.defineProperty(s,"default",{enumerable:!0,value:e})}):function(s,e){s.default=e}),za=F&&F.__importStar||(function(){var s=function(e){return s=Object.getOwnPropertyNames||function(t){var i=[];for(var r in t)Object.prototype.hasOwnProperty.call(t,r)&&(i[i.length]=r);return i},s(e)};return function(e){if(e&&e.__esModule)return e;var t={};if(e!=null)for(var i=s(e),r=0;r{let e=s.onReadEntry;s.onReadEntry=e?t=>{e(t),t.resume()}:t=>t.resume()},qa=(s,e)=>{let t=new Map(e.map(n=>[(0,xs.stripTrailingSlashes)(n),!0])),i=s.filter,r=(n,o="")=>{let a=o||(0,sn.parse)(n).root||".",h;if(n===a)h=!1;else{let l=t.get(n);l!==void 0?h=l:h=r((0,sn.dirname)(n),a)}return t.set(n,h),h};s.filter=i?(n,o)=>i(n,o)&&r((0,xs.stripTrailingSlashes)(n)):n=>r((0,xs.stripTrailingSlashes)(n))};F.filesFilter=qa;var Wa=s=>{let e=new wi.Parser(s),t=s.file,i;try{i=it.default.openSync(t,"r");let r=it.default.fstatSync(i),n=s.maxReadSize||16*1024*1024;if(r.size{let t=new wi.Parser(s),i=s.maxReadSize||16*1024*1024,r=s.file;return new Promise((o,a)=>{t.on("error",a),t.on("end",o),it.default.stat(r,(h,l)=>{if(h)a(h);else{let u=new ja.ReadStream(r,{readSize:i,size:l.size});u.on("error",a),u.pipe(t)}})})};F.list=(0,xa.makeCommand)(Wa,Ha,s=>new wi.Parser(s),s=>new wi.Parser(s),(s,e)=>{e?.length&&(0,F.filesFilter)(s,e),s.noResume||Ua(s)})});var rn=d(yi=>{"use strict";Object.defineProperty(yi,"__esModule",{value:!0});yi.modeFix=void 0;var Za=(s,e,t)=>(s&=4095,t&&(s=(s|384)&-19),e&&(s&256&&(s|=64),s&32&&(s|=8),s&4&&(s|=1)),s);yi.modeFix=Za});var Us=d(Ei=>{"use strict";Object.defineProperty(Ei,"__esModule",{value:!0});Ei.stripAbsolutePath=void 0;var Ga=require("node:path"),{isAbsolute:Ya,parse:nn}=Ga.win32,Ka=s=>{let e="",t=nn(s);for(;Ya(s)||t.root;){let i=s.charAt(0)==="/"&&s.slice(0,4)!=="//?/"?"/":t.root;s=s.slice(i.length),e+=i,t=nn(s)}return[e,s]};Ei.stripAbsolutePath=Ka});var Ws=d(rt=>{"use strict";Object.defineProperty(rt,"__esModule",{value:!0});rt.decode=rt.encode=void 0;var bi=["|","<",">","?",":"],qs=bi.map(s=>String.fromCharCode(61440+s.charCodeAt(0))),Va=new Map(bi.map((s,e)=>[s,qs[e]])),$a=new Map(qs.map((s,e)=>[s,bi[e]])),Xa=s=>bi.reduce((e,t)=>e.split(t).join(Va.get(t)),s);rt.encode=Xa;var Qa=s=>qs.reduce((e,t)=>e.split(t).join($a.get(t)),s);rt.decode=Qa});var er=d(M=>{"use strict";var Ja=M&&M.__createBinding||(Object.create?(function(s,e,t,i){i===void 0&&(i=t);var r=Object.getOwnPropertyDescriptor(e,t);(!r||("get"in r?!e.__esModule:r.writable||r.configurable))&&(r={enumerable:!0,get:function(){return e[t]}}),Object.defineProperty(s,i,r)}):(function(s,e,t,i){i===void 0&&(i=t),s[i]=e[t]})),eh=M&&M.__setModuleDefault||(Object.create?(function(s,e){Object.defineProperty(s,"default",{enumerable:!0,value:e})}):function(s,e){s.default=e}),th=M&&M.__importStar||(function(){var s=function(e){return s=Object.getOwnPropertyNames||function(t){var i=[];for(var r in t)Object.prototype.hasOwnProperty.call(t,r)&&(i[i.length]=r);return i},s(e)};return function(e){if(e&&e.__esModule)return e;var t={};if(e!=null)for(var i=s(e),r=0;re?(s=(0,ne.normalizeWindowsPath)(s).replace(/^\.(\/|$)/,""),(0,ih.stripTrailingSlashes)(e)+"/"+s):(0,ne.normalizeWindowsPath)(s),rh=16*1024*1024,an=Symbol("process"),hn=Symbol("file"),ln=Symbol("directory"),Zs=Symbol("symlink"),un=Symbol("hardlink"),Ot=Symbol("header"),Si=Symbol("read"),Gs=Symbol("lstat"),gi=Symbol("onlstat"),Ys=Symbol("onread"),Ks=Symbol("onreadlink"),Vs=Symbol("openfile"),$s=Symbol("onopenfile"),ve=Symbol("close"),Ri=Symbol("mode"),Xs=Symbol("awaitDrain"),Hs=Symbol("ondrain"),ae=Symbol("prefix"),Oi=class extends fn.Minipass{path;portable;myuid=process.getuid&&process.getuid()||0;myuser=process.env.USER||"";maxReadSize;linkCache;statCache;preservePaths;cwd;strict;mtime;noPax;noMtime;prefix;fd;blockLen=0;blockRemain=0;buf;pos=0;remain=0;length=0;offset=0;win32;absolute;header;type;linkpath;stat;onWriteEntry;#e=!1;constructor(e,t={}){let i=(0,pn.dealias)(t);super(),this.path=(0,ne.normalizeWindowsPath)(e),this.portable=!!i.portable,this.maxReadSize=i.maxReadSize||rh,this.linkCache=i.linkCache||new Map,this.statCache=i.statCache||new Map,this.preservePaths=!!i.preservePaths,this.cwd=(0,ne.normalizeWindowsPath)(i.cwd||process.cwd()),this.strict=!!i.strict,this.noPax=!!i.noPax,this.noMtime=!!i.noMtime,this.mtime=i.mtime,this.prefix=i.prefix?(0,ne.normalizeWindowsPath)(i.prefix):void 0,this.onWriteEntry=i.onWriteEntry,typeof i.onwarn=="function"&&this.on("warn",i.onwarn);let r=!1;if(!this.preservePaths){let[o,a]=(0,wn.stripAbsolutePath)(this.path);o&&typeof a=="string"&&(this.path=a,r=o)}this.win32=!!i.win32||process.platform==="win32",this.win32&&(this.path=sh.decode(this.path.replace(/\\/g,"/")),e=e.replace(/\\/g,"/")),this.absolute=(0,ne.normalizeWindowsPath)(i.absolute||on.default.resolve(this.cwd,e)),this.path===""&&(this.path="./"),r&&this.warn("TAR_ENTRY_INFO",`stripping ${r} from absolute path`,{entry:this,path:r+this.path});let n=this.statCache.get(this.absolute);n?this[gi](n):this[Gs]()}warn(e,t,i={}){return(0,yn.warnMethod)(this,e,t,i)}emit(e,...t){return e==="error"&&(this.#e=!0),super.emit(e,...t)}[Gs](){oe.default.lstat(this.absolute,(e,t)=>{if(e)return this.emit("error",e);this[gi](t)})}[gi](e){this.statCache.set(this.absolute,e),this.stat=e,e.isFile()||(e.size=0),this.type=nh(e),this.emit("stat",e),this[an]()}[an](){switch(this.type){case"File":return this[hn]();case"Directory":return this[ln]();case"SymbolicLink":return this[Zs]();default:return this.end()}}[Ri](e){return(0,mn.modeFix)(e,this.type==="Directory",this.portable)}[ae](e){return En(e,this.prefix)}[Ot](){if(!this.stat)throw new Error("cannot write header before stat");this.type==="Directory"&&this.portable&&(this.noMtime=!0),this.onWriteEntry?.(this),this.header=new dn.Header({path:this[ae](this.path),linkpath:this.type==="Link"&&this.linkpath!==void 0?this[ae](this.linkpath):this.linkpath,mode:this[Ri](this.stat.mode),uid:this.portable?void 0:this.stat.uid,gid:this.portable?void 0:this.stat.gid,size:this.stat.size,mtime:this.noMtime?void 0:this.mtime||this.stat.mtime,type:this.type==="Unsupported"?void 0:this.type,uname:this.portable?void 0:this.stat.uid===this.myuid?this.myuser:"",atime:this.portable?void 0:this.stat.atime,ctime:this.portable?void 0:this.stat.ctime}),this.header.encode()&&!this.noPax&&super.write(new _n.Pax({atime:this.portable?void 0:this.header.atime,ctime:this.portable?void 0:this.header.ctime,gid:this.portable?void 0:this.header.gid,mtime:this.noMtime?void 0:this.mtime||this.header.mtime,path:this[ae](this.path),linkpath:this.type==="Link"&&this.linkpath!==void 0?this[ae](this.linkpath):this.linkpath,size:this.header.size,uid:this.portable?void 0:this.header.uid,uname:this.portable?void 0:this.header.uname,dev:this.portable?void 0:this.stat.dev,ino:this.portable?void 0:this.stat.ino,nlink:this.portable?void 0:this.stat.nlink}).encode());let e=this.header?.block;if(!e)throw new Error("failed to encode header");super.write(e)}[ln](){if(!this.stat)throw new Error("cannot create directory entry without stat");this.path.slice(-1)!=="/"&&(this.path+="/"),this.stat.size=0,this[Ot](),this.end()}[Zs](){oe.default.readlink(this.absolute,(e,t)=>{if(e)return this.emit("error",e);this[Ks](t)})}[Ks](e){this.linkpath=(0,ne.normalizeWindowsPath)(e),this[Ot](),this.end()}[un](e){if(!this.stat)throw new Error("cannot create link entry without stat");this.type="Link",this.linkpath=(0,ne.normalizeWindowsPath)(on.default.relative(this.cwd,e)),this.stat.size=0,this[Ot](),this.end()}[hn](){if(!this.stat)throw new Error("cannot create file entry without stat");if(this.stat.nlink>1){let e=`${this.stat.dev}:${this.stat.ino}`,t=this.linkCache.get(e);if(t?.indexOf(this.cwd)===0)return this[un](t);this.linkCache.set(e,this.absolute)}if(this[Ot](),this.stat.size===0)return this.end();this[Vs]()}[Vs](){oe.default.open(this.absolute,"r",(e,t)=>{if(e)return this.emit("error",e);this[$s](t)})}[$s](e){if(this.fd=e,this.#e)return this[ve]();if(!this.stat)throw new Error("should stat before calling onopenfile");this.blockLen=512*Math.ceil(this.stat.size/512),this.blockRemain=this.blockLen;let t=Math.min(this.blockLen,this.maxReadSize);this.buf=Buffer.allocUnsafe(t),this.offset=0,this.pos=0,this.remain=this.stat.size,this.length=this.buf.length,this[Si]()}[Si](){let{fd:e,buf:t,offset:i,length:r,pos:n}=this;if(e===void 0||t===void 0)throw new Error("cannot read file without first opening");oe.default.read(e,t,i,r,n,(o,a)=>{if(o)return this[ve](()=>this.emit("error",o));this[Ys](a)})}[ve](e=()=>{}){this.fd!==void 0&&oe.default.close(this.fd,e)}[Ys](e){if(e<=0&&this.remain>0){let r=Object.assign(new Error("encountered unexpected EOF"),{path:this.absolute,syscall:"read",code:"EOF"});return this[ve](()=>this.emit("error",r))}if(e>this.remain){let r=Object.assign(new Error("did not encounter expected EOF"),{path:this.absolute,syscall:"read",code:"EOF"});return this[ve](()=>this.emit("error",r))}if(!this.buf)throw new Error("should have created buffer prior to reading");if(e===this.remain)for(let r=e;rthis[Hs]())}[Xs](e){this.once("drain",e)}write(e,t,i){if(typeof t=="function"&&(i=t,t=void 0),typeof e=="string"&&(e=Buffer.from(e,typeof t=="string"?t:"utf8")),this.blockRemaine?this.emit("error",e):this.end());if(!this.buf)throw new Error("buffer lost somehow in ONDRAIN");this.offset>=this.length&&(this.buf=Buffer.allocUnsafe(Math.min(this.blockRemain,this.buf.length)),this.offset=0),this.length=this.buf.length-this.offset,this[Si]()}};M.WriteEntry=Oi;var Qs=class extends Oi{sync=!0;[Gs](){this[gi](oe.default.lstatSync(this.absolute))}[Zs](){this[Ks](oe.default.readlinkSync(this.absolute))}[Vs](){this[$s](oe.default.openSync(this.absolute,"r"))}[Si](){let e=!0;try{let{fd:t,buf:i,offset:r,length:n,pos:o}=this;if(t===void 0||i===void 0)throw new Error("fd and buf must be set in READ method");let a=oe.default.readSync(t,i,r,n,o);this[Ys](a),e=!1}finally{if(e)try{this[ve](()=>{})}catch{}}}[Xs](e){e()}[ve](e=()=>{}){this.fd!==void 0&&oe.default.closeSync(this.fd),e()}};M.WriteEntrySync=Qs;var Js=class extends fn.Minipass{blockLen=0;blockRemain=0;buf=0;pos=0;remain=0;length=0;preservePaths;portable;strict;noPax;noMtime;readEntry;type;prefix;path;mode;uid;gid;uname;gname;header;mtime;atime;ctime;linkpath;size;onWriteEntry;warn(e,t,i={}){return(0,yn.warnMethod)(this,e,t,i)}constructor(e,t={}){let i=(0,pn.dealias)(t);super(),this.preservePaths=!!i.preservePaths,this.portable=!!i.portable,this.strict=!!i.strict,this.noPax=!!i.noPax,this.noMtime=!!i.noMtime,this.onWriteEntry=i.onWriteEntry,this.readEntry=e;let{type:r}=e;if(r==="Unsupported")throw new Error("writing entry that should be ignored");this.type=r,this.type==="Directory"&&this.portable&&(this.noMtime=!0),this.prefix=i.prefix,this.path=(0,ne.normalizeWindowsPath)(e.path),this.mode=e.mode!==void 0?this[Ri](e.mode):void 0,this.uid=this.portable?void 0:e.uid,this.gid=this.portable?void 0:e.gid,this.uname=this.portable?void 0:e.uname,this.gname=this.portable?void 0:e.gname,this.size=e.size,this.mtime=this.noMtime?void 0:i.mtime||e.mtime,this.atime=this.portable?void 0:e.atime,this.ctime=this.portable?void 0:e.ctime,this.linkpath=e.linkpath!==void 0?(0,ne.normalizeWindowsPath)(e.linkpath):void 0,typeof i.onwarn=="function"&&this.on("warn",i.onwarn);let n=!1;if(!this.preservePaths){let[a,h]=(0,wn.stripAbsolutePath)(this.path);a&&typeof h=="string"&&(this.path=h,n=a)}this.remain=e.size,this.blockRemain=e.startBlockSize,this.onWriteEntry?.(this),this.header=new dn.Header({path:this[ae](this.path),linkpath:this.type==="Link"&&this.linkpath!==void 0?this[ae](this.linkpath):this.linkpath,mode:this.mode,uid:this.portable?void 0:this.uid,gid:this.portable?void 0:this.gid,size:this.size,mtime:this.noMtime?void 0:this.mtime,type:this.type,uname:this.portable?void 0:this.uname,atime:this.portable?void 0:this.atime,ctime:this.portable?void 0:this.ctime}),n&&this.warn("TAR_ENTRY_INFO",`stripping ${n} from absolute path`,{entry:this,path:n+this.path}),this.header.encode()&&!this.noPax&&super.write(new _n.Pax({atime:this.portable?void 0:this.atime,ctime:this.portable?void 0:this.ctime,gid:this.portable?void 0:this.gid,mtime:this.noMtime?void 0:this.mtime,path:this[ae](this.path),linkpath:this.type==="Link"&&this.linkpath!==void 0?this[ae](this.linkpath):this.linkpath,size:this.size,uid:this.portable?void 0:this.uid,uname:this.portable?void 0:this.uname,dev:this.portable?void 0:this.readEntry.dev,ino:this.portable?void 0:this.readEntry.ino,nlink:this.portable?void 0:this.readEntry.nlink}).encode());let o=this.header?.block;if(!o)throw new Error("failed to encode header");super.write(o),e.pipe(this)}[ae](e){return En(e,this.prefix)}[Ri](e){return(0,mn.modeFix)(e,this.type==="Directory",this.portable)}write(e,t,i){typeof t=="function"&&(i=t,t=void 0),typeof e=="string"&&(e=Buffer.from(e,typeof t=="string"?t:"utf8"));let r=e.length;if(r>this.blockRemain)throw new Error("writing more to entry than is appropriate");return this.blockRemain-=r,super.write(e,i)}end(e,t,i){return this.blockRemain&&super.write(Buffer.alloc(this.blockRemain)),typeof e=="function"&&(i=e,t=void 0,e=void 0),typeof t=="function"&&(i=t,t=void 0),typeof e=="string"&&(e=Buffer.from(e,t??"utf8")),i&&this.once("finish",i),e?super.end(e,i):super.end(i),this}};M.WriteEntryTar=Js;var nh=s=>s.isFile()?"File":s.isDirectory()?"Directory":s.isSymbolicLink()?"SymbolicLink":"Unsupported"});var bn=d(ot=>{"use strict";Object.defineProperty(ot,"__esModule",{value:!0});ot.Node=ot.Yallist=void 0;var tr=class s{tail;head;length=0;static create(e=[]){return new s(e)}constructor(e=[]){for(let t of e)this.push(t)}*[Symbol.iterator](){for(let e=this.head;e;e=e.next)yield e.value}removeNode(e){if(e.list!==this)throw new Error("removing node which does not belong to this list");let t=e.next,i=e.prev;return t&&(t.prev=i),i&&(i.next=t),e===this.head&&(this.head=t),e===this.tail&&(this.tail=i),this.length--,e.next=void 0,e.prev=void 0,e.list=void 0,t}unshiftNode(e){if(e===this.head)return;e.list&&e.list.removeNode(e);let t=this.head;e.list=this,e.next=t,t&&(t.prev=e),this.head=e,this.tail||(this.tail=e),this.length++}pushNode(e){if(e===this.tail)return;e.list&&e.list.removeNode(e);let t=this.tail;e.list=this,e.prev=t,t&&(t.next=e),this.tail=e,this.head||(this.head=e),this.length++}push(...e){for(let t=0,i=e.length;t1)i=t;else if(this.head)r=this.head.next,i=this.head.value;else throw new TypeError("Reduce of empty list with no initial value");for(var n=0;r;n++)i=e(i,r.value,n),r=r.next;return i}reduceReverse(e,t){let i,r=this.tail;if(arguments.length>1)i=t;else if(this.tail)r=this.tail.prev,i=this.tail.value;else throw new TypeError("Reduce of empty list with no initial value");for(let n=this.length-1;r;n--)i=e(i,r.value,n),r=r.prev;return i}toArray(){let e=new Array(this.length);for(let t=0,i=this.head;i;t++)e[t]=i.value,i=i.next;return e}toArrayReverse(){let e=new Array(this.length);for(let t=0,i=this.tail;i;t++)e[t]=i.value,i=i.prev;return e}slice(e=0,t=this.length){t<0&&(t+=this.length),e<0&&(e+=this.length);let i=new s;if(tthis.length&&(t=this.length);let r=this.head,n=0;for(n=0;r&&nthis.length&&(t=this.length);let r=this.length,n=this.tail;for(;n&&r>t;r--)n=n.prev;for(;n&&r>e;r--,n=n.prev)i.push(n.value);return i}splice(e,t=0,...i){e>this.length&&(e=this.length-1),e<0&&(e=this.length+e);let r=this.head;for(let o=0;r&&o{"use strict";var lh=L&&L.__createBinding||(Object.create?(function(s,e,t,i){i===void 0&&(i=t);var r=Object.getOwnPropertyDescriptor(e,t);(!r||("get"in r?!e.__esModule:r.writable||r.configurable))&&(r={enumerable:!0,get:function(){return e[t]}}),Object.defineProperty(s,i,r)}):(function(s,e,t,i){i===void 0&&(i=t),s[i]=e[t]})),uh=L&&L.__setModuleDefault||(Object.create?(function(s,e){Object.defineProperty(s,"default",{enumerable:!0,value:e})}):function(s,e){s.default=e}),ch=L&&L.__importStar||(function(){var s=function(e){return s=Object.getOwnPropertyNames||function(t){var i=[];for(var r in t)Object.prototype.hasOwnProperty.call(t,r)&&(i[i.length]=r);return i},s(e)};return function(e){if(e&&e.__esModule)return e;var t={};if(e!=null)for(var i=s(e),r=0;r1)throw new TypeError("gzip, brotli, zstd are mutually exclusive");if(e.gzip&&(typeof e.gzip!="object"&&(e.gzip={}),this.portable&&(e.gzip.portable=!0),this.zip=new ir.Gzip(e.gzip)),e.brotli&&(typeof e.brotli!="object"&&(e.brotli={}),this.zip=new ir.BrotliCompress(e.brotli)),e.zstd&&(typeof e.zstd!="object"&&(e.zstd={}),this.zip=new ir.ZstdCompress(e.zstd)),!this.zip)throw new Error("impossible");let t=this.zip;t.on("data",i=>super.write(i)),t.on("end",()=>super.end()),t.on("drain",()=>this[or]()),this.on("resume",()=>t.resume())}else this.on("drain",this[or]);this.noDirRecurse=!!e.noDirRecurse,this.follow=!!e.follow,this.noMtime=!!e.noMtime,e.mtime&&(this.mtime=e.mtime),this.filter=typeof e.filter=="function"?e.filter:()=>!0,this[X]=new dh.Yallist,this[Q]=0,this.jobs=Number(e.jobs)||4,this[Tt]=!1,this[vt]=!1}[Tn](e){return super.write(e)}add(e){return this.write(e),this}end(e,t,i){return typeof e=="function"&&(i=e,e=void 0),typeof t=="function"&&(i=t,t=void 0),e&&this.add(e),this[vt]=!0,this[xe](),i&&i(),this}write(e){if(this[vt])throw new Error("write after end");return e instanceof mh.ReadEntry?this[gn](e):this[Ti](e),this.flowing}[gn](e){let t=(0,ar.normalizeWindowsPath)(On.default.resolve(this.cwd,e.path));if(!this.filter(e.path,e))e.resume();else{let i=new Dt(e.path,t);i.entry=new hr.WriteEntryTar(e,this[nr](i)),i.entry.on("end",()=>this[rr](i)),this[Q]+=1,this[X].push(i)}this[xe]()}[Ti](e){let t=(0,ar.normalizeWindowsPath)(On.default.resolve(this.cwd,e));this[X].push(new Dt(e,t)),this[xe]()}[lr](e){e.pending=!0,this[Q]+=1;let t=this.follow?"stat":"lstat";Mi.default[t](e.absolute,(i,r)=>{e.pending=!1,this[Q]-=1,i?this.emit("error",i):this[vi](e,r)})}[vi](e,t){this.statCache.set(e.absolute,t),e.stat=t,this.filter(e.path,t)?t.isFile()&&t.nlink>1&&e===this[je]&&!this.linkCache.get(`${t.dev}:${t.ino}`)&&!this.sync&&this[sr](e):e.ignore=!0,this[xe]()}[ur](e){e.pending=!0,this[Q]+=1,Mi.default.readdir(e.absolute,(t,i)=>{if(e.pending=!1,this[Q]-=1,t)return this.emit("error",t);this[Di](e,i)})}[Di](e,t){this.readdirCache.set(e.absolute,t),e.readdir=t,this[xe]()}[xe](){if(!this[Tt]){this[Tt]=!0;for(let e=this[X].head;e&&this[Q]this.warn(t,i,r),noPax:this.noPax,cwd:this.cwd,absolute:e.absolute,preservePaths:this.preservePaths,maxReadSize:this.maxReadSize,strict:this.strict,portable:this.portable,linkCache:this.linkCache,statCache:this.statCache,noMtime:this.noMtime,mtime:this.mtime,prefix:this.prefix,onWriteEntry:this.onWriteEntry}}[Rn](e){this[Q]+=1;try{return new this[Ni](e.path,this[nr](e)).on("end",()=>this[rr](e)).on("error",i=>this.emit("error",i))}catch(t){this.emit("error",t)}}[or](){this[je]&&this[je].entry&&this[je].entry.resume()}[Pi](e){e.piped=!0,e.readdir&&e.readdir.forEach(r=>{let n=e.path,o=n==="./"?"":n.replace(/\/*$/,"/");this[Ti](o+r)});let t=e.entry,i=this.zip;if(!t)throw new Error("cannot pipe without source");i?t.on("data",r=>{i.write(r)||t.pause()}):t.on("data",r=>{super.write(r)||t.pause()})}pause(){return this.zip&&this.zip.pause(),super.pause()}warn(e,t,i={}){(0,ph.warnMethod)(this,e,t,i)}};L.Pack=Li;var cr=class extends Li{sync=!0;constructor(e){super(e),this[Ni]=hr.WriteEntrySync}pause(){}resume(){}[lr](e){let t=this.follow?"statSync":"lstatSync";this[vi](e,Mi.default[t](e.absolute))}[ur](e){this[Di](e,Mi.default.readdirSync(e.absolute))}[Pi](e){let t=e.entry,i=this.zip;if(e.readdir&&e.readdir.forEach(r=>{let n=e.path,o=n==="./"?"":n.replace(/\/*$/,"/");this[Ti](o+r)}),!t)throw new Error("Cannot pipe without source");i?t.on("data",r=>{i.write(r)}):t.on("data",r=>{super[Tn](r)})}};L.PackSync=cr});var fr=d(at=>{"use strict";var _h=at&&at.__importDefault||function(s){return s&&s.__esModule?s:{default:s}};Object.defineProperty(at,"__esModule",{value:!0});at.create=void 0;var Dn=Ke(),Pn=_h(require("node:path")),Nn=st(),wh=Ve(),Ii=Ai(),yh=(s,e)=>{let t=new Ii.PackSync(s),i=new Dn.WriteStreamSync(s.file,{mode:s.mode||438});t.pipe(i),Mn(t,e)},Eh=(s,e)=>{let t=new Ii.Pack(s),i=new Dn.WriteStream(s.file,{mode:s.mode||438});t.pipe(i);let r=new Promise((n,o)=>{i.on("error",o),i.on("close",n),t.on("error",o)});return Ln(t,e),r},Mn=(s,e)=>{e.forEach(t=>{t.charAt(0)==="@"?(0,Nn.list)({file:Pn.default.resolve(s.cwd,t.slice(1)),sync:!0,noResume:!0,onReadEntry:i=>s.add(i)}):s.add(t)}),s.end()},Ln=async(s,e)=>{for(let t=0;t{s.add(r)}}):s.add(i)}s.end()},bh=(s,e)=>{let t=new Ii.PackSync(s);return Mn(t,e),t},Sh=(s,e)=>{let t=new Ii.Pack(s);return Ln(t,e),t};at.create=(0,wh.makeCommand)(yh,Eh,bh,Sh,(s,e)=>{if(!e?.length)throw new TypeError("no paths specified to add to archive")})});var Cn=d(ht=>{"use strict";var gh=ht&&ht.__importDefault||function(s){return s&&s.__esModule?s:{default:s}};Object.defineProperty(ht,"__esModule",{value:!0});ht.getWriteFlag=void 0;var An=gh(require("fs")),Rh=process.env.__FAKE_PLATFORM__||process.platform,Oh=Rh==="win32",{O_CREAT:vh,O_TRUNC:Th,O_WRONLY:Dh}=An.default.constants,In=Number(process.env.__FAKE_FS_O_FILENAME__)||An.default.constants.UV_FS_O_FILEMAP||0,Ph=Oh&&!!In,Nh=512*1024,Mh=In|Th|vh|Dh;ht.getWriteFlag=Ph?s=>s"w"});var Bn=d(he=>{"use strict";var Fn=he&&he.__importDefault||function(s){return s&&s.__esModule?s:{default:s}};Object.defineProperty(he,"__esModule",{value:!0});he.chownrSync=he.chownr=void 0;var Fi=Fn(require("node:fs")),Pt=Fn(require("node:path")),dr=(s,e,t)=>{try{return Fi.default.lchownSync(s,e,t)}catch(i){if(i?.code!=="ENOENT")throw i}},Ci=(s,e,t,i)=>{Fi.default.lchown(s,e,t,r=>{i(r&&r?.code!=="ENOENT"?r:null)})},Lh=(s,e,t,i,r)=>{if(e.isDirectory())(0,he.chownr)(Pt.default.resolve(s,e.name),t,i,n=>{if(n)return r(n);let o=Pt.default.resolve(s,e.name);Ci(o,t,i,r)});else{let n=Pt.default.resolve(s,e.name);Ci(n,t,i,r)}},Ah=(s,e,t,i)=>{Fi.default.readdir(s,{withFileTypes:!0},(r,n)=>{if(r){if(r.code==="ENOENT")return i();if(r.code!=="ENOTDIR"&&r.code!=="ENOTSUP")return i(r)}if(r||!n.length)return Ci(s,e,t,i);let o=n.length,a=null,h=l=>{if(!a){if(l)return i(a=l);if(--o===0)return Ci(s,e,t,i)}};for(let l of n)Lh(s,l,e,t,h)})};he.chownr=Ah;var Ih=(s,e,t,i)=>{e.isDirectory()&&(0,he.chownrSync)(Pt.default.resolve(s,e.name),t,i),dr(Pt.default.resolve(s,e.name),t,i)},Ch=(s,e,t)=>{let i;try{i=Fi.default.readdirSync(s,{withFileTypes:!0})}catch(r){let n=r;if(n?.code==="ENOENT")return;if(n?.code==="ENOTDIR"||n?.code==="ENOTSUP")return dr(s,e,t);throw n}for(let r of i)Ih(s,r,e,t);return dr(s,e,t)};he.chownrSync=Ch});var zn=d(Bi=>{"use strict";Object.defineProperty(Bi,"__esModule",{value:!0});Bi.CwdError=void 0;var mr=class extends Error{path;code;syscall="chdir";constructor(e,t){super(`${t}: Cannot cd into '${e}'`),this.path=e,this.code=t}get name(){return"CwdError"}};Bi.CwdError=mr});var _r=d(zi=>{"use strict";Object.defineProperty(zi,"__esModule",{value:!0});zi.SymlinkError=void 0;var pr=class extends Error{path;symlink;syscall="symlink";code="TAR_SYMLINK_ERROR";constructor(e,t){super("TAR_SYMLINK_ERROR: Cannot extract through symbolic link"),this.symlink=e,this.path=t}get name(){return"SymlinkError"}};zi.SymlinkError=pr});var qn=d(Te=>{"use strict";var yr=Te&&Te.__importDefault||function(s){return s&&s.__esModule?s:{default:s}};Object.defineProperty(Te,"__esModule",{value:!0});Te.mkdirSync=Te.mkdir=void 0;var kn=Bn(),x=yr(require("node:fs")),Fh=yr(require("node:fs/promises")),ki=yr(require("node:path")),jn=zn(),pe=et(),xn=_r(),Bh=(s,e)=>{x.default.stat(s,(t,i)=>{(t||!i.isDirectory())&&(t=new jn.CwdError(s,t?.code||"ENOTDIR")),e(t)})},zh=(s,e,t)=>{s=(0,pe.normalizeWindowsPath)(s);let i=e.umask??18,r=e.mode|448,n=(r&i)!==0,o=e.uid,a=e.gid,h=typeof o=="number"&&typeof a=="number"&&(o!==e.processUid||a!==e.processGid),l=e.preserve,u=e.unlink,c=(0,pe.normalizeWindowsPath)(e.cwd),E=(w,P)=>{w?t(w):P&&h?(0,kn.chownr)(P,o,a,Cr=>E(Cr)):n?x.default.chmod(s,r,t):t()};if(s===c)return Bh(s,E);if(l)return Fh.default.mkdir(s,{mode:r,recursive:!0}).then(w=>E(null,w??void 0),E);let A=(0,pe.normalizeWindowsPath)(ki.default.relative(c,s)).split("/");wr(c,A,r,u,c,void 0,E)};Te.mkdir=zh;var wr=(s,e,t,i,r,n,o)=>{if(!e.length)return o(null,n);let a=e.shift(),h=(0,pe.normalizeWindowsPath)(ki.default.resolve(s+"/"+a));x.default.mkdir(h,t,Un(h,e,t,i,r,n,o))},Un=(s,e,t,i,r,n,o)=>a=>{a?x.default.lstat(s,(h,l)=>{if(h)h.path=h.path&&(0,pe.normalizeWindowsPath)(h.path),o(h);else if(l.isDirectory())wr(s,e,t,i,r,n,o);else if(i)x.default.unlink(s,u=>{if(u)return o(u);x.default.mkdir(s,t,Un(s,e,t,i,r,n,o))});else{if(l.isSymbolicLink())return o(new xn.SymlinkError(s,s+"/"+e.join("/")));o(a)}}):(n=n||s,wr(s,e,t,i,r,n,o))},kh=s=>{let e=!1,t;try{e=x.default.statSync(s).isDirectory()}catch(i){t=i?.code}finally{if(!e)throw new jn.CwdError(s,t??"ENOTDIR")}},jh=(s,e)=>{s=(0,pe.normalizeWindowsPath)(s);let t=e.umask??18,i=e.mode|448,r=(i&t)!==0,n=e.uid,o=e.gid,a=typeof n=="number"&&typeof o=="number"&&(n!==e.processUid||o!==e.processGid),h=e.preserve,l=e.unlink,u=(0,pe.normalizeWindowsPath)(e.cwd),c=w=>{w&&a&&(0,kn.chownrSync)(w,n,o),r&&x.default.chmodSync(s,i)};if(s===u)return kh(u),c();if(h)return c(x.default.mkdirSync(s,{mode:i,recursive:!0})??void 0);let D=(0,pe.normalizeWindowsPath)(ki.default.relative(u,s)).split("/"),A;for(let w=D.shift(),P=u;w&&(P+="/"+w);w=D.shift()){P=(0,pe.normalizeWindowsPath)(ki.default.resolve(P));try{x.default.mkdirSync(P,i),A=A||P}catch{let Fr=x.default.lstatSync(P);if(Fr.isDirectory())continue;if(l){x.default.unlinkSync(P),x.default.mkdirSync(P,i),A=A||P;continue}else if(Fr.isSymbolicLink())return new xn.SymlinkError(P,P+"/"+D.join("/"))}}return c(A)};Te.mkdirSync=jh});var Hn=d(ji=>{"use strict";Object.defineProperty(ji,"__esModule",{value:!0});ji.normalizeUnicode=void 0;var Er=Object.create(null),Wn=1e4,lt=new Set,xh=s=>{lt.has(s)?lt.delete(s):Er[s]=s.normalize("NFD").toLocaleLowerCase("en").toLocaleUpperCase("en"),lt.add(s);let e=Er[s],t=lt.size-Wn;if(t>Wn/10){for(let i of lt)if(lt.delete(i),delete Er[i],--t<=0)break}return e};ji.normalizeUnicode=xh});var Gn=d(xi=>{"use strict";Object.defineProperty(xi,"__esModule",{value:!0});xi.PathReservations=void 0;var Zn=require("node:path"),Uh=Hn(),qh=_i(),Wh=process.env.TESTING_TAR_FAKE_PLATFORM||process.platform,Hh=Wh==="win32",Zh=s=>s.split("/").slice(0,-1).reduce((t,i)=>{let r=t[t.length-1];return r!==void 0&&(i=(0,Zn.join)(r,i)),t.push(i||"/"),t},[]),br=class{#e=new Map;#i=new Map;#s=new Set;reserve(e,t){e=Hh?["win32 parallelization disabled"]:e.map(r=>(0,qh.stripTrailingSlashes)((0,Zn.join)((0,Uh.normalizeUnicode)(r))));let i=new Set(e.map(r=>Zh(r)).reduce((r,n)=>r.concat(n)));this.#i.set(t,{dirs:i,paths:e});for(let r of e){let n=this.#e.get(r);n?n.push(t):this.#e.set(r,[t])}for(let r of i){let n=this.#e.get(r);if(!n)this.#e.set(r,[new Set([t])]);else{let o=n[n.length-1];o instanceof Set?o.add(t):n.push(new Set([t]))}}return this.#r(t)}#n(e){let t=this.#i.get(e);if(!t)throw new Error("function does not have any path reservations");return{paths:t.paths.map(i=>this.#e.get(i)),dirs:[...t.dirs].map(i=>this.#e.get(i))}}check(e){let{paths:t,dirs:i}=this.#n(e);return t.every(r=>r&&r[0]===e)&&i.every(r=>r&&r[0]instanceof Set&&r[0].has(e))}#r(e){return this.#s.has(e)||!this.check(e)?!1:(this.#s.add(e),e(()=>this.#t(e)),!0)}#t(e){if(!this.#s.has(e))return!1;let t=this.#i.get(e);if(!t)throw new Error("invalid reservation");let{paths:i,dirs:r}=t,n=new Set;for(let o of i){let a=this.#e.get(o);if(!a||a?.[0]!==e)continue;let h=a[1];if(!h){this.#e.delete(o);continue}if(a.shift(),typeof h=="function")n.add(h);else for(let l of h)n.add(l)}for(let o of r){let a=this.#e.get(o),h=a?.[0];if(!(!a||!(h instanceof Set)))if(h.size===1&&a.length===1){this.#e.delete(o);continue}else if(h.size===1){a.shift();let l=a[0];typeof l=="function"&&n.add(l)}else h.delete(e)}return this.#s.delete(e),n.forEach(o=>this.#r(o)),!0}};xi.PathReservations=br});var Yn=d(Ui=>{"use strict";Object.defineProperty(Ui,"__esModule",{value:!0});Ui.umask=void 0;var Gh=()=>process.umask();Ui.umask=Gh});var Lr=d(z=>{"use strict";var Yh=z&&z.__createBinding||(Object.create?(function(s,e,t,i){i===void 0&&(i=t);var r=Object.getOwnPropertyDescriptor(e,t);(!r||("get"in r?!e.__esModule:r.writable||r.configurable))&&(r={enumerable:!0,get:function(){return e[t]}}),Object.defineProperty(s,i,r)}):(function(s,e,t,i){i===void 0&&(i=t),s[i]=e[t]})),Kh=z&&z.__setModuleDefault||(Object.create?(function(s,e){Object.defineProperty(s,"default",{enumerable:!0,value:e})}):function(s,e){s.default=e}),so=z&&z.__importStar||(function(){var s=function(e){return s=Object.getOwnPropertyNames||function(t){var i=[];for(var r in t)Object.prototype.hasOwnProperty.call(t,r)&&(i[i.length]=r);return i},s(e)};return function(e){if(e&&e.__esModule)return e;var t={};if(e!=null)for(var i=s(e),r=0;r{if(!Ft)return m.default.unlink(s,e);let t=s+".DELETE."+(0,ro.randomBytes)(16).toString("hex");m.default.rename(s,t,i=>{if(i)return e(i);m.default.unlink(t,e)})},rl=s=>{if(!Ft)return m.default.unlinkSync(s);let e=s+".DELETE."+(0,ro.randomBytes)(16).toString("hex");m.default.renameSync(s,e),m.default.unlinkSync(e)},io=(s,e,t)=>s!==void 0&&s===s>>>0?s:e!==void 0&&e===e>>>0?e:t,Hi=class extends Xh.Parser{[gr]=!1;[Ct]=!1;[qi]=0;reservations=new Jh.PathReservations;transform;writable=!0;readable=!1;uid;gid;setOwner;preserveOwner;processGid;processUid;maxDepth;forceChown;win32;newer;keep;noMtime;preservePaths;unlink;cwd;strip;processUmask;umask;dmode;fmode;chmod;constructor(e={}){if(e.ondone=()=>{this[gr]=!0,this[Rr]()},super(e),this.transform=e.transform,this.chmod=!!e.chmod,typeof e.uid=="number"||typeof e.gid=="number"){if(typeof e.uid!="number"||typeof e.gid!="number")throw new TypeError("cannot set owner without number uid and gid");if(e.preserveOwner)throw new TypeError("cannot preserve owner in archive and also set owner explicitly");this.uid=e.uid,this.gid=e.gid,this.setOwner=!0}else this.uid=void 0,this.gid=void 0,this.setOwner=!1;e.preserveOwner===void 0&&typeof e.uid!="number"?this.preserveOwner=!!(process.getuid&&process.getuid()===0):this.preserveOwner=!!e.preserveOwner,this.processUid=(this.preserveOwner||this.setOwner)&&process.getuid?process.getuid():void 0,this.processGid=(this.preserveOwner||this.setOwner)&&process.getgid?process.getgid():void 0,this.maxDepth=typeof e.maxDepth=="number"?e.maxDepth:il,this.forceChown=e.forceChown===!0,this.win32=!!e.win32||Ft,this.newer=!!e.newer,this.keep=!!e.keep,this.noMtime=!!e.noMtime,this.preservePaths=!!e.preservePaths,this.unlink=!!e.unlink,this.cwd=(0,U.normalizeWindowsPath)(g.default.resolve(e.cwd||process.cwd())),this.strip=Number(e.strip)||0,this.processUmask=this.chmod?typeof e.processUmask=="number"?e.processUmask:(0,el.umask)():0,this.umask=typeof e.umask=="number"?e.umask:this.processUmask,this.dmode=e.dmode||511&~this.umask,this.fmode=e.fmode||438&~this.umask,this.on("entry",t=>this[Vn](t))}warn(e,t,i={}){return(e==="TAR_BAD_ARCHIVE"||e==="TAR_ABORT")&&(i.recoverable=!1),super.warn(e,t,i)}[Rr](){this[gr]&&this[qi]===0&&(this.emit("prefinish"),this.emit("finish"),this.emit("end"))}[Sr](e,t){let i=e[t],{type:r}=e;if(!i||this.preservePaths)return!0;let[n,o]=(0,Qh.stripAbsolutePath)(i),a=o.replace(/\\/g,"/").split("/");if(a.includes("..")||Ft&&/^[a-z]:\.\.$/i.test(a[0]??"")){if(t==="path"||r==="Link")return this.warn("TAR_ENTRY_ERROR",`${t} contains '..'`,{entry:e,[t]:i}),!1;{let h=g.default.posix.dirname(e.path),l=g.default.posix.normalize(g.default.posix.join(h,a.join("/")));if(l.startsWith("../")||l==="..")return this.warn("TAR_ENTRY_ERROR",`${t} escapes extraction directory`,{entry:e,[t]:i}),!1}}return n&&(e[t]=String(o),this.warn("TAR_ENTRY_INFO",`stripping ${n} from absolute ${t}`,{entry:e,[t]:i})),!0}[eo](e){let t=(0,U.normalizeWindowsPath)(e.path),i=t.split("/");if(this.strip){if(i.length=this.strip)e.linkpath=r.slice(this.strip).join("/");else return!1}i.splice(0,this.strip),e.path=i.join("/")}if(isFinite(this.maxDepth)&&i.length>this.maxDepth)return this.warn("TAR_ENTRY_ERROR","path excessively deep",{entry:e,path:t,depth:i.length,maxDepth:this.maxDepth}),!1;if(!this[Sr](e,"path")||!this[Sr](e,"linkpath"))return!1;if(g.default.isAbsolute(e.path)?e.absolute=(0,U.normalizeWindowsPath)(g.default.resolve(e.path)):e.absolute=(0,U.normalizeWindowsPath)(g.default.resolve(this.cwd,e.path)),!this.preservePaths&&typeof e.absolute=="string"&&e.absolute.indexOf(this.cwd+"/")!==0&&e.absolute!==this.cwd)return this.warn("TAR_ENTRY_ERROR","path escaped extraction target",{entry:e,path:(0,U.normalizeWindowsPath)(e.path),resolvedPath:e.absolute,cwd:this.cwd}),!1;if(e.absolute===this.cwd&&e.type!=="Directory"&&e.type!=="GNUDumpDir")return!1;if(this.win32){let{root:r}=g.default.win32.parse(String(e.absolute));e.absolute=r+Kn.encode(String(e.absolute).slice(r.length));let{root:n}=g.default.win32.parse(e.path);e.path=n+Kn.encode(e.path.slice(n.length))}return!0}[Vn](e){if(!this[eo](e))return e.resume();switch($h.default.equal(typeof e.absolute,"string"),e.type){case"Directory":case"GNUDumpDir":e.mode&&(e.mode=e.mode|448);case"File":case"OldFile":case"ContiguousFile":case"Link":case"SymbolicLink":return this[Or](e);default:return this[Jn](e)}}[T](e,t){e.name==="CwdError"?this.emit("error",e):(this.warn("TAR_ENTRY_ERROR",e,{entry:t}),this[ut](),t.resume())}[De](e,t,i){(0,oo.mkdir)((0,U.normalizeWindowsPath)(e),{uid:this.uid,gid:this.gid,processUid:this.processUid,processGid:this.processGid,umask:this.processUmask,preserve:this.preservePaths,unlink:this.unlink,cwd:this.cwd,mode:t},i)}[Lt](e){return this.forceChown||this.preserveOwner&&(typeof e.uid=="number"&&e.uid!==this.processUid||typeof e.gid=="number"&&e.gid!==this.processGid)||typeof this.uid=="number"&&this.uid!==this.processUid||typeof this.gid=="number"&&this.gid!==this.processGid}[At](e){return io(this.uid,e.uid,this.processUid)}[It](e){return io(this.gid,e.gid,this.processGid)}[Tr](e,t){let i=typeof e.mode=="number"?e.mode&4095:this.fmode,r=new Vh.WriteStream(String(e.absolute),{flags:(0,no.getWriteFlag)(e.size),mode:i,autoClose:!1});r.on("error",h=>{r.fd&&m.default.close(r.fd,()=>{}),r.write=()=>!0,this[T](h,e),t()});let n=1,o=h=>{if(h){r.fd&&m.default.close(r.fd,()=>{}),this[T](h,e),t();return}--n===0&&r.fd!==void 0&&m.default.close(r.fd,l=>{l?this[T](l,e):this[ut](),t()})};r.on("finish",()=>{let h=String(e.absolute),l=r.fd;if(typeof l=="number"&&e.mtime&&!this.noMtime){n++;let u=e.atime||new Date,c=e.mtime;m.default.futimes(l,u,c,E=>E?m.default.utimes(h,u,c,D=>o(D&&E)):o())}if(typeof l=="number"&&this[Lt](e)){n++;let u=this[At](e),c=this[It](e);typeof u=="number"&&typeof c=="number"&&m.default.fchown(l,u,c,E=>E?m.default.chown(h,u,c,D=>o(D&&E)):o())}o()});let a=this.transform&&this.transform(e)||e;a!==e&&(a.on("error",h=>{this[T](h,e),t()}),e.pipe(a)),a.pipe(r)}[Dr](e,t){let i=typeof e.mode=="number"?e.mode&4095:this.dmode;this[De](String(e.absolute),i,r=>{if(r){this[T](r,e),t();return}let n=1,o=()=>{--n===0&&(t(),this[ut](),e.resume())};e.mtime&&!this.noMtime&&(n++,m.default.utimes(String(e.absolute),e.atime||new Date,e.mtime,o)),this[Lt](e)&&(n++,m.default.chown(String(e.absolute),Number(this[At](e)),Number(this[It](e)),o)),o()})}[Jn](e){e.unsupported=!0,this.warn("TAR_ENTRY_UNSUPPORTED",`unsupported entry type: ${e.type}`,{entry:e}),e.resume()}[Xn](e,t){let i=(0,U.normalizeWindowsPath)(g.default.relative(this.cwd,g.default.resolve(g.default.dirname(String(e.absolute)),String(e.linkpath)))).split("/");this[Mt](e,this.cwd,i,()=>this[Wi](e,String(e.linkpath),"symlink",t),r=>{this[T](r,e),t()})}[Qn](e,t){let i=(0,U.normalizeWindowsPath)(g.default.resolve(this.cwd,String(e.linkpath))),r=(0,U.normalizeWindowsPath)(String(e.linkpath)).split("/");this[Mt](e,this.cwd,r,()=>this[Wi](e,i,"link",t),n=>{this[T](n,e),t()})}[Mt](e,t,i,r,n){let o=i.shift();if(this.preservePaths||o===void 0)return r();let a=g.default.resolve(t,o);m.default.lstat(a,(h,l)=>{if(h)return r();if(l?.isSymbolicLink())return n(new ao.SymlinkError(a,g.default.resolve(a,i.join("/"))));this[Mt](e,a,i,r,n)})}[to](){this[qi]++}[ut](){this[qi]--,this[Rr]()}[Pr](e){this[ut](),e.resume()}[vr](e,t){return e.type==="File"&&!this.unlink&&t.isFile()&&t.nlink<=1&&!Ft}[Or](e){this[to]();let t=[e.path];e.linkpath&&t.push(e.linkpath),this.reservations.reserve(t,i=>this[$n](e,i))}[$n](e,t){let i=a=>{t(a)},r=()=>{this[De](this.cwd,this.dmode,a=>{if(a){this[T](a,e),i();return}this[Ct]=!0,n()})},n=()=>{if(e.absolute!==this.cwd){let a=(0,U.normalizeWindowsPath)(g.default.dirname(String(e.absolute)));if(a!==this.cwd)return this[De](a,this.dmode,h=>{if(h){this[T](h,e),i();return}o()})}o()},o=()=>{m.default.lstat(String(e.absolute),(a,h)=>{if(h&&(this.keep||this.newer&&h.mtime>(e.mtime??h.mtime))){this[Pr](e),i();return}if(a||this[vr](e,h))return this[Z](null,e,i);if(h.isDirectory()){if(e.type==="Directory"){let l=this.chmod&&e.mode&&(h.mode&4095)!==e.mode,u=c=>this[Z](c??null,e,i);return l?m.default.chmod(String(e.absolute),Number(e.mode),u):u()}if(e.absolute!==this.cwd)return m.default.rmdir(String(e.absolute),l=>this[Z](l??null,e,i))}if(e.absolute===this.cwd)return this[Z](null,e,i);sl(String(e.absolute),l=>this[Z](l??null,e,i))})};this[Ct]?n():r()}[Z](e,t,i){if(e){this[T](e,t),i();return}switch(t.type){case"File":case"OldFile":case"ContiguousFile":return this[Tr](t,i);case"Link":return this[Qn](t,i);case"SymbolicLink":return this[Xn](t,i);case"Directory":case"GNUDumpDir":return this[Dr](t,i)}}[Wi](e,t,i,r){m.default[i](t,String(e.absolute),n=>{n?this[T](n,e):(this[ut](),e.resume()),r()})}};z.Unpack=Hi;var Nt=s=>{try{return[null,s()]}catch(e){return[e,null]}},Nr=class extends Hi{sync=!0;[Z](e,t){return super[Z](e,t,()=>{})}[Or](e){if(!this[Ct]){let n=this[De](this.cwd,this.dmode);if(n)return this[T](n,e);this[Ct]=!0}if(e.absolute!==this.cwd){let n=(0,U.normalizeWindowsPath)(g.default.dirname(String(e.absolute)));if(n!==this.cwd){let o=this[De](n,this.dmode);if(o)return this[T](o,e)}}let[t,i]=Nt(()=>m.default.lstatSync(String(e.absolute)));if(i&&(this.keep||this.newer&&i.mtime>(e.mtime??i.mtime)))return this[Pr](e);if(t||this[vr](e,i))return this[Z](null,e);if(i.isDirectory()){if(e.type==="Directory"){let o=this.chmod&&e.mode&&(i.mode&4095)!==e.mode,[a]=o?Nt(()=>{m.default.chmodSync(String(e.absolute),Number(e.mode))}):[];return this[Z](a,e)}let[n]=Nt(()=>m.default.rmdirSync(String(e.absolute)));this[Z](n,e)}let[r]=e.absolute===this.cwd?[]:Nt(()=>rl(String(e.absolute)));this[Z](r,e)}[Tr](e,t){let i=typeof e.mode=="number"?e.mode&4095:this.fmode,r=a=>{let h;try{m.default.closeSync(n)}catch(l){h=l}(a||h)&&this[T](a||h,e),t()},n;try{n=m.default.openSync(String(e.absolute),(0,no.getWriteFlag)(e.size),i)}catch(a){return r(a)}let o=this.transform&&this.transform(e)||e;o!==e&&(o.on("error",a=>this[T](a,e)),e.pipe(o)),o.on("data",a=>{try{m.default.writeSync(n,a,0,a.length)}catch(h){r(h)}}),o.on("end",()=>{let a=null;if(e.mtime&&!this.noMtime){let h=e.atime||new Date,l=e.mtime;try{m.default.futimesSync(n,h,l)}catch(u){try{m.default.utimesSync(String(e.absolute),h,l)}catch{a=u}}}if(this[Lt](e)){let h=this[At](e),l=this[It](e);try{m.default.fchownSync(n,Number(h),Number(l))}catch(u){try{m.default.chownSync(String(e.absolute),Number(h),Number(l))}catch{a=a||u}}}r(a)})}[Dr](e,t){let i=typeof e.mode=="number"?e.mode&4095:this.dmode,r=this[De](String(e.absolute),i);if(r){this[T](r,e),t();return}if(e.mtime&&!this.noMtime)try{m.default.utimesSync(String(e.absolute),e.atime||new Date,e.mtime)}catch{}if(this[Lt](e))try{m.default.chownSync(String(e.absolute),Number(this[At](e)),Number(this[It](e)))}catch{}t(),e.resume()}[De](e,t){try{return(0,oo.mkdirSync)((0,U.normalizeWindowsPath)(e),{uid:this.uid,gid:this.gid,processUid:this.processUid,processGid:this.processGid,umask:this.processUmask,preserve:this.preservePaths,unlink:this.unlink,cwd:this.cwd,mode:t})}catch(i){return i}}[Mt](e,t,i,r,n){if(this.preservePaths||!i.length)return r();let o=t;for(let a of i){o=g.default.resolve(o,a);let[h,l]=Nt(()=>m.default.lstatSync(o));if(h)return r();if(l.isSymbolicLink())return n(new ao.SymlinkError(o,g.default.resolve(t,i.join("/"))))}r()}[Wi](e,t,i,r){let n=`${i}Sync`;try{m.default[n](t,String(e.absolute)),r(),e.resume()}catch(o){return this[T](o,e)}}};z.UnpackSync=Nr});var Ar=d(G=>{"use strict";var nl=G&&G.__createBinding||(Object.create?(function(s,e,t,i){i===void 0&&(i=t);var r=Object.getOwnPropertyDescriptor(e,t);(!r||("get"in r?!e.__esModule:r.writable||r.configurable))&&(r={enumerable:!0,get:function(){return e[t]}}),Object.defineProperty(s,i,r)}):(function(s,e,t,i){i===void 0&&(i=t),s[i]=e[t]})),ol=G&&G.__setModuleDefault||(Object.create?(function(s,e){Object.defineProperty(s,"default",{enumerable:!0,value:e})}):function(s,e){s.default=e}),al=G&&G.__importStar||(function(){var s=function(e){return s=Object.getOwnPropertyNames||function(t){var i=[];for(var r in t)Object.prototype.hasOwnProperty.call(t,r)&&(i[i.length]=r);return i},s(e)};return function(e){if(e&&e.__esModule)return e;var t={};if(e!=null)for(var i=s(e),r=0;r