nitro-nightly 3.0.1-20260106-130706-e20c92d9 → 3.0.1-20260106-182834-5019d347
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/_build/rolldown.mjs +7 -5
- package/dist/_build/rollup.mjs +15 -5
- package/dist/_build/shared.mjs +2 -1
- package/dist/_build/shared2.mjs +1 -1
- package/dist/_build/shared3.mjs +1 -1
- package/dist/_build/vite.build.mjs +7 -6
- package/dist/_dev.mjs +2 -1
- package/dist/_libs/@hiogawa/vite-plugin-fullstack.mjs +5 -1560
- package/dist/_libs/@jridgewell/gen-mapping.mjs +2 -304
- package/dist/_libs/@jridgewell/remapping.mjs +2 -1
- package/dist/_libs/@jridgewell/resolve-uri.mjs +166 -0
- package/dist/_libs/@jridgewell/sourcemap-codec.mjs +167 -0
- package/dist/_libs/@jridgewell/trace-mapping.mjs +141 -0
- package/dist/_libs/@rolldown/pluginutils.mjs +31 -0
- package/dist/_libs/@rollup/plugin-commonjs.mjs +33 -2636
- package/dist/_libs/@rollup/plugin-inject.mjs +3 -2
- package/dist/_libs/@rollup/plugin-json.mjs +1 -1
- package/dist/_libs/@rollup/plugin-node-resolve.mjs +7 -1111
- package/dist/_libs/@rollup/plugin-replace.mjs +2 -2
- package/dist/_libs/@rollup/pluginutils.mjs +241 -0
- package/dist/_libs/c12.mjs +63 -2513
- package/dist/_libs/chokidar.mjs +1 -234
- package/dist/_libs/commondir.mjs +22 -0
- package/dist/_libs/confbox.mjs +1102 -502
- package/dist/_libs/deepmerge.mjs +86 -0
- package/dist/_libs/dotenv.mjs +345 -0
- package/dist/_libs/estree-walker.mjs +144 -1
- package/dist/_libs/exsolve.mjs +1007 -0
- package/dist/_libs/fdir.mjs +514 -0
- package/dist/_libs/function-bind.mjs +63 -0
- package/dist/_libs/giget.mjs +1376 -2234
- package/dist/_libs/hasown.mjs +14 -0
- package/dist/_libs/is-core-module.mjs +220 -0
- package/dist/_libs/is-module.mjs +13 -0
- package/dist/_libs/is-reference.mjs +33 -0
- package/dist/_libs/js-tokens.mjs +382 -0
- package/dist/_libs/local-pkg.mjs +6 -1560
- package/dist/_libs/magic-string.mjs +939 -0
- package/dist/_libs/mlly.mjs +1415 -0
- package/dist/_libs/node-fetch-native.mjs +7 -0
- package/dist/_libs/nypm.mjs +239 -0
- package/dist/_libs/path-parse.mjs +47 -0
- package/dist/_libs/pathe.mjs +163 -2
- package/dist/_libs/perfect-debounce.mjs +89 -0
- package/dist/_libs/picomatch.mjs +1673 -0
- package/dist/_libs/pkg-types.mjs +197 -0
- package/dist/_libs/quansync.mjs +90 -0
- package/dist/_libs/rc9.mjs +136 -0
- package/dist/_libs/readdirp.mjs +237 -0
- package/dist/_libs/resolve.mjs +689 -0
- package/dist/_libs/strip-literal.mjs +51 -0
- package/dist/_libs/tinyexec.mjs +627 -0
- package/dist/_libs/tinyglobby.mjs +2 -1
- package/dist/_libs/unimport.mjs +42 -1575
- package/dist/_libs/unplugin-utils.mjs +61 -0
- package/dist/_libs/unplugin.mjs +1225 -0
- package/dist/_libs/unwasm.mjs +3 -2
- package/dist/_libs/webpack-virtual-modules.mjs +272 -0
- package/dist/_nitro.mjs +5 -2
- package/dist/_nitro2.mjs +3 -3
- package/dist/_presets.mjs +5 -3
- package/dist/builder.mjs +4 -4
- package/dist/cli/_chunks/build.mjs +1 -1
- package/dist/cli/_chunks/dev.mjs +1 -1
- package/dist/cli/_chunks/list.mjs +1 -1
- package/dist/cli/_chunks/prepare.mjs +1 -1
- package/dist/cli/_chunks/run.mjs +1 -1
- package/dist/vite.mjs +1824 -19
- package/package.json +2 -2
- package/dist/_build/shared4.mjs +0 -1114
- package/dist/_build/vite.plugin.mjs +0 -712
|
@@ -0,0 +1,197 @@
|
|
|
1
|
+
import { a as isAbsolute, o as join, r as dirname, s as normalize, u as resolve } from "./pathe.mjs";
|
|
2
|
+
import { t as resolveModulePath } from "./exsolve.mjs";
|
|
3
|
+
import { d as x, i as S, n as h$1, r as x$1, u as h } from "./confbox.mjs";
|
|
4
|
+
import { u as resolvePath } from "./mlly.mjs";
|
|
5
|
+
import { promises, statSync } from "node:fs";
|
|
6
|
+
import { readFile, writeFile } from "node:fs/promises";
|
|
7
|
+
import { fileURLToPath } from "node:url";
|
|
8
|
+
|
|
9
|
+
//#region node_modules/.pnpm/pkg-types@2.3.0/node_modules/pkg-types/dist/index.mjs
|
|
10
|
+
const defaultFindOptions$1 = {
|
|
11
|
+
startingFrom: ".",
|
|
12
|
+
rootPattern: /^node_modules$/,
|
|
13
|
+
reverse: false,
|
|
14
|
+
test: (filePath) => {
|
|
15
|
+
try {
|
|
16
|
+
if (statSync(filePath).isFile()) return true;
|
|
17
|
+
} catch {}
|
|
18
|
+
}
|
|
19
|
+
};
|
|
20
|
+
async function findFile$1(filename, _options = {}) {
|
|
21
|
+
const filenames = Array.isArray(filename) ? filename : [filename];
|
|
22
|
+
const options = {
|
|
23
|
+
...defaultFindOptions$1,
|
|
24
|
+
..._options
|
|
25
|
+
};
|
|
26
|
+
const basePath = resolve(options.startingFrom);
|
|
27
|
+
const leadingSlash = basePath[0] === "/";
|
|
28
|
+
const segments = basePath.split("/").filter(Boolean);
|
|
29
|
+
if (filenames.includes(segments.at(-1)) && await options.test(basePath)) return basePath;
|
|
30
|
+
if (leadingSlash) segments[0] = "/" + segments[0];
|
|
31
|
+
let root = segments.findIndex((r) => r.match(options.rootPattern));
|
|
32
|
+
if (root === -1) root = 0;
|
|
33
|
+
if (options.reverse) for (let index = root + 1; index <= segments.length; index++) for (const filename2 of filenames) {
|
|
34
|
+
const filePath = join(...segments.slice(0, index), filename2);
|
|
35
|
+
if (await options.test(filePath)) return filePath;
|
|
36
|
+
}
|
|
37
|
+
else for (let index = segments.length; index > root; index--) for (const filename2 of filenames) {
|
|
38
|
+
const filePath = join(...segments.slice(0, index), filename2);
|
|
39
|
+
if (await options.test(filePath)) return filePath;
|
|
40
|
+
}
|
|
41
|
+
throw new Error(`Cannot find matching ${filename} in ${options.startingFrom} or parent directories`);
|
|
42
|
+
}
|
|
43
|
+
function findNearestFile$1(filename, options = {}) {
|
|
44
|
+
return findFile$1(filename, options);
|
|
45
|
+
}
|
|
46
|
+
function _resolvePath(id, opts = {}) {
|
|
47
|
+
if (id instanceof URL || id.startsWith("file://")) return normalize(fileURLToPath(id));
|
|
48
|
+
if (isAbsolute(id)) return normalize(id);
|
|
49
|
+
return resolveModulePath(id, {
|
|
50
|
+
...opts,
|
|
51
|
+
from: opts.from || opts.parent || opts.url
|
|
52
|
+
});
|
|
53
|
+
}
|
|
54
|
+
const lockFiles = [
|
|
55
|
+
"yarn.lock",
|
|
56
|
+
"package-lock.json",
|
|
57
|
+
"pnpm-lock.yaml",
|
|
58
|
+
"npm-shrinkwrap.json",
|
|
59
|
+
"bun.lockb",
|
|
60
|
+
"bun.lock",
|
|
61
|
+
"deno.lock"
|
|
62
|
+
];
|
|
63
|
+
const packageFiles = [
|
|
64
|
+
"package.json",
|
|
65
|
+
"package.json5",
|
|
66
|
+
"package.yaml"
|
|
67
|
+
];
|
|
68
|
+
const workspaceFiles = [
|
|
69
|
+
"pnpm-workspace.yaml",
|
|
70
|
+
"lerna.json",
|
|
71
|
+
"turbo.json",
|
|
72
|
+
"rush.json",
|
|
73
|
+
"deno.json",
|
|
74
|
+
"deno.jsonc"
|
|
75
|
+
];
|
|
76
|
+
const FileCache$1 = /* @__PURE__ */ new Map();
|
|
77
|
+
async function readPackageJSON$1(id, options = {}) {
|
|
78
|
+
const resolvedPath = await resolvePackageJSON$1(id, options);
|
|
79
|
+
const cache = options.cache && typeof options.cache !== "boolean" ? options.cache : FileCache$1;
|
|
80
|
+
if (options.cache && cache.has(resolvedPath)) return cache.get(resolvedPath);
|
|
81
|
+
const blob = await promises.readFile(resolvedPath, "utf8");
|
|
82
|
+
let parsed;
|
|
83
|
+
try {
|
|
84
|
+
parsed = x(blob);
|
|
85
|
+
} catch {
|
|
86
|
+
parsed = h(blob);
|
|
87
|
+
}
|
|
88
|
+
cache.set(resolvedPath, parsed);
|
|
89
|
+
return parsed;
|
|
90
|
+
}
|
|
91
|
+
async function resolvePackageJSON$1(id = process.cwd(), options = {}) {
|
|
92
|
+
return findNearestFile$1("package.json", {
|
|
93
|
+
...options,
|
|
94
|
+
startingFrom: _resolvePath(id, options)
|
|
95
|
+
});
|
|
96
|
+
}
|
|
97
|
+
const workspaceTests = {
|
|
98
|
+
workspaceFile: (opts) => findFile$1(workspaceFiles, opts).then((r) => dirname(r)),
|
|
99
|
+
gitConfig: (opts) => findFile$1(".git/config", opts).then((r) => resolve(r, "../..")),
|
|
100
|
+
lockFile: (opts) => findFile$1(lockFiles, opts).then((r) => dirname(r)),
|
|
101
|
+
packageJson: (opts) => findFile$1(packageFiles, opts).then((r) => dirname(r))
|
|
102
|
+
};
|
|
103
|
+
async function findWorkspaceDir(id = process.cwd(), options = {}) {
|
|
104
|
+
const startingFrom = _resolvePath(id, options);
|
|
105
|
+
const tests = options.tests || [
|
|
106
|
+
"workspaceFile",
|
|
107
|
+
"gitConfig",
|
|
108
|
+
"lockFile",
|
|
109
|
+
"packageJson"
|
|
110
|
+
];
|
|
111
|
+
for (const testName of tests) {
|
|
112
|
+
const test = workspaceTests[testName];
|
|
113
|
+
if (options[testName] === false || !test) continue;
|
|
114
|
+
const direction = options[testName] || (testName === "gitConfig" ? "closest" : "furthest");
|
|
115
|
+
const detected = await test({
|
|
116
|
+
...options,
|
|
117
|
+
startingFrom,
|
|
118
|
+
reverse: direction === "furthest"
|
|
119
|
+
}).catch(() => {});
|
|
120
|
+
if (detected) return detected;
|
|
121
|
+
}
|
|
122
|
+
throw new Error(`Cannot detect workspace root from ${id}`);
|
|
123
|
+
}
|
|
124
|
+
async function resolveGitConfig(dir, opts) {
|
|
125
|
+
return findNearestFile$1(".git/config", {
|
|
126
|
+
...opts,
|
|
127
|
+
startingFrom: dir
|
|
128
|
+
});
|
|
129
|
+
}
|
|
130
|
+
async function readGitConfig(dir, opts) {
|
|
131
|
+
return parseGitConfig(await readFile(await resolveGitConfig(dir, opts), "utf8"));
|
|
132
|
+
}
|
|
133
|
+
function parseGitConfig(ini) {
|
|
134
|
+
return S(ini.replaceAll(/^\[(\w+) "(.+)"\]$/gm, "[$1.$2]"));
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
//#endregion
|
|
138
|
+
//#region node_modules/.pnpm/pkg-types@1.3.1/node_modules/pkg-types/dist/index.mjs
|
|
139
|
+
const defaultFindOptions = {
|
|
140
|
+
startingFrom: ".",
|
|
141
|
+
rootPattern: /^node_modules$/,
|
|
142
|
+
reverse: false,
|
|
143
|
+
test: (filePath) => {
|
|
144
|
+
try {
|
|
145
|
+
if (statSync(filePath).isFile()) return true;
|
|
146
|
+
} catch {}
|
|
147
|
+
}
|
|
148
|
+
};
|
|
149
|
+
async function findFile(filename, _options = {}) {
|
|
150
|
+
const filenames = Array.isArray(filename) ? filename : [filename];
|
|
151
|
+
const options = {
|
|
152
|
+
...defaultFindOptions,
|
|
153
|
+
..._options
|
|
154
|
+
};
|
|
155
|
+
const basePath = resolve(options.startingFrom);
|
|
156
|
+
const leadingSlash = basePath[0] === "/";
|
|
157
|
+
const segments = basePath.split("/").filter(Boolean);
|
|
158
|
+
if (leadingSlash) segments[0] = "/" + segments[0];
|
|
159
|
+
let root = segments.findIndex((r) => r.match(options.rootPattern));
|
|
160
|
+
if (root === -1) root = 0;
|
|
161
|
+
if (options.reverse) for (let index = root + 1; index <= segments.length; index++) for (const filename2 of filenames) {
|
|
162
|
+
const filePath = join(...segments.slice(0, index), filename2);
|
|
163
|
+
if (await options.test(filePath)) return filePath;
|
|
164
|
+
}
|
|
165
|
+
else for (let index = segments.length; index > root; index--) for (const filename2 of filenames) {
|
|
166
|
+
const filePath = join(...segments.slice(0, index), filename2);
|
|
167
|
+
if (await options.test(filePath)) return filePath;
|
|
168
|
+
}
|
|
169
|
+
throw new Error(`Cannot find matching ${filename} in ${options.startingFrom} or parent directories`);
|
|
170
|
+
}
|
|
171
|
+
function findNearestFile(filename, _options = {}) {
|
|
172
|
+
return findFile(filename, _options);
|
|
173
|
+
}
|
|
174
|
+
const FileCache = /* @__PURE__ */ new Map();
|
|
175
|
+
async function readPackageJSON(id, options = {}) {
|
|
176
|
+
const resolvedPath = await resolvePackageJSON(id, options);
|
|
177
|
+
const cache = options.cache && typeof options.cache !== "boolean" ? options.cache : FileCache;
|
|
178
|
+
if (options.cache && cache.has(resolvedPath)) return cache.get(resolvedPath);
|
|
179
|
+
const blob = await promises.readFile(resolvedPath, "utf8");
|
|
180
|
+
let parsed;
|
|
181
|
+
try {
|
|
182
|
+
parsed = x$1(blob);
|
|
183
|
+
} catch {
|
|
184
|
+
parsed = h$1(blob);
|
|
185
|
+
}
|
|
186
|
+
cache.set(resolvedPath, parsed);
|
|
187
|
+
return parsed;
|
|
188
|
+
}
|
|
189
|
+
async function resolvePackageJSON(id = process.cwd(), options = {}) {
|
|
190
|
+
return findNearestFile("package.json", {
|
|
191
|
+
startingFrom: isAbsolute(id) ? id : await resolvePath(id, options),
|
|
192
|
+
...options
|
|
193
|
+
});
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
//#endregion
|
|
197
|
+
export { parseGitConfig as a, resolveGitConfig as c, findWorkspaceDir as i, resolvePackageJSON$1 as l, findFile$1 as n, readGitConfig as o, findNearestFile$1 as r, readPackageJSON$1 as s, readPackageJSON as t };
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
//#region node_modules/.pnpm/quansync@0.2.11/node_modules/quansync/dist/index.mjs
|
|
2
|
+
const GET_IS_ASYNC = Symbol.for("quansync.getIsAsync");
|
|
3
|
+
var QuansyncError = class extends Error {
|
|
4
|
+
constructor(message = "Unexpected promise in sync context") {
|
|
5
|
+
super(message);
|
|
6
|
+
this.name = "QuansyncError";
|
|
7
|
+
}
|
|
8
|
+
};
|
|
9
|
+
function isThenable(value) {
|
|
10
|
+
return value && typeof value === "object" && typeof value.then === "function";
|
|
11
|
+
}
|
|
12
|
+
function isQuansyncGenerator(value) {
|
|
13
|
+
return value && typeof value === "object" && typeof value[Symbol.iterator] === "function" && "__quansync" in value;
|
|
14
|
+
}
|
|
15
|
+
function fromObject(options) {
|
|
16
|
+
const generator = function* (...args) {
|
|
17
|
+
if (yield GET_IS_ASYNC) return yield options.async.apply(this, args);
|
|
18
|
+
return options.sync.apply(this, args);
|
|
19
|
+
};
|
|
20
|
+
function fn(...args) {
|
|
21
|
+
const iter = generator.apply(this, args);
|
|
22
|
+
iter.then = (...thenArgs) => options.async.apply(this, args).then(...thenArgs);
|
|
23
|
+
iter.__quansync = true;
|
|
24
|
+
return iter;
|
|
25
|
+
}
|
|
26
|
+
fn.sync = options.sync;
|
|
27
|
+
fn.async = options.async;
|
|
28
|
+
return fn;
|
|
29
|
+
}
|
|
30
|
+
function fromPromise(promise) {
|
|
31
|
+
return fromObject({
|
|
32
|
+
async: () => Promise.resolve(promise),
|
|
33
|
+
sync: () => {
|
|
34
|
+
if (isThenable(promise)) throw new QuansyncError();
|
|
35
|
+
return promise;
|
|
36
|
+
}
|
|
37
|
+
});
|
|
38
|
+
}
|
|
39
|
+
function unwrapYield(value, isAsync) {
|
|
40
|
+
if (value === GET_IS_ASYNC) return isAsync;
|
|
41
|
+
if (isQuansyncGenerator(value)) return isAsync ? iterateAsync(value) : iterateSync(value);
|
|
42
|
+
if (!isAsync && isThenable(value)) throw new QuansyncError();
|
|
43
|
+
return value;
|
|
44
|
+
}
|
|
45
|
+
const DEFAULT_ON_YIELD = (value) => value;
|
|
46
|
+
function iterateSync(generator, onYield = DEFAULT_ON_YIELD) {
|
|
47
|
+
let current = generator.next();
|
|
48
|
+
while (!current.done) try {
|
|
49
|
+
current = generator.next(unwrapYield(onYield(current.value, false)));
|
|
50
|
+
} catch (err) {
|
|
51
|
+
current = generator.throw(err);
|
|
52
|
+
}
|
|
53
|
+
return unwrapYield(current.value);
|
|
54
|
+
}
|
|
55
|
+
async function iterateAsync(generator, onYield = DEFAULT_ON_YIELD) {
|
|
56
|
+
let current = generator.next();
|
|
57
|
+
while (!current.done) try {
|
|
58
|
+
current = generator.next(await unwrapYield(onYield(current.value, true), true));
|
|
59
|
+
} catch (err) {
|
|
60
|
+
current = generator.throw(err);
|
|
61
|
+
}
|
|
62
|
+
return current.value;
|
|
63
|
+
}
|
|
64
|
+
function fromGeneratorFn(generatorFn, options) {
|
|
65
|
+
return fromObject({
|
|
66
|
+
name: generatorFn.name,
|
|
67
|
+
async(...args) {
|
|
68
|
+
return iterateAsync(generatorFn.apply(this, args), options?.onYield);
|
|
69
|
+
},
|
|
70
|
+
sync(...args) {
|
|
71
|
+
return iterateSync(generatorFn.apply(this, args), options?.onYield);
|
|
72
|
+
}
|
|
73
|
+
});
|
|
74
|
+
}
|
|
75
|
+
function quansync$1(input, options) {
|
|
76
|
+
if (isThenable(input)) return fromPromise(input);
|
|
77
|
+
if (typeof input === "function") return fromGeneratorFn(input, options);
|
|
78
|
+
else return fromObject(input);
|
|
79
|
+
}
|
|
80
|
+
const getIsAsync = quansync$1({
|
|
81
|
+
async: () => Promise.resolve(true),
|
|
82
|
+
sync: () => false
|
|
83
|
+
});
|
|
84
|
+
|
|
85
|
+
//#endregion
|
|
86
|
+
//#region node_modules/.pnpm/quansync@0.2.11/node_modules/quansync/dist/macro.mjs
|
|
87
|
+
const quansync = quansync$1;
|
|
88
|
+
|
|
89
|
+
//#endregion
|
|
90
|
+
export { quansync as t };
|
|
@@ -0,0 +1,136 @@
|
|
|
1
|
+
import { existsSync, readFileSync } from "node:fs";
|
|
2
|
+
import { homedir } from "node:os";
|
|
3
|
+
import { resolve } from "node:path";
|
|
4
|
+
import destr from "destr";
|
|
5
|
+
import { defu } from "defu";
|
|
6
|
+
|
|
7
|
+
//#region node_modules/.pnpm/rc9@2.1.2/node_modules/rc9/dist/index.mjs
|
|
8
|
+
function isBuffer(obj) {
|
|
9
|
+
return obj && obj.constructor && typeof obj.constructor.isBuffer === "function" && obj.constructor.isBuffer(obj);
|
|
10
|
+
}
|
|
11
|
+
function keyIdentity(key) {
|
|
12
|
+
return key;
|
|
13
|
+
}
|
|
14
|
+
function flatten(target, opts) {
|
|
15
|
+
opts = opts || {};
|
|
16
|
+
const delimiter$1 = opts.delimiter || ".";
|
|
17
|
+
const maxDepth = opts.maxDepth;
|
|
18
|
+
const transformKey = opts.transformKey || keyIdentity;
|
|
19
|
+
const output = {};
|
|
20
|
+
function step(object, prev, currentDepth) {
|
|
21
|
+
currentDepth = currentDepth || 1;
|
|
22
|
+
Object.keys(object).forEach(function(key) {
|
|
23
|
+
const value = object[key];
|
|
24
|
+
const isarray = opts.safe && Array.isArray(value);
|
|
25
|
+
const type$1 = Object.prototype.toString.call(value);
|
|
26
|
+
const isbuffer = isBuffer(value);
|
|
27
|
+
const isobject = type$1 === "[object Object]" || type$1 === "[object Array]";
|
|
28
|
+
const newKey = prev ? prev + delimiter$1 + transformKey(key) : transformKey(key);
|
|
29
|
+
if (!isarray && !isbuffer && isobject && Object.keys(value).length && (!opts.maxDepth || currentDepth < maxDepth)) return step(value, newKey, currentDepth + 1);
|
|
30
|
+
output[newKey] = value;
|
|
31
|
+
});
|
|
32
|
+
}
|
|
33
|
+
step(target);
|
|
34
|
+
return output;
|
|
35
|
+
}
|
|
36
|
+
function unflatten(target, opts) {
|
|
37
|
+
opts = opts || {};
|
|
38
|
+
const delimiter$1 = opts.delimiter || ".";
|
|
39
|
+
const overwrite = opts.overwrite || false;
|
|
40
|
+
const transformKey = opts.transformKey || keyIdentity;
|
|
41
|
+
const result = {};
|
|
42
|
+
if (isBuffer(target) || Object.prototype.toString.call(target) !== "[object Object]") return target;
|
|
43
|
+
function getkey(key) {
|
|
44
|
+
const parsedKey = Number(key);
|
|
45
|
+
return isNaN(parsedKey) || key.indexOf(".") !== -1 || opts.object ? key : parsedKey;
|
|
46
|
+
}
|
|
47
|
+
function addKeys(keyPrefix, recipient, target$1) {
|
|
48
|
+
return Object.keys(target$1).reduce(function(result$1, key) {
|
|
49
|
+
result$1[keyPrefix + delimiter$1 + key] = target$1[key];
|
|
50
|
+
return result$1;
|
|
51
|
+
}, recipient);
|
|
52
|
+
}
|
|
53
|
+
function isEmpty(val) {
|
|
54
|
+
const type$1 = Object.prototype.toString.call(val);
|
|
55
|
+
const isArray = type$1 === "[object Array]";
|
|
56
|
+
const isObject = type$1 === "[object Object]";
|
|
57
|
+
if (!val) return true;
|
|
58
|
+
else if (isArray) return !val.length;
|
|
59
|
+
else if (isObject) return !Object.keys(val).length;
|
|
60
|
+
}
|
|
61
|
+
target = Object.keys(target).reduce(function(result$1, key) {
|
|
62
|
+
const type$1 = Object.prototype.toString.call(target[key]);
|
|
63
|
+
if (!(type$1 === "[object Object]" || type$1 === "[object Array]") || isEmpty(target[key])) {
|
|
64
|
+
result$1[key] = target[key];
|
|
65
|
+
return result$1;
|
|
66
|
+
} else return addKeys(key, result$1, flatten(target[key], opts));
|
|
67
|
+
}, {});
|
|
68
|
+
Object.keys(target).forEach(function(key) {
|
|
69
|
+
const split = key.split(delimiter$1).map(transformKey);
|
|
70
|
+
let key1 = getkey(split.shift());
|
|
71
|
+
let key2 = getkey(split[0]);
|
|
72
|
+
let recipient = result;
|
|
73
|
+
while (key2 !== void 0) {
|
|
74
|
+
if (key1 === "__proto__") return;
|
|
75
|
+
const type$1 = Object.prototype.toString.call(recipient[key1]);
|
|
76
|
+
const isobject = type$1 === "[object Object]" || type$1 === "[object Array]";
|
|
77
|
+
if (!overwrite && !isobject && typeof recipient[key1] !== "undefined") return;
|
|
78
|
+
if (overwrite && !isobject || !overwrite && recipient[key1] == null) recipient[key1] = typeof key2 === "number" && !opts.object ? [] : {};
|
|
79
|
+
recipient = recipient[key1];
|
|
80
|
+
if (split.length > 0) {
|
|
81
|
+
key1 = getkey(split.shift());
|
|
82
|
+
key2 = getkey(split[0]);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
recipient[key1] = unflatten(target[key], opts);
|
|
86
|
+
});
|
|
87
|
+
return result;
|
|
88
|
+
}
|
|
89
|
+
const RE_KEY_VAL = /^\s*([^\s=]+)\s*=\s*(.*)?\s*$/;
|
|
90
|
+
const RE_LINES = /\n|\r|\r\n/;
|
|
91
|
+
const defaults = {
|
|
92
|
+
name: ".conf",
|
|
93
|
+
dir: process.cwd(),
|
|
94
|
+
flat: false
|
|
95
|
+
};
|
|
96
|
+
function withDefaults(options) {
|
|
97
|
+
if (typeof options === "string") options = { name: options };
|
|
98
|
+
return {
|
|
99
|
+
...defaults,
|
|
100
|
+
...options
|
|
101
|
+
};
|
|
102
|
+
}
|
|
103
|
+
function parse(contents, options = {}) {
|
|
104
|
+
const config = {};
|
|
105
|
+
const lines = contents.split(RE_LINES);
|
|
106
|
+
for (const line of lines) {
|
|
107
|
+
const match = line.match(RE_KEY_VAL);
|
|
108
|
+
if (!match) continue;
|
|
109
|
+
const key = match[1];
|
|
110
|
+
if (!key || key === "__proto__" || key === "constructor") continue;
|
|
111
|
+
const value = destr((match[2] || "").trim());
|
|
112
|
+
if (key.endsWith("[]")) {
|
|
113
|
+
const nkey = key.slice(0, Math.max(0, key.length - 2));
|
|
114
|
+
config[nkey] = (config[nkey] || []).concat(value);
|
|
115
|
+
continue;
|
|
116
|
+
}
|
|
117
|
+
config[key] = value;
|
|
118
|
+
}
|
|
119
|
+
return options.flat ? config : unflatten(config, { overwrite: true });
|
|
120
|
+
}
|
|
121
|
+
function parseFile(path$1, options) {
|
|
122
|
+
if (!existsSync(path$1)) return {};
|
|
123
|
+
return parse(readFileSync(path$1, "utf8"), options);
|
|
124
|
+
}
|
|
125
|
+
function read(options) {
|
|
126
|
+
options = withDefaults(options);
|
|
127
|
+
return parseFile(resolve(options.dir, options.name), options);
|
|
128
|
+
}
|
|
129
|
+
function readUser(options) {
|
|
130
|
+
options = withDefaults(options);
|
|
131
|
+
options.dir = process.env.XDG_CONFIG_HOME || homedir();
|
|
132
|
+
return read(options);
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
//#endregion
|
|
136
|
+
export { readUser as n, read as t };
|
|
@@ -0,0 +1,237 @@
|
|
|
1
|
+
import { lstat, readdir, realpath, stat } from "node:fs/promises";
|
|
2
|
+
import { join, relative, resolve, sep } from "node:path";
|
|
3
|
+
import { Readable } from "node:stream";
|
|
4
|
+
|
|
5
|
+
//#region node_modules/.pnpm/readdirp@5.0.0/node_modules/readdirp/index.js
|
|
6
|
+
const EntryTypes = {
|
|
7
|
+
FILE_TYPE: "files",
|
|
8
|
+
DIR_TYPE: "directories",
|
|
9
|
+
FILE_DIR_TYPE: "files_directories",
|
|
10
|
+
EVERYTHING_TYPE: "all"
|
|
11
|
+
};
|
|
12
|
+
const defaultOptions = {
|
|
13
|
+
root: ".",
|
|
14
|
+
fileFilter: (_entryInfo) => true,
|
|
15
|
+
directoryFilter: (_entryInfo) => true,
|
|
16
|
+
type: EntryTypes.FILE_TYPE,
|
|
17
|
+
lstat: false,
|
|
18
|
+
depth: 2147483648,
|
|
19
|
+
alwaysStat: false,
|
|
20
|
+
highWaterMark: 4096
|
|
21
|
+
};
|
|
22
|
+
Object.freeze(defaultOptions);
|
|
23
|
+
const RECURSIVE_ERROR_CODE = "READDIRP_RECURSIVE_ERROR";
|
|
24
|
+
const NORMAL_FLOW_ERRORS = new Set([
|
|
25
|
+
"ENOENT",
|
|
26
|
+
"EPERM",
|
|
27
|
+
"EACCES",
|
|
28
|
+
"ELOOP",
|
|
29
|
+
RECURSIVE_ERROR_CODE
|
|
30
|
+
]);
|
|
31
|
+
const ALL_TYPES = [
|
|
32
|
+
EntryTypes.DIR_TYPE,
|
|
33
|
+
EntryTypes.EVERYTHING_TYPE,
|
|
34
|
+
EntryTypes.FILE_DIR_TYPE,
|
|
35
|
+
EntryTypes.FILE_TYPE
|
|
36
|
+
];
|
|
37
|
+
const DIR_TYPES = new Set([
|
|
38
|
+
EntryTypes.DIR_TYPE,
|
|
39
|
+
EntryTypes.EVERYTHING_TYPE,
|
|
40
|
+
EntryTypes.FILE_DIR_TYPE
|
|
41
|
+
]);
|
|
42
|
+
const FILE_TYPES = new Set([
|
|
43
|
+
EntryTypes.EVERYTHING_TYPE,
|
|
44
|
+
EntryTypes.FILE_DIR_TYPE,
|
|
45
|
+
EntryTypes.FILE_TYPE
|
|
46
|
+
]);
|
|
47
|
+
const isNormalFlowError = (error) => NORMAL_FLOW_ERRORS.has(error.code);
|
|
48
|
+
const wantBigintFsStats = process.platform === "win32";
|
|
49
|
+
const emptyFn = (_entryInfo) => true;
|
|
50
|
+
const normalizeFilter = (filter) => {
|
|
51
|
+
if (filter === void 0) return emptyFn;
|
|
52
|
+
if (typeof filter === "function") return filter;
|
|
53
|
+
if (typeof filter === "string") {
|
|
54
|
+
const fl = filter.trim();
|
|
55
|
+
return (entry) => entry.basename === fl;
|
|
56
|
+
}
|
|
57
|
+
if (Array.isArray(filter)) {
|
|
58
|
+
const trItems = filter.map((item) => item.trim());
|
|
59
|
+
return (entry) => trItems.some((f) => entry.basename === f);
|
|
60
|
+
}
|
|
61
|
+
return emptyFn;
|
|
62
|
+
};
|
|
63
|
+
/** Readable readdir stream, emitting new files as they're being listed. */
|
|
64
|
+
var ReaddirpStream = class extends Readable {
|
|
65
|
+
parents;
|
|
66
|
+
reading;
|
|
67
|
+
parent;
|
|
68
|
+
_stat;
|
|
69
|
+
_maxDepth;
|
|
70
|
+
_wantsDir;
|
|
71
|
+
_wantsFile;
|
|
72
|
+
_wantsEverything;
|
|
73
|
+
_root;
|
|
74
|
+
_isDirent;
|
|
75
|
+
_statsProp;
|
|
76
|
+
_rdOptions;
|
|
77
|
+
_fileFilter;
|
|
78
|
+
_directoryFilter;
|
|
79
|
+
constructor(options = {}) {
|
|
80
|
+
super({
|
|
81
|
+
objectMode: true,
|
|
82
|
+
autoDestroy: true,
|
|
83
|
+
highWaterMark: options.highWaterMark
|
|
84
|
+
});
|
|
85
|
+
const opts = {
|
|
86
|
+
...defaultOptions,
|
|
87
|
+
...options
|
|
88
|
+
};
|
|
89
|
+
const { root, type } = opts;
|
|
90
|
+
this._fileFilter = normalizeFilter(opts.fileFilter);
|
|
91
|
+
this._directoryFilter = normalizeFilter(opts.directoryFilter);
|
|
92
|
+
const statMethod = opts.lstat ? lstat : stat;
|
|
93
|
+
if (wantBigintFsStats) this._stat = (path$1) => statMethod(path$1, { bigint: true });
|
|
94
|
+
else this._stat = statMethod;
|
|
95
|
+
this._maxDepth = opts.depth != null && Number.isSafeInteger(opts.depth) ? opts.depth : defaultOptions.depth;
|
|
96
|
+
this._wantsDir = type ? DIR_TYPES.has(type) : false;
|
|
97
|
+
this._wantsFile = type ? FILE_TYPES.has(type) : false;
|
|
98
|
+
this._wantsEverything = type === EntryTypes.EVERYTHING_TYPE;
|
|
99
|
+
this._root = resolve(root);
|
|
100
|
+
this._isDirent = !opts.alwaysStat;
|
|
101
|
+
this._statsProp = this._isDirent ? "dirent" : "stats";
|
|
102
|
+
this._rdOptions = {
|
|
103
|
+
encoding: "utf8",
|
|
104
|
+
withFileTypes: this._isDirent
|
|
105
|
+
};
|
|
106
|
+
this.parents = [this._exploreDir(root, 1)];
|
|
107
|
+
this.reading = false;
|
|
108
|
+
this.parent = void 0;
|
|
109
|
+
}
|
|
110
|
+
async _read(batch) {
|
|
111
|
+
if (this.reading) return;
|
|
112
|
+
this.reading = true;
|
|
113
|
+
try {
|
|
114
|
+
while (!this.destroyed && batch > 0) {
|
|
115
|
+
const par = this.parent;
|
|
116
|
+
const fil = par && par.files;
|
|
117
|
+
if (fil && fil.length > 0) {
|
|
118
|
+
const { path: path$1, depth } = par;
|
|
119
|
+
const slice = fil.splice(0, batch).map((dirent) => this._formatEntry(dirent, path$1));
|
|
120
|
+
const awaited = await Promise.all(slice);
|
|
121
|
+
for (const entry of awaited) {
|
|
122
|
+
if (!entry) continue;
|
|
123
|
+
if (this.destroyed) return;
|
|
124
|
+
const entryType = await this._getEntryType(entry);
|
|
125
|
+
if (entryType === "directory" && this._directoryFilter(entry)) {
|
|
126
|
+
if (depth <= this._maxDepth) this.parents.push(this._exploreDir(entry.fullPath, depth + 1));
|
|
127
|
+
if (this._wantsDir) {
|
|
128
|
+
this.push(entry);
|
|
129
|
+
batch--;
|
|
130
|
+
}
|
|
131
|
+
} else if ((entryType === "file" || this._includeAsFile(entry)) && this._fileFilter(entry)) {
|
|
132
|
+
if (this._wantsFile) {
|
|
133
|
+
this.push(entry);
|
|
134
|
+
batch--;
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
} else {
|
|
139
|
+
const parent = this.parents.pop();
|
|
140
|
+
if (!parent) {
|
|
141
|
+
this.push(null);
|
|
142
|
+
break;
|
|
143
|
+
}
|
|
144
|
+
this.parent = await parent;
|
|
145
|
+
if (this.destroyed) return;
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
} catch (error) {
|
|
149
|
+
this.destroy(error);
|
|
150
|
+
} finally {
|
|
151
|
+
this.reading = false;
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
async _exploreDir(path$1, depth) {
|
|
155
|
+
let files;
|
|
156
|
+
try {
|
|
157
|
+
files = await readdir(path$1, this._rdOptions);
|
|
158
|
+
} catch (error) {
|
|
159
|
+
this._onError(error);
|
|
160
|
+
}
|
|
161
|
+
return {
|
|
162
|
+
files,
|
|
163
|
+
depth,
|
|
164
|
+
path: path$1
|
|
165
|
+
};
|
|
166
|
+
}
|
|
167
|
+
async _formatEntry(dirent, path$1) {
|
|
168
|
+
let entry;
|
|
169
|
+
const basename$1 = this._isDirent ? dirent.name : dirent;
|
|
170
|
+
try {
|
|
171
|
+
const fullPath = resolve(join(path$1, basename$1));
|
|
172
|
+
entry = {
|
|
173
|
+
path: relative(this._root, fullPath),
|
|
174
|
+
fullPath,
|
|
175
|
+
basename: basename$1
|
|
176
|
+
};
|
|
177
|
+
entry[this._statsProp] = this._isDirent ? dirent : await this._stat(fullPath);
|
|
178
|
+
} catch (err) {
|
|
179
|
+
this._onError(err);
|
|
180
|
+
return;
|
|
181
|
+
}
|
|
182
|
+
return entry;
|
|
183
|
+
}
|
|
184
|
+
_onError(err) {
|
|
185
|
+
if (isNormalFlowError(err) && !this.destroyed) this.emit("warn", err);
|
|
186
|
+
else this.destroy(err);
|
|
187
|
+
}
|
|
188
|
+
async _getEntryType(entry) {
|
|
189
|
+
if (!entry && this._statsProp in entry) return "";
|
|
190
|
+
const stats = entry[this._statsProp];
|
|
191
|
+
if (stats.isFile()) return "file";
|
|
192
|
+
if (stats.isDirectory()) return "directory";
|
|
193
|
+
if (stats && stats.isSymbolicLink()) {
|
|
194
|
+
const full = entry.fullPath;
|
|
195
|
+
try {
|
|
196
|
+
const entryRealPath = await realpath(full);
|
|
197
|
+
const entryRealPathStats = await lstat(entryRealPath);
|
|
198
|
+
if (entryRealPathStats.isFile()) return "file";
|
|
199
|
+
if (entryRealPathStats.isDirectory()) {
|
|
200
|
+
const len = entryRealPath.length;
|
|
201
|
+
if (full.startsWith(entryRealPath) && full.substr(len, 1) === sep) {
|
|
202
|
+
const recursiveError = /* @__PURE__ */ new Error(`Circular symlink detected: "${full}" points to "${entryRealPath}"`);
|
|
203
|
+
recursiveError.code = RECURSIVE_ERROR_CODE;
|
|
204
|
+
return this._onError(recursiveError);
|
|
205
|
+
}
|
|
206
|
+
return "directory";
|
|
207
|
+
}
|
|
208
|
+
} catch (error) {
|
|
209
|
+
this._onError(error);
|
|
210
|
+
return "";
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
_includeAsFile(entry) {
|
|
215
|
+
const stats = entry && entry[this._statsProp];
|
|
216
|
+
return stats && this._wantsEverything && !stats.isDirectory();
|
|
217
|
+
}
|
|
218
|
+
};
|
|
219
|
+
/**
|
|
220
|
+
* Streaming version: Reads all files and directories in given root recursively.
|
|
221
|
+
* Consumes ~constant small amount of RAM.
|
|
222
|
+
* @param root Root directory
|
|
223
|
+
* @param options Options to specify root (start directory), filters and recursion depth
|
|
224
|
+
*/
|
|
225
|
+
function readdirp(root, options = {}) {
|
|
226
|
+
let type = options.entryType || options.type;
|
|
227
|
+
if (type === "both") type = EntryTypes.FILE_DIR_TYPE;
|
|
228
|
+
if (type) options.type = type;
|
|
229
|
+
if (!root) throw new Error("readdirp: root argument is required. Usage: readdirp(root, options)");
|
|
230
|
+
else if (typeof root !== "string") throw new TypeError("readdirp: root argument must be a string. Usage: readdirp(root, options)");
|
|
231
|
+
else if (type && !ALL_TYPES.includes(type)) throw new Error(`readdirp: Invalid type passed. Use one of ${ALL_TYPES.join(", ")}`);
|
|
232
|
+
options.root = root;
|
|
233
|
+
return new ReaddirpStream(options);
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
//#endregion
|
|
237
|
+
export { readdirp as t };
|