@vibgrate/cli 1.0.40 → 1.0.42
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/baseline-FDWMBM2O.js +10 -0
- package/dist/{chunk-LBZR6G43.js → chunk-LO66M6OC.js} +4 -2
- package/dist/chunk-RNVZIZNL.js +569 -0
- package/dist/{chunk-NU7IIHIG.js → chunk-YFJC5JSQ.js} +1146 -650
- package/dist/cli.js +8 -6
- package/dist/fs-Q63DRR7L.js +30 -0
- package/dist/index.d.ts +1 -1
- package/dist/index.js +2 -1
- package/package.json +2 -2
- package/dist/baseline-BTB34JBE.js +0 -9
|
@@ -0,0 +1,569 @@
|
|
|
1
|
+
// src/utils/fs.ts
|
|
2
|
+
import * as fs from "fs/promises";
|
|
3
|
+
import * as os from "os";
|
|
4
|
+
import * as path2 from "path";
|
|
5
|
+
|
|
6
|
+
// src/utils/semaphore.ts
|
|
7
|
+
var Semaphore = class {
|
|
8
|
+
available;
|
|
9
|
+
queue = [];
|
|
10
|
+
constructor(max) {
|
|
11
|
+
this.available = max;
|
|
12
|
+
}
|
|
13
|
+
async run(fn) {
|
|
14
|
+
await this.acquire();
|
|
15
|
+
try {
|
|
16
|
+
return await fn();
|
|
17
|
+
} finally {
|
|
18
|
+
this.release();
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
acquire() {
|
|
22
|
+
if (this.available > 0) {
|
|
23
|
+
this.available--;
|
|
24
|
+
return Promise.resolve();
|
|
25
|
+
}
|
|
26
|
+
return new Promise((resolve2) => this.queue.push(resolve2));
|
|
27
|
+
}
|
|
28
|
+
release() {
|
|
29
|
+
const next = this.queue.shift();
|
|
30
|
+
if (next) next();
|
|
31
|
+
else this.available++;
|
|
32
|
+
}
|
|
33
|
+
};
|
|
34
|
+
|
|
35
|
+
// src/utils/glob.ts
|
|
36
|
+
import * as path from "path";
|
|
37
|
+
function compileGlobs(patterns) {
|
|
38
|
+
if (patterns.length === 0) return null;
|
|
39
|
+
const matchers = patterns.map((p) => compileOne(normalise(p)));
|
|
40
|
+
return (relPath) => {
|
|
41
|
+
const norm = normalise(relPath);
|
|
42
|
+
return matchers.some((m) => m(norm));
|
|
43
|
+
};
|
|
44
|
+
}
|
|
45
|
+
function normalise(p) {
|
|
46
|
+
return p.split(path.sep).join("/").replace(/\/+$/, "");
|
|
47
|
+
}
|
|
48
|
+
function compileOne(pattern) {
|
|
49
|
+
if (!pattern.includes("/") && !hasGlobChars(pattern)) {
|
|
50
|
+
const prefix = pattern + "/";
|
|
51
|
+
return (p) => p === pattern || p.startsWith(prefix);
|
|
52
|
+
}
|
|
53
|
+
const re = globToRegex(pattern);
|
|
54
|
+
return (p) => re.test(p);
|
|
55
|
+
}
|
|
56
|
+
function hasGlobChars(s) {
|
|
57
|
+
return /[*?[\]{}]/.test(s);
|
|
58
|
+
}
|
|
59
|
+
function globToRegex(pattern) {
|
|
60
|
+
let i = 0;
|
|
61
|
+
let re = "^";
|
|
62
|
+
const len = pattern.length;
|
|
63
|
+
while (i < len) {
|
|
64
|
+
const ch = pattern[i];
|
|
65
|
+
if (ch === "*") {
|
|
66
|
+
if (pattern[i + 1] === "*") {
|
|
67
|
+
i += 2;
|
|
68
|
+
if (pattern[i] === "/") {
|
|
69
|
+
i++;
|
|
70
|
+
re += "(?:.+/)?";
|
|
71
|
+
} else {
|
|
72
|
+
re += ".*";
|
|
73
|
+
}
|
|
74
|
+
} else {
|
|
75
|
+
i++;
|
|
76
|
+
re += "[^/]*";
|
|
77
|
+
}
|
|
78
|
+
} else if (ch === "?") {
|
|
79
|
+
i++;
|
|
80
|
+
re += "[^/]";
|
|
81
|
+
} else if (ch === "[") {
|
|
82
|
+
const start = i;
|
|
83
|
+
i++;
|
|
84
|
+
while (i < len && pattern[i] !== "]") i++;
|
|
85
|
+
i++;
|
|
86
|
+
re += pattern.slice(start, i);
|
|
87
|
+
} else if (ch === "{") {
|
|
88
|
+
i++;
|
|
89
|
+
const alternatives = [];
|
|
90
|
+
let current = "";
|
|
91
|
+
while (i < len && pattern[i] !== "}") {
|
|
92
|
+
if (pattern[i] === ",") {
|
|
93
|
+
alternatives.push(current);
|
|
94
|
+
current = "";
|
|
95
|
+
} else {
|
|
96
|
+
current += pattern[i];
|
|
97
|
+
}
|
|
98
|
+
i++;
|
|
99
|
+
}
|
|
100
|
+
alternatives.push(current);
|
|
101
|
+
i++;
|
|
102
|
+
re += "(?:" + alternatives.map(escapeRegex).join("|") + ")";
|
|
103
|
+
} else {
|
|
104
|
+
re += escapeRegex(ch);
|
|
105
|
+
i++;
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
re += "$";
|
|
109
|
+
return new RegExp(re);
|
|
110
|
+
}
|
|
111
|
+
function escapeRegex(s) {
|
|
112
|
+
return s.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
// src/utils/fs.ts
|
|
116
|
+
var SKIP_DIRS = /* @__PURE__ */ new Set([
|
|
117
|
+
"node_modules",
|
|
118
|
+
".git",
|
|
119
|
+
".vibgrate",
|
|
120
|
+
".wrangler",
|
|
121
|
+
".next",
|
|
122
|
+
"dist",
|
|
123
|
+
"build",
|
|
124
|
+
"out",
|
|
125
|
+
".turbo",
|
|
126
|
+
".cache",
|
|
127
|
+
"coverage",
|
|
128
|
+
"bin",
|
|
129
|
+
"obj",
|
|
130
|
+
".vs",
|
|
131
|
+
"packages",
|
|
132
|
+
"TestResults"
|
|
133
|
+
]);
|
|
134
|
+
var SKIP_EXTENSIONS = /* @__PURE__ */ new Set([
|
|
135
|
+
// Fonts
|
|
136
|
+
".woff",
|
|
137
|
+
".woff2",
|
|
138
|
+
".ttf",
|
|
139
|
+
".otf",
|
|
140
|
+
".eot",
|
|
141
|
+
// Images & vector
|
|
142
|
+
".png",
|
|
143
|
+
".jpg",
|
|
144
|
+
".jpeg",
|
|
145
|
+
".gif",
|
|
146
|
+
".ico",
|
|
147
|
+
".bmp",
|
|
148
|
+
".tiff",
|
|
149
|
+
".tif",
|
|
150
|
+
".webp",
|
|
151
|
+
".avif",
|
|
152
|
+
".svg",
|
|
153
|
+
".heic",
|
|
154
|
+
".heif",
|
|
155
|
+
".jfif",
|
|
156
|
+
".psd",
|
|
157
|
+
".ai",
|
|
158
|
+
".eps",
|
|
159
|
+
".raw",
|
|
160
|
+
".cr2",
|
|
161
|
+
".nef",
|
|
162
|
+
".dng",
|
|
163
|
+
// Video
|
|
164
|
+
".mp4",
|
|
165
|
+
".webm",
|
|
166
|
+
".avi",
|
|
167
|
+
".mov",
|
|
168
|
+
".mkv",
|
|
169
|
+
".wmv",
|
|
170
|
+
".flv",
|
|
171
|
+
".m4v",
|
|
172
|
+
".mpg",
|
|
173
|
+
".mpeg",
|
|
174
|
+
".3gp",
|
|
175
|
+
".ogv",
|
|
176
|
+
// Audio
|
|
177
|
+
".mp3",
|
|
178
|
+
".wav",
|
|
179
|
+
".ogg",
|
|
180
|
+
".flac",
|
|
181
|
+
".aac",
|
|
182
|
+
".wma",
|
|
183
|
+
".m4a",
|
|
184
|
+
".opus",
|
|
185
|
+
".aiff",
|
|
186
|
+
".mid",
|
|
187
|
+
".midi",
|
|
188
|
+
// Archives
|
|
189
|
+
".zip",
|
|
190
|
+
".tar",
|
|
191
|
+
".gz",
|
|
192
|
+
".bz2",
|
|
193
|
+
".7z",
|
|
194
|
+
".rar",
|
|
195
|
+
// Compiled / binary
|
|
196
|
+
".exe",
|
|
197
|
+
".dll",
|
|
198
|
+
".so",
|
|
199
|
+
".dylib",
|
|
200
|
+
".o",
|
|
201
|
+
".a",
|
|
202
|
+
".class",
|
|
203
|
+
".pyc",
|
|
204
|
+
".pdb",
|
|
205
|
+
// Source maps & lockfiles (large, not useful for drift analysis)
|
|
206
|
+
".map"
|
|
207
|
+
]);
|
|
208
|
+
var TEXT_CACHE_MAX_BYTES = 1048576;
|
|
209
|
+
var FileCache = class _FileCache {
|
|
210
|
+
/** Directory walk results keyed by rootDir */
|
|
211
|
+
walkCache = /* @__PURE__ */ new Map();
|
|
212
|
+
/** File content keyed by absolute path (only files ≤ TEXT_CACHE_MAX_BYTES) */
|
|
213
|
+
textCache = /* @__PURE__ */ new Map();
|
|
214
|
+
/** Parsed JSON keyed by absolute path */
|
|
215
|
+
jsonCache = /* @__PURE__ */ new Map();
|
|
216
|
+
/** pathExists keyed by absolute path */
|
|
217
|
+
existsCache = /* @__PURE__ */ new Map();
|
|
218
|
+
/** User-configured exclude predicate (compiled from glob patterns) */
|
|
219
|
+
excludePredicate = null;
|
|
220
|
+
/** Directories that were auto-skipped because they were stuck (>60s) */
|
|
221
|
+
_stuckPaths = [];
|
|
222
|
+
/** Files skipped because they exceed maxFileSizeToScan */
|
|
223
|
+
_skippedLargeFiles = [];
|
|
224
|
+
/** Maximum file size (bytes) we will read. 0 = unlimited. */
|
|
225
|
+
_maxFileSize = 0;
|
|
226
|
+
/** Root dir for relative-path computation (set by the first walkDir call) */
|
|
227
|
+
_rootDir = null;
|
|
228
|
+
/** Set exclude patterns from config (call once before the walk) */
|
|
229
|
+
setExcludePatterns(patterns) {
|
|
230
|
+
this.excludePredicate = compileGlobs(patterns);
|
|
231
|
+
}
|
|
232
|
+
/** Set the maximum file size in bytes that readTextFile / readJsonFile will process */
|
|
233
|
+
setMaxFileSize(bytes) {
|
|
234
|
+
this._maxFileSize = bytes;
|
|
235
|
+
}
|
|
236
|
+
/** Record a path that timed out or was stuck during scanning */
|
|
237
|
+
addStuckPath(relPath) {
|
|
238
|
+
this._stuckPaths.push(relPath);
|
|
239
|
+
}
|
|
240
|
+
/** Get all paths that were auto-skipped due to being stuck (dirs + scanner files) */
|
|
241
|
+
get stuckPaths() {
|
|
242
|
+
return this._stuckPaths;
|
|
243
|
+
}
|
|
244
|
+
/** @deprecated Use stuckPaths instead */
|
|
245
|
+
get stuckDirs() {
|
|
246
|
+
return this._stuckPaths;
|
|
247
|
+
}
|
|
248
|
+
/** Get files that were skipped because they exceeded maxFileSizeToScan */
|
|
249
|
+
get skippedLargeFiles() {
|
|
250
|
+
return this._skippedLargeFiles;
|
|
251
|
+
}
|
|
252
|
+
// ── Directory walking ──
|
|
253
|
+
/**
|
|
254
|
+
* Walk the directory tree from `rootDir` once, skipping SKIP_DIRS plus
|
|
255
|
+
* common framework output dirs (.nuxt, .output, .svelte-kit).
|
|
256
|
+
*
|
|
257
|
+
* The result is memoised so every scanner filters the same array.
|
|
258
|
+
* Consumers that need additional filtering (e.g. SOURCE_EXTENSIONS,
|
|
259
|
+
* SKIP_EXTENSIONS) do so on the returned entries — no separate walk.
|
|
260
|
+
*/
|
|
261
|
+
walkDir(rootDir, onProgress) {
|
|
262
|
+
this._rootDir = rootDir;
|
|
263
|
+
const cached = this.walkCache.get(rootDir);
|
|
264
|
+
if (cached) return cached;
|
|
265
|
+
const promise = this._doWalk(rootDir, onProgress);
|
|
266
|
+
this.walkCache.set(rootDir, promise);
|
|
267
|
+
return promise;
|
|
268
|
+
}
|
|
269
|
+
/** Additional dirs skipped only by the cached walk (framework outputs) */
|
|
270
|
+
static EXTRA_SKIP = /* @__PURE__ */ new Set([".nuxt", ".output", ".svelte-kit"]);
|
|
271
|
+
async _doWalk(rootDir, onProgress) {
|
|
272
|
+
const results = [];
|
|
273
|
+
const cores = typeof os.availableParallelism === "function" ? os.availableParallelism() : os.cpus().length || 4;
|
|
274
|
+
const maxConcurrentReads = Math.max(8, Math.min(64, cores * 4));
|
|
275
|
+
let foundCount = 0;
|
|
276
|
+
let lastReported = 0;
|
|
277
|
+
const REPORT_INTERVAL = 50;
|
|
278
|
+
const sem = new Semaphore(maxConcurrentReads);
|
|
279
|
+
const STUCK_TIMEOUT_MS = 6e4;
|
|
280
|
+
const extraSkip = _FileCache.EXTRA_SKIP;
|
|
281
|
+
const isExcluded = this.excludePredicate;
|
|
282
|
+
const stuckDirs = this._stuckPaths;
|
|
283
|
+
async function walk(dir) {
|
|
284
|
+
const relDir = path2.relative(rootDir, dir);
|
|
285
|
+
if (onProgress) {
|
|
286
|
+
onProgress(foundCount, relDir || ".");
|
|
287
|
+
}
|
|
288
|
+
let entries;
|
|
289
|
+
try {
|
|
290
|
+
entries = await sem.run(async () => {
|
|
291
|
+
const readPromise = fs.readdir(dir, { withFileTypes: true });
|
|
292
|
+
const result = await Promise.race([
|
|
293
|
+
readPromise.then((e) => ({ ok: true, entries: e })),
|
|
294
|
+
new Promise(
|
|
295
|
+
(resolve2) => setTimeout(() => resolve2({ ok: false }), STUCK_TIMEOUT_MS)
|
|
296
|
+
)
|
|
297
|
+
]);
|
|
298
|
+
if (!result.ok) {
|
|
299
|
+
stuckDirs.push(relDir || dir);
|
|
300
|
+
return null;
|
|
301
|
+
}
|
|
302
|
+
return result.entries;
|
|
303
|
+
});
|
|
304
|
+
} catch {
|
|
305
|
+
return;
|
|
306
|
+
}
|
|
307
|
+
if (!entries) return;
|
|
308
|
+
const subWalks = [];
|
|
309
|
+
for (const e of entries) {
|
|
310
|
+
const absPath = path2.join(dir, e.name);
|
|
311
|
+
const relPath = path2.relative(rootDir, absPath);
|
|
312
|
+
if (isExcluded && isExcluded(relPath)) continue;
|
|
313
|
+
if (e.isDirectory()) {
|
|
314
|
+
if (SKIP_DIRS.has(e.name) || extraSkip.has(e.name)) continue;
|
|
315
|
+
results.push({ absPath, relPath, name: e.name, isFile: false, isDirectory: true });
|
|
316
|
+
subWalks.push(walk(absPath));
|
|
317
|
+
} else if (e.isFile()) {
|
|
318
|
+
const ext = path2.extname(e.name).toLowerCase();
|
|
319
|
+
if (SKIP_EXTENSIONS.has(ext)) continue;
|
|
320
|
+
results.push({ absPath, relPath, name: e.name, isFile: true, isDirectory: false });
|
|
321
|
+
foundCount++;
|
|
322
|
+
if (onProgress && foundCount - lastReported >= REPORT_INTERVAL) {
|
|
323
|
+
lastReported = foundCount;
|
|
324
|
+
onProgress(foundCount, relPath);
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
await Promise.all(subWalks);
|
|
329
|
+
}
|
|
330
|
+
await walk(rootDir);
|
|
331
|
+
if (onProgress && foundCount !== lastReported) {
|
|
332
|
+
onProgress(foundCount, "");
|
|
333
|
+
}
|
|
334
|
+
return results;
|
|
335
|
+
}
|
|
336
|
+
/**
|
|
337
|
+
* Find files matching a predicate from the cached walk.
|
|
338
|
+
* Returns absolute paths (same contract as the standalone `findFiles`).
|
|
339
|
+
*/
|
|
340
|
+
async findFiles(rootDir, predicate) {
|
|
341
|
+
const entries = await this.walkDir(rootDir);
|
|
342
|
+
return entries.filter((e) => e.isFile && predicate(e.name)).map((e) => e.absPath);
|
|
343
|
+
}
|
|
344
|
+
async findPackageJsonFiles(rootDir) {
|
|
345
|
+
return this.findFiles(rootDir, (name) => name === "package.json");
|
|
346
|
+
}
|
|
347
|
+
async findCsprojFiles(rootDir) {
|
|
348
|
+
return this.findFiles(rootDir, (name) => name.endsWith(".csproj"));
|
|
349
|
+
}
|
|
350
|
+
async findSolutionFiles(rootDir) {
|
|
351
|
+
return this.findFiles(rootDir, (name) => name.endsWith(".sln"));
|
|
352
|
+
}
|
|
353
|
+
// ── File content reading ──
|
|
354
|
+
/**
|
|
355
|
+
* Read a text file. Files ≤ 1 MB are cached so subsequent calls from
|
|
356
|
+
* different scanners return the same string. Files > 1 MB (lockfiles,
|
|
357
|
+
* large generated files) are read directly and never retained.
|
|
358
|
+
*
|
|
359
|
+
* If maxFileSizeToScan is set and the file exceeds it, the file is
|
|
360
|
+
* recorded as skipped and an empty string is returned.
|
|
361
|
+
*/
|
|
362
|
+
readTextFile(filePath) {
|
|
363
|
+
const abs = path2.resolve(filePath);
|
|
364
|
+
const cached = this.textCache.get(abs);
|
|
365
|
+
if (cached) return cached;
|
|
366
|
+
const maxSize = this._maxFileSize;
|
|
367
|
+
const skippedLarge = this._skippedLargeFiles;
|
|
368
|
+
const rootDir = this._rootDir;
|
|
369
|
+
const promise = (async () => {
|
|
370
|
+
if (maxSize > 0) {
|
|
371
|
+
try {
|
|
372
|
+
const stat2 = await fs.stat(abs);
|
|
373
|
+
if (stat2.size > maxSize) {
|
|
374
|
+
const rel = rootDir ? path2.relative(rootDir, abs) : abs;
|
|
375
|
+
skippedLarge.push(rel);
|
|
376
|
+
this.textCache.delete(abs);
|
|
377
|
+
return "";
|
|
378
|
+
}
|
|
379
|
+
} catch {
|
|
380
|
+
}
|
|
381
|
+
}
|
|
382
|
+
const content = await fs.readFile(abs, "utf8");
|
|
383
|
+
if (content.length > TEXT_CACHE_MAX_BYTES) {
|
|
384
|
+
this.textCache.delete(abs);
|
|
385
|
+
}
|
|
386
|
+
return content;
|
|
387
|
+
})();
|
|
388
|
+
this.textCache.set(abs, promise);
|
|
389
|
+
return promise;
|
|
390
|
+
}
|
|
391
|
+
/**
|
|
392
|
+
* Read and parse a JSON file. The parsed object is cached; the raw
|
|
393
|
+
* text is evicted immediately so we never hold both representations.
|
|
394
|
+
*/
|
|
395
|
+
readJsonFile(filePath) {
|
|
396
|
+
const abs = path2.resolve(filePath);
|
|
397
|
+
const cached = this.jsonCache.get(abs);
|
|
398
|
+
if (cached) return cached;
|
|
399
|
+
const promise = this.readTextFile(abs).then((txt) => {
|
|
400
|
+
this.textCache.delete(abs);
|
|
401
|
+
return JSON.parse(txt);
|
|
402
|
+
});
|
|
403
|
+
this.jsonCache.set(abs, promise);
|
|
404
|
+
return promise;
|
|
405
|
+
}
|
|
406
|
+
// ── Existence checks ──
|
|
407
|
+
pathExists(p) {
|
|
408
|
+
const abs = path2.resolve(p);
|
|
409
|
+
const cached = this.existsCache.get(abs);
|
|
410
|
+
if (cached) return cached;
|
|
411
|
+
const promise = fs.access(abs).then(() => true, () => false);
|
|
412
|
+
this.existsCache.set(abs, promise);
|
|
413
|
+
return promise;
|
|
414
|
+
}
|
|
415
|
+
// ── Lifecycle ──
|
|
416
|
+
/** Release all cached data. Call after the scan completes. */
|
|
417
|
+
clear() {
|
|
418
|
+
this.walkCache.clear();
|
|
419
|
+
this.textCache.clear();
|
|
420
|
+
this.jsonCache.clear();
|
|
421
|
+
this.existsCache.clear();
|
|
422
|
+
}
|
|
423
|
+
/** Number of file content entries currently held */
|
|
424
|
+
get textCacheSize() {
|
|
425
|
+
return this.textCache.size;
|
|
426
|
+
}
|
|
427
|
+
/** Number of parsed JSON entries currently held */
|
|
428
|
+
get jsonCacheSize() {
|
|
429
|
+
return this.jsonCache.size;
|
|
430
|
+
}
|
|
431
|
+
};
|
|
432
|
+
async function quickTreeCount(rootDir, excludePatterns) {
|
|
433
|
+
let totalFiles = 0;
|
|
434
|
+
let totalDirs = 0;
|
|
435
|
+
const cores = typeof os.availableParallelism === "function" ? os.availableParallelism() : os.cpus().length || 4;
|
|
436
|
+
const maxConcurrent = Math.max(8, Math.min(128, cores * 8));
|
|
437
|
+
const sem = new Semaphore(maxConcurrent);
|
|
438
|
+
const extraSkip = /* @__PURE__ */ new Set([".nuxt", ".output", ".svelte-kit"]);
|
|
439
|
+
const isExcluded = excludePatterns ? compileGlobs(excludePatterns) : null;
|
|
440
|
+
async function count(dir) {
|
|
441
|
+
let entries;
|
|
442
|
+
try {
|
|
443
|
+
entries = await sem.run(() => fs.readdir(dir, { withFileTypes: true }));
|
|
444
|
+
} catch {
|
|
445
|
+
return;
|
|
446
|
+
}
|
|
447
|
+
const subs = [];
|
|
448
|
+
for (const e of entries) {
|
|
449
|
+
const relPath = path2.relative(rootDir, path2.join(dir, e.name));
|
|
450
|
+
if (isExcluded && isExcluded(relPath)) continue;
|
|
451
|
+
if (e.isDirectory()) {
|
|
452
|
+
if (SKIP_DIRS.has(e.name) || extraSkip.has(e.name)) continue;
|
|
453
|
+
totalDirs++;
|
|
454
|
+
subs.push(count(path2.join(dir, e.name)));
|
|
455
|
+
} else if (e.isFile()) {
|
|
456
|
+
const ext = path2.extname(e.name).toLowerCase();
|
|
457
|
+
if (!SKIP_EXTENSIONS.has(ext)) totalFiles++;
|
|
458
|
+
}
|
|
459
|
+
}
|
|
460
|
+
await Promise.all(subs);
|
|
461
|
+
}
|
|
462
|
+
await count(rootDir);
|
|
463
|
+
return { totalFiles, totalDirs };
|
|
464
|
+
}
|
|
465
|
+
async function countFilesInDir(dir, recursive = true) {
|
|
466
|
+
let count = 0;
|
|
467
|
+
const extraSkip = /* @__PURE__ */ new Set(["obj", "bin", "Debug", "Release", "TestResults"]);
|
|
468
|
+
async function walk(currentDir) {
|
|
469
|
+
let entries;
|
|
470
|
+
try {
|
|
471
|
+
entries = await fs.readdir(currentDir, { withFileTypes: true });
|
|
472
|
+
} catch {
|
|
473
|
+
return;
|
|
474
|
+
}
|
|
475
|
+
const subs = [];
|
|
476
|
+
for (const e of entries) {
|
|
477
|
+
if (e.isDirectory()) {
|
|
478
|
+
if (!recursive) continue;
|
|
479
|
+
if (SKIP_DIRS.has(e.name) || extraSkip.has(e.name)) continue;
|
|
480
|
+
subs.push(walk(path2.join(currentDir, e.name)));
|
|
481
|
+
} else if (e.isFile()) {
|
|
482
|
+
const ext = path2.extname(e.name).toLowerCase();
|
|
483
|
+
if (!SKIP_EXTENSIONS.has(ext)) count++;
|
|
484
|
+
}
|
|
485
|
+
}
|
|
486
|
+
await Promise.all(subs);
|
|
487
|
+
}
|
|
488
|
+
await walk(dir);
|
|
489
|
+
return count;
|
|
490
|
+
}
|
|
491
|
+
async function findFiles(rootDir, predicate) {
|
|
492
|
+
const results = [];
|
|
493
|
+
const cores = typeof os.availableParallelism === "function" ? os.availableParallelism() : os.cpus().length || 4;
|
|
494
|
+
const maxConcurrentReads = Math.max(8, Math.min(64, cores * 4));
|
|
495
|
+
const readDirSemaphore = new Semaphore(maxConcurrentReads);
|
|
496
|
+
async function walk(dir) {
|
|
497
|
+
let entries;
|
|
498
|
+
try {
|
|
499
|
+
entries = await readDirSemaphore.run(() => fs.readdir(dir, { withFileTypes: true }));
|
|
500
|
+
} catch {
|
|
501
|
+
return;
|
|
502
|
+
}
|
|
503
|
+
const subDirectoryWalks = [];
|
|
504
|
+
for (const e of entries) {
|
|
505
|
+
if (e.isDirectory()) {
|
|
506
|
+
if (SKIP_DIRS.has(e.name)) continue;
|
|
507
|
+
subDirectoryWalks.push(walk(path2.join(dir, e.name)));
|
|
508
|
+
} else if (e.isFile() && predicate(e.name)) {
|
|
509
|
+
const ext = path2.extname(e.name).toLowerCase();
|
|
510
|
+
if (!SKIP_EXTENSIONS.has(ext)) results.push(path2.join(dir, e.name));
|
|
511
|
+
}
|
|
512
|
+
}
|
|
513
|
+
await Promise.all(subDirectoryWalks);
|
|
514
|
+
}
|
|
515
|
+
await walk(rootDir);
|
|
516
|
+
return results;
|
|
517
|
+
}
|
|
518
|
+
async function findPackageJsonFiles(rootDir) {
|
|
519
|
+
return findFiles(rootDir, (name) => name === "package.json");
|
|
520
|
+
}
|
|
521
|
+
async function findSolutionFiles(rootDir) {
|
|
522
|
+
return findFiles(rootDir, (name) => name.endsWith(".sln"));
|
|
523
|
+
}
|
|
524
|
+
async function findCsprojFiles(rootDir) {
|
|
525
|
+
return findFiles(rootDir, (name) => name.endsWith(".csproj"));
|
|
526
|
+
}
|
|
527
|
+
async function readJsonFile(filePath) {
|
|
528
|
+
const txt = await fs.readFile(filePath, "utf8");
|
|
529
|
+
return JSON.parse(txt);
|
|
530
|
+
}
|
|
531
|
+
async function readTextFile(filePath) {
|
|
532
|
+
return fs.readFile(filePath, "utf8");
|
|
533
|
+
}
|
|
534
|
+
async function pathExists(p) {
|
|
535
|
+
try {
|
|
536
|
+
await fs.access(p);
|
|
537
|
+
return true;
|
|
538
|
+
} catch {
|
|
539
|
+
return false;
|
|
540
|
+
}
|
|
541
|
+
}
|
|
542
|
+
async function ensureDir(dir) {
|
|
543
|
+
await fs.mkdir(dir, { recursive: true });
|
|
544
|
+
}
|
|
545
|
+
async function writeJsonFile(filePath, data) {
|
|
546
|
+
await ensureDir(path2.dirname(filePath));
|
|
547
|
+
await fs.writeFile(filePath, JSON.stringify(data, null, 2) + "\n", "utf8");
|
|
548
|
+
}
|
|
549
|
+
async function writeTextFile(filePath, content) {
|
|
550
|
+
await ensureDir(path2.dirname(filePath));
|
|
551
|
+
await fs.writeFile(filePath, content, "utf8");
|
|
552
|
+
}
|
|
553
|
+
|
|
554
|
+
export {
|
|
555
|
+
Semaphore,
|
|
556
|
+
FileCache,
|
|
557
|
+
quickTreeCount,
|
|
558
|
+
countFilesInDir,
|
|
559
|
+
findFiles,
|
|
560
|
+
findPackageJsonFiles,
|
|
561
|
+
findSolutionFiles,
|
|
562
|
+
findCsprojFiles,
|
|
563
|
+
readJsonFile,
|
|
564
|
+
readTextFile,
|
|
565
|
+
pathExists,
|
|
566
|
+
ensureDir,
|
|
567
|
+
writeJsonFile,
|
|
568
|
+
writeTextFile
|
|
569
|
+
};
|