@reliverse/dler 2.0.6 → 2.0.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +16 -15
- package/src/cli.ts +0 -8
- package/src/cmds/build/cmd.ts +0 -568
- package/src/cmds/clean/cmd.ts +0 -166
- package/src/cmds/clean/impl.ts +0 -900
- package/src/cmds/clean/presets.ts +0 -158
- package/src/cmds/clean/types.ts +0 -71
- package/src/cmds/init/cmd.ts +0 -68
- package/src/cmds/init/impl/config.ts +0 -105
- package/src/cmds/init/impl/generators.ts +0 -220
- package/src/cmds/init/impl/prompts.ts +0 -137
- package/src/cmds/init/impl/types.ts +0 -25
- package/src/cmds/init/impl/utils.ts +0 -17
- package/src/cmds/init/impl/validators.ts +0 -55
- package/src/cmds/integrate/cmd.ts +0 -82
- package/src/cmds/integrate/impl.ts +0 -204
- package/src/cmds/integrate/integrations/base.ts +0 -69
- package/src/cmds/integrate/integrations/nextjs.ts +0 -227
- package/src/cmds/integrate/integrations/registry.ts +0 -45
- package/src/cmds/integrate/integrations/ultracite.ts +0 -53
- package/src/cmds/integrate/types.ts +0 -48
- package/src/cmds/integrate/utils/biome.ts +0 -173
- package/src/cmds/integrate/utils/context.ts +0 -148
- package/src/cmds/integrate/utils/temp.ts +0 -47
- package/src/cmds/perf/analysis/bundle.ts +0 -311
- package/src/cmds/perf/analysis/filesystem.ts +0 -324
- package/src/cmds/perf/analysis/monorepo.ts +0 -439
- package/src/cmds/perf/benchmarks/command.ts +0 -230
- package/src/cmds/perf/benchmarks/memory.ts +0 -249
- package/src/cmds/perf/benchmarks/runner.ts +0 -220
- package/src/cmds/perf/cmd.ts +0 -285
- package/src/cmds/perf/impl.ts +0 -411
- package/src/cmds/perf/reporters/console.ts +0 -331
- package/src/cmds/perf/reporters/html.ts +0 -984
- package/src/cmds/perf/reporters/json.ts +0 -42
- package/src/cmds/perf/types.ts +0 -220
- package/src/cmds/perf/utils/cache.ts +0 -234
- package/src/cmds/perf/utils/formatter.ts +0 -190
- package/src/cmds/perf/utils/stats.ts +0 -153
- package/src/cmds/publish/cmd.ts +0 -213
- package/src/cmds/shell/cmd.ts +0 -61
- package/src/cmds/tsc/cache.ts +0 -237
- package/src/cmds/tsc/cmd.ts +0 -139
- package/src/cmds/tsc/impl.ts +0 -855
- package/src/cmds/tsc/types.ts +0 -66
- package/tsconfig.json +0 -9
package/src/cmds/clean/impl.ts
DELETED
|
@@ -1,900 +0,0 @@
|
|
|
1
|
-
// apps/dler/src/cmds/clean/impl.ts
|
|
2
|
-
|
|
3
|
-
import { existsSync, rmSync, statSync } from "node:fs";
|
|
4
|
-
import { join, resolve } from "node:path";
|
|
5
|
-
import { logger } from "@reliverse/dler-logger";
|
|
6
|
-
import pMap from "@reliverse/dler-mapper";
|
|
7
|
-
import { createIgnoreFilter, normalizePatterns } from "@reliverse/dler-matcher";
|
|
8
|
-
import {
|
|
9
|
-
getWorkspacePatterns,
|
|
10
|
-
hasWorkspaces,
|
|
11
|
-
readPackageJSON,
|
|
12
|
-
} from "@reliverse/dler-pkg-tsc";
|
|
13
|
-
import { confirmPrompt } from "@reliverse/dler-prompt";
|
|
14
|
-
import {
|
|
15
|
-
LOCK_FILE_PATTERNS,
|
|
16
|
-
mergePatterns,
|
|
17
|
-
parseCustomPatterns,
|
|
18
|
-
parsePresets,
|
|
19
|
-
validatePatterns,
|
|
20
|
-
} from "./presets";
|
|
21
|
-
import type {
|
|
22
|
-
CleanError,
|
|
23
|
-
CleanOptions,
|
|
24
|
-
CleanSummary,
|
|
25
|
-
FileMatch,
|
|
26
|
-
PackageCleanResult,
|
|
27
|
-
PackageInfo,
|
|
28
|
-
} from "./types";
|
|
29
|
-
|
|
30
|
-
const DEFAULT_CONCURRENCY = 5;
|
|
31
|
-
|
|
32
|
-
// ============================================================================
|
|
33
|
-
// Package Discovery
|
|
34
|
-
// ============================================================================
|
|
35
|
-
|
|
36
|
-
const findMonorepoRoot = async (
|
|
37
|
-
startDir?: string,
|
|
38
|
-
useCwd = false,
|
|
39
|
-
): Promise<string | null> => {
|
|
40
|
-
let currentDir = resolve(startDir ?? process.cwd());
|
|
41
|
-
|
|
42
|
-
// If useCwd is true, only check the specified directory, don't search up
|
|
43
|
-
if (useCwd) {
|
|
44
|
-
const pkgPath = join(currentDir, "package.json");
|
|
45
|
-
|
|
46
|
-
if (existsSync(pkgPath)) {
|
|
47
|
-
const pkg = await readPackageJSON(currentDir);
|
|
48
|
-
|
|
49
|
-
if (pkg && hasWorkspaces(pkg)) {
|
|
50
|
-
return currentDir;
|
|
51
|
-
}
|
|
52
|
-
}
|
|
53
|
-
|
|
54
|
-
return null;
|
|
55
|
-
}
|
|
56
|
-
|
|
57
|
-
// Original behavior: search up the directory tree
|
|
58
|
-
while (currentDir !== "/") {
|
|
59
|
-
const pkgPath = join(currentDir, "package.json");
|
|
60
|
-
|
|
61
|
-
if (existsSync(pkgPath)) {
|
|
62
|
-
const pkg = await readPackageJSON(currentDir);
|
|
63
|
-
|
|
64
|
-
if (pkg && hasWorkspaces(pkg)) {
|
|
65
|
-
return currentDir;
|
|
66
|
-
}
|
|
67
|
-
}
|
|
68
|
-
|
|
69
|
-
const parentDir = resolve(currentDir, "..");
|
|
70
|
-
if (parentDir === currentDir) break;
|
|
71
|
-
currentDir = parentDir;
|
|
72
|
-
}
|
|
73
|
-
|
|
74
|
-
return null;
|
|
75
|
-
};
|
|
76
|
-
|
|
77
|
-
const resolvePackageInfo = async (
|
|
78
|
-
packagePath: string,
|
|
79
|
-
isRoot = false,
|
|
80
|
-
): Promise<PackageInfo | null> => {
|
|
81
|
-
const pkgJsonPath = join(packagePath, "package.json");
|
|
82
|
-
|
|
83
|
-
if (!existsSync(pkgJsonPath)) {
|
|
84
|
-
return null;
|
|
85
|
-
}
|
|
86
|
-
|
|
87
|
-
try {
|
|
88
|
-
const pkg = await readPackageJSON(packagePath);
|
|
89
|
-
|
|
90
|
-
if (!pkg?.name) {
|
|
91
|
-
return null;
|
|
92
|
-
}
|
|
93
|
-
|
|
94
|
-
return {
|
|
95
|
-
name: pkg.name,
|
|
96
|
-
path: packagePath,
|
|
97
|
-
isRoot,
|
|
98
|
-
};
|
|
99
|
-
} catch {
|
|
100
|
-
return null;
|
|
101
|
-
}
|
|
102
|
-
};
|
|
103
|
-
|
|
104
|
-
const getWorkspacePackages = async (
|
|
105
|
-
cwd?: string,
|
|
106
|
-
useCwd = false,
|
|
107
|
-
): Promise<PackageInfo[]> => {
|
|
108
|
-
const monorepoRoot = await findMonorepoRoot(cwd, useCwd);
|
|
109
|
-
|
|
110
|
-
if (!monorepoRoot) {
|
|
111
|
-
throw new Error(
|
|
112
|
-
"❌ No monorepo found. Ensure package.json has 'workspaces' field.",
|
|
113
|
-
);
|
|
114
|
-
}
|
|
115
|
-
|
|
116
|
-
const rootPkg = await readPackageJSON(monorepoRoot);
|
|
117
|
-
if (!rootPkg) {
|
|
118
|
-
throw new Error("❌ Could not read root package.json");
|
|
119
|
-
}
|
|
120
|
-
|
|
121
|
-
const patterns = getWorkspacePatterns(rootPkg);
|
|
122
|
-
|
|
123
|
-
if (!patterns.length) {
|
|
124
|
-
throw new Error("❌ No workspace patterns found in package.json");
|
|
125
|
-
}
|
|
126
|
-
|
|
127
|
-
const packages: PackageInfo[] = [];
|
|
128
|
-
const seenPaths = new Set<string>();
|
|
129
|
-
|
|
130
|
-
for (const pattern of patterns) {
|
|
131
|
-
// Check if pattern contains wildcards
|
|
132
|
-
if (pattern.includes('*')) {
|
|
133
|
-
// Pattern with wildcards - use glob
|
|
134
|
-
const glob = new Bun.Glob(pattern);
|
|
135
|
-
const matches = glob.scanSync({ cwd: monorepoRoot, onlyFiles: false });
|
|
136
|
-
|
|
137
|
-
for (const match of matches) {
|
|
138
|
-
const packagePath = resolve(monorepoRoot, match);
|
|
139
|
-
|
|
140
|
-
if (seenPaths.has(packagePath)) continue;
|
|
141
|
-
seenPaths.add(packagePath);
|
|
142
|
-
|
|
143
|
-
const pkgInfo = await resolvePackageInfo(packagePath, false);
|
|
144
|
-
|
|
145
|
-
if (pkgInfo) {
|
|
146
|
-
packages.push(pkgInfo);
|
|
147
|
-
}
|
|
148
|
-
}
|
|
149
|
-
} else {
|
|
150
|
-
// Direct package path (no wildcards)
|
|
151
|
-
const packagePath = resolve(monorepoRoot, pattern);
|
|
152
|
-
|
|
153
|
-
if (seenPaths.has(packagePath)) continue;
|
|
154
|
-
seenPaths.add(packagePath);
|
|
155
|
-
|
|
156
|
-
const pkgInfo = await resolvePackageInfo(packagePath, false);
|
|
157
|
-
|
|
158
|
-
if (pkgInfo) {
|
|
159
|
-
packages.push(pkgInfo);
|
|
160
|
-
}
|
|
161
|
-
}
|
|
162
|
-
}
|
|
163
|
-
|
|
164
|
-
return packages;
|
|
165
|
-
};
|
|
166
|
-
|
|
167
|
-
const getSingleRepoPackages = async (cwd?: string): Promise<PackageInfo[]> => {
|
|
168
|
-
const currentDir = resolve(cwd ?? process.cwd());
|
|
169
|
-
const pkgInfo = await resolvePackageInfo(currentDir, true);
|
|
170
|
-
|
|
171
|
-
if (!pkgInfo) {
|
|
172
|
-
throw new Error("❌ No package.json found in current directory");
|
|
173
|
-
}
|
|
174
|
-
|
|
175
|
-
return [pkgInfo];
|
|
176
|
-
};
|
|
177
|
-
|
|
178
|
-
// ============================================================================
|
|
179
|
-
// File Discovery
|
|
180
|
-
// ============================================================================
|
|
181
|
-
|
|
182
|
-
const calculateSize = (path: string): number => {
|
|
183
|
-
try {
|
|
184
|
-
const stats = statSync(path);
|
|
185
|
-
if (stats.isDirectory()) {
|
|
186
|
-
// For directories, we'll calculate recursively
|
|
187
|
-
return calculateDirectorySize(path);
|
|
188
|
-
}
|
|
189
|
-
return stats.size;
|
|
190
|
-
} catch {
|
|
191
|
-
return 0;
|
|
192
|
-
}
|
|
193
|
-
};
|
|
194
|
-
|
|
195
|
-
const calculateDirectorySize = (dirPath: string): number => {
|
|
196
|
-
try {
|
|
197
|
-
let totalSize = 0;
|
|
198
|
-
const entries =
|
|
199
|
-
existsSync(dirPath) && statSync(dirPath).isDirectory()
|
|
200
|
-
? Array.from(
|
|
201
|
-
new Bun.Glob("**/*").scanSync({ cwd: dirPath, onlyFiles: true }),
|
|
202
|
-
)
|
|
203
|
-
: [];
|
|
204
|
-
|
|
205
|
-
for (const entry of entries) {
|
|
206
|
-
try {
|
|
207
|
-
const fullPath = join(dirPath, entry);
|
|
208
|
-
const stats = statSync(fullPath);
|
|
209
|
-
totalSize += stats.size;
|
|
210
|
-
} catch {
|
|
211
|
-
// Ignore files we can't access
|
|
212
|
-
}
|
|
213
|
-
}
|
|
214
|
-
|
|
215
|
-
return totalSize;
|
|
216
|
-
} catch {
|
|
217
|
-
return 0;
|
|
218
|
-
}
|
|
219
|
-
};
|
|
220
|
-
|
|
221
|
-
const isAbsolutePath = (path: string): boolean =>
|
|
222
|
-
path.startsWith("/") || (path.length > 1 && path[1] === ":");
|
|
223
|
-
|
|
224
|
-
const findMatchingFiles = (
|
|
225
|
-
targetDir: string,
|
|
226
|
-
patterns: string[],
|
|
227
|
-
subdirs = false,
|
|
228
|
-
): FileMatch[] => {
|
|
229
|
-
const matches: FileMatch[] = [];
|
|
230
|
-
const searchDirs = subdirs ? [targetDir] : [targetDir];
|
|
231
|
-
|
|
232
|
-
for (const pattern of patterns) {
|
|
233
|
-
// Handle absolute paths directly
|
|
234
|
-
if (isAbsolutePath(pattern)) {
|
|
235
|
-
if (existsSync(pattern)) {
|
|
236
|
-
const stats = statSync(pattern);
|
|
237
|
-
const size = calculateSize(pattern);
|
|
238
|
-
|
|
239
|
-
matches.push({
|
|
240
|
-
path: pattern,
|
|
241
|
-
size,
|
|
242
|
-
isDirectory: stats.isDirectory(),
|
|
243
|
-
category: getCategoryForPattern(pattern),
|
|
244
|
-
});
|
|
245
|
-
}
|
|
246
|
-
continue;
|
|
247
|
-
}
|
|
248
|
-
|
|
249
|
-
// Handle relative patterns with glob
|
|
250
|
-
for (const searchDir of searchDirs) {
|
|
251
|
-
try {
|
|
252
|
-
const glob = new Bun.Glob(pattern);
|
|
253
|
-
const globMatches = glob.scanSync({
|
|
254
|
-
cwd: searchDir,
|
|
255
|
-
onlyFiles: false,
|
|
256
|
-
dot: true, // Include hidden files/directories
|
|
257
|
-
});
|
|
258
|
-
|
|
259
|
-
for (const match of globMatches) {
|
|
260
|
-
const fullPath = join(searchDir, match);
|
|
261
|
-
|
|
262
|
-
if (existsSync(fullPath)) {
|
|
263
|
-
const stats = statSync(fullPath);
|
|
264
|
-
const size = calculateSize(fullPath);
|
|
265
|
-
|
|
266
|
-
matches.push({
|
|
267
|
-
path: fullPath,
|
|
268
|
-
size,
|
|
269
|
-
isDirectory: stats.isDirectory(),
|
|
270
|
-
category: getCategoryForPattern(pattern),
|
|
271
|
-
});
|
|
272
|
-
}
|
|
273
|
-
}
|
|
274
|
-
} catch (error) {
|
|
275
|
-
// Ignore glob errors
|
|
276
|
-
if (process.env.DEBUG) {
|
|
277
|
-
console.warn(`Glob error for pattern ${pattern}:`, error);
|
|
278
|
-
}
|
|
279
|
-
}
|
|
280
|
-
}
|
|
281
|
-
}
|
|
282
|
-
|
|
283
|
-
return matches;
|
|
284
|
-
};
|
|
285
|
-
|
|
286
|
-
const getCategoryForPattern = (pattern: string): string => {
|
|
287
|
-
// Map patterns to categories for better organization
|
|
288
|
-
if (pattern.includes("dist")) return "build";
|
|
289
|
-
if (pattern.includes("_generated")) return "db";
|
|
290
|
-
if (pattern.includes(".basehub")) return "cms";
|
|
291
|
-
if (
|
|
292
|
-
pattern.includes(".next") ||
|
|
293
|
-
pattern.includes(".nuxt") ||
|
|
294
|
-
pattern.includes(".expo")
|
|
295
|
-
)
|
|
296
|
-
return "frontend";
|
|
297
|
-
if (pattern.includes(".source")) return "docs";
|
|
298
|
-
if (pattern.includes(".react-email")) return "email";
|
|
299
|
-
if (
|
|
300
|
-
pattern.includes(".turbo") ||
|
|
301
|
-
pattern.includes(".vercel") ||
|
|
302
|
-
pattern.includes(".wrangler")
|
|
303
|
-
)
|
|
304
|
-
return "build-tools";
|
|
305
|
-
if (pattern.includes("node_modules")) return "deps";
|
|
306
|
-
return "other";
|
|
307
|
-
};
|
|
308
|
-
|
|
309
|
-
// ============================================================================
|
|
310
|
-
// File Deletion
|
|
311
|
-
// ============================================================================
|
|
312
|
-
|
|
313
|
-
const deleteFile = (filePath: string): CleanError | null => {
|
|
314
|
-
try {
|
|
315
|
-
rmSync(filePath, { recursive: true, force: true });
|
|
316
|
-
return null;
|
|
317
|
-
} catch (error) {
|
|
318
|
-
return {
|
|
319
|
-
path: filePath,
|
|
320
|
-
error: error instanceof Error ? error.message : String(error),
|
|
321
|
-
};
|
|
322
|
-
}
|
|
323
|
-
};
|
|
324
|
-
|
|
325
|
-
const deleteFiles = async (
|
|
326
|
-
files: FileMatch[],
|
|
327
|
-
dryRun = false,
|
|
328
|
-
): Promise<{
|
|
329
|
-
deletedCount: number;
|
|
330
|
-
deletedSize: number;
|
|
331
|
-
errors: CleanError[];
|
|
332
|
-
}> => {
|
|
333
|
-
if (dryRun) {
|
|
334
|
-
return {
|
|
335
|
-
deletedCount: files.length,
|
|
336
|
-
deletedSize: files.reduce((sum, file) => sum + file.size, 0),
|
|
337
|
-
errors: [],
|
|
338
|
-
};
|
|
339
|
-
}
|
|
340
|
-
|
|
341
|
-
let deletedCount = 0;
|
|
342
|
-
let deletedSize = 0;
|
|
343
|
-
const errors: CleanError[] = [];
|
|
344
|
-
|
|
345
|
-
for (const file of files) {
|
|
346
|
-
const error = deleteFile(file.path);
|
|
347
|
-
if (error) {
|
|
348
|
-
errors.push(error);
|
|
349
|
-
} else {
|
|
350
|
-
deletedCount++;
|
|
351
|
-
deletedSize += file.size;
|
|
352
|
-
}
|
|
353
|
-
}
|
|
354
|
-
|
|
355
|
-
return { deletedCount, deletedSize, errors };
|
|
356
|
-
};
|
|
357
|
-
|
|
358
|
-
// ============================================================================
|
|
359
|
-
// Package Processing
|
|
360
|
-
// ============================================================================
|
|
361
|
-
|
|
362
|
-
const discoverPackageFiles = async (
|
|
363
|
-
pkg: PackageInfo,
|
|
364
|
-
patterns: string[],
|
|
365
|
-
options: CleanOptions,
|
|
366
|
-
): Promise<PackageCleanResult> => {
|
|
367
|
-
const { subdirs = false, verbose = false } = options;
|
|
368
|
-
|
|
369
|
-
if (verbose) {
|
|
370
|
-
logger.info(`🔍 Scanning ${pkg.name}...`);
|
|
371
|
-
}
|
|
372
|
-
|
|
373
|
-
const files = findMatchingFiles(pkg.path, patterns, subdirs);
|
|
374
|
-
|
|
375
|
-
if (verbose && files.length > 0) {
|
|
376
|
-
logger.info(` Found ${files.length} files/directories to clean`);
|
|
377
|
-
}
|
|
378
|
-
|
|
379
|
-
// During discovery phase, we don't delete files yet
|
|
380
|
-
return {
|
|
381
|
-
package: pkg,
|
|
382
|
-
files,
|
|
383
|
-
deletedCount: 0,
|
|
384
|
-
deletedSize: 0,
|
|
385
|
-
errors: [],
|
|
386
|
-
};
|
|
387
|
-
};
|
|
388
|
-
|
|
389
|
-
const cleanPackage = async (
|
|
390
|
-
pkg: PackageInfo,
|
|
391
|
-
patterns: string[],
|
|
392
|
-
options: CleanOptions,
|
|
393
|
-
): Promise<PackageCleanResult> => {
|
|
394
|
-
const { subdirs = false, dryRun = false, verbose = false } = options;
|
|
395
|
-
|
|
396
|
-
if (verbose) {
|
|
397
|
-
logger.info(`🔍 Scanning ${pkg.name}...`);
|
|
398
|
-
}
|
|
399
|
-
|
|
400
|
-
const files = findMatchingFiles(pkg.path, patterns, subdirs);
|
|
401
|
-
|
|
402
|
-
if (verbose && files.length > 0) {
|
|
403
|
-
logger.info(` Found ${files.length} files/directories to clean`);
|
|
404
|
-
}
|
|
405
|
-
|
|
406
|
-
const { deletedCount, deletedSize, errors } = await deleteFiles(
|
|
407
|
-
files,
|
|
408
|
-
dryRun,
|
|
409
|
-
);
|
|
410
|
-
|
|
411
|
-
if (verbose) {
|
|
412
|
-
const status = errors.length === 0 ? "✅" : "⚠️";
|
|
413
|
-
logger.log(
|
|
414
|
-
`${status} ${pkg.name}: ${deletedCount} deleted, ${errors.length} errors`,
|
|
415
|
-
);
|
|
416
|
-
}
|
|
417
|
-
|
|
418
|
-
return {
|
|
419
|
-
package: pkg,
|
|
420
|
-
files,
|
|
421
|
-
deletedCount,
|
|
422
|
-
deletedSize,
|
|
423
|
-
errors,
|
|
424
|
-
};
|
|
425
|
-
};
|
|
426
|
-
|
|
427
|
-
// ============================================================================
|
|
428
|
-
// Lock Files Processing
|
|
429
|
-
// ============================================================================
|
|
430
|
-
|
|
431
|
-
const cleanLockFiles = async (
|
|
432
|
-
rootDir: string,
|
|
433
|
-
deleteLockFiles: boolean,
|
|
434
|
-
dryRun = false,
|
|
435
|
-
): Promise<{
|
|
436
|
-
deletedCount: number;
|
|
437
|
-
deletedSize: number;
|
|
438
|
-
errors: CleanError[];
|
|
439
|
-
}> => {
|
|
440
|
-
if (!deleteLockFiles) {
|
|
441
|
-
return { deletedCount: 0, deletedSize: 0, errors: [] };
|
|
442
|
-
}
|
|
443
|
-
|
|
444
|
-
const lockFiles: FileMatch[] = [];
|
|
445
|
-
|
|
446
|
-
for (const pattern of LOCK_FILE_PATTERNS) {
|
|
447
|
-
const fullPath = join(rootDir, pattern);
|
|
448
|
-
if (existsSync(fullPath)) {
|
|
449
|
-
const stats = statSync(fullPath);
|
|
450
|
-
lockFiles.push({
|
|
451
|
-
path: fullPath,
|
|
452
|
-
size: stats.size,
|
|
453
|
-
isDirectory: false,
|
|
454
|
-
category: "deps",
|
|
455
|
-
});
|
|
456
|
-
}
|
|
457
|
-
}
|
|
458
|
-
|
|
459
|
-
return deleteFiles(lockFiles, dryRun);
|
|
460
|
-
};
|
|
461
|
-
|
|
462
|
-
// ============================================================================
|
|
463
|
-
// Confirmation and Display
|
|
464
|
-
// ============================================================================
|
|
465
|
-
|
|
466
|
-
const formatBytes = (bytes: number): string => {
|
|
467
|
-
if (bytes === 0) return "0 B";
|
|
468
|
-
|
|
469
|
-
const k = 1024;
|
|
470
|
-
const sizes = ["B", "KB", "MB", "GB"];
|
|
471
|
-
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
|
472
|
-
|
|
473
|
-
return `${parseFloat((bytes / Math.pow(k, i)).toFixed(1))} ${sizes[i]}`;
|
|
474
|
-
};
|
|
475
|
-
|
|
476
|
-
const displayPreview = (
|
|
477
|
-
results: PackageCleanResult[],
|
|
478
|
-
lockFilesResult: { deletedCount: number; deletedSize: number },
|
|
479
|
-
): void => {
|
|
480
|
-
logger.log("━".repeat(60));
|
|
481
|
-
logger.log("🧹 Clean Preview:");
|
|
482
|
-
logger.log("━".repeat(60));
|
|
483
|
-
|
|
484
|
-
let totalFiles = 0;
|
|
485
|
-
let totalSize = 0;
|
|
486
|
-
|
|
487
|
-
for (const result of results) {
|
|
488
|
-
if (result.files.length === 0) continue;
|
|
489
|
-
|
|
490
|
-
logger.log(
|
|
491
|
-
`\n📦 ${result.package.name}${result.package.isRoot ? " (root)" : ""}:`,
|
|
492
|
-
);
|
|
493
|
-
|
|
494
|
-
// Group by category
|
|
495
|
-
const byCategory = result.files.reduce(
|
|
496
|
-
(acc, file) => {
|
|
497
|
-
if (!acc[file.category]) acc[file.category] = [];
|
|
498
|
-
acc[file.category]!.push(file);
|
|
499
|
-
return acc;
|
|
500
|
-
},
|
|
501
|
-
{} as Record<string, FileMatch[]>,
|
|
502
|
-
);
|
|
503
|
-
|
|
504
|
-
for (const [category, files] of Object.entries(byCategory)) {
|
|
505
|
-
const categorySize = files.reduce((sum, file) => sum + file.size, 0);
|
|
506
|
-
logger.log(
|
|
507
|
-
` ${category}: ${files.length} files (${formatBytes(categorySize)})`,
|
|
508
|
-
);
|
|
509
|
-
|
|
510
|
-
if (files.length <= 5) {
|
|
511
|
-
for (const file of files) {
|
|
512
|
-
const relativePath = file.path.replace(result.package.path + "/", "");
|
|
513
|
-
logger.log(` • ${relativePath}`);
|
|
514
|
-
}
|
|
515
|
-
} else {
|
|
516
|
-
logger.log(` • ... and ${files.length - 5} more files`);
|
|
517
|
-
}
|
|
518
|
-
}
|
|
519
|
-
|
|
520
|
-
totalFiles += result.files.length;
|
|
521
|
-
totalSize += result.files.reduce((sum, file) => sum + file.size, 0);
|
|
522
|
-
}
|
|
523
|
-
|
|
524
|
-
if (lockFilesResult.deletedCount > 0) {
|
|
525
|
-
logger.log(
|
|
526
|
-
`\n🔒 Lock files: ${lockFilesResult.deletedCount} files (${formatBytes(lockFilesResult.deletedSize)})`,
|
|
527
|
-
);
|
|
528
|
-
totalFiles += lockFilesResult.deletedCount;
|
|
529
|
-
totalSize += lockFilesResult.deletedSize;
|
|
530
|
-
}
|
|
531
|
-
|
|
532
|
-
logger.log(`\n📊 Total: ${totalFiles} files (${formatBytes(totalSize)})`);
|
|
533
|
-
logger.log("━".repeat(60));
|
|
534
|
-
};
|
|
535
|
-
|
|
536
|
-
const askConfirmation = async (force: boolean): Promise<boolean> => {
|
|
537
|
-
if (force) {
|
|
538
|
-
return true;
|
|
539
|
-
}
|
|
540
|
-
|
|
541
|
-
try {
|
|
542
|
-
return await confirmPrompt("Proceed with deletion?", false);
|
|
543
|
-
} catch {
|
|
544
|
-
// If prompt fails, default to false for safety
|
|
545
|
-
return false;
|
|
546
|
-
}
|
|
547
|
-
};
|
|
548
|
-
|
|
549
|
-
// ============================================================================
|
|
550
|
-
// Main Orchestration
|
|
551
|
-
// ============================================================================
|
|
552
|
-
|
|
553
|
-
export const runCleanOnAllPackages = async (
|
|
554
|
-
ignore?: string | string[],
|
|
555
|
-
cwd?: string,
|
|
556
|
-
options: CleanOptions = {},
|
|
557
|
-
): Promise<CleanSummary> => {
|
|
558
|
-
const {
|
|
559
|
-
presets: presetsString,
|
|
560
|
-
custom: customString,
|
|
561
|
-
dryRun = false,
|
|
562
|
-
force = false,
|
|
563
|
-
verbose = false,
|
|
564
|
-
deleteLockFiles = false,
|
|
565
|
-
} = options;
|
|
566
|
-
|
|
567
|
-
const presets = parsePresets(presetsString);
|
|
568
|
-
const customPatterns = parseCustomPatterns(customString);
|
|
569
|
-
|
|
570
|
-
// Validate that at least one pattern source is provided
|
|
571
|
-
validatePatterns(presets, customPatterns);
|
|
572
|
-
|
|
573
|
-
const patterns = mergePatterns(presets, customPatterns);
|
|
574
|
-
|
|
575
|
-
// Check if we have any absolute paths in custom patterns
|
|
576
|
-
const hasAbsolutePaths = customPatterns.some((pattern) =>
|
|
577
|
-
isAbsolutePath(pattern),
|
|
578
|
-
);
|
|
579
|
-
|
|
580
|
-
// Log discovery start
|
|
581
|
-
if (verbose) {
|
|
582
|
-
logger.info("🔍 Discovering files to clean...");
|
|
583
|
-
}
|
|
584
|
-
|
|
585
|
-
// Execute the main logic
|
|
586
|
-
const result = await (async () => {
|
|
587
|
-
// If we have only absolute paths and no presets, just clean those directly
|
|
588
|
-
if (
|
|
589
|
-
hasAbsolutePaths &&
|
|
590
|
-
presets.length === 0 &&
|
|
591
|
-
customPatterns.every((pattern) => isAbsolutePath(pattern))
|
|
592
|
-
) {
|
|
593
|
-
if (verbose) {
|
|
594
|
-
logger.info(` Processing absolute paths directly`);
|
|
595
|
-
}
|
|
596
|
-
|
|
597
|
-
// For absolute paths, we don't need to search in directories
|
|
598
|
-
const files: FileMatch[] = [];
|
|
599
|
-
for (const pattern of patterns) {
|
|
600
|
-
if (isAbsolutePath(pattern) && existsSync(pattern)) {
|
|
601
|
-
const stats = statSync(pattern);
|
|
602
|
-
const size = calculateSize(pattern);
|
|
603
|
-
files.push({
|
|
604
|
-
path: pattern,
|
|
605
|
-
size,
|
|
606
|
-
isDirectory: stats.isDirectory(),
|
|
607
|
-
category: getCategoryForPattern(pattern),
|
|
608
|
-
});
|
|
609
|
-
}
|
|
610
|
-
}
|
|
611
|
-
|
|
612
|
-
// Display preview
|
|
613
|
-
displayPreview(
|
|
614
|
-
[
|
|
615
|
-
{
|
|
616
|
-
package: { name: "Absolute paths", path: "", isRoot: true },
|
|
617
|
-
files,
|
|
618
|
-
deletedCount: 0,
|
|
619
|
-
deletedSize: 0,
|
|
620
|
-
errors: [],
|
|
621
|
-
},
|
|
622
|
-
],
|
|
623
|
-
{ deletedCount: 0, deletedSize: 0 },
|
|
624
|
-
);
|
|
625
|
-
|
|
626
|
-
if (!dryRun) {
|
|
627
|
-
const shouldProceed = await askConfirmation(force);
|
|
628
|
-
|
|
629
|
-
if (!shouldProceed) {
|
|
630
|
-
logger.info("❌ Clean cancelled by user");
|
|
631
|
-
process.exit(0);
|
|
632
|
-
}
|
|
633
|
-
|
|
634
|
-
// Delete files
|
|
635
|
-
const { deletedCount, deletedSize, errors } = await deleteFiles(
|
|
636
|
-
files,
|
|
637
|
-
dryRun,
|
|
638
|
-
);
|
|
639
|
-
|
|
640
|
-
const summary: CleanSummary = {
|
|
641
|
-
totalPackages: 1,
|
|
642
|
-
processedPackages: 1,
|
|
643
|
-
totalFiles: files.length,
|
|
644
|
-
totalSize: files.reduce((sum, file) => sum + file.size, 0),
|
|
645
|
-
deletedFiles: deletedCount,
|
|
646
|
-
deletedSize: deletedSize,
|
|
647
|
-
errors,
|
|
648
|
-
hasErrors: errors.length > 0,
|
|
649
|
-
results: [
|
|
650
|
-
{
|
|
651
|
-
package: { name: "Absolute paths", path: "", isRoot: true },
|
|
652
|
-
files,
|
|
653
|
-
deletedCount,
|
|
654
|
-
deletedSize,
|
|
655
|
-
errors,
|
|
656
|
-
},
|
|
657
|
-
],
|
|
658
|
-
};
|
|
659
|
-
|
|
660
|
-
// Display final summary
|
|
661
|
-
logger.log("\n" + "━".repeat(60));
|
|
662
|
-
logger.log("📊 Clean Summary:");
|
|
663
|
-
logger.log(
|
|
664
|
-
` Files ${dryRun ? "would be" : ""} deleted: ${summary.deletedFiles}`,
|
|
665
|
-
);
|
|
666
|
-
logger.log(
|
|
667
|
-
` Size ${dryRun ? "would be" : ""} freed: ${formatBytes(summary.deletedSize)}`,
|
|
668
|
-
);
|
|
669
|
-
|
|
670
|
-
if (summary.hasErrors) {
|
|
671
|
-
logger.log(` Errors: ${summary.errors.length}`);
|
|
672
|
-
logger.error("\n❌ Errors occurred during cleanup:");
|
|
673
|
-
for (const error of summary.errors) {
|
|
674
|
-
logger.error(` • ${error.path}: ${error.error}`);
|
|
675
|
-
}
|
|
676
|
-
}
|
|
677
|
-
|
|
678
|
-
logger.log("━".repeat(60));
|
|
679
|
-
|
|
680
|
-
return summary;
|
|
681
|
-
}
|
|
682
|
-
|
|
683
|
-
// If dry run, return early
|
|
684
|
-
if (dryRun) {
|
|
685
|
-
const summary: CleanSummary = {
|
|
686
|
-
totalPackages: 1,
|
|
687
|
-
processedPackages: 1,
|
|
688
|
-
totalFiles: files.length,
|
|
689
|
-
totalSize: files.reduce((sum, file) => sum + file.size, 0),
|
|
690
|
-
deletedFiles: files.length,
|
|
691
|
-
deletedSize: files.reduce((sum, file) => sum + file.size, 0),
|
|
692
|
-
errors: [],
|
|
693
|
-
hasErrors: false,
|
|
694
|
-
results: [
|
|
695
|
-
{
|
|
696
|
-
package: { name: "Absolute paths", path: "", isRoot: true },
|
|
697
|
-
files,
|
|
698
|
-
deletedCount: files.length,
|
|
699
|
-
deletedSize: files.reduce((sum, file) => sum + file.size, 0),
|
|
700
|
-
errors: [],
|
|
701
|
-
},
|
|
702
|
-
],
|
|
703
|
-
};
|
|
704
|
-
|
|
705
|
-
logger.log("\n" + "━".repeat(60));
|
|
706
|
-
logger.log("📊 Clean Summary:");
|
|
707
|
-
logger.log(` Files would be deleted: ${summary.deletedFiles}`);
|
|
708
|
-
logger.log(
|
|
709
|
-
` Size would be freed: ${formatBytes(summary.deletedSize)}`,
|
|
710
|
-
);
|
|
711
|
-
logger.log("━".repeat(60));
|
|
712
|
-
|
|
713
|
-
return summary;
|
|
714
|
-
}
|
|
715
|
-
}
|
|
716
|
-
|
|
717
|
-
// Detect if we're in a monorepo or single repo
|
|
718
|
-
let packages: PackageInfo[];
|
|
719
|
-
let isMonorepo = false;
|
|
720
|
-
const useCwd = !!cwd; // If cwd is provided, don't search up for monorepo root
|
|
721
|
-
|
|
722
|
-
try {
|
|
723
|
-
packages = await getWorkspacePackages(cwd, useCwd);
|
|
724
|
-
isMonorepo = true;
|
|
725
|
-
|
|
726
|
-
if (verbose) {
|
|
727
|
-
logger.info(` Found ${packages.length} packages in monorepo`);
|
|
728
|
-
}
|
|
729
|
-
} catch {
|
|
730
|
-
packages = await getSingleRepoPackages(cwd);
|
|
731
|
-
|
|
732
|
-
if (verbose) {
|
|
733
|
-
logger.info(` Single repo mode: ${packages[0]?.name}`);
|
|
734
|
-
}
|
|
735
|
-
}
|
|
736
|
-
|
|
737
|
-
// Filter packages if ignore patterns provided
|
|
738
|
-
if (isMonorepo && ignore) {
|
|
739
|
-
const ignoreFilter = createIgnoreFilter(ignore);
|
|
740
|
-
const filteredPackages = ignoreFilter(packages);
|
|
741
|
-
const ignoredCount = packages.length - filteredPackages.length;
|
|
742
|
-
|
|
743
|
-
if (ignoredCount > 0) {
|
|
744
|
-
const patterns = normalizePatterns(ignore);
|
|
745
|
-
logger.info(
|
|
746
|
-
` Ignoring ${ignoredCount} packages matching: ${patterns.join(", ")}`,
|
|
747
|
-
);
|
|
748
|
-
}
|
|
749
|
-
|
|
750
|
-
packages = filteredPackages;
|
|
751
|
-
}
|
|
752
|
-
|
|
753
|
-
// Discover files
|
|
754
|
-
const results = await pMap(
|
|
755
|
-
packages,
|
|
756
|
-
async (pkg) => discoverPackageFiles(pkg, patterns, options),
|
|
757
|
-
{
|
|
758
|
-
concurrency: DEFAULT_CONCURRENCY,
|
|
759
|
-
stopOnError: false,
|
|
760
|
-
},
|
|
761
|
-
);
|
|
762
|
-
|
|
763
|
-
// Clean lock files if requested
|
|
764
|
-
const rootDir = cwd ?? process.cwd();
|
|
765
|
-
const lockFilesResult = await cleanLockFiles(
|
|
766
|
-
rootDir,
|
|
767
|
-
deleteLockFiles,
|
|
768
|
-
dryRun,
|
|
769
|
-
);
|
|
770
|
-
|
|
771
|
-
// Display preview
|
|
772
|
-
displayPreview(results, lockFilesResult);
|
|
773
|
-
|
|
774
|
-
if (!dryRun) {
|
|
775
|
-
const shouldProceed = await askConfirmation(force);
|
|
776
|
-
|
|
777
|
-
if (!shouldProceed) {
|
|
778
|
-
logger.info("❌ Clean cancelled by user");
|
|
779
|
-
process.exit(0);
|
|
780
|
-
}
|
|
781
|
-
|
|
782
|
-
// Actually delete files
|
|
783
|
-
const cleanedResults = await pMap(
|
|
784
|
-
packages,
|
|
785
|
-
async (pkg) => cleanPackage(pkg, patterns, options),
|
|
786
|
-
{
|
|
787
|
-
concurrency: DEFAULT_CONCURRENCY,
|
|
788
|
-
stopOnError: false,
|
|
789
|
-
},
|
|
790
|
-
);
|
|
791
|
-
|
|
792
|
-
// Clean lock files if requested
|
|
793
|
-
if (deleteLockFiles) {
|
|
794
|
-
await cleanLockFiles(rootDir, true, false);
|
|
795
|
-
}
|
|
796
|
-
|
|
797
|
-
// Calculate totals
|
|
798
|
-
const totalFiles =
|
|
799
|
-
cleanedResults.reduce((sum, r) => sum + r.files.length, 0) +
|
|
800
|
-
lockFilesResult.deletedCount;
|
|
801
|
-
const totalSize =
|
|
802
|
-
cleanedResults.reduce(
|
|
803
|
-
(sum, r) => sum + r.files.reduce((s, f) => s + f.size, 0),
|
|
804
|
-
0,
|
|
805
|
-
) + lockFilesResult.deletedSize;
|
|
806
|
-
const deletedFiles =
|
|
807
|
-
cleanedResults.reduce((sum, r) => sum + r.deletedCount, 0) +
|
|
808
|
-
lockFilesResult.deletedCount;
|
|
809
|
-
const deletedSize =
|
|
810
|
-
cleanedResults.reduce((sum, r) => sum + r.deletedSize, 0) +
|
|
811
|
-
lockFilesResult.deletedSize;
|
|
812
|
-
const allErrors = cleanedResults.flatMap((r) => r.errors);
|
|
813
|
-
|
|
814
|
-
const summary: CleanSummary = {
|
|
815
|
-
totalPackages: packages.length,
|
|
816
|
-
processedPackages: cleanedResults.length,
|
|
817
|
-
totalFiles,
|
|
818
|
-
totalSize,
|
|
819
|
-
deletedFiles,
|
|
820
|
-
deletedSize,
|
|
821
|
-
errors: allErrors,
|
|
822
|
-
hasErrors: allErrors.length > 0,
|
|
823
|
-
results: cleanedResults,
|
|
824
|
-
};
|
|
825
|
-
|
|
826
|
-
// Display final summary
|
|
827
|
-
logger.log("\n" + "━".repeat(60));
|
|
828
|
-
logger.log("📊 Clean Summary:");
|
|
829
|
-
logger.log(` Packages processed: ${summary.processedPackages}`);
|
|
830
|
-
logger.log(
|
|
831
|
-
` Files ${dryRun ? "would be" : ""} deleted: ${summary.deletedFiles}`,
|
|
832
|
-
);
|
|
833
|
-
logger.log(
|
|
834
|
-
` Size ${dryRun ? "would be" : ""} freed: ${formatBytes(summary.deletedSize)}`,
|
|
835
|
-
);
|
|
836
|
-
|
|
837
|
-
if (summary.hasErrors) {
|
|
838
|
-
logger.log(` Errors: ${summary.errors.length}`);
|
|
839
|
-
logger.error("\n❌ Errors occurred during cleanup:");
|
|
840
|
-
for (const error of summary.errors) {
|
|
841
|
-
logger.error(` • ${error.path}: ${error.error}`);
|
|
842
|
-
}
|
|
843
|
-
}
|
|
844
|
-
|
|
845
|
-
logger.log("━".repeat(60));
|
|
846
|
-
|
|
847
|
-
return summary;
|
|
848
|
-
}
|
|
849
|
-
|
|
850
|
-
// If dry run, return early
|
|
851
|
-
const totalFiles =
|
|
852
|
-
results.reduce((sum, r) => sum + r.files.length, 0) +
|
|
853
|
-
lockFilesResult.deletedCount;
|
|
854
|
-
const totalSize =
|
|
855
|
-
results.reduce(
|
|
856
|
-
(sum, r) => sum + r.files.reduce((s, f) => s + f.size, 0),
|
|
857
|
-
0,
|
|
858
|
-
) + lockFilesResult.deletedSize;
|
|
859
|
-
const deletedFiles =
|
|
860
|
-
results.reduce((sum, r) => sum + r.deletedCount, 0) +
|
|
861
|
-
lockFilesResult.deletedCount;
|
|
862
|
-
const deletedSize =
|
|
863
|
-
results.reduce((sum, r) => sum + r.deletedSize, 0) +
|
|
864
|
-
lockFilesResult.deletedSize;
|
|
865
|
-
const allErrors = results.flatMap((r) => r.errors);
|
|
866
|
-
|
|
867
|
-
const summary: CleanSummary = {
|
|
868
|
-
totalPackages: packages.length,
|
|
869
|
-
processedPackages: results.length,
|
|
870
|
-
totalFiles,
|
|
871
|
-
totalSize,
|
|
872
|
-
deletedFiles,
|
|
873
|
-
deletedSize,
|
|
874
|
-
errors: allErrors,
|
|
875
|
-
hasErrors: allErrors.length > 0,
|
|
876
|
-
results,
|
|
877
|
-
};
|
|
878
|
-
|
|
879
|
-
// Display final summary
|
|
880
|
-
logger.log("\n" + "━".repeat(60));
|
|
881
|
-
logger.log("📊 Clean Summary:");
|
|
882
|
-
logger.log(` Packages processed: ${summary.processedPackages}`);
|
|
883
|
-
logger.log(` Files would be deleted: ${summary.deletedFiles}`);
|
|
884
|
-
logger.log(` Size would be freed: ${formatBytes(summary.deletedSize)}`);
|
|
885
|
-
|
|
886
|
-
if (summary.hasErrors) {
|
|
887
|
-
logger.log(` Errors: ${summary.errors.length}`);
|
|
888
|
-
logger.error("\n❌ Errors occurred during cleanup:");
|
|
889
|
-
for (const error of summary.errors) {
|
|
890
|
-
logger.error(` • ${error.path}: ${error.error}`);
|
|
891
|
-
}
|
|
892
|
-
}
|
|
893
|
-
|
|
894
|
-
logger.log("━".repeat(60));
|
|
895
|
-
|
|
896
|
-
return summary;
|
|
897
|
-
})();
|
|
898
|
-
|
|
899
|
-
return result;
|
|
900
|
-
};
|