@tukuyomil032/broom 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +554 -0
  3. package/dist/commands/analyze.js +371 -0
  4. package/dist/commands/backup.js +257 -0
  5. package/dist/commands/clean.js +255 -0
  6. package/dist/commands/completion.js +714 -0
  7. package/dist/commands/config.js +474 -0
  8. package/dist/commands/doctor.js +280 -0
  9. package/dist/commands/duplicates.js +325 -0
  10. package/dist/commands/help.js +34 -0
  11. package/dist/commands/index.js +22 -0
  12. package/dist/commands/installer.js +266 -0
  13. package/dist/commands/optimize.js +270 -0
  14. package/dist/commands/purge.js +271 -0
  15. package/dist/commands/remove.js +184 -0
  16. package/dist/commands/reports.js +173 -0
  17. package/dist/commands/schedule.js +249 -0
  18. package/dist/commands/status.js +468 -0
  19. package/dist/commands/touchid.js +230 -0
  20. package/dist/commands/uninstall.js +336 -0
  21. package/dist/commands/update.js +182 -0
  22. package/dist/commands/watch.js +258 -0
  23. package/dist/index.js +131 -0
  24. package/dist/scanners/base.js +21 -0
  25. package/dist/scanners/browser-cache.js +111 -0
  26. package/dist/scanners/dev-cache.js +64 -0
  27. package/dist/scanners/docker.js +96 -0
  28. package/dist/scanners/downloads.js +66 -0
  29. package/dist/scanners/homebrew.js +82 -0
  30. package/dist/scanners/index.js +126 -0
  31. package/dist/scanners/installer.js +87 -0
  32. package/dist/scanners/ios-backups.js +82 -0
  33. package/dist/scanners/node-modules.js +75 -0
  34. package/dist/scanners/temp-files.js +65 -0
  35. package/dist/scanners/trash.js +90 -0
  36. package/dist/scanners/user-cache.js +62 -0
  37. package/dist/scanners/user-logs.js +53 -0
  38. package/dist/scanners/xcode.js +124 -0
  39. package/dist/types/index.js +23 -0
  40. package/dist/ui/index.js +5 -0
  41. package/dist/ui/monitors.js +345 -0
  42. package/dist/ui/output.js +304 -0
  43. package/dist/ui/prompts.js +270 -0
  44. package/dist/utils/config.js +133 -0
  45. package/dist/utils/debug.js +119 -0
  46. package/dist/utils/fs.js +283 -0
  47. package/dist/utils/help.js +265 -0
  48. package/dist/utils/index.js +6 -0
  49. package/dist/utils/paths.js +142 -0
  50. package/dist/utils/report.js +404 -0
  51. package/package.json +87 -0
@@ -0,0 +1,371 @@
1
+ /**
2
+ * Analyze command - Disk space analysis with drill-down
3
+ */
4
+ import chalk from 'chalk';
5
+ import { Command } from 'commander';
6
+ import { enhanceCommandHelp } from '../utils/help.js';
7
+ import { readdir, stat } from 'fs/promises';
8
+ import { join, basename } from 'path';
9
+ import { exists, getSize, formatSize, expandPath } from '../utils/fs.js';
10
+ import { printHeader, warning, info, separator, createSpinner, succeedSpinner, failSpinner, } from '../ui/output.js';
11
+ // Fixed column widths for aligned display
12
+ const NAME_WIDTH = 25;
13
+ const BAR_WIDTH = 30;
14
+ /**
15
+ * Scan directory for sizes
16
+ */
17
+ async function scanDirectory(dirPath, depth, currentDepth = 0) {
18
+ try {
19
+ const stats = await stat(dirPath);
20
+ const name = basename(dirPath) || dirPath;
21
+ if (!stats.isDirectory()) {
22
+ return {
23
+ path: dirPath,
24
+ name,
25
+ size: stats.size,
26
+ isDirectory: false,
27
+ };
28
+ }
29
+ // Get total size
30
+ const size = await getSize(dirPath);
31
+ const info = {
32
+ path: dirPath,
33
+ name,
34
+ size,
35
+ isDirectory: true,
36
+ };
37
+ // Get children if within depth
38
+ if (currentDepth < depth) {
39
+ try {
40
+ const entries = await readdir(dirPath);
41
+ const children = [];
42
+ for (const entry of entries) {
43
+ // Skip hidden files at top level
44
+ if (entry.startsWith('.') && currentDepth === 0) {
45
+ continue;
46
+ }
47
+ const childPath = join(dirPath, entry);
48
+ // Skip excluded paths (iCloud Drive, etc.)
49
+ const { isExcludedPath } = await import('../utils/fs.js');
50
+ if (isExcludedPath(childPath)) {
51
+ continue;
52
+ }
53
+ try {
54
+ const childStats = await stat(childPath);
55
+ const childSize = childStats.isDirectory() ? await getSize(childPath) : childStats.size;
56
+ children.push({
57
+ path: childPath,
58
+ name: entry,
59
+ size: childSize,
60
+ isDirectory: childStats.isDirectory(),
61
+ });
62
+ }
63
+ catch {
64
+ // Skip if cannot access
65
+ }
66
+ }
67
+ // Sort by size descending
68
+ children.sort((a, b) => b.size - a.size);
69
+ info.children = children;
70
+ }
71
+ catch {
72
+ // Cannot read directory
73
+ }
74
+ }
75
+ return info;
76
+ }
77
+ catch {
78
+ return null;
79
+ }
80
+ }
81
+ /**
82
+ * Generate size bar for Quick Analysis - with borders and gridlines
83
+ */
84
+ function generateQuickAnalysisBar(size, maxSize, width = 20) {
85
+ const percentage = maxSize > 0 ? size / maxSize : 0;
86
+ const filledWidth = Math.round(percentage * width);
87
+ let bar = '';
88
+ // Color based on relative size
89
+ let color;
90
+ if (percentage > 0.7) {
91
+ color = chalk.red;
92
+ }
93
+ else if (percentage > 0.3) {
94
+ color = chalk.hex('#FFA500'); // Orange
95
+ }
96
+ else {
97
+ color = chalk.gray;
98
+ }
99
+ // Build bar with gridlines every 20%
100
+ for (let i = 0; i < width; i++) {
101
+ const isGridline = i > 0 && i % (width / 5) === 0;
102
+ if (i < filledWidth) {
103
+ bar += isGridline ? chalk.white('│') : color('█');
104
+ }
105
+ else {
106
+ bar += isGridline ? chalk.gray('│') : chalk.gray('░');
107
+ }
108
+ }
109
+ // Add horizontal borders
110
+ return chalk.gray('│') + bar + chalk.gray('│');
111
+ }
112
+ /**
113
+ * Generate size bar for tree display with gridlines
114
+ */
115
+ function generateTreeBar(size, maxSize, width = BAR_WIDTH) {
116
+ const percentage = maxSize > 0 ? size / maxSize : 0;
117
+ const filledWidth = Math.round(percentage * width);
118
+ // Build bar with gridlines every 20%
119
+ let bar = '';
120
+ for (let i = 0; i < width; i++) {
121
+ const isGridline = i > 0 && i % (width / 5) === 0;
122
+ if (i < filledWidth) {
123
+ // Filled portion with color gradient
124
+ const ratio = i / width;
125
+ let fillChar;
126
+ if (ratio < 0.4) {
127
+ fillChar = chalk.bgGreen.green('█');
128
+ }
129
+ else if (ratio < 0.7) {
130
+ fillChar = chalk.bgYellow.yellow('█');
131
+ }
132
+ else {
133
+ fillChar = chalk.bgRed.red('█');
134
+ }
135
+ if (isGridline) {
136
+ bar += chalk.white('│');
137
+ }
138
+ else {
139
+ bar += fillChar;
140
+ }
141
+ }
142
+ else {
143
+ // Empty portion
144
+ if (isGridline) {
145
+ bar += chalk.gray('│');
146
+ }
147
+ else {
148
+ bar += chalk.gray('░');
149
+ }
150
+ }
151
+ }
152
+ // Add border
153
+ return chalk.gray('│') + bar + chalk.gray('│');
154
+ }
155
+ /**
156
+ * Generate disk usage bar with scale markers and gradient colors
157
+ */
158
+ function generateDiskBar(used, total, width = 40) {
159
+ const percentage = total > 0 ? used / total : 0;
160
+ const filledWidth = Math.round(percentage * width);
161
+ // Scale header
162
+ const scale = chalk.gray('0% 20% 40% 60% 80% 100%');
163
+ // Build top border with horizontal gridlines
164
+ let topBorder = chalk.gray('╔');
165
+ for (let i = 0; i < width; i++) {
166
+ const isGridline = i > 0 && i % (width / 5) === 0;
167
+ topBorder += isGridline ? chalk.white('┬') : chalk.gray('═');
168
+ }
169
+ topBorder += chalk.gray('╗') + '\n';
170
+ // Build main bar with gradient
171
+ let bar = chalk.gray('║');
172
+ for (let i = 0; i < width; i++) {
173
+ const isVerticalGridline = i > 0 && i % (width / 5) === 0;
174
+ if (i < filledWidth) {
175
+ // Gradient color calculation (green → yellow → red)
176
+ const ratio = i / width;
177
+ let color;
178
+ if (ratio < 0.5) {
179
+ // Green to Yellow gradient (0-50%)
180
+ const localRatio = ratio / 0.5;
181
+ const r = Math.round(16 + (245 - 16) * localRatio);
182
+ const g = Math.round(185 + (158 - 185) * localRatio);
183
+ const b = Math.round(129 + (11 - 129) * localRatio);
184
+ color = chalk.rgb(r, g, b);
185
+ }
186
+ else {
187
+ // Yellow to Red gradient (50-100%)
188
+ const localRatio = (ratio - 0.5) / 0.5;
189
+ const r = 239;
190
+ const g = Math.round(158 - (158 - 68) * localRatio);
191
+ const b = Math.round(11 - 11 * localRatio);
192
+ color = chalk.rgb(r, g, b);
193
+ }
194
+ bar += isVerticalGridline ? chalk.white('│') : color('█');
195
+ }
196
+ else {
197
+ bar += isVerticalGridline ? chalk.gray('│') : chalk.gray('░');
198
+ }
199
+ }
200
+ bar += chalk.gray('║') + '\n';
201
+ // Build bottom border with horizontal gridlines
202
+ let bottomBorder = chalk.gray('╚');
203
+ for (let i = 0; i < width; i++) {
204
+ const isGridline = i > 0 && i % (width / 5) === 0;
205
+ bottomBorder += isGridline ? chalk.white('┴') : chalk.gray('═');
206
+ }
207
+ bottomBorder += chalk.gray('╝');
208
+ return scale + '\n' + topBorder + bar + bottomBorder;
209
+ }
210
+ /**
211
+ * Print directory tree with aligned columns
212
+ */
213
+ function printTree(items, maxSize, limit, indent = '') {
214
+ const displayed = items.slice(0, limit);
215
+ const remaining = items.length - limit;
216
+ // Calculate maximum name width from displayed items (with reasonable limit)
217
+ const MAX_NAME_WIDTH = 40; // Maximum width to prevent excessive spacing
218
+ const maxNameLength = Math.min(Math.max(...displayed.map((item) => item.name.length), NAME_WIDTH // Minimum width
219
+ ), MAX_NAME_WIDTH // Maximum width
220
+ );
221
+ for (let i = 0; i < displayed.length; i++) {
222
+ const item = displayed[i];
223
+ const isLast = i === displayed.length - 1 && remaining <= 0;
224
+ const prefix = isLast ? '└── ' : '├── ';
225
+ const icon = item.isDirectory ? '📁' : '📄';
226
+ // Truncate or pad name to max width (show start and end)
227
+ let displayName = item.name;
228
+ if (displayName.length > maxNameLength) {
229
+ const keepLength = Math.floor((maxNameLength - 3) / 2);
230
+ displayName =
231
+ displayName.substring(0, keepLength) +
232
+ '...' +
233
+ displayName.substring(displayName.length - keepLength);
234
+ }
235
+ displayName = displayName.padEnd(maxNameLength);
236
+ const bar = generateTreeBar(item.size, maxSize);
237
+ const sizeStr = formatSize(item.size).padStart(10);
238
+ const percentage = maxSize > 0 ? ((item.size / maxSize) * 100).toFixed(1).padStart(5) + '%' : ' 0.0%';
239
+ console.log(`${indent}${prefix}${icon} ${chalk.bold(displayName)} ${bar} ${chalk.cyan(sizeStr)} ${chalk.dim(percentage)}`);
240
+ }
241
+ if (remaining > 0) {
242
+ console.log(chalk.dim(`${indent} ... and ${remaining} more items`));
243
+ }
244
+ }
245
+ /**
246
+ * Get disk usage info
247
+ */
248
+ async function getDiskUsage() {
249
+ try {
250
+ const { execSync } = await import('child_process');
251
+ const output = execSync("df -k / | tail -1 | awk '{print $2, $3, $4}'").toString().trim();
252
+ const [total, used, free] = output.split(' ').map((n) => parseInt(n) * 1024);
253
+ return { total, used, free };
254
+ }
255
+ catch {
256
+ return null;
257
+ }
258
+ }
259
+ /**
260
+ * Execute analyze command
261
+ */
262
+ export async function analyzeCommand(options) {
263
+ const targetPath = options.path ? expandPath(options.path) : expandPath('~');
264
+ const depth = options.depth ?? 1;
265
+ const limit = options.limit ?? 15;
266
+ printHeader(`📊 Disk Space Analysis`);
267
+ // Show disk usage with improved bar
268
+ const diskUsage = await getDiskUsage();
269
+ if (diskUsage) {
270
+ console.log(chalk.bold('💾 Disk Usage:'));
271
+ const usedPercent = (diskUsage.used / diskUsage.total) * 100;
272
+ console.log(generateDiskBar(diskUsage.used, diskUsage.total));
273
+ console.log();
274
+ console.log(` Used: ${chalk.yellow(formatSize(diskUsage.used))} / ${formatSize(diskUsage.total)} (${usedPercent.toFixed(1)}%)`);
275
+ console.log(` Free: ${chalk.green(formatSize(diskUsage.free))}`);
276
+ console.log();
277
+ }
278
+ // Scan target directory
279
+ info(`Analyzing system disk...`);
280
+ console.log(chalk.bold(`Target: ${chalk.cyan(targetPath)}`));
281
+ console.log();
282
+ const spinner = createSpinner('Scanning directory sizes...');
283
+ const dirInfo = await scanDirectory(targetPath, depth);
284
+ if (!dirInfo) {
285
+ failSpinner(spinner, 'Failed to scan directory');
286
+ return;
287
+ }
288
+ succeedSpinner(spinner, `Scanned ${dirInfo.children?.length ?? 0} items`);
289
+ // Print results
290
+ console.log();
291
+ console.log(chalk.bold(`📁 ${dirInfo.name}`));
292
+ console.log(` Total size: ${chalk.yellow(formatSize(dirInfo.size))}`);
293
+ console.log();
294
+ // Calculate maximum name width for header alignment (with reasonable limit)
295
+ const MAX_NAME_WIDTH = 40;
296
+ const maxNameLength = dirInfo.children && dirInfo.children.length > 0
297
+ ? Math.min(Math.max(...dirInfo.children.slice(0, limit).map((item) => item.name.length), NAME_WIDTH), MAX_NAME_WIDTH)
298
+ : NAME_WIDTH;
299
+ // Print header for aligned columns
300
+ const headerName = 'Name'.padEnd(maxNameLength);
301
+ console.log(chalk.dim(` ${headerName} ${'0% 20% 40% 60% 80% 100%'.padStart(BAR_WIDTH + 2)} Size Ratio`));
302
+ console.log();
303
+ if (dirInfo.children && dirInfo.children.length > 0) {
304
+ const maxSize = dirInfo.children[0]?.size ?? 1;
305
+ printTree(dirInfo.children, maxSize, limit);
306
+ }
307
+ else {
308
+ warning('No items found in directory');
309
+ }
310
+ // Show common large directories
311
+ console.log();
312
+ separator();
313
+ console.log();
314
+ console.log(chalk.bold('💡 Quick Analysis:'));
315
+ console.log();
316
+ // Header for Quick Analysis with scale
317
+ console.log(chalk.dim(' Location 0% 20% 40% 60% 80% 100% Size'));
318
+ console.log();
319
+ const quickPaths = [
320
+ { path: '~/Library/Caches', label: 'User Caches' },
321
+ { path: '~/Library/Application Support', label: 'App Support' },
322
+ { path: '~/.Trash', label: 'Trash' },
323
+ { path: '~/Downloads', label: 'Downloads' },
324
+ { path: '~/Library/Developer', label: 'Developer Data' },
325
+ ];
326
+ const quickResults = [];
327
+ for (const item of quickPaths) {
328
+ const fullPath = expandPath(item.path);
329
+ if (exists(fullPath)) {
330
+ const size = await getSize(fullPath);
331
+ quickResults.push({ label: item.label, size });
332
+ }
333
+ }
334
+ // Sort by size
335
+ quickResults.sort((a, b) => b.size - a.size);
336
+ const maxQuickSize = quickResults[0]?.size ?? 1;
337
+ for (const result of quickResults) {
338
+ const bar = generateQuickAnalysisBar(result.size, maxQuickSize, 30);
339
+ const sizeStr = formatSize(result.size).padStart(10);
340
+ console.log(` ${result.label.padEnd(15)} ${bar} ${chalk.cyan(sizeStr)}`);
341
+ }
342
+ // Recommendations
343
+ console.log();
344
+ console.log(chalk.bold('📋 Recommendations:'));
345
+ const totalQuickSize = quickResults.reduce((sum, r) => sum + r.size, 0);
346
+ if (totalQuickSize > 5 * 1024 * 1024 * 1024) {
347
+ // > 5GB
348
+ console.log(chalk.yellow(` ⚠️ You have ${formatSize(totalQuickSize)} in common cleanup locations`));
349
+ console.log(chalk.dim(' Run "broom clean" to free up space'));
350
+ }
351
+ else {
352
+ console.log(chalk.green(' ✓ Your disk looks reasonably clean'));
353
+ }
354
+ console.log();
355
+ console.log(chalk.dim('Tip: Use "broom analyze --path /path/to/dir" to analyze a specific directory'));
356
+ console.log(chalk.dim(' Use "broom analyze --depth 2" to scan deeper'));
357
+ }
358
+ /**
359
+ * Create analyze command
360
+ */
361
+ export function createAnalyzeCommand() {
362
+ const cmd = new Command('analyze')
363
+ .description('Analyze disk space usage')
364
+ .option('-p, --path <path>', 'Path to analyze (default: home directory)')
365
+ .option('-d, --depth <number>', 'Scan depth (default: 1)', parseInt)
366
+ .option('-l, --limit <number>', 'Max items to show (default: 15)', parseInt)
367
+ .action(async (options) => {
368
+ await analyzeCommand(options);
369
+ });
370
+ return enhanceCommandHelp(cmd);
371
+ }
@@ -0,0 +1,257 @@
1
+ /**
2
+ * Backup/Restore command - Safe file deletion with backup
3
+ */
4
+ import chalk from 'chalk';
5
+ import { Command } from 'commander';
6
+ import { enhanceCommandHelp } from '../utils/help.js';
7
+ import { expandPath, formatSize, exists } from '../utils/fs.js';
8
+ import { printHeader, separator, success, error, warning, info } from '../ui/output.js';
9
+ import { mkdir, readFile, writeFile, unlink, stat } from 'fs/promises';
10
+ import { join } from 'path';
11
+ import { execSync } from 'child_process';
12
+ const BACKUP_DIR = expandPath('~/.config/broom/backups');
13
+ const METADATA_FILE = expandPath('~/.config/broom/backups/metadata.json');
14
+ const DEFAULT_RETENTION_DAYS = 7;
15
+ /**
16
+ * Load backup metadata
17
+ */
18
+ async function loadMetadata() {
19
+ const metadataMap = new Map();
20
+ try {
21
+ if (exists(METADATA_FILE)) {
22
+ const content = await readFile(METADATA_FILE, 'utf-8');
23
+ const data = JSON.parse(content);
24
+ for (const [id, meta] of Object.entries(data)) {
25
+ metadataMap.set(id, {
26
+ ...meta,
27
+ timestamp: new Date(meta.timestamp),
28
+ expiresAt: new Date(meta.expiresAt),
29
+ });
30
+ }
31
+ }
32
+ }
33
+ catch {
34
+ // Return empty map if cannot read
35
+ }
36
+ return metadataMap;
37
+ }
38
+ /**
39
+ * Save backup metadata
40
+ */
41
+ async function saveMetadata(metadataMap) {
42
+ const data = {};
43
+ for (const [id, meta] of metadataMap.entries()) {
44
+ data[id] = meta;
45
+ }
46
+ await mkdir(join(BACKUP_DIR, '..'), { recursive: true });
47
+ await writeFile(METADATA_FILE, JSON.stringify(data, null, 2), 'utf-8');
48
+ }
49
+ /**
50
+ * Create a backup
51
+ */
52
+ export async function createBackup(filePaths, description = 'Manual backup') {
53
+ const backupId = `backup-${Date.now()}`;
54
+ const backupPath = join(BACKUP_DIR, `${backupId}.tar.gz`);
55
+ // Ensure backup directory exists
56
+ await mkdir(BACKUP_DIR, { recursive: true });
57
+ // Create tar.gz archive
58
+ const filesList = filePaths.map((p) => `"${p}"`).join(' ');
59
+ try {
60
+ execSync(`tar -czf "${backupPath}" ${filesList} 2>/dev/null`, {
61
+ cwd: expandPath('~'),
62
+ });
63
+ }
64
+ catch (err) {
65
+ throw new Error(`Failed to create backup: ${err}`);
66
+ }
67
+ // Calculate backup size
68
+ const stats = await stat(backupPath);
69
+ const totalSize = stats.size;
70
+ // Save metadata
71
+ const metadata = {
72
+ id: backupId,
73
+ timestamp: new Date(),
74
+ files: filePaths,
75
+ totalSize,
76
+ expiresAt: new Date(Date.now() + DEFAULT_RETENTION_DAYS * 24 * 60 * 60 * 1000),
77
+ description,
78
+ };
79
+ const metadataMap = await loadMetadata();
80
+ metadataMap.set(backupId, metadata);
81
+ await saveMetadata(metadataMap);
82
+ return backupId;
83
+ }
84
+ /**
85
+ * List backups
86
+ */
87
+ async function listBackups() {
88
+ printHeader('📦 Backup List');
89
+ const metadataMap = await loadMetadata();
90
+ if (metadataMap.size === 0) {
91
+ warning('No backups found');
92
+ return;
93
+ }
94
+ // Sort by timestamp (newest first)
95
+ const backups = Array.from(metadataMap.values()).sort((a, b) => b.timestamp.getTime() - a.timestamp.getTime());
96
+ console.log();
97
+ for (const backup of backups) {
98
+ const isExpired = new Date() > backup.expiresAt;
99
+ const age = Math.floor((Date.now() - backup.timestamp.getTime()) / (1000 * 60 * 60 * 24));
100
+ console.log(chalk.bold(`ID: ${backup.id}`));
101
+ console.log(` Description: ${backup.description}`);
102
+ console.log(` Created: ${backup.timestamp.toLocaleString()} (${age} days ago)`);
103
+ console.log(` Size: ${formatSize(backup.totalSize)}`);
104
+ console.log(` Files: ${backup.files.length}`);
105
+ if (isExpired) {
106
+ console.log(chalk.red(' Status: Expired'));
107
+ }
108
+ else {
109
+ const daysLeft = Math.floor((backup.expiresAt.getTime() - Date.now()) / (1000 * 60 * 60 * 24));
110
+ console.log(chalk.green(` Status: Active (${daysLeft} days left)`));
111
+ }
112
+ console.log();
113
+ }
114
+ separator();
115
+ console.log();
116
+ console.log(chalk.dim('Use "broom restore <backup-id>" to restore a backup'));
117
+ console.log(chalk.dim('Use "broom backup --clean" to remove expired backups'));
118
+ }
119
+ /**
120
+ * Clean expired backups
121
+ */
122
+ async function cleanExpiredBackups() {
123
+ printHeader('🧹 Cleaning Expired Backups');
124
+ const metadataMap = await loadMetadata();
125
+ const now = new Date();
126
+ let removedCount = 0;
127
+ let freedSpace = 0;
128
+ for (const [id, meta] of metadataMap.entries()) {
129
+ if (now > meta.expiresAt) {
130
+ const backupPath = join(BACKUP_DIR, `${id}.tar.gz`);
131
+ if (exists(backupPath)) {
132
+ const stats = await stat(backupPath);
133
+ freedSpace += stats.size;
134
+ await unlink(backupPath);
135
+ metadataMap.delete(id);
136
+ removedCount++;
137
+ console.log(chalk.dim(`Removed: ${id} (${formatSize(stats.size)})`));
138
+ }
139
+ }
140
+ }
141
+ await saveMetadata(metadataMap);
142
+ console.log();
143
+ if (removedCount > 0) {
144
+ success(`Removed ${removedCount} expired backup(s), freed ${formatSize(freedSpace)}`);
145
+ }
146
+ else {
147
+ info('No expired backups to remove');
148
+ }
149
+ }
150
+ /**
151
+ * Restore a backup
152
+ */
153
+ async function restoreBackup(backupId) {
154
+ printHeader(`📦 Restoring Backup: ${backupId}`);
155
+ const metadataMap = await loadMetadata();
156
+ const metadata = metadataMap.get(backupId);
157
+ if (!metadata) {
158
+ error(`Backup not found: ${backupId}`);
159
+ return;
160
+ }
161
+ const backupPath = join(BACKUP_DIR, `${backupId}.tar.gz`);
162
+ if (!exists(backupPath)) {
163
+ error(`Backup file not found: ${backupPath}`);
164
+ return;
165
+ }
166
+ console.log();
167
+ console.log(chalk.bold('Backup Information:'));
168
+ console.log(` Created: ${metadata.timestamp.toLocaleString()}`);
169
+ console.log(` Files: ${metadata.files.length}`);
170
+ console.log(` Size: ${formatSize(metadata.totalSize)}`);
171
+ console.log();
172
+ // Ask for confirmation
173
+ const { confirm } = await import('../ui/prompts.js');
174
+ const shouldRestore = await confirm({ message: 'Restore this backup?' });
175
+ if (!shouldRestore) {
176
+ warning('Restore cancelled');
177
+ return;
178
+ }
179
+ console.log();
180
+ info('Restoring files...');
181
+ try {
182
+ // Extract to home directory
183
+ execSync(`tar -xzf "${backupPath}" -C "${expandPath('~')}"`, {
184
+ stdio: 'inherit',
185
+ });
186
+ success('Backup restored successfully!');
187
+ console.log();
188
+ console.log(chalk.dim('Restored files:'));
189
+ for (const file of metadata.files) {
190
+ console.log(chalk.dim(` - ${file}`));
191
+ }
192
+ }
193
+ catch (err) {
194
+ error(`Failed to restore backup: ${err}`);
195
+ }
196
+ }
197
+ /**
198
+ * Execute backup command
199
+ */
200
+ export async function backupCommand(options) {
201
+ if (options.list) {
202
+ await listBackups();
203
+ return;
204
+ }
205
+ if (options.clean) {
206
+ await cleanExpiredBackups();
207
+ return;
208
+ }
209
+ // Show help
210
+ printHeader('📦 Backup Management');
211
+ console.log();
212
+ console.log(chalk.bold('Usage:'));
213
+ console.log(' broom backup --list List all backups');
214
+ console.log(' broom backup --clean Remove expired backups');
215
+ console.log(' broom restore <backup-id> Restore a specific backup');
216
+ console.log(' broom restore --list List restorable backups');
217
+ console.log();
218
+ console.log(chalk.dim('Backups are automatically created when using --backup flag with clean'));
219
+ console.log(chalk.dim(`Default retention period: ${DEFAULT_RETENTION_DAYS} days`));
220
+ }
221
+ /**
222
+ * Execute restore command
223
+ */
224
+ export async function restoreCommand(options, backupId) {
225
+ if (options.list || !backupId) {
226
+ await listBackups();
227
+ return;
228
+ }
229
+ await restoreBackup(backupId);
230
+ }
231
+ /**
232
+ * Create backup command
233
+ */
234
+ export function createBackupCommand() {
235
+ const cmd = new Command('backup')
236
+ .description('Manage file backups')
237
+ .option('-l, --list', 'List all backups')
238
+ .option('-c, --clean', 'Remove expired backups')
239
+ .option('-r, --retention <days>', 'Set retention period in days', parseInt)
240
+ .action(async (options) => {
241
+ await backupCommand(options);
242
+ });
243
+ return enhanceCommandHelp(cmd);
244
+ }
245
+ /**
246
+ * Create restore command
247
+ */
248
+ export function createRestoreCommand() {
249
+ const cmd = new Command('restore')
250
+ .description('Restore files from backup')
251
+ .argument('[backup-id]', 'Backup ID to restore')
252
+ .option('-l, --list', 'List restorable backups')
253
+ .action(async (backupId, options) => {
254
+ await restoreCommand(options, backupId);
255
+ });
256
+ return enhanceCommandHelp(cmd);
257
+ }