filemayor 2.0.5 → 3.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,163 @@
1
+ #!/usr/bin/env node
2
+
3
+ /**
4
+ * ═══════════════════════════════════════════════════════════════════
5
+ * FILEMAYOR CORE — ANALYZER
6
+ * Deep filesystem insights: duplicate detection, bloat mapping,
7
+ * and potential savings calculation.
8
+ * ═══════════════════════════════════════════════════════════════════
9
+ */
10
+
11
+ 'use strict';
12
+
13
+ const fs = require('fs');
14
+ const path = require('path');
15
+ const crypto = require('crypto');
16
+ const { scan } = require('./scanner');
17
+ const { findJunk } = require('./cleaner');
18
+ const { formatBytes } = require('./scanner');
19
+
20
+ /**
21
+ * Analyze a directory for duplicates, bloat, and junk
22
+ * @param {string} dirPath - Root directory to analyze
23
+ * @param {Object} options - Analysis options
24
+ * @returns {Object} Deep analysis results
25
+ */
26
+ function analyzeDirectory(dirPath, options = {}) {
27
+ const {
28
+ maxDepth = 10,
29
+ minSize = 1024, // Ignore files smaller than 1KB for duplicate analysis
30
+ } = options;
31
+
32
+ // 1. Initial Scan
33
+ const scanResult = scan(dirPath, { maxDepth, includeDirectories: true });
34
+ const { files, stats } = scanResult;
35
+
36
+ // 2. Duplicate Detection (Size-first hashing)
37
+ const sizeMap = new Map();
38
+ const duplicates = [];
39
+
40
+ // Group by size first (fast)
41
+ for (const file of files) {
42
+ if (file.size < minSize) continue;
43
+ if (!sizeMap.has(file.size)) {
44
+ sizeMap.set(file.size, []);
45
+ }
46
+ sizeMap.get(file.size).push(file);
47
+ }
48
+
49
+ // Hash only those with identical sizes
50
+ for (const [size, candidateFiles] of sizeMap.entries()) {
51
+ if (candidateFiles.length < 2) continue;
52
+
53
+ const hashMap = new Map();
54
+ for (const file of candidateFiles) {
55
+ try {
56
+ // Partial hash (first 16KB) for efficiency
57
+ const hash = getFileHash(file.path, 16384);
58
+ if (!hashMap.has(hash)) {
59
+ hashMap.set(hash, []);
60
+ }
61
+ hashMap.get(hash).push(file);
62
+ } catch (err) {
63
+ // Skip files we can't read
64
+ }
65
+ }
66
+
67
+ for (const [hash, dupeFiles] of hashMap.entries()) {
68
+ if (dupeFiles.length > 1) {
69
+ duplicates.push({
70
+ size,
71
+ sizeHuman: formatBytes(size),
72
+ files: dupeFiles.map(f => ({ name: f.name, path: f.path, relativePath: f.relativePath })),
73
+ wastedSpace: size * (dupeFiles.length - 1),
74
+ wastedSpaceHuman: formatBytes(size * (dupeFiles.length - 1))
75
+ });
76
+ }
77
+ }
78
+ }
79
+ // 3. Bloat Mapping (Top Directories) & Progress Reporting
80
+ const dirMap = new Map();
81
+ for (let i = 0; i < files.length; i++) {
82
+ const file = files[i];
83
+
84
+ // Progress callback
85
+ if (options.onProgress) {
86
+ options.onProgress({
87
+ current: i + 1,
88
+ total: files.length,
89
+ percent: Math.round(((i + 1) / files.length) * 100),
90
+ file: file.name
91
+ });
92
+ }
93
+
94
+ const parts = file.relativePath.split(path.sep);
95
+ let currentPath = '';
96
+ // Only track top-level and second-level folders for bloat mapping
97
+ for (let j = 0; j < Math.min(parts.length - 1, 2); j++) {
98
+ currentPath = currentPath ? path.join(currentPath, parts[j]) : parts[j];
99
+ if (!dirMap.has(currentPath)) {
100
+ dirMap.set(currentPath, { size: 0, count: 0 });
101
+ }
102
+ const info = dirMap.get(currentPath);
103
+ info.size += file.size;
104
+ info.count += 1;
105
+ }
106
+ }
107
+
108
+ const largestDirs = Array.from(dirMap.entries())
109
+ .map(([name, info]) => ({ name, ...info, sizeHuman: formatBytes(info.size) }))
110
+ .sort((a, b) => b.size - a.size)
111
+ .slice(0, 5);
112
+
113
+ // 4. Junk Detection Integration
114
+ const junkResult = findJunk(dirPath, { maxDepth });
115
+ const totalJunkSize = junkResult.stats.totalSize;
116
+ const totalDuplicateWasted = duplicates.reduce((sum, d) => sum + d.wastedSpace, 0);
117
+
118
+ return {
119
+ root: dirPath,
120
+ timestamp: Date.now(),
121
+ summary: {
122
+ totalFiles: stats.filesFound,
123
+ totalSize: stats.totalSize,
124
+ totalSizeHuman: formatBytes(stats.totalSize),
125
+ potentialSavings: totalJunkSize + totalDuplicateWasted,
126
+ potentialSavingsHuman: formatBytes(totalJunkSize + totalDuplicateWasted)
127
+ },
128
+ duplicates: {
129
+ sets: duplicates.length,
130
+ totalWasted: totalDuplicateWasted,
131
+ totalWastedHuman: formatBytes(totalDuplicateWasted),
132
+ details: duplicates.sort((a, b) => b.wastedSpace - a.wastedSpace).slice(0, 10)
133
+ },
134
+ largestDirs,
135
+ junk: {
136
+ count: junkResult.junk.length,
137
+ totalSize: totalJunkSize,
138
+ totalSizeHuman: formatBytes(totalJunkSize),
139
+ categories: junkResult.stats.byCategory
140
+ }
141
+ };
142
+ }
143
+
144
+ /**
145
+ * Get hash of file content (partial or full)
146
+ */
147
+ function getFileHash(filePath, limit = null) {
148
+ const buffer = limit
149
+ ? Buffer.alloc(limit)
150
+ : fs.readFileSync(filePath);
151
+
152
+ if (limit) {
153
+ const fd = fs.openSync(filePath, 'r');
154
+ fs.readSync(fd, buffer, 0, limit, 0);
155
+ fs.closeSync(fd);
156
+ }
157
+
158
+ return crypto.createHash('md5').update(buffer).digest('hex');
159
+ }
160
+
161
+ module.exports = {
162
+ analyzeDirectory
163
+ };
@@ -0,0 +1,104 @@
1
+ #!/usr/bin/env node
2
+
3
+ /**
4
+ * ═══════════════════════════════════════════════════════════════════
5
+ * FILEMAYOR CORE — EMERGENCY HALT HANDLER ("The Black Box")
6
+ *
7
+ * Intercepts Ctrl+C, SIGTERM, and uncaught exceptions to ensure
8
+ * the Master Journal is flushed to disk before the process dies.
9
+ * Without this, a partial batch leaves an un-recoverable mess.
10
+ *
11
+ * Uses writeFileSync to BLOCK the event loop during flush —
12
+ * guaranteeing the JSON data reaches the SSD even during panic.
13
+ * ═══════════════════════════════════════════════════════════════════
14
+ */
15
+
16
+ 'use strict';
17
+
18
+ const fs = require('fs');
19
+ const path = require('path');
20
+
21
+ let _isInitialized = false;
22
+ let _journalRef = null;
23
+ let _journalPath = null;
24
+
25
+ /**
26
+ * Initialize the Emergency Halt handler.
27
+ * Call this ONCE at the start of any batch operation.
28
+ *
29
+ * @param {Array} journalRef - Reference to the live journal array
30
+ * @param {string} journalPath - Absolute path to the journal file
31
+ */
32
+ function initEmergencyHalt(journalRef, journalPath) {
33
+ if (_isInitialized) return; // Prevent double-registration
34
+
35
+ _journalRef = journalRef;
36
+ _journalPath = journalPath;
37
+ _isInitialized = true;
38
+
39
+ const flushJournal = (reason) => {
40
+ if (!_journalRef || _journalRef.length === 0) return;
41
+
42
+ console.log(`\n\n🛑 [EMERGENCY HALT] ${reason}`);
43
+ console.log(' Saving progress to Master Journal...');
44
+
45
+ try {
46
+ const data = JSON.stringify(_journalRef, null, 2);
47
+
48
+ // CRITICAL: writeFileSync blocks the event loop to guarantee persistence
49
+ // Permission 0o600 = owner-only read/write (anti-journal-poisoning)
50
+ fs.writeFileSync(_journalPath, data, { mode: 0o600 });
51
+
52
+ console.log(` ✅ Progress saved (${_journalRef.length} entries).`);
53
+ console.log(` Run 'filemayor undo' to revert.`);
54
+ } catch (err) {
55
+ console.error(` ❌ CRITICAL: Could not save Journal: ${err.message}`);
56
+ }
57
+ };
58
+
59
+ // Intercept Ctrl+C (SIGINT), Terminal Closure (SIGTERM), HUP
60
+ const signals = ['SIGINT', 'SIGTERM'];
61
+ // SIGHUP is not available on Windows, add conditionally
62
+ if (process.platform !== 'win32') signals.push('SIGHUP');
63
+
64
+ for (const signal of signals) {
65
+ process.on(signal, () => {
66
+ flushJournal(`Received ${signal}`);
67
+ process.exit(130); // Standard exit code for signal termination
68
+ });
69
+ }
70
+
71
+ // Catch unexpected crashes (the "Vibe-Coding Guard")
72
+ process.on('uncaughtException', (err) => {
73
+ console.error(`\n💥 [INTERNAL CRASH]: ${err.message}`);
74
+ console.error(err.stack);
75
+ flushJournal('Uncaught Exception');
76
+ process.exit(1);
77
+ });
78
+
79
+ process.on('unhandledRejection', (reason) => {
80
+ console.error(`\n💥 [UNHANDLED REJECTION]: ${reason}`);
81
+ flushJournal('Unhandled Promise Rejection');
82
+ process.exit(1);
83
+ });
84
+ }
85
+
86
+ /**
87
+ * Update the journal reference during a batch operation.
88
+ * Use this to keep the halt handler pointed at the latest data.
89
+ * @param {Array} journalRef - Updated reference
90
+ */
91
+ function updateJournalRef(journalRef) {
92
+ _journalRef = journalRef;
93
+ }
94
+
95
+ /**
96
+ * Clean shutdown — call after a batch completes successfully.
97
+ * Resets the halt handler state.
98
+ */
99
+ function clearEmergencyHalt() {
100
+ _journalRef = null;
101
+ _isInitialized = false;
102
+ }
103
+
104
+ module.exports = { initEmergencyHalt, updateJournalRef, clearEmergencyHalt };
@@ -0,0 +1,69 @@
1
+ /**
2
+ * ═══════════════════════════════════════════════════════════════════
3
+ * FILEMAYOR v3.5 — APPLY ENGINE
4
+ * The Executioner: Reifies the Curative Plan into reality.
5
+ * ═══════════════════════════════════════════════════════════════════
6
+ */
7
+
8
+ 'use strict';
9
+
10
+ const FileMayorFS = require('../fs-abstraction');
11
+
12
+ class ApplyEngine {
13
+ constructor(options = {}) {
14
+ this.fs = new FileMayorFS(options);
15
+ }
16
+
17
+ /**
18
+ * Execute a validated Curative Plan
19
+ * @param {Object} plan - The plan from CureEngine
20
+ * @param {Object} progressCb - Optional progress callback
21
+ * @returns {Promise<Object>} Execution results
22
+ */
23
+ async apply(plan, progressCb = null) {
24
+ if (!plan || !Array.isArray(plan.plan)) {
25
+ throw new Error('Invalid plan: Nothing to apply.');
26
+ }
27
+
28
+ const stats = {
29
+ total: plan.plan.length,
30
+ success: 0,
31
+ failed: 0
32
+ };
33
+
34
+ for (let i = 0; i < plan.plan.length; i++) {
35
+ const step = plan.plan[i];
36
+ try {
37
+ await this.fs.move(step.source, step.destination);
38
+ stats.success++;
39
+ if (progressCb) progressCb({
40
+ index: i + 1,
41
+ total: stats.total,
42
+ current: step.source,
43
+ status: 'success'
44
+ });
45
+ } catch (err) {
46
+ stats.failed++;
47
+ if (progressCb) progressCb({
48
+ index: i + 1,
49
+ total: stats.total,
50
+ current: step.source,
51
+ status: 'error',
52
+ error: err.message
53
+ });
54
+ }
55
+ }
56
+
57
+ return {
58
+ status: stats.failed === 0 ? 'completed' : 'partial',
59
+ stats,
60
+ journalCount: this.fs.getJournal().length
61
+ };
62
+ }
63
+
64
+ async rollback() {
65
+ return await this.fs.rollback();
66
+ }
67
+ }
68
+
69
+ module.exports = ApplyEngine;
@@ -0,0 +1,70 @@
1
+ /**
2
+ * ═══════════════════════════════════════════════════════════════════
3
+ * FILEMAYOR v3.5 — CURE ENGINE
4
+ * Orchestrates the Agent Crew to fix the clutter.
5
+ * ═══════════════════════════════════════════════════════════════════
6
+ */
7
+
8
+ 'use strict';
9
+
10
+ const IntentStrategist = require('../ai/strategist');
11
+ const MetadataSentry = require('../ai/sentry');
12
+ const CurativePlanner = require('../ai/planner');
13
+ const SecurityArchitect = require('../ai/validator');
14
+ const { scan } = require('../scanner');
15
+
16
+ class CureEngine {
17
+ constructor(dirPath, apiKey) {
18
+ // Smart detection: if dirPath looks like an API key (no path seps), swap args
19
+ if (dirPath && !apiKey && !dirPath.includes('/') && !dirPath.includes('\\') && dirPath.length > 20) {
20
+ this.apiKey = dirPath;
21
+ this.dirPath = null;
22
+ } else {
23
+ this.dirPath = dirPath;
24
+ this.apiKey = apiKey || process.env.GEMINI_API_KEY;
25
+ }
26
+
27
+ this.strategist = new IntentStrategist();
28
+ this.sentry = new MetadataSentry();
29
+ this.planner = new CurativePlanner(this.apiKey);
30
+ this.architect = new SecurityArchitect();
31
+
32
+ this.plan = null;
33
+ }
34
+
35
+ /**
36
+ * Perform curative operation (Brain phase) - UI Aligned
37
+ * @param {string} prompt - User intent
38
+ * @param {Function} progressCallback - Optional progress reporter
39
+ * @returns {Promise<Object>} Safe Curative Plan
40
+ */
41
+ async generatePlan(prompt, progressCallback) {
42
+ if (progressCallback) progressCallback("Scanning filesystem context...");
43
+ const scanResult = scan(this.dirPath, { maxDepth: 10 });
44
+
45
+ if (progressCallback) progressCallback("Identifying functional domains & ancestry...");
46
+ const sentryData = this.sentry.analyze(scanResult.files);
47
+
48
+ if (progressCallback) progressCallback("Consulting Intuition Engine...");
49
+ const intent = this.strategist.classify(prompt);
50
+
51
+ if (progressCallback) progressCallback("Synthesizing structural plan...");
52
+ const curativePlan = await this.planner.plan(intent, sentryData, prompt);
53
+
54
+ if (progressCallback) progressCallback("Validating structural integrity...");
55
+ const safePlan = this.architect.validate(curativePlan, sentryData);
56
+
57
+ this.plan = safePlan;
58
+ return safePlan;
59
+ }
60
+
61
+ /**
62
+ * Legacy entry point for CLI
63
+ */
64
+ async curate(dirPath, prompt) {
65
+ this.dirPath = dirPath;
66
+ return await this.generatePlan(prompt);
67
+ }
68
+ }
69
+
70
+ module.exports = CureEngine;
@@ -0,0 +1,77 @@
1
+ 'use strict';
2
+
3
+ const { scan, formatBytes } = require('../scanner');
4
+ const crypto = require('crypto');
5
+ const fs = require('fs');
6
+
7
+ /**
8
+ * DedupeEngine — specialized engine for finding and removing duplicates
9
+ */
10
+ class DedupeEngine {
11
+ constructor() {}
12
+
13
+ /**
14
+ * Find duplicates in a directory
15
+ * @param {string} dirPath
16
+ * @returns {Object} Duplicate report
17
+ */
18
+ find(dirPath) {
19
+ const result = scan(dirPath, { maxDepth: 10 });
20
+ const map = new Map();
21
+ const duplicates = [];
22
+ let totalWasted = 0;
23
+
24
+ for (const file of result.files) {
25
+ // Heuristic-based match (size + name)
26
+ // In a real pro version, we might do checksums here,
27
+ // but for a fast CLI, this is the "Viral" speed way.
28
+ const key = `${file.size}-${file.name}`;
29
+ if (map.has(key)) {
30
+ const original = map.get(key);
31
+ duplicates.push({
32
+ original,
33
+ duplicate: file
34
+ });
35
+ totalWasted += file.size;
36
+ } else {
37
+ map.set(key, file);
38
+ }
39
+ }
40
+
41
+ return {
42
+ path: dirPath,
43
+ sets: duplicates.length,
44
+ totalWasted,
45
+ totalWastedHuman: formatBytes(totalWasted),
46
+ duplicates
47
+ };
48
+ }
49
+
50
+ /**
51
+ * Remove duplicates based on a report
52
+ */
53
+ async clean(report) {
54
+ let deleted = 0;
55
+ let freed = 0;
56
+ const errors = [];
57
+
58
+ for (const set of report.duplicates) {
59
+ try {
60
+ fs.unlinkSync(set.duplicate.path);
61
+ deleted++;
62
+ freed += set.duplicate.size;
63
+ } catch (err) {
64
+ errors.push(`${set.duplicate.name}: ${err.message}`);
65
+ }
66
+ }
67
+
68
+ return {
69
+ deleted,
70
+ freed,
71
+ freedHuman: formatBytes(freed),
72
+ errors
73
+ };
74
+ }
75
+ }
76
+
77
+ module.exports = DedupeEngine;
@@ -0,0 +1,114 @@
1
+ /**
2
+ * ═══════════════════════════════════════════════════════════════════
3
+ * FILEMAYOR v3.5 — EXPLAIN ENGINE
4
+ * The Diagnostic UX: "The doctor for your filesystem."
5
+ * ═══════════════════════════════════════════════════════════════════
6
+ */
7
+
8
+ 'use strict';
9
+
10
+ const { scan } = require('../scanner');
11
+ const MetadataSentry = require('../ai/sentry');
12
+
13
+ class ExplainEngine {
14
+ constructor() {
15
+ this.sentry = new MetadataSentry();
16
+ }
17
+
18
+ /**
19
+ * Run a diagnostic scan and return structured insights
20
+ * @param {string} dirPath - Directory to analyze
21
+ * @returns {Object} Diagnostic report
22
+ */
23
+ run(dirPath) {
24
+ // 1. Scan the directory
25
+ const scanResult = scan(dirPath, { maxDepth: 10 });
26
+
27
+ // 2. Use MetadataSentry to get clusters
28
+ const sentryData = this.sentry.analyze(scanResult.files);
29
+
30
+ // 3. Find duplicates (by name and size as a simple heuristic)
31
+ const duplicates = this._findDuplicates(scanResult.files);
32
+
33
+ // 4. Generate insights from clusters
34
+ const insights = this._generateInsights(sentryData, duplicates);
35
+
36
+ const health = this._calculateHealth(sentryData, duplicates);
37
+
38
+ return {
39
+ title: 'FILEMAYOR DIAGNOSIS',
40
+ path: dirPath,
41
+ health,
42
+ summary: sentryData.summary,
43
+ clusters: sentryData.clusters,
44
+ duplicates: duplicates.length,
45
+ duplicateSize: duplicates.reduce((acc, f) => acc + f.size, 0),
46
+ insights
47
+ };
48
+ }
49
+
50
+ _findDuplicates(files) {
51
+ const seen = new Map();
52
+ const duplicates = [];
53
+
54
+ for (const f of files) {
55
+ const key = `${f.name}-${f.size}`;
56
+ if (seen.has(key)) {
57
+ duplicates.push(f);
58
+ } else {
59
+ seen.set(key, f);
60
+ }
61
+ }
62
+ return duplicates;
63
+ }
64
+
65
+ _generateInsights(data, duplicates) {
66
+ const insights = [];
67
+ const { clusters } = data;
68
+
69
+ if (clusters.screenshot && clusters.screenshot.count > 10) {
70
+ insights.push(`${clusters.screenshot.count} screenshots scattered in folder.`);
71
+ }
72
+
73
+ if (clusters.executable && clusters.executable.count > 5) {
74
+ insights.push(`${clusters.executable.count} installers consuming space.`);
75
+ }
76
+
77
+ if (duplicates.length > 0) {
78
+ insights.push(`${duplicates.length} duplicate files detected.`);
79
+ }
80
+
81
+ if (clusters.archive && clusters.archive.count > 3) {
82
+ insights.push(`Multiple old ZIP/RAR archives identified.`);
83
+ }
84
+
85
+ return insights;
86
+ }
87
+
88
+ _calculateHealth(data, duplicates) {
89
+ const totalFiles = data.summary.totalFiles;
90
+ if (totalFiles === 0) return { score: 100, label: 'EXCELLENT', color: '#10b981' };
91
+
92
+ // Deduction logic
93
+ let deductions = 0;
94
+ deductions += Math.min(30, (duplicates.length / totalFiles) * 100);
95
+ deductions += Math.min(30, (data.summary.categories.length > 8 ? 20 : 0)); // too many categories in one place
96
+
97
+ const score = Math.max(0, 100 - Math.round(deductions));
98
+
99
+ let label = 'GOOD';
100
+ let color = '#10b981';
101
+
102
+ if (score < 40) {
103
+ label = 'POOR';
104
+ color = '#ef4444';
105
+ } else if (score < 75) {
106
+ label = 'FAIR';
107
+ color = '#f59e0b';
108
+ }
109
+
110
+ return { score, label, color };
111
+ }
112
+ }
113
+
114
+ module.exports = ExplainEngine;
@@ -0,0 +1,49 @@
1
+ /**
2
+ * ═══════════════════════════════════════════════════════════════════
3
+ * FILEMAYOR v3.5 — PREVIEW ENGINE
4
+ * Formats Curative Plans for visualization.
5
+ * ═══════════════════════════════════════════════════════════════════
6
+ */
7
+
8
+ 'use strict';
9
+
10
+ class PreviewEngine {
11
+ /**
12
+ * Format a plan for CLI display
13
+ * @param {Object} plan
14
+ */
15
+ showInConsole(plan) {
16
+ console.log('\n--- CURATIVE PLAN PREVIEW ---');
17
+ console.log(`\nNARRATIVE: ${plan.narrative || 'No explanation provided.'}`);
18
+ console.log(`CONFIDENCE: ${plan.confidence || 0}%`);
19
+
20
+ console.log('\nPROPOSED MOVES:');
21
+ if (plan.plan.length === 0) {
22
+ console.log(' (No changes suggested)');
23
+ } else {
24
+ plan.plan.forEach((step, idx) => {
25
+ console.log(` [${idx + 1}] ${step.source}`);
26
+ console.log(` → ${step.destination}`);
27
+ console.log(` Reason: ${step.reason || 'Organizing'}`);
28
+ });
29
+ }
30
+
31
+ console.log(`\nTotal actions: ${plan.plan.length}`);
32
+ console.log('Run with `apply` to execute these changes.');
33
+ console.log('-----------------------------\n');
34
+ }
35
+
36
+ /**
37
+ * Format a plan for UI digestion
38
+ * @param {Object} plan
39
+ */
40
+ formatForUI(plan) {
41
+ return {
42
+ ...plan,
43
+ timestamp: Date.now(),
44
+ summary: `${plan.plan.length} files to move`
45
+ };
46
+ }
47
+ }
48
+
49
+ module.exports = PreviewEngine;