filemayor 2.1.0 → 3.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/core/ai/planner.js +42 -0
- package/core/ai/sentry.js +92 -0
- package/core/ai/strategist.js +186 -0
- package/core/ai/validator.js +116 -0
- package/core/analyzer.js +16 -5
- package/core/emergency-halt.js +104 -0
- package/core/engine/apply-engine.js +69 -0
- package/core/engine/cure-engine.js +70 -0
- package/core/engine/dedupe-engine.js +77 -0
- package/core/engine/explain-engine.js +114 -0
- package/core/engine/preview-engine.js +49 -0
- package/core/fs-abstraction.js +199 -0
- package/core/guardrail.js +115 -0
- package/core/index.js +56 -0
- package/core/intent-interpreter.js +158 -0
- package/core/jailer.js +151 -0
- package/core/license.js +6 -4
- package/core/metadata-store.js +104 -0
- package/core/organizer.js +28 -142
- package/core/reporter.js +132 -2
- package/core/scanner.js +92 -62
- package/core/security.js +65 -12
- package/core/vault.js +165 -0
- package/index.js +193 -1
- package/package.json +2 -2
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* ═══════════════════════════════════════════════════════════════════
|
|
3
|
+
* FILEMAYOR v3.5 — CURE ENGINE
|
|
4
|
+
* Orchestrates the Agent Crew to fix the clutter.
|
|
5
|
+
* ═══════════════════════════════════════════════════════════════════
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
'use strict';
|
|
9
|
+
|
|
10
|
+
const IntentStrategist = require('../ai/strategist');
|
|
11
|
+
const MetadataSentry = require('../ai/sentry');
|
|
12
|
+
const CurativePlanner = require('../ai/planner');
|
|
13
|
+
const SecurityArchitect = require('../ai/validator');
|
|
14
|
+
const { scan } = require('../scanner');
|
|
15
|
+
|
|
16
|
+
class CureEngine {
|
|
17
|
+
constructor(dirPath, apiKey) {
|
|
18
|
+
// Smart detection: if dirPath looks like an API key (no path seps), swap args
|
|
19
|
+
if (dirPath && !apiKey && !dirPath.includes('/') && !dirPath.includes('\\') && dirPath.length > 20) {
|
|
20
|
+
this.apiKey = dirPath;
|
|
21
|
+
this.dirPath = null;
|
|
22
|
+
} else {
|
|
23
|
+
this.dirPath = dirPath;
|
|
24
|
+
this.apiKey = apiKey || process.env.GEMINI_API_KEY;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
this.strategist = new IntentStrategist();
|
|
28
|
+
this.sentry = new MetadataSentry();
|
|
29
|
+
this.planner = new CurativePlanner(this.apiKey);
|
|
30
|
+
this.architect = new SecurityArchitect();
|
|
31
|
+
|
|
32
|
+
this.plan = null;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
/**
|
|
36
|
+
* Perform curative operation (Brain phase) - UI Aligned
|
|
37
|
+
* @param {string} prompt - User intent
|
|
38
|
+
* @param {Function} progressCallback - Optional progress reporter
|
|
39
|
+
* @returns {Promise<Object>} Safe Curative Plan
|
|
40
|
+
*/
|
|
41
|
+
async generatePlan(prompt, progressCallback) {
|
|
42
|
+
if (progressCallback) progressCallback("Scanning filesystem context...");
|
|
43
|
+
const scanResult = scan(this.dirPath, { maxDepth: 10 });
|
|
44
|
+
|
|
45
|
+
if (progressCallback) progressCallback("Identifying functional domains & ancestry...");
|
|
46
|
+
const sentryData = this.sentry.analyze(scanResult.files);
|
|
47
|
+
|
|
48
|
+
if (progressCallback) progressCallback("Consulting Intuition Engine...");
|
|
49
|
+
const intent = this.strategist.classify(prompt);
|
|
50
|
+
|
|
51
|
+
if (progressCallback) progressCallback("Synthesizing structural plan...");
|
|
52
|
+
const curativePlan = await this.planner.plan(intent, sentryData, prompt);
|
|
53
|
+
|
|
54
|
+
if (progressCallback) progressCallback("Validating structural integrity...");
|
|
55
|
+
const safePlan = this.architect.validate(curativePlan, sentryData);
|
|
56
|
+
|
|
57
|
+
this.plan = safePlan;
|
|
58
|
+
return safePlan;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
/**
|
|
62
|
+
* Legacy entry point for CLI
|
|
63
|
+
*/
|
|
64
|
+
async curate(dirPath, prompt) {
|
|
65
|
+
this.dirPath = dirPath;
|
|
66
|
+
return await this.generatePlan(prompt);
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
module.exports = CureEngine;
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const { scan, formatBytes } = require('../scanner');
|
|
4
|
+
const crypto = require('crypto');
|
|
5
|
+
const fs = require('fs');
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* DedupeEngine — specialized engine for finding and removing duplicates
|
|
9
|
+
*/
|
|
10
|
+
class DedupeEngine {
|
|
11
|
+
constructor() {}
|
|
12
|
+
|
|
13
|
+
/**
|
|
14
|
+
* Find duplicates in a directory
|
|
15
|
+
* @param {string} dirPath
|
|
16
|
+
* @returns {Object} Duplicate report
|
|
17
|
+
*/
|
|
18
|
+
find(dirPath) {
|
|
19
|
+
const result = scan(dirPath, { maxDepth: 10 });
|
|
20
|
+
const map = new Map();
|
|
21
|
+
const duplicates = [];
|
|
22
|
+
let totalWasted = 0;
|
|
23
|
+
|
|
24
|
+
for (const file of result.files) {
|
|
25
|
+
// Heuristic-based match (size + name)
|
|
26
|
+
// In a real pro version, we might do checksums here,
|
|
27
|
+
// but for a fast CLI, this is the "Viral" speed way.
|
|
28
|
+
const key = `${file.size}-${file.name}`;
|
|
29
|
+
if (map.has(key)) {
|
|
30
|
+
const original = map.get(key);
|
|
31
|
+
duplicates.push({
|
|
32
|
+
original,
|
|
33
|
+
duplicate: file
|
|
34
|
+
});
|
|
35
|
+
totalWasted += file.size;
|
|
36
|
+
} else {
|
|
37
|
+
map.set(key, file);
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
return {
|
|
42
|
+
path: dirPath,
|
|
43
|
+
sets: duplicates.length,
|
|
44
|
+
totalWasted,
|
|
45
|
+
totalWastedHuman: formatBytes(totalWasted),
|
|
46
|
+
duplicates
|
|
47
|
+
};
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
/**
|
|
51
|
+
* Remove duplicates based on a report
|
|
52
|
+
*/
|
|
53
|
+
async clean(report) {
|
|
54
|
+
let deleted = 0;
|
|
55
|
+
let freed = 0;
|
|
56
|
+
const errors = [];
|
|
57
|
+
|
|
58
|
+
for (const set of report.duplicates) {
|
|
59
|
+
try {
|
|
60
|
+
fs.unlinkSync(set.duplicate.path);
|
|
61
|
+
deleted++;
|
|
62
|
+
freed += set.duplicate.size;
|
|
63
|
+
} catch (err) {
|
|
64
|
+
errors.push(`${set.duplicate.name}: ${err.message}`);
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
return {
|
|
69
|
+
deleted,
|
|
70
|
+
freed,
|
|
71
|
+
freedHuman: formatBytes(freed),
|
|
72
|
+
errors
|
|
73
|
+
};
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
module.exports = DedupeEngine;
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* ═══════════════════════════════════════════════════════════════════
|
|
3
|
+
* FILEMAYOR v3.5 — EXPLAIN ENGINE
|
|
4
|
+
* The Diagnostic UX: "The doctor for your filesystem."
|
|
5
|
+
* ═══════════════════════════════════════════════════════════════════
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
'use strict';
|
|
9
|
+
|
|
10
|
+
const { scan } = require('../scanner');
|
|
11
|
+
const MetadataSentry = require('../ai/sentry');
|
|
12
|
+
|
|
13
|
+
class ExplainEngine {
|
|
14
|
+
constructor() {
|
|
15
|
+
this.sentry = new MetadataSentry();
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* Run a diagnostic scan and return structured insights
|
|
20
|
+
* @param {string} dirPath - Directory to analyze
|
|
21
|
+
* @returns {Object} Diagnostic report
|
|
22
|
+
*/
|
|
23
|
+
run(dirPath) {
|
|
24
|
+
// 1. Scan the directory
|
|
25
|
+
const scanResult = scan(dirPath, { maxDepth: 10 });
|
|
26
|
+
|
|
27
|
+
// 2. Use MetadataSentry to get clusters
|
|
28
|
+
const sentryData = this.sentry.analyze(scanResult.files);
|
|
29
|
+
|
|
30
|
+
// 3. Find duplicates (by name and size as a simple heuristic)
|
|
31
|
+
const duplicates = this._findDuplicates(scanResult.files);
|
|
32
|
+
|
|
33
|
+
// 4. Generate insights from clusters
|
|
34
|
+
const insights = this._generateInsights(sentryData, duplicates);
|
|
35
|
+
|
|
36
|
+
const health = this._calculateHealth(sentryData, duplicates);
|
|
37
|
+
|
|
38
|
+
return {
|
|
39
|
+
title: 'FILEMAYOR DIAGNOSIS',
|
|
40
|
+
path: dirPath,
|
|
41
|
+
health,
|
|
42
|
+
summary: sentryData.summary,
|
|
43
|
+
clusters: sentryData.clusters,
|
|
44
|
+
duplicates: duplicates.length,
|
|
45
|
+
duplicateSize: duplicates.reduce((acc, f) => acc + f.size, 0),
|
|
46
|
+
insights
|
|
47
|
+
};
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
_findDuplicates(files) {
|
|
51
|
+
const seen = new Map();
|
|
52
|
+
const duplicates = [];
|
|
53
|
+
|
|
54
|
+
for (const f of files) {
|
|
55
|
+
const key = `${f.name}-${f.size}`;
|
|
56
|
+
if (seen.has(key)) {
|
|
57
|
+
duplicates.push(f);
|
|
58
|
+
} else {
|
|
59
|
+
seen.set(key, f);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
return duplicates;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
_generateInsights(data, duplicates) {
|
|
66
|
+
const insights = [];
|
|
67
|
+
const { clusters } = data;
|
|
68
|
+
|
|
69
|
+
if (clusters.screenshot && clusters.screenshot.count > 10) {
|
|
70
|
+
insights.push(`${clusters.screenshot.count} screenshots scattered in folder.`);
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
if (clusters.executable && clusters.executable.count > 5) {
|
|
74
|
+
insights.push(`${clusters.executable.count} installers consuming space.`);
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
if (duplicates.length > 0) {
|
|
78
|
+
insights.push(`${duplicates.length} duplicate files detected.`);
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
if (clusters.archive && clusters.archive.count > 3) {
|
|
82
|
+
insights.push(`Multiple old ZIP/RAR archives identified.`);
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
return insights;
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
_calculateHealth(data, duplicates) {
|
|
89
|
+
const totalFiles = data.summary.totalFiles;
|
|
90
|
+
if (totalFiles === 0) return { score: 100, label: 'EXCELLENT', color: '#10b981' };
|
|
91
|
+
|
|
92
|
+
// Deduction logic
|
|
93
|
+
let deductions = 0;
|
|
94
|
+
deductions += Math.min(30, (duplicates.length / totalFiles) * 100);
|
|
95
|
+
deductions += Math.min(30, (data.summary.categories.length > 8 ? 20 : 0)); // too many categories in one place
|
|
96
|
+
|
|
97
|
+
const score = Math.max(0, 100 - Math.round(deductions));
|
|
98
|
+
|
|
99
|
+
let label = 'GOOD';
|
|
100
|
+
let color = '#10b981';
|
|
101
|
+
|
|
102
|
+
if (score < 40) {
|
|
103
|
+
label = 'POOR';
|
|
104
|
+
color = '#ef4444';
|
|
105
|
+
} else if (score < 75) {
|
|
106
|
+
label = 'FAIR';
|
|
107
|
+
color = '#f59e0b';
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
return { score, label, color };
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
module.exports = ExplainEngine;
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* ═══════════════════════════════════════════════════════════════════
|
|
3
|
+
* FILEMAYOR v3.5 — PREVIEW ENGINE
|
|
4
|
+
* Formats Curative Plans for visualization.
|
|
5
|
+
* ═══════════════════════════════════════════════════════════════════
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
'use strict';
|
|
9
|
+
|
|
10
|
+
class PreviewEngine {
|
|
11
|
+
/**
|
|
12
|
+
* Format a plan for CLI display
|
|
13
|
+
* @param {Object} plan
|
|
14
|
+
*/
|
|
15
|
+
showInConsole(plan) {
|
|
16
|
+
console.log('\n--- CURATIVE PLAN PREVIEW ---');
|
|
17
|
+
console.log(`\nNARRATIVE: ${plan.narrative || 'No explanation provided.'}`);
|
|
18
|
+
console.log(`CONFIDENCE: ${plan.confidence || 0}%`);
|
|
19
|
+
|
|
20
|
+
console.log('\nPROPOSED MOVES:');
|
|
21
|
+
if (plan.plan.length === 0) {
|
|
22
|
+
console.log(' (No changes suggested)');
|
|
23
|
+
} else {
|
|
24
|
+
plan.plan.forEach((step, idx) => {
|
|
25
|
+
console.log(` [${idx + 1}] ${step.source}`);
|
|
26
|
+
console.log(` → ${step.destination}`);
|
|
27
|
+
console.log(` Reason: ${step.reason || 'Organizing'}`);
|
|
28
|
+
});
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
console.log(`\nTotal actions: ${plan.plan.length}`);
|
|
32
|
+
console.log('Run with `apply` to execute these changes.');
|
|
33
|
+
console.log('-----------------------------\n');
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Format a plan for UI digestion
|
|
38
|
+
* @param {Object} plan
|
|
39
|
+
*/
|
|
40
|
+
formatForUI(plan) {
|
|
41
|
+
return {
|
|
42
|
+
...plan,
|
|
43
|
+
timestamp: Date.now(),
|
|
44
|
+
summary: `${plan.plan.length} files to move`
|
|
45
|
+
};
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
module.exports = PreviewEngine;
|
|
@@ -0,0 +1,199 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* ═══════════════════════════════════════════════════════════════════
|
|
5
|
+
* FILEMAYOR FS v3.0 — INTENT-READY FILESYSTEM ABSTRACTION
|
|
6
|
+
* Async, safe, transactional, and fully rollbackable.
|
|
7
|
+
* ═══════════════════════════════════════════════════════════════════
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
'use strict';
|
|
11
|
+
|
|
12
|
+
const fs = require('fs').promises;
|
|
13
|
+
const fssync = require('fs');
|
|
14
|
+
const path = require('path');
|
|
15
|
+
const os = require('os');
|
|
16
|
+
const crypto = require('crypto');
|
|
17
|
+
const { validatePath, isFileSafe, isDirSafe, canRead, canWrite } = require('./security');
|
|
18
|
+
const { formatBytes } = require('./scanner');
|
|
19
|
+
|
|
20
|
+
class FileMayorFS {
|
|
21
|
+
constructor(options = {}) {
|
|
22
|
+
this.options = {
|
|
23
|
+
useJournal: true,
|
|
24
|
+
dryRun: false,
|
|
25
|
+
trashPath: path.join(os.tmpdir(), 'filemayor_trash'),
|
|
26
|
+
journalPath: path.join(os.homedir(), '.filemayor-master-journal.json'),
|
|
27
|
+
...options
|
|
28
|
+
};
|
|
29
|
+
this.sessionJournal = [];
|
|
30
|
+
this.stats = {
|
|
31
|
+
opsSucceeded: 0,
|
|
32
|
+
opsFailed: 0,
|
|
33
|
+
totalBytesProcessed: 0,
|
|
34
|
+
filesMoved: 0,
|
|
35
|
+
filesDeleted: 0
|
|
36
|
+
};
|
|
37
|
+
fssync.mkdirSync(this.options.trashPath, { recursive: true });
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
/** ------------------------
|
|
41
|
+
* Enhanced Snapshot
|
|
42
|
+
* -------------------------*/
|
|
43
|
+
async createSnapshot(filePath) {
|
|
44
|
+
try {
|
|
45
|
+
const stats = await fs.stat(filePath);
|
|
46
|
+
return {
|
|
47
|
+
path: filePath,
|
|
48
|
+
size: stats.size,
|
|
49
|
+
mtime: stats.mtimeMs,
|
|
50
|
+
hash: crypto.createHash('sha256').update(filePath + stats.mtimeMs).digest('hex')
|
|
51
|
+
};
|
|
52
|
+
} catch {
|
|
53
|
+
return { path: filePath };
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
/** ------------------------
|
|
58
|
+
* Async Move
|
|
59
|
+
* -------------------------*/
|
|
60
|
+
async move(source, destination) {
|
|
61
|
+
const result = { source, destination, action: 'move', status: 'pending' };
|
|
62
|
+
|
|
63
|
+
const sourceCheck = isFileSafe(source);
|
|
64
|
+
if (!sourceCheck.safe) throw new Error(`Source unsafe: ${sourceCheck.reason}`);
|
|
65
|
+
|
|
66
|
+
const destValidation = validatePath(destination);
|
|
67
|
+
if (!destValidation.valid) throw new Error(`Destination invalid: ${destValidation.error}`);
|
|
68
|
+
|
|
69
|
+
const snapshot = await this.createSnapshot(source);
|
|
70
|
+
|
|
71
|
+
if (this.options.dryRun) {
|
|
72
|
+
result.status = 'dry-run';
|
|
73
|
+
return result;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
try {
|
|
77
|
+
const destDir = path.dirname(destination);
|
|
78
|
+
await fs.mkdir(destDir, { recursive: true });
|
|
79
|
+
|
|
80
|
+
try {
|
|
81
|
+
await fs.rename(source, destination);
|
|
82
|
+
} catch (err) {
|
|
83
|
+
if (err.code === 'EXDEV') {
|
|
84
|
+
await fs.copyFile(source, destination);
|
|
85
|
+
await fs.unlink(source);
|
|
86
|
+
} else throw err;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
result.status = 'success';
|
|
90
|
+
this.stats.opsSucceeded++;
|
|
91
|
+
this.stats.filesMoved++;
|
|
92
|
+
this.stats.totalBytesProcessed += snapshot.size || 0;
|
|
93
|
+
await this._logToJournal(source, destination, 'move', snapshot);
|
|
94
|
+
|
|
95
|
+
} catch (err) {
|
|
96
|
+
result.status = 'error';
|
|
97
|
+
result.error = err.message;
|
|
98
|
+
this.stats.opsFailed++;
|
|
99
|
+
throw err;
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
return result;
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
/** ------------------------
|
|
106
|
+
* Safe Delete with Trash
|
|
107
|
+
* -------------------------*/
|
|
108
|
+
async delete(target) {
|
|
109
|
+
const check = isFileSafe(target);
|
|
110
|
+
if (!check.safe) throw new Error(`Target unsafe: ${check.reason}`);
|
|
111
|
+
|
|
112
|
+
const snapshot = await this.createSnapshot(target);
|
|
113
|
+
|
|
114
|
+
if (this.options.dryRun) return { target, status: 'dry-run' };
|
|
115
|
+
|
|
116
|
+
const trashFile = path.join(this.options.trashPath, path.basename(target) + '-' + Date.now());
|
|
117
|
+
await fs.rename(target, trashFile);
|
|
118
|
+
|
|
119
|
+
this.stats.opsSucceeded++;
|
|
120
|
+
this.stats.filesDeleted++;
|
|
121
|
+
this.stats.totalBytesProcessed += snapshot.size || 0;
|
|
122
|
+
|
|
123
|
+
await this._logToJournal(target, trashFile, 'delete', snapshot);
|
|
124
|
+
|
|
125
|
+
return { target, status: 'success' };
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
/** ------------------------
|
|
129
|
+
* Journaling
|
|
130
|
+
* -------------------------*/
|
|
131
|
+
async _logToJournal(source, destination, action, snapshot) {
|
|
132
|
+
if (!this.options.useJournal) return;
|
|
133
|
+
|
|
134
|
+
const entry = {
|
|
135
|
+
id: crypto.randomUUID(),
|
|
136
|
+
timestamp: new Date().toISOString(),
|
|
137
|
+
action,
|
|
138
|
+
source,
|
|
139
|
+
destination,
|
|
140
|
+
snapshot
|
|
141
|
+
};
|
|
142
|
+
|
|
143
|
+
this.sessionJournal.push(entry);
|
|
144
|
+
|
|
145
|
+
// Persistent Journaling (v3.5)
|
|
146
|
+
try {
|
|
147
|
+
let diskJournal = [];
|
|
148
|
+
if (fssync.existsSync(this.options.journalPath)) {
|
|
149
|
+
diskJournal = JSON.parse(fssync.readFileSync(this.options.journalPath, 'utf8'));
|
|
150
|
+
}
|
|
151
|
+
diskJournal.push(entry);
|
|
152
|
+
// Save last 1000 ops globally
|
|
153
|
+
fssync.writeFileSync(this.options.journalPath, JSON.stringify(diskJournal.slice(-1000), null, 2));
|
|
154
|
+
} catch (err) {
|
|
155
|
+
console.error(`[FAIL] Disk Journal write failed: ${err.message}`);
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
/** ------------------------
|
|
160
|
+
* Rollback
|
|
161
|
+
* -------------------------*/
|
|
162
|
+
async rollback() {
|
|
163
|
+
// Use session journal if available, otherwise try to load from disk
|
|
164
|
+
let reversed = [...this.sessionJournal].reverse();
|
|
165
|
+
|
|
166
|
+
if (reversed.length === 0 && fssync.existsSync(this.options.journalPath)) {
|
|
167
|
+
console.log(`[SYS] Session journal empty. Attempting disk rollback...`);
|
|
168
|
+
const diskJournal = JSON.parse(fssync.readFileSync(this.options.journalPath, 'utf8'));
|
|
169
|
+
reversed = diskJournal.reverse();
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
console.log(`[SYS] Rolling back ${reversed.length} operations...`);
|
|
173
|
+
|
|
174
|
+
for (const entry of reversed) {
|
|
175
|
+
try {
|
|
176
|
+
if (entry.action === 'move') {
|
|
177
|
+
if (fssync.existsSync(entry.destination)) {
|
|
178
|
+
await fs.rename(entry.destination, entry.source);
|
|
179
|
+
}
|
|
180
|
+
} else if (entry.action === 'delete') {
|
|
181
|
+
if (fssync.existsSync(entry.destination)) {
|
|
182
|
+
await fs.rename(entry.destination, entry.source);
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
} catch (err) {
|
|
186
|
+
console.error(`[FAIL] Rollback failed for ${entry.source}: ${err.message}`);
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
this.sessionJournal = [];
|
|
191
|
+
console.log(`[SYS] Rollback complete.`);
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
getJournal() {
|
|
195
|
+
return this.sessionJournal;
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
module.exports = FileMayorFS;
|
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* ═══════════════════════════════════════════════════════════════════
|
|
5
|
+
* FILEMAYOR CORE — LOGIC GUARDRAIL
|
|
6
|
+
*
|
|
7
|
+
* The final layer of the Chevza Doctrine: INTENTIONALITY.
|
|
8
|
+
* Sits between the AI Intuition Engine and the Jailer.
|
|
9
|
+
* Forces a dry-run preview + user confirmation for high-volume
|
|
10
|
+
* or destructive-pattern operations.
|
|
11
|
+
*
|
|
12
|
+
* Zero-dependency: uses native `readline/promises`.
|
|
13
|
+
* ═══════════════════════════════════════════════════════════════════
|
|
14
|
+
*/
|
|
15
|
+
|
|
16
|
+
'use strict';
|
|
17
|
+
|
|
18
|
+
const readline = require('readline/promises');
|
|
19
|
+
const { stdin: input, stdout: output } = require('process');
|
|
20
|
+
const path = require('path');
|
|
21
|
+
|
|
22
|
+
class LogicGuardrail {
|
|
23
|
+
/**
|
|
24
|
+
* @param {number} safetyLimit - Max ops before triggering confirmation (default: 50)
|
|
25
|
+
*/
|
|
26
|
+
constructor(safetyLimit = 50) {
|
|
27
|
+
this.safetyLimit = safetyLimit;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* Analyze a batch plan and require user confirmation if suspicious.
|
|
32
|
+
* @param {Array} batch - Array of { source, destination, type?, reason? }
|
|
33
|
+
* @returns {Promise<boolean>} Whether the user approved the batch
|
|
34
|
+
*/
|
|
35
|
+
async verifyBatch(batch) {
|
|
36
|
+
if (!Array.isArray(batch) || batch.length === 0) return true;
|
|
37
|
+
|
|
38
|
+
const operationCount = batch.length;
|
|
39
|
+
|
|
40
|
+
// Auto-pass for small, safe batches
|
|
41
|
+
if (operationCount < this.safetyLimit && !this.isDestructivePattern(batch)) {
|
|
42
|
+
return true;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
// ─── Dry Run Summary ─────────────────────────────────────
|
|
46
|
+
console.log('\n' + '═'.repeat(60));
|
|
47
|
+
console.log('⚠️ [LOGIC GUARDRAIL] High-Volume Operation Detected');
|
|
48
|
+
console.log('═'.repeat(60));
|
|
49
|
+
console.log(` Total operations: ${operationCount}`);
|
|
50
|
+
console.log(` Safety limit: ${this.safetyLimit}`);
|
|
51
|
+
|
|
52
|
+
if (this.isDestructivePattern(batch)) {
|
|
53
|
+
console.log('\n 🔴 WARNING: Destructive pattern detected!');
|
|
54
|
+
console.log(' Many files are being moved to a single destination.');
|
|
55
|
+
console.log(' This could indicate an AI "Semantic Wipe" or "Over-Organizer" loop.');
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
// Show preview of first 5 operations
|
|
59
|
+
console.log('\n Preview:');
|
|
60
|
+
const preview = batch.slice(0, 5);
|
|
61
|
+
for (const op of preview) {
|
|
62
|
+
const srcName = path.basename(op.source || '');
|
|
63
|
+
const dstDir = path.dirname(op.destination || '');
|
|
64
|
+
console.log(` ${srcName} → ${dstDir}`);
|
|
65
|
+
}
|
|
66
|
+
if (operationCount > 5) {
|
|
67
|
+
console.log(` ... and ${operationCount - 5} more.`);
|
|
68
|
+
}
|
|
69
|
+
console.log('═'.repeat(60));
|
|
70
|
+
|
|
71
|
+
// ─── User Confirmation (Native Node.js) ─────────────────
|
|
72
|
+
const rl = readline.createInterface({ input, output });
|
|
73
|
+
try {
|
|
74
|
+
const answer = await rl.question('\n Proceed with this batch? (y/N): ');
|
|
75
|
+
return answer.toLowerCase().trim() === 'y';
|
|
76
|
+
} finally {
|
|
77
|
+
rl.close();
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
/**
|
|
82
|
+
* Detect if the AI is funneling many files into a single destination.
|
|
83
|
+
* This catches "Over-Organizer Loops" and "Semantic Wipes."
|
|
84
|
+
* @param {Array} batch - The operation batch
|
|
85
|
+
* @returns {boolean} True if the pattern looks destructive
|
|
86
|
+
*/
|
|
87
|
+
isDestructivePattern(batch) {
|
|
88
|
+
if (batch.length < 10) return false;
|
|
89
|
+
|
|
90
|
+
const destDirs = batch.map(b => {
|
|
91
|
+
const dest = b.destination || '';
|
|
92
|
+
return path.dirname(dest).toLowerCase();
|
|
93
|
+
});
|
|
94
|
+
const uniqueDests = new Set(destDirs).size;
|
|
95
|
+
|
|
96
|
+
// If 10+ files all going to 1 directory, it's suspicious
|
|
97
|
+
return (batch.length / uniqueDests) > 10;
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
/**
|
|
101
|
+
* Detect "Nesting Hell" — AI creating deeply nested structures
|
|
102
|
+
* @param {Array} batch - The operation batch
|
|
103
|
+
* @param {number} maxDepth - Maximum allowed nesting depth (default: 8)
|
|
104
|
+
* @returns {Array} Entries that exceed the nesting depth
|
|
105
|
+
*/
|
|
106
|
+
detectNestingHell(batch, maxDepth = 8) {
|
|
107
|
+
return batch.filter(op => {
|
|
108
|
+
const dest = op.destination || '';
|
|
109
|
+
const segments = dest.replace(/\\/g, '/').split('/').filter(s => s.length > 0);
|
|
110
|
+
return segments.length > maxDepth;
|
|
111
|
+
});
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
module.exports = LogicGuardrail;
|