filemayor 2.0.5 → 3.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,199 @@
1
+ #!/usr/bin/env node
2
+
3
+ /**
4
+ * ═══════════════════════════════════════════════════════════════════
5
+ * FILEMAYOR FS v3.0 — INTENT-READY FILESYSTEM ABSTRACTION
6
+ * Async, safe, transactional, and fully rollbackable.
7
+ * ═══════════════════════════════════════════════════════════════════
8
+ */
9
+
10
+ 'use strict';
11
+
12
+ const fs = require('fs').promises;
13
+ const fssync = require('fs');
14
+ const path = require('path');
15
+ const os = require('os');
16
+ const crypto = require('crypto');
17
+ const { validatePath, isFileSafe, isDirSafe, canRead, canWrite } = require('./security');
18
+ const { formatBytes } = require('./scanner');
19
+
20
+ class FileMayorFS {
21
+ constructor(options = {}) {
22
+ this.options = {
23
+ useJournal: true,
24
+ dryRun: false,
25
+ trashPath: path.join(os.tmpdir(), 'filemayor_trash'),
26
+ journalPath: path.join(os.homedir(), '.filemayor-master-journal.json'),
27
+ ...options
28
+ };
29
+ this.sessionJournal = [];
30
+ this.stats = {
31
+ opsSucceeded: 0,
32
+ opsFailed: 0,
33
+ totalBytesProcessed: 0,
34
+ filesMoved: 0,
35
+ filesDeleted: 0
36
+ };
37
+ fssync.mkdirSync(this.options.trashPath, { recursive: true });
38
+ }
39
+
40
+ /** ------------------------
41
+ * Enhanced Snapshot
42
+ * -------------------------*/
43
+ async createSnapshot(filePath) {
44
+ try {
45
+ const stats = await fs.stat(filePath);
46
+ return {
47
+ path: filePath,
48
+ size: stats.size,
49
+ mtime: stats.mtimeMs,
50
+ hash: crypto.createHash('sha256').update(filePath + stats.mtimeMs).digest('hex')
51
+ };
52
+ } catch {
53
+ return { path: filePath };
54
+ }
55
+ }
56
+
57
+ /** ------------------------
58
+ * Async Move
59
+ * -------------------------*/
60
+ async move(source, destination) {
61
+ const result = { source, destination, action: 'move', status: 'pending' };
62
+
63
+ const sourceCheck = isFileSafe(source);
64
+ if (!sourceCheck.safe) throw new Error(`Source unsafe: ${sourceCheck.reason}`);
65
+
66
+ const destValidation = validatePath(destination);
67
+ if (!destValidation.valid) throw new Error(`Destination invalid: ${destValidation.error}`);
68
+
69
+ const snapshot = await this.createSnapshot(source);
70
+
71
+ if (this.options.dryRun) {
72
+ result.status = 'dry-run';
73
+ return result;
74
+ }
75
+
76
+ try {
77
+ const destDir = path.dirname(destination);
78
+ await fs.mkdir(destDir, { recursive: true });
79
+
80
+ try {
81
+ await fs.rename(source, destination);
82
+ } catch (err) {
83
+ if (err.code === 'EXDEV') {
84
+ await fs.copyFile(source, destination);
85
+ await fs.unlink(source);
86
+ } else throw err;
87
+ }
88
+
89
+ result.status = 'success';
90
+ this.stats.opsSucceeded++;
91
+ this.stats.filesMoved++;
92
+ this.stats.totalBytesProcessed += snapshot.size || 0;
93
+ await this._logToJournal(source, destination, 'move', snapshot);
94
+
95
+ } catch (err) {
96
+ result.status = 'error';
97
+ result.error = err.message;
98
+ this.stats.opsFailed++;
99
+ throw err;
100
+ }
101
+
102
+ return result;
103
+ }
104
+
105
+ /** ------------------------
106
+ * Safe Delete with Trash
107
+ * -------------------------*/
108
+ async delete(target) {
109
+ const check = isFileSafe(target);
110
+ if (!check.safe) throw new Error(`Target unsafe: ${check.reason}`);
111
+
112
+ const snapshot = await this.createSnapshot(target);
113
+
114
+ if (this.options.dryRun) return { target, status: 'dry-run' };
115
+
116
+ const trashFile = path.join(this.options.trashPath, path.basename(target) + '-' + Date.now());
117
+ await fs.rename(target, trashFile);
118
+
119
+ this.stats.opsSucceeded++;
120
+ this.stats.filesDeleted++;
121
+ this.stats.totalBytesProcessed += snapshot.size || 0;
122
+
123
+ await this._logToJournal(target, trashFile, 'delete', snapshot);
124
+
125
+ return { target, status: 'success' };
126
+ }
127
+
128
+ /** ------------------------
129
+ * Journaling
130
+ * -------------------------*/
131
+ async _logToJournal(source, destination, action, snapshot) {
132
+ if (!this.options.useJournal) return;
133
+
134
+ const entry = {
135
+ id: crypto.randomUUID(),
136
+ timestamp: new Date().toISOString(),
137
+ action,
138
+ source,
139
+ destination,
140
+ snapshot
141
+ };
142
+
143
+ this.sessionJournal.push(entry);
144
+
145
+ // Persistent Journaling (v3.5)
146
+ try {
147
+ let diskJournal = [];
148
+ if (fssync.existsSync(this.options.journalPath)) {
149
+ diskJournal = JSON.parse(fssync.readFileSync(this.options.journalPath, 'utf8'));
150
+ }
151
+ diskJournal.push(entry);
152
+ // Save last 1000 ops globally
153
+ fssync.writeFileSync(this.options.journalPath, JSON.stringify(diskJournal.slice(-1000), null, 2));
154
+ } catch (err) {
155
+ console.error(`[FAIL] Disk Journal write failed: ${err.message}`);
156
+ }
157
+ }
158
+
159
+ /** ------------------------
160
+ * Rollback
161
+ * -------------------------*/
162
+ async rollback() {
163
+ // Use session journal if available, otherwise try to load from disk
164
+ let reversed = [...this.sessionJournal].reverse();
165
+
166
+ if (reversed.length === 0 && fssync.existsSync(this.options.journalPath)) {
167
+ console.log(`[SYS] Session journal empty. Attempting disk rollback...`);
168
+ const diskJournal = JSON.parse(fssync.readFileSync(this.options.journalPath, 'utf8'));
169
+ reversed = diskJournal.reverse();
170
+ }
171
+
172
+ console.log(`[SYS] Rolling back ${reversed.length} operations...`);
173
+
174
+ for (const entry of reversed) {
175
+ try {
176
+ if (entry.action === 'move') {
177
+ if (fssync.existsSync(entry.destination)) {
178
+ await fs.rename(entry.destination, entry.source);
179
+ }
180
+ } else if (entry.action === 'delete') {
181
+ if (fssync.existsSync(entry.destination)) {
182
+ await fs.rename(entry.destination, entry.source);
183
+ }
184
+ }
185
+ } catch (err) {
186
+ console.error(`[FAIL] Rollback failed for ${entry.source}: ${err.message}`);
187
+ }
188
+ }
189
+
190
+ this.sessionJournal = [];
191
+ console.log(`[SYS] Rollback complete.`);
192
+ }
193
+
194
+ getJournal() {
195
+ return this.sessionJournal;
196
+ }
197
+ }
198
+
199
+ module.exports = FileMayorFS;
@@ -0,0 +1,115 @@
1
+ #!/usr/bin/env node
2
+
3
+ /**
4
+ * ═══════════════════════════════════════════════════════════════════
5
+ * FILEMAYOR CORE — LOGIC GUARDRAIL
6
+ *
7
+ * The final layer of the Chevza Doctrine: INTENTIONALITY.
8
+ * Sits between the AI Intuition Engine and the Jailer.
9
+ * Forces a dry-run preview + user confirmation for high-volume
10
+ * or destructive-pattern operations.
11
+ *
12
+ * Zero-dependency: uses native `readline/promises`.
13
+ * ═══════════════════════════════════════════════════════════════════
14
+ */
15
+
16
+ 'use strict';
17
+
18
+ const readline = require('readline/promises');
19
+ const { stdin: input, stdout: output } = require('process');
20
+ const path = require('path');
21
+
22
+ class LogicGuardrail {
23
+ /**
24
+ * @param {number} safetyLimit - Max ops before triggering confirmation (default: 50)
25
+ */
26
+ constructor(safetyLimit = 50) {
27
+ this.safetyLimit = safetyLimit;
28
+ }
29
+
30
+ /**
31
+ * Analyze a batch plan and require user confirmation if suspicious.
32
+ * @param {Array} batch - Array of { source, destination, type?, reason? }
33
+ * @returns {Promise<boolean>} Whether the user approved the batch
34
+ */
35
+ async verifyBatch(batch) {
36
+ if (!Array.isArray(batch) || batch.length === 0) return true;
37
+
38
+ const operationCount = batch.length;
39
+
40
+ // Auto-pass for small, safe batches
41
+ if (operationCount < this.safetyLimit && !this.isDestructivePattern(batch)) {
42
+ return true;
43
+ }
44
+
45
+ // ─── Dry Run Summary ─────────────────────────────────────
46
+ console.log('\n' + '═'.repeat(60));
47
+ console.log('⚠️ [LOGIC GUARDRAIL] High-Volume Operation Detected');
48
+ console.log('═'.repeat(60));
49
+ console.log(` Total operations: ${operationCount}`);
50
+ console.log(` Safety limit: ${this.safetyLimit}`);
51
+
52
+ if (this.isDestructivePattern(batch)) {
53
+ console.log('\n 🔴 WARNING: Destructive pattern detected!');
54
+ console.log(' Many files are being moved to a single destination.');
55
+ console.log(' This could indicate an AI "Semantic Wipe" or "Over-Organizer" loop.');
56
+ }
57
+
58
+ // Show preview of first 5 operations
59
+ console.log('\n Preview:');
60
+ const preview = batch.slice(0, 5);
61
+ for (const op of preview) {
62
+ const srcName = path.basename(op.source || '');
63
+ const dstDir = path.dirname(op.destination || '');
64
+ console.log(` ${srcName} → ${dstDir}`);
65
+ }
66
+ if (operationCount > 5) {
67
+ console.log(` ... and ${operationCount - 5} more.`);
68
+ }
69
+ console.log('═'.repeat(60));
70
+
71
+ // ─── User Confirmation (Native Node.js) ─────────────────
72
+ const rl = readline.createInterface({ input, output });
73
+ try {
74
+ const answer = await rl.question('\n Proceed with this batch? (y/N): ');
75
+ return answer.toLowerCase().trim() === 'y';
76
+ } finally {
77
+ rl.close();
78
+ }
79
+ }
80
+
81
+ /**
82
+ * Detect if the AI is funneling many files into a single destination.
83
+ * This catches "Over-Organizer Loops" and "Semantic Wipes."
84
+ * @param {Array} batch - The operation batch
85
+ * @returns {boolean} True if the pattern looks destructive
86
+ */
87
+ isDestructivePattern(batch) {
88
+ if (batch.length < 10) return false;
89
+
90
+ const destDirs = batch.map(b => {
91
+ const dest = b.destination || '';
92
+ return path.dirname(dest).toLowerCase();
93
+ });
94
+ const uniqueDests = new Set(destDirs).size;
95
+
96
+ // If 10+ files all going to 1 directory, it's suspicious
97
+ return (batch.length / uniqueDests) > 10;
98
+ }
99
+
100
+ /**
101
+ * Detect "Nesting Hell" — AI creating deeply nested structures
102
+ * @param {Array} batch - The operation batch
103
+ * @param {number} maxDepth - Maximum allowed nesting depth (default: 8)
104
+ * @returns {Array} Entries that exceed the nesting depth
105
+ */
106
+ detectNestingHell(batch, maxDepth = 8) {
107
+ return batch.filter(op => {
108
+ const dest = op.destination || '';
109
+ const segments = dest.replace(/\\/g, '/').split('/').filter(s => s.length > 0);
110
+ return segments.length > maxDepth;
111
+ });
112
+ }
113
+ }
114
+
115
+ module.exports = LogicGuardrail;
package/core/index.js CHANGED
@@ -11,6 +11,8 @@
11
11
 
12
12
  const scanner = require('./scanner');
13
13
  const organizer = require('./organizer');
14
+ const IntentInterpreter = require('./intent-interpreter');
15
+ const FileMayorFS = require('./fs-abstraction');
14
16
  const cleaner = require('./cleaner');
15
17
  const watcher = require('./watcher');
16
18
  const config = require('./config');
@@ -18,6 +20,22 @@ const reporter = require('./reporter');
18
20
  const categories = require('./categories');
19
21
  const security = require('./security');
20
22
  const sopParser = require('./sop-parser');
23
+ const analyzer = require('./analyzer');
24
+ const license = require('./license');
25
+ const ExplainEngine = require('./engine/explain-engine');
26
+ const CureEngine = require('./engine/cure-engine');
27
+ const ApplyEngine = require('./engine/apply-engine');
28
+ const PreviewEngine = require('./engine/preview-engine');
29
+ const DedupeEngine = require('./engine/dedupe-engine');
30
+ const MetadataStore = require('./metadata-store');
31
+ const strategist = require('./ai/strategist');
32
+ const sentry = require('./ai/sentry');
33
+ const planner = require('./ai/planner');
34
+ const validator = require('./ai/validator');
35
+ const { FileMayorJailer, enforceUserSpace } = require('./jailer');
36
+ const Vault = require('./vault');
37
+ const LogicGuardrail = require('./guardrail');
38
+ const { initEmergencyHalt, updateJournalRef, clearEmergencyHalt } = require('./emergency-halt');
21
39
 
22
40
  module.exports = {
23
41
  // Scanner
@@ -26,6 +44,7 @@ module.exports = {
26
44
  scanByCategory: scanner.scanByCategory,
27
45
  scanSummary: scanner.scanSummary,
28
46
  formatBytes: scanner.formatBytes,
47
+ analyzeDirectory: analyzer.analyzeDirectory,
29
48
 
30
49
  // Organizer
31
50
  organize: organizer.organize,
@@ -35,6 +54,12 @@ module.exports = {
35
54
  loadJournal: organizer.loadJournal,
36
55
  NAMING_CONVENTIONS: organizer.NAMING_CONVENTIONS,
37
56
 
57
+ // Intent Interpreter
58
+ IntentInterpreter: IntentInterpreter,
59
+
60
+ // FS Abstraction
61
+ FileMayorFS: FileMayorFS,
62
+
38
63
  // Cleaner
39
64
  Cleaner: cleaner.Cleaner,
40
65
  findJunk: cleaner.findJunk,
@@ -74,6 +99,37 @@ module.exports = {
74
99
  rulesToConfig: sopParser.rulesToConfig,
75
100
  FILE_TYPE_ALIASES: sopParser.FILE_TYPE_ALIASES,
76
101
 
102
+ // License
103
+ activateLicense: license.activateLicense,
104
+ deactivateLicense: license.deactivateLicense,
105
+ getLicenseInfo: license.getLicenseInfo,
106
+ checkProFeature: license.checkProFeature,
107
+ checkBulkLimit: license.checkBulkLimit,
108
+ hasFeature: license.hasFeature,
109
+
110
+ // Intent Agents
111
+ IntentStrategist: strategist,
112
+ MetadataSentry: sentry,
113
+ CurativePlanner: planner,
114
+ SecurityArchitect: validator,
115
+
116
+ // Engines
117
+ ExplainEngine,
118
+ CureEngine,
119
+ ApplyEngine,
120
+ PreviewEngine,
121
+ DedupeEngine,
122
+ MetadataStore,
123
+
124
+ // Hardened Runtime
125
+ FileMayorJailer,
126
+ enforceUserSpace,
127
+ Vault,
128
+ LogicGuardrail,
129
+ initEmergencyHalt,
130
+ updateJournalRef,
131
+ clearEmergencyHalt,
132
+
77
133
  // Version
78
134
  VERSION: require('../package.json').version || '2.0.0'
79
135
  };
@@ -0,0 +1,158 @@
1
+ #!/usr/bin/env node
2
+
3
+ /**
4
+ * ═══════════════════════════════════════════════════════════════════
5
+ * FILEMAYOR v3.0 — INTENT INTERPRETER
6
+ * AI Logic Layer: Bridges fuzzy human language and atomic execution.
7
+ * ═══════════════════════════════════════════════════════════════════
8
+ */
9
+
10
+ 'use strict';
11
+
12
+ const path = require('path');
13
+
14
+ const GEMINI_MODEL = 'gemini-2.0-flash';
15
+ const GEMINI_ENDPOINT = `https://generativelanguage.googleapis.com/v1beta/models/${GEMINI_MODEL}:generateContent`;
16
+
17
+ class IntentInterpreter {
18
+ constructor(apiKey) {
19
+ this.apiKey = apiKey || process.env.GEMINI_API_KEY;
20
+ if (!this.apiKey) {
21
+ console.warn('[WARN] GEMINI_API_KEY not found. AI features will be disabled.');
22
+ }
23
+ }
24
+
25
+ /**
26
+ * Interpret a user prompt against clustered telemetry
27
+ * @param {string} userPrompt
28
+ * @param {Object} clusters - From MetadataSentry
29
+ * @param {Object} context
30
+ * @returns {Promise<Object>} The Curative Plan
31
+ */
32
+ async interpret(userPrompt, clusters, context = {}) {
33
+ if (!this.apiKey) {
34
+ throw new Error('API Key missing. Cannot interpret intent.');
35
+ }
36
+
37
+ const telemetry = this._summarizeFiles(clusters);
38
+ const systemPrompt = this._getSystemPrompt(context);
39
+
40
+ const response = await this._callGemini(systemPrompt, userPrompt, telemetry);
41
+ const curativePlan = this._parseResponse(response);
42
+
43
+ // [Production Grade] Confidence Gating
44
+ if (curativePlan.confidence < 60) {
45
+ curativePlan.status = 'low_confidence';
46
+ curativePlan.message = 'I am not entirely sure about this request. Could you clarify your intent?';
47
+ }
48
+
49
+ // [Production Grade] Plan Validation
50
+ curativePlan.plan = this._validatePlan(curativePlan.plan);
51
+
52
+ return curativePlan;
53
+ }
54
+
55
+ /**
56
+ * Summarize clustered file list for AI ingestion
57
+ */
58
+ _summarizeFiles(clusters) {
59
+ const summary = {};
60
+
61
+ for (const cat in clusters) {
62
+ summary[cat] = {
63
+ count: clusters[cat].count,
64
+ samples: clusters[cat].samples.map(s => ({
65
+ name: s.name,
66
+ path: s.path,
67
+ ancestry: s.ancestry,
68
+ bundleId: s.bundleId
69
+ }))
70
+ };
71
+ }
72
+
73
+ return summary;
74
+ }
75
+
76
+ _getSystemPrompt(context) {
77
+ return `You are FileMayor v3.2, the "Intuition & Dependency Engine".
78
+ Your goal is to organize files while strictly preserving the integrity of projects and collections.
79
+
80
+ INPUT:
81
+ 1. User Intent: "${context.intent}" (Strategy: "${context.strategy}")
82
+ 2. Telemetry: Clustered file metadata including 'ancestry' and 'bundleId'.
83
+
84
+ GOLDEN RULES:
85
+ - RULE OF ANCESTRY: Respect parent directory names in 'ancestry'. If a file is in /Books/ or a project folder, DO NOT scatter it based on extension alone.
86
+ - ATOMIC BUNDLING: Files sharing a 'bundleId' (like music stems with a project file) MUST stay together.
87
+ - REFINE, DON'T DESTROY: A "Refine" intent means improving structure WITHIN a domain, not extracting files OUT of it.
88
+ - FORMAT: Return a VALID JSON object. Use absolute paths for source.
89
+
90
+ OUTPUT SCHEMA:
91
+ {
92
+ "narrative": "A curative explanation of the structural refinement.",
93
+ "plan": [
94
+ { "source": "absolute/path", "destination": "suggested/target", "reason": "Why?" }
95
+ ],
96
+ "confidence": 0-100
97
+ }`;
98
+ }
99
+
100
+ async _callGemini(systemPrompt, userPrompt, files) {
101
+ const body = {
102
+ contents: [{
103
+ parts: [{
104
+ text: `${systemPrompt}\n\nUSER INTENT: "${userPrompt}"\n\nFILES:\n${JSON.stringify(files, null, 2)}`
105
+ }]
106
+ }],
107
+ generationConfig: {
108
+ temperature: 0.1, // Even lower for higher determinism
109
+ topP: 0.95,
110
+ maxOutputTokens: 2048,
111
+ responseMimeType: "application/json" // [Production Grade] Force JSON Mode
112
+ }
113
+ };
114
+
115
+ const url = `${GEMINI_ENDPOINT}?key=${this.apiKey}`;
116
+ const response = await fetch(url, {
117
+ method: 'POST',
118
+ headers: { 'Content-Type': 'application/json' },
119
+ body: JSON.stringify(body),
120
+ });
121
+
122
+ if (!response.ok) {
123
+ const err = await response.text();
124
+ throw new Error(`Gemini API Error: ${response.status} - ${err}`);
125
+ }
126
+
127
+ const data = await response.json();
128
+ return data?.candidates?.[0]?.content?.parts?.[0]?.text || '';
129
+ }
130
+
131
+ /**
132
+ * Validate plan entries from AI response
133
+ * Filters out any entries missing required fields
134
+ */
135
+ _validatePlan(plan) {
136
+ if (!Array.isArray(plan)) return [];
137
+ return plan.filter(entry => {
138
+ if (!entry || typeof entry !== 'object') return false;
139
+ if (!entry.source || typeof entry.source !== 'string') return false;
140
+ if (!entry.destination || typeof entry.destination !== 'string') return false;
141
+ if (!entry.reason) entry.reason = 'AI suggested move';
142
+ return true;
143
+ });
144
+ }
145
+
146
+ _parseResponse(rawText) {
147
+ try {
148
+ const jsonMatch = rawText.match(/\{[\s\S]*\}/);
149
+ if (!jsonMatch) throw new Error('No JSON found in AI response');
150
+ return JSON.parse(jsonMatch[0]);
151
+ } catch (err) {
152
+ console.error('[FAIL] Failed to parse AI response:', rawText);
153
+ throw new Error(`AI Parsing Error: ${err.message}`);
154
+ }
155
+ }
156
+ }
157
+
158
+ module.exports = IntentInterpreter;