@mrxkun/mcfast-mcp 4.0.6 → 4.0.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,127 @@
1
+ /**
2
+ * Audit Queue with Batching
3
+ * Buffers audit logs and sends them in batches to reduce HTTP overhead
4
+ */
5
+
6
+ import { colors } from './colors.js';
7
+
8
+ export class AuditQueue {
9
+ constructor(options = {}) {
10
+ this.queue = [];
11
+ this.batchSize = options.batchSize || 5;
12
+ this.flushInterval = options.flushInterval || 5000; // 5 seconds
13
+ this.apiUrl = options.apiUrl || 'https://mcfast.vercel.app/api/v1/logs/batch';
14
+ this.token = options.token || process.env.MCFAST_TOKEN;
15
+ this.verbose = options.verbose || false;
16
+
17
+ // Start flush timer
18
+ this.flushTimer = setInterval(() => this.flush(), this.flushInterval);
19
+
20
+ // Flush on process exit
21
+ process.on('beforeExit', () => this.flushSync());
22
+ process.on('SIGINT', () => this.flushSync());
23
+ process.on('SIGTERM', () => this.flushSync());
24
+ }
25
+
26
+ add(data) {
27
+ // Add timestamp if not present
28
+ const auditData = {
29
+ timestamp: new Date().toISOString(),
30
+ ...data
31
+ };
32
+
33
+ this.queue.push(auditData);
34
+
35
+ if (this.verbose) {
36
+ console.error(`${colors.dim}[AuditQueue] Added to queue. Size: ${this.queue.length}${colors.reset}`);
37
+ }
38
+
39
+ // Flush immediately if batch size reached
40
+ if (this.queue.length >= this.batchSize) {
41
+ this.flush();
42
+ }
43
+ }
44
+
45
+ async flush() {
46
+ if (this.queue.length === 0) return;
47
+ if (!this.token) return; // Skip if no token
48
+
49
+ // Take batch from queue
50
+ const batch = this.queue.splice(0, this.batchSize);
51
+
52
+ try {
53
+ if (this.verbose) {
54
+ console.error(`${colors.dim}[AuditQueue] Flushing ${batch.length} logs...${colors.reset}`);
55
+ }
56
+
57
+ // Send batch using the batch API
58
+ const response = await fetch(this.apiUrl, {
59
+ method: 'POST',
60
+ headers: {
61
+ 'Content-Type': 'application/json',
62
+ 'Authorization': `Bearer ${this.token}`
63
+ },
64
+ body: JSON.stringify({ logs: batch, stream: false })
65
+ });
66
+
67
+ if (!response.ok) {
68
+ // Put back in queue if failed (will retry next flush)
69
+ this.queue.unshift(...batch);
70
+ if (this.verbose) {
71
+ console.error(`${colors.yellow}[AuditQueue] Flush failed, will retry${colors.reset}`);
72
+ }
73
+ } else {
74
+ const result = await response.json();
75
+ if (this.verbose) {
76
+ console.error(`${colors.green}[AuditQueue] Flushed ${result.stored || batch.length} logs ✓${colors.reset}`);
77
+ }
78
+ }
79
+ } catch (error) {
80
+ // Put back in queue if failed
81
+ this.queue.unshift(...batch);
82
+ if (this.verbose) {
83
+ console.error(`${colors.yellow}[AuditQueue] Network error, will retry: ${error.message}${colors.reset}`);
84
+ }
85
+ }
86
+ }
87
+
88
+ flushSync() {
89
+ // Synchronous flush for process exit
90
+ if (this.queue.length === 0) return;
91
+
92
+ // In real implementation, you might want to use sync HTTP or write to file
93
+ if (this.verbose) {
94
+ console.error(`${colors.dim}[AuditQueue] ${this.queue.length} logs remaining on exit${colors.reset}`);
95
+ }
96
+ }
97
+
98
+ getStats() {
99
+ return {
100
+ queued: this.queue.length,
101
+ batchSize: this.batchSize,
102
+ flushInterval: this.flushInterval
103
+ };
104
+ }
105
+
106
+ destroy() {
107
+ clearInterval(this.flushTimer);
108
+ return this.flush();
109
+ }
110
+ }
111
+
112
+ // Singleton instance
113
+ let auditQueue = null;
114
+
115
+ export function getAuditQueue(options = {}) {
116
+ if (!auditQueue) {
117
+ auditQueue = new AuditQueue(options);
118
+ }
119
+ return auditQueue;
120
+ }
121
+
122
+ export function resetAuditQueue() {
123
+ if (auditQueue) {
124
+ auditQueue.destroy();
125
+ auditQueue = null;
126
+ }
127
+ }
@@ -0,0 +1,31 @@
1
+ /**
2
+ * Color utilities for terminal output
3
+ */
4
+
5
+ export const colors = {
6
+ reset: '\x1b[0m',
7
+ bright: '\x1b[1m',
8
+ dim: '\x1b[2m',
9
+ underscore: '\x1b[4m',
10
+ blink: '\x1b[5m',
11
+ reverse: '\x1b[7m',
12
+ hidden: '\x1b[8m',
13
+
14
+ black: '\x1b[30m',
15
+ red: '\x1b[31m',
16
+ green: '\x1b[32m',
17
+ yellow: '\x1b[33m',
18
+ blue: '\x1b[34m',
19
+ magenta: '\x1b[35m',
20
+ cyan: '\x1b[36m',
21
+ white: '\x1b[37m',
22
+
23
+ bgBlack: '\x1b[40m',
24
+ bgRed: '\x1b[41m',
25
+ bgGreen: '\x1b[42m',
26
+ bgYellow: '\x1b[43m',
27
+ bgBlue: '\x1b[44m',
28
+ bgMagenta: '\x1b[45m',
29
+ bgCyan: '\x1b[46m',
30
+ bgWhite: '\x1b[47m'
31
+ };
@@ -0,0 +1,170 @@
1
+ /**
2
+ * Context Prefetcher
3
+ * Predictively fetches context based on user activity
4
+ */
5
+
6
+ import { colors } from './colors.js';
7
+
8
+ export class ContextPrefetcher {
9
+ constructor(options = {}) {
10
+ this.cache = new Map();
11
+ this.cacheTtl = options.cacheTtl || 2 * 60 * 1000; // 2 minutes
12
+ this.prefetchDelay = options.prefetchDelay || 500; // 500ms debounce
13
+ this.maxCacheSize = options.maxCacheSize || 50;
14
+ this.pendingPrefetch = null;
15
+ this.verbose = options.verbose || false;
16
+
17
+ // Track recent files for relationship analysis
18
+ this.recentFiles = [];
19
+ this.maxRecentFiles = 10;
20
+ }
21
+
22
+ onUserTyping(filePath, currentLine, searchContextFn) {
23
+ // Update recent files
24
+ this.addToRecentFiles(filePath);
25
+
26
+ // Debounce prefetch
27
+ clearTimeout(this.pendingPrefetch);
28
+ this.pendingPrefetch = setTimeout(() => {
29
+ this.prefetch(filePath, currentLine, searchContextFn);
30
+ }, this.prefetchDelay);
31
+ }
32
+
33
+ onFileOpened(filePath, searchContextFn) {
34
+ this.addToRecentFiles(filePath);
35
+ // Prefetch immediately on file open
36
+ this.prefetch(filePath, null, searchContextFn);
37
+ }
38
+
39
+ addToRecentFiles(filePath) {
40
+ // Remove if already exists
41
+ this.recentFiles = this.recentFiles.filter(f => f !== filePath);
42
+ // Add to front
43
+ this.recentFiles.unshift(filePath);
44
+ // Keep only max
45
+ this.recentFiles = this.recentFiles.slice(0, this.maxRecentFiles);
46
+ }
47
+
48
+ async prefetch(filePath, currentLine, searchContextFn) {
49
+ if (!searchContextFn) return;
50
+
51
+ try {
52
+ // Prefetch current file context
53
+ const cacheKey = `file:${filePath}`;
54
+ if (!this.cache.has(cacheKey)) {
55
+ if (this.verbose) {
56
+ console.error(`${colors.dim}[Prefetch] Fetching context for ${filePath}${colors.reset}`);
57
+ }
58
+
59
+ const context = await searchContextFn(`file:${filePath}`, 10);
60
+ this.setCache(cacheKey, context);
61
+ }
62
+
63
+ // Prefetch related files
64
+ await this.prefetchRelatedFiles(filePath, searchContextFn);
65
+
66
+ } catch (error) {
67
+ if (this.verbose) {
68
+ console.error(`${colors.yellow}[Prefetch] Error: ${error.message}${colors.reset}`);
69
+ }
70
+ }
71
+ }
72
+
73
+ async prefetchRelatedFiles(currentFilePath, searchContextFn) {
74
+ // Find files that are often accessed together
75
+ const relatedFiles = this.findRelatedFiles(currentFilePath);
76
+
77
+ for (const filePath of relatedFiles) {
78
+ const cacheKey = `file:${filePath}`;
79
+ if (!this.cache.has(cacheKey)) {
80
+ try {
81
+ const context = await searchContextFn(`file:${filePath}`, 5);
82
+ this.setCache(cacheKey, context);
83
+ } catch (e) {
84
+ // Ignore prefetch errors
85
+ }
86
+ }
87
+ }
88
+ }
89
+
90
+ findRelatedFiles(currentFilePath) {
91
+ // Simple heuristic: files accessed recently with similar names
92
+ const currentBase = currentFilePath.replace(/\.(js|ts|jsx|tsx)$/, '');
93
+ const related = [];
94
+
95
+ for (const file of this.recentFiles) {
96
+ if (file === currentFilePath) continue;
97
+
98
+ const fileBase = file.replace(/\.(js|ts|jsx|tsx)$/, '');
99
+
100
+ // Check for common patterns
101
+ if (currentBase.includes(fileBase) || fileBase.includes(currentBase)) {
102
+ related.push(file);
103
+ }
104
+ }
105
+
106
+ return related.slice(0, 3); // Max 3 related files
107
+ }
108
+
109
+ getCachedContext(filePath) {
110
+ const cacheKey = `file:${filePath}`;
111
+ const item = this.cache.get(cacheKey);
112
+
113
+ if (!item) return null;
114
+
115
+ // Check TTL
116
+ if (Date.now() - item.timestamp > this.cacheTtl) {
117
+ this.cache.delete(cacheKey);
118
+ return null;
119
+ }
120
+
121
+ return item.data;
122
+ }
123
+
124
+ setCache(key, data) {
125
+ // Evict oldest if needed
126
+ if (this.cache.size >= this.maxCacheSize) {
127
+ const firstKey = this.cache.keys().next().value;
128
+ this.cache.delete(firstKey);
129
+ }
130
+
131
+ this.cache.set(key, {
132
+ data,
133
+ timestamp: Date.now()
134
+ });
135
+ }
136
+
137
+ getStats() {
138
+ return {
139
+ cacheSize: this.cache.size,
140
+ maxCacheSize: this.maxCacheSize,
141
+ recentFiles: this.recentFiles.length
142
+ };
143
+ }
144
+
145
+ invalidate(filePath) {
146
+ this.cache.delete(`file:${filePath}`);
147
+ }
148
+
149
+ invalidateAll() {
150
+ this.cache.clear();
151
+ this.recentFiles = [];
152
+ }
153
+ }
154
+
155
+ // Singleton instance
156
+ let prefetcher = null;
157
+
158
+ export function getContextPrefetcher(options = {}) {
159
+ if (!prefetcher) {
160
+ prefetcher = new ContextPrefetcher(options);
161
+ }
162
+ return prefetcher;
163
+ }
164
+
165
+ export function resetContextPrefetcher() {
166
+ if (prefetcher) {
167
+ prefetcher.invalidateAll();
168
+ prefetcher = null;
169
+ }
170
+ }
@@ -0,0 +1,114 @@
1
+ /**
2
+ * Intelligence Cache with TTL
3
+ * Caches intelligence results to reduce computation overhead
4
+ */
5
+
6
+ export class IntelligenceCache {
7
+ constructor(options = {}) {
8
+ this.cache = new Map();
9
+ this.ttl = options.ttl || 5 * 60 * 1000; // 5 minutes default
10
+ this.maxSize = options.maxSize || 1000;
11
+ this.hitCount = 0;
12
+ this.missCount = 0;
13
+ }
14
+
15
+ generateKey(type, params) {
16
+ // Generate unique key from type and params
17
+ const paramsStr = JSON.stringify(params);
18
+ return `${type}:${paramsStr}`;
19
+ }
20
+
21
+ get(type, params) {
22
+ const key = this.generateKey(type, params);
23
+ const item = this.cache.get(key);
24
+
25
+ if (!item) {
26
+ this.missCount++;
27
+ return null;
28
+ }
29
+
30
+ // Check TTL
31
+ if (Date.now() - item.timestamp > this.ttl) {
32
+ this.cache.delete(key);
33
+ this.missCount++;
34
+ return null;
35
+ }
36
+
37
+ this.hitCount++;
38
+ return item.data;
39
+ }
40
+
41
+ set(type, params, data) {
42
+ const key = this.generateKey(type, params);
43
+
44
+ // Evict oldest if at capacity
45
+ if (this.cache.size >= this.maxSize) {
46
+ const firstKey = this.cache.keys().next().value;
47
+ this.cache.delete(firstKey);
48
+ }
49
+
50
+ this.cache.set(key, {
51
+ data,
52
+ timestamp: Date.now()
53
+ });
54
+ }
55
+
56
+ invalidate(type) {
57
+ // Invalidate all keys of a certain type
58
+ for (const key of this.cache.keys()) {
59
+ if (key.startsWith(`${type}:`)) {
60
+ this.cache.delete(key);
61
+ }
62
+ }
63
+ }
64
+
65
+ invalidateAll() {
66
+ this.cache.clear();
67
+ this.hitCount = 0;
68
+ this.missCount = 0;
69
+ }
70
+
71
+ getStats() {
72
+ const total = this.hitCount + this.missCount;
73
+ return {
74
+ size: this.cache.size,
75
+ maxSize: this.maxSize,
76
+ hitRate: total > 0 ? (this.hitCount / total * 100).toFixed(2) + '%' : 'N/A',
77
+ hits: this.hitCount,
78
+ misses: this.missCount,
79
+ ttl: this.ttl
80
+ };
81
+ }
82
+
83
+ // Pre-warm cache with common queries
84
+ async prewarm(queries, fetchFn) {
85
+ for (const query of queries) {
86
+ const cached = this.get('search', query);
87
+ if (!cached) {
88
+ try {
89
+ const result = await fetchFn(query);
90
+ this.set('search', query, result);
91
+ } catch (e) {
92
+ // Ignore prewarm errors
93
+ }
94
+ }
95
+ }
96
+ }
97
+ }
98
+
99
+ // Singleton instance
100
+ let intelligenceCache = null;
101
+
102
+ export function getIntelligenceCache(options = {}) {
103
+ if (!intelligenceCache) {
104
+ intelligenceCache = new IntelligenceCache(options);
105
+ }
106
+ return intelligenceCache;
107
+ }
108
+
109
+ export function resetIntelligenceCache() {
110
+ if (intelligenceCache) {
111
+ intelligenceCache.invalidateAll();
112
+ intelligenceCache = null;
113
+ }
114
+ }
@@ -0,0 +1,130 @@
1
+ /**
2
+ * Parallel Search Executor
3
+ * Executes multiple search strategies in parallel for better performance
4
+ */
5
+
6
+ export class ParallelSearch {
7
+ constructor(options = {}) {
8
+ this.timeout = options.timeout || 5000; // 5 second timeout per search
9
+ }
10
+
11
+ /**
12
+ * Execute multiple searches in parallel
13
+ * Returns results as soon as all complete (or timeout)
14
+ */
15
+ async searchAll(searchFns, query, limit) {
16
+ const searchPromises = searchFns.map(fn =>
17
+ this.executeWithTimeout(fn, query, limit)
18
+ );
19
+
20
+ const results = await Promise.allSettled(searchPromises);
21
+
22
+ return results.map((result, index) => ({
23
+ strategy: searchFns[index].name || `search-${index}`,
24
+ status: result.status,
25
+ data: result.status === 'fulfilled' ? result.value : null,
26
+ error: result.status === 'rejected' ? result.reason : null
27
+ }));
28
+ }
29
+
30
+ /**
31
+ * Execute search with timeout
32
+ */
33
+ async executeWithTimeout(fn, query, limit) {
34
+ return Promise.race([
35
+ fn(query, limit),
36
+ new Promise((_, reject) =>
37
+ setTimeout(() => reject(new Error('Search timeout')), this.timeout)
38
+ )
39
+ ]);
40
+ }
41
+
42
+ /**
43
+ * Merge and rank results from multiple strategies
44
+ */
45
+ mergeResults(results, maxResults = 10) {
46
+ const allItems = [];
47
+
48
+ for (const result of results) {
49
+ if (result.status === 'fulfilled' && result.data) {
50
+ const items = Array.isArray(result.data) ? result.data : [result.data];
51
+ allItems.push(...items.map(item => ({
52
+ ...item,
53
+ _source: result.strategy,
54
+ _score: item.score || item.similarity || item.confidence || 0.5
55
+ })));
56
+ }
57
+ }
58
+
59
+ // Remove duplicates (by id or content)
60
+ const seen = new Set();
61
+ const unique = allItems.filter(item => {
62
+ const key = item.id || item.name || item.content || JSON.stringify(item);
63
+ if (seen.has(key)) return false;
64
+ seen.add(key);
65
+ return true;
66
+ });
67
+
68
+ // Sort by score
69
+ unique.sort((a, b) => b._score - a._score);
70
+
71
+ return unique.slice(0, maxResults);
72
+ }
73
+
74
+ /**
75
+ * Smart parallel search with fallbacks
76
+ * Tries fast strategies first, then slower ones if needed
77
+ */
78
+ async smartSearch(strategies, query, limit, minResults = 5) {
79
+ // Group strategies by speed
80
+ const fastStrategies = strategies.filter(s => s.fast);
81
+ const slowStrategies = strategies.filter(s => !s.fast);
82
+
83
+ // Try fast strategies first
84
+ let results = await this.searchAll(
85
+ fastStrategies.map(s => s.fn),
86
+ query,
87
+ limit
88
+ );
89
+
90
+ const merged = this.mergeResults(results);
91
+
92
+ // If not enough results, try slow strategies
93
+ if (merged.length < minResults && slowStrategies.length > 0) {
94
+ const slowResults = await this.searchAll(
95
+ slowStrategies.map(s => s.fn),
96
+ query,
97
+ limit
98
+ );
99
+
100
+ results = [...results, ...slowResults];
101
+ return this.mergeResults(results, maxResults);
102
+ }
103
+
104
+ return merged;
105
+ }
106
+ }
107
+
108
+ // Helper for MemoryEngine parallel search
109
+ export async function parallelMemorySearch(engine, query, limit) {
110
+ const parallel = new ParallelSearch();
111
+
112
+ const strategies = [
113
+ { fn: (q, l) => engine.searchFacts(q, l), name: 'facts', fast: true },
114
+ { fn: (q, l) => engine.searchFTS(q, l), name: 'fts', fast: true },
115
+ { fn: (q, l) => engine.searchVector(q, l), name: 'vector', fast: false }
116
+ ];
117
+
118
+ const startTime = performance.now();
119
+ const results = await parallel.smartSearch(strategies, query, limit, 5);
120
+ const duration = performance.now() - startTime;
121
+
122
+ return {
123
+ results,
124
+ metadata: {
125
+ duration: duration.toFixed(2) + 'ms',
126
+ strategies: strategies.length,
127
+ resultCount: results.length
128
+ }
129
+ };
130
+ }