@mrxkun/mcfast-mcp 4.0.6 → 4.0.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,170 @@
1
+ /**
2
+ * Context Prefetcher
3
+ * Predictively fetches context based on user activity
4
+ */
5
+
6
+ import { colors } from './colors.js';
7
+
8
+ export class ContextPrefetcher {
9
+ constructor(options = {}) {
10
+ this.cache = new Map();
11
+ this.cacheTtl = options.cacheTtl || 2 * 60 * 1000; // 2 minutes
12
+ this.prefetchDelay = options.prefetchDelay || 500; // 500ms debounce
13
+ this.maxCacheSize = options.maxCacheSize || 50;
14
+ this.pendingPrefetch = null;
15
+ this.verbose = options.verbose || false;
16
+
17
+ // Track recent files for relationship analysis
18
+ this.recentFiles = [];
19
+ this.maxRecentFiles = 10;
20
+ }
21
+
22
+ onUserTyping(filePath, currentLine, searchContextFn) {
23
+ // Update recent files
24
+ this.addToRecentFiles(filePath);
25
+
26
+ // Debounce prefetch
27
+ clearTimeout(this.pendingPrefetch);
28
+ this.pendingPrefetch = setTimeout(() => {
29
+ this.prefetch(filePath, currentLine, searchContextFn);
30
+ }, this.prefetchDelay);
31
+ }
32
+
33
+ onFileOpened(filePath, searchContextFn) {
34
+ this.addToRecentFiles(filePath);
35
+ // Prefetch immediately on file open
36
+ this.prefetch(filePath, null, searchContextFn);
37
+ }
38
+
39
+ addToRecentFiles(filePath) {
40
+ // Remove if already exists
41
+ this.recentFiles = this.recentFiles.filter(f => f !== filePath);
42
+ // Add to front
43
+ this.recentFiles.unshift(filePath);
44
+ // Keep only max
45
+ this.recentFiles = this.recentFiles.slice(0, this.maxRecentFiles);
46
+ }
47
+
48
+ async prefetch(filePath, currentLine, searchContextFn) {
49
+ if (!searchContextFn) return;
50
+
51
+ try {
52
+ // Prefetch current file context
53
+ const cacheKey = `file:${filePath}`;
54
+ if (!this.cache.has(cacheKey)) {
55
+ if (this.verbose) {
56
+ console.error(`${colors.dim}[Prefetch] Fetching context for ${filePath}${colors.reset}`);
57
+ }
58
+
59
+ const context = await searchContextFn(`file:${filePath}`, 10);
60
+ this.setCache(cacheKey, context);
61
+ }
62
+
63
+ // Prefetch related files
64
+ await this.prefetchRelatedFiles(filePath, searchContextFn);
65
+
66
+ } catch (error) {
67
+ if (this.verbose) {
68
+ console.error(`${colors.yellow}[Prefetch] Error: ${error.message}${colors.reset}`);
69
+ }
70
+ }
71
+ }
72
+
73
+ async prefetchRelatedFiles(currentFilePath, searchContextFn) {
74
+ // Find files that are often accessed together
75
+ const relatedFiles = this.findRelatedFiles(currentFilePath);
76
+
77
+ for (const filePath of relatedFiles) {
78
+ const cacheKey = `file:${filePath}`;
79
+ if (!this.cache.has(cacheKey)) {
80
+ try {
81
+ const context = await searchContextFn(`file:${filePath}`, 5);
82
+ this.setCache(cacheKey, context);
83
+ } catch (e) {
84
+ // Ignore prefetch errors
85
+ }
86
+ }
87
+ }
88
+ }
89
+
90
+ findRelatedFiles(currentFilePath) {
91
+ // Simple heuristic: files accessed recently with similar names
92
+ const currentBase = currentFilePath.replace(/\.(js|ts|jsx|tsx)$/, '');
93
+ const related = [];
94
+
95
+ for (const file of this.recentFiles) {
96
+ if (file === currentFilePath) continue;
97
+
98
+ const fileBase = file.replace(/\.(js|ts|jsx|tsx)$/, '');
99
+
100
+ // Check for common patterns
101
+ if (currentBase.includes(fileBase) || fileBase.includes(currentBase)) {
102
+ related.push(file);
103
+ }
104
+ }
105
+
106
+ return related.slice(0, 3); // Max 3 related files
107
+ }
108
+
109
+ getCachedContext(filePath) {
110
+ const cacheKey = `file:${filePath}`;
111
+ const item = this.cache.get(cacheKey);
112
+
113
+ if (!item) return null;
114
+
115
+ // Check TTL
116
+ if (Date.now() - item.timestamp > this.cacheTtl) {
117
+ this.cache.delete(cacheKey);
118
+ return null;
119
+ }
120
+
121
+ return item.data;
122
+ }
123
+
124
+ setCache(key, data) {
125
+ // Evict oldest if needed
126
+ if (this.cache.size >= this.maxCacheSize) {
127
+ const firstKey = this.cache.keys().next().value;
128
+ this.cache.delete(firstKey);
129
+ }
130
+
131
+ this.cache.set(key, {
132
+ data,
133
+ timestamp: Date.now()
134
+ });
135
+ }
136
+
137
+ getStats() {
138
+ return {
139
+ cacheSize: this.cache.size,
140
+ maxCacheSize: this.maxCacheSize,
141
+ recentFiles: this.recentFiles.length
142
+ };
143
+ }
144
+
145
+ invalidate(filePath) {
146
+ this.cache.delete(`file:${filePath}`);
147
+ }
148
+
149
+ invalidateAll() {
150
+ this.cache.clear();
151
+ this.recentFiles = [];
152
+ }
153
+ }
154
+
155
+ // Singleton instance
156
+ let prefetcher = null;
157
+
158
+ export function getContextPrefetcher(options = {}) {
159
+ if (!prefetcher) {
160
+ prefetcher = new ContextPrefetcher(options);
161
+ }
162
+ return prefetcher;
163
+ }
164
+
165
+ export function resetContextPrefetcher() {
166
+ if (prefetcher) {
167
+ prefetcher.invalidateAll();
168
+ prefetcher = null;
169
+ }
170
+ }
@@ -0,0 +1,114 @@
1
+ /**
2
+ * Intelligence Cache with TTL
3
+ * Caches intelligence results to reduce computation overhead
4
+ */
5
+
6
+ export class IntelligenceCache {
7
+ constructor(options = {}) {
8
+ this.cache = new Map();
9
+ this.ttl = options.ttl || 5 * 60 * 1000; // 5 minutes default
10
+ this.maxSize = options.maxSize || 1000;
11
+ this.hitCount = 0;
12
+ this.missCount = 0;
13
+ }
14
+
15
+ generateKey(type, params) {
16
+ // Generate unique key from type and params
17
+ const paramsStr = JSON.stringify(params);
18
+ return `${type}:${paramsStr}`;
19
+ }
20
+
21
+ get(type, params) {
22
+ const key = this.generateKey(type, params);
23
+ const item = this.cache.get(key);
24
+
25
+ if (!item) {
26
+ this.missCount++;
27
+ return null;
28
+ }
29
+
30
+ // Check TTL
31
+ if (Date.now() - item.timestamp > this.ttl) {
32
+ this.cache.delete(key);
33
+ this.missCount++;
34
+ return null;
35
+ }
36
+
37
+ this.hitCount++;
38
+ return item.data;
39
+ }
40
+
41
+ set(type, params, data) {
42
+ const key = this.generateKey(type, params);
43
+
44
+ // Evict oldest if at capacity
45
+ if (this.cache.size >= this.maxSize) {
46
+ const firstKey = this.cache.keys().next().value;
47
+ this.cache.delete(firstKey);
48
+ }
49
+
50
+ this.cache.set(key, {
51
+ data,
52
+ timestamp: Date.now()
53
+ });
54
+ }
55
+
56
+ invalidate(type) {
57
+ // Invalidate all keys of a certain type
58
+ for (const key of this.cache.keys()) {
59
+ if (key.startsWith(`${type}:`)) {
60
+ this.cache.delete(key);
61
+ }
62
+ }
63
+ }
64
+
65
+ invalidateAll() {
66
+ this.cache.clear();
67
+ this.hitCount = 0;
68
+ this.missCount = 0;
69
+ }
70
+
71
+ getStats() {
72
+ const total = this.hitCount + this.missCount;
73
+ return {
74
+ size: this.cache.size,
75
+ maxSize: this.maxSize,
76
+ hitRate: total > 0 ? (this.hitCount / total * 100).toFixed(2) + '%' : 'N/A',
77
+ hits: this.hitCount,
78
+ misses: this.missCount,
79
+ ttl: this.ttl
80
+ };
81
+ }
82
+
83
+ // Pre-warm cache with common queries
84
+ async prewarm(queries, fetchFn) {
85
+ for (const query of queries) {
86
+ const cached = this.get('search', query);
87
+ if (!cached) {
88
+ try {
89
+ const result = await fetchFn(query);
90
+ this.set('search', query, result);
91
+ } catch (e) {
92
+ // Ignore prewarm errors
93
+ }
94
+ }
95
+ }
96
+ }
97
+ }
98
+
99
+ // Singleton instance
100
+ let intelligenceCache = null;
101
+
102
+ export function getIntelligenceCache(options = {}) {
103
+ if (!intelligenceCache) {
104
+ intelligenceCache = new IntelligenceCache(options);
105
+ }
106
+ return intelligenceCache;
107
+ }
108
+
109
+ export function resetIntelligenceCache() {
110
+ if (intelligenceCache) {
111
+ intelligenceCache.invalidateAll();
112
+ intelligenceCache = null;
113
+ }
114
+ }
@@ -0,0 +1,130 @@
1
+ /**
2
+ * Parallel Search Executor
3
+ * Executes multiple search strategies in parallel for better performance
4
+ */
5
+
6
+ export class ParallelSearch {
7
+ constructor(options = {}) {
8
+ this.timeout = options.timeout || 5000; // 5 second timeout per search
9
+ }
10
+
11
+ /**
12
+ * Execute multiple searches in parallel
13
+ * Returns results as soon as all complete (or timeout)
14
+ */
15
+ async searchAll(searchFns, query, limit) {
16
+ const searchPromises = searchFns.map(fn =>
17
+ this.executeWithTimeout(fn, query, limit)
18
+ );
19
+
20
+ const results = await Promise.allSettled(searchPromises);
21
+
22
+ return results.map((result, index) => ({
23
+ strategy: searchFns[index].name || `search-${index}`,
24
+ status: result.status,
25
+ data: result.status === 'fulfilled' ? result.value : null,
26
+ error: result.status === 'rejected' ? result.reason : null
27
+ }));
28
+ }
29
+
30
+ /**
31
+ * Execute search with timeout
32
+ */
33
+ async executeWithTimeout(fn, query, limit) {
34
+ return Promise.race([
35
+ fn(query, limit),
36
+ new Promise((_, reject) =>
37
+ setTimeout(() => reject(new Error('Search timeout')), this.timeout)
38
+ )
39
+ ]);
40
+ }
41
+
42
+ /**
43
+ * Merge and rank results from multiple strategies
44
+ */
45
+ mergeResults(results, maxResults = 10) {
46
+ const allItems = [];
47
+
48
+ for (const result of results) {
49
+ if (result.status === 'fulfilled' && result.data) {
50
+ const items = Array.isArray(result.data) ? result.data : [result.data];
51
+ allItems.push(...items.map(item => ({
52
+ ...item,
53
+ _source: result.strategy,
54
+ _score: item.score || item.similarity || item.confidence || 0.5
55
+ })));
56
+ }
57
+ }
58
+
59
+ // Remove duplicates (by id or content)
60
+ const seen = new Set();
61
+ const unique = allItems.filter(item => {
62
+ const key = item.id || item.name || item.content || JSON.stringify(item);
63
+ if (seen.has(key)) return false;
64
+ seen.add(key);
65
+ return true;
66
+ });
67
+
68
+ // Sort by score
69
+ unique.sort((a, b) => b._score - a._score);
70
+
71
+ return unique.slice(0, maxResults);
72
+ }
73
+
74
+ /**
75
+ * Smart parallel search with fallbacks
76
+ * Tries fast strategies first, then slower ones if needed
77
+ */
78
+ async smartSearch(strategies, query, limit, minResults = 5) {
79
+ // Group strategies by speed
80
+ const fastStrategies = strategies.filter(s => s.fast);
81
+ const slowStrategies = strategies.filter(s => !s.fast);
82
+
83
+ // Try fast strategies first
84
+ let results = await this.searchAll(
85
+ fastStrategies.map(s => s.fn),
86
+ query,
87
+ limit
88
+ );
89
+
90
+ const merged = this.mergeResults(results);
91
+
92
+ // If not enough results, try slow strategies
93
+ if (merged.length < minResults && slowStrategies.length > 0) {
94
+ const slowResults = await this.searchAll(
95
+ slowStrategies.map(s => s.fn),
96
+ query,
97
+ limit
98
+ );
99
+
100
+ results = [...results, ...slowResults];
101
+ return this.mergeResults(results, maxResults);
102
+ }
103
+
104
+ return merged;
105
+ }
106
+ }
107
+
108
+ // Helper for MemoryEngine parallel search
109
+ export async function parallelMemorySearch(engine, query, limit) {
110
+ const parallel = new ParallelSearch();
111
+
112
+ const strategies = [
113
+ { fn: (q, l) => engine.searchFacts(q, l), name: 'facts', fast: true },
114
+ { fn: (q, l) => engine.searchFTS(q, l), name: 'fts', fast: true },
115
+ { fn: (q, l) => engine.searchVector(q, l), name: 'vector', fast: false }
116
+ ];
117
+
118
+ const startTime = performance.now();
119
+ const results = await parallel.smartSearch(strategies, query, limit, 5);
120
+ const duration = performance.now() - startTime;
121
+
122
+ return {
123
+ results,
124
+ metadata: {
125
+ duration: duration.toFixed(2) + 'ms',
126
+ strategies: strategies.length,
127
+ resultCount: results.length
128
+ }
129
+ };
130
+ }
@@ -0,0 +1,168 @@
1
+ /**
2
+ * Streaming API Client
3
+ * Handles Server-Sent Events (SSE) for real-time updates
4
+ */
5
+
6
+ import { colors } from './colors.js';
7
+
8
+ export class StreamingAPIClient {
9
+ constructor(options = {}) {
10
+ this.baseUrl = options.baseUrl || 'https://mcfast.vercel.app/api/v1';
11
+ this.token = options.token || process.env.MCFAST_TOKEN;
12
+ this.verbose = options.verbose || false;
13
+ }
14
+
15
+ /**
16
+ * Stream edit with real-time progress
17
+ */
18
+ async *streamEdit({ instruction, files, onProgress }) {
19
+ const url = `${this.baseUrl}/edit/stream`;
20
+
21
+ try {
22
+ const response = await fetch(url, {
23
+ method: 'POST',
24
+ headers: {
25
+ 'Content-Type': 'application/json',
26
+ 'Authorization': `Bearer ${this.token}`
27
+ },
28
+ body: JSON.stringify({
29
+ instruction,
30
+ files,
31
+ stream: true
32
+ })
33
+ });
34
+
35
+ if (!response.ok) {
36
+ throw new Error(`HTTP ${response.status}: ${await response.text()}`);
37
+ }
38
+
39
+ const reader = response.body.getReader();
40
+ const decoder = new TextDecoder();
41
+ let buffer = '';
42
+
43
+ while (true) {
44
+ const { done, value } = await reader.read();
45
+ if (done) break;
46
+
47
+ buffer += decoder.decode(value, { stream: true });
48
+ const lines = buffer.split('\n\n');
49
+ buffer = lines.pop() || '';
50
+
51
+ for (const line of lines) {
52
+ if (line.trim()) {
53
+ const event = this.parseSSE(line);
54
+ if (event) {
55
+ if (onProgress) onProgress(event);
56
+ yield event;
57
+ }
58
+ }
59
+ }
60
+ }
61
+
62
+ } catch (error) {
63
+ console.error(`${colors.red}[Streaming]${colors.reset} Error:`, error.message);
64
+ throw error;
65
+ }
66
+ }
67
+
68
+ /**
69
+ * Batch audit with streaming acknowledgments
70
+ */
71
+ async *streamBatchAudit(logs, onAck) {
72
+ const url = `${this.baseUrl}/logs/batch`;
73
+
74
+ try {
75
+ const response = await fetch(url, {
76
+ method: 'POST',
77
+ headers: {
78
+ 'Content-Type': 'application/json',
79
+ 'Authorization': `Bearer ${this.token}`
80
+ },
81
+ body: JSON.stringify({ logs, stream: true })
82
+ });
83
+
84
+ if (!response.ok) {
85
+ throw new Error(`HTTP ${response.status}: ${await response.text()}`);
86
+ }
87
+
88
+ const reader = response.body.getReader();
89
+ const decoder = new TextDecoder();
90
+ let buffer = '';
91
+ const acks = [];
92
+
93
+ while (true) {
94
+ const { done, value } = await reader.read();
95
+ if (done) break;
96
+
97
+ buffer += decoder.decode(value, { stream: true });
98
+ const lines = buffer.split('\n\n');
99
+ buffer = lines.pop() || '';
100
+
101
+ for (const line of lines) {
102
+ if (line.trim()) {
103
+ const event = this.parseSSE(line);
104
+ if (event) {
105
+ if (event.event === 'ack' && onAck) {
106
+ onAck(event.data);
107
+ }
108
+ acks.push(event.data);
109
+ yield event;
110
+ }
111
+ }
112
+ }
113
+ }
114
+
115
+ return acks;
116
+
117
+ } catch (error) {
118
+ console.error(`${colors.red}[Streaming]${colors.reset} Batch audit error:`, error.message);
119
+ throw error;
120
+ }
121
+ }
122
+
123
+ /**
124
+ * Parse SSE format
125
+ */
126
+ parseSSE(raw) {
127
+ const lines = raw.split('\n');
128
+ const event = {};
129
+
130
+ for (const line of lines) {
131
+ if (line.startsWith('event: ')) {
132
+ event.event = line.substring(7);
133
+ } else if (line.startsWith('data: ')) {
134
+ try {
135
+ event.data = JSON.parse(line.substring(6));
136
+ } catch {
137
+ event.data = line.substring(6);
138
+ }
139
+ } else if (line.startsWith('id: ')) {
140
+ event.id = line.substring(4);
141
+ }
142
+ }
143
+
144
+ return event.event ? event : null;
145
+ }
146
+
147
+ /**
148
+ * Non-streaming batch audit (fallback)
149
+ */
150
+ async batchAudit(logs) {
151
+ const url = `${this.baseUrl}/logs/batch`;
152
+
153
+ const response = await fetch(url, {
154
+ method: 'POST',
155
+ headers: {
156
+ 'Content-Type': 'application/json',
157
+ 'Authorization': `Bearer ${this.token}`
158
+ },
159
+ body: JSON.stringify({ logs, stream: false })
160
+ });
161
+
162
+ if (!response.ok) {
163
+ throw new Error(`HTTP ${response.status}: ${await response.text()}`);
164
+ }
165
+
166
+ return response.json();
167
+ }
168
+ }