@mrxkun/mcfast-mcp 4.0.6 → 4.0.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/index.js +118 -36
- package/src/memory/memory-engine.js +42 -38
- package/src/memory/stores/database.js +6 -2
- package/src/memory/utils/daily-logs.js +6 -22
- package/src/memory/utils/indexer.js +6 -5
- package/src/memory/utils/sync-engine.js +7 -18
- package/src/utils/audit-queue.js +127 -0
- package/src/utils/colors.js +31 -0
- package/src/utils/context-prefetcher.js +170 -0
- package/src/utils/intelligence-cache.js +114 -0
- package/src/utils/parallel-search.js +130 -0
- package/src/utils/streaming-api.js +168 -0
- package/src/utils/streaming.js +180 -0
|
@@ -0,0 +1,168 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Streaming API Client
|
|
3
|
+
* Handles Server-Sent Events (SSE) for real-time updates
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import { colors } from './colors.js';
|
|
7
|
+
|
|
8
|
+
export class StreamingAPIClient {
|
|
9
|
+
constructor(options = {}) {
|
|
10
|
+
this.baseUrl = options.baseUrl || 'https://mcfast.vercel.app/api/v1';
|
|
11
|
+
this.token = options.token || process.env.MCFAST_TOKEN;
|
|
12
|
+
this.verbose = options.verbose || false;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Stream edit with real-time progress
|
|
17
|
+
*/
|
|
18
|
+
async *streamEdit({ instruction, files, onProgress }) {
|
|
19
|
+
const url = `${this.baseUrl}/edit/stream`;
|
|
20
|
+
|
|
21
|
+
try {
|
|
22
|
+
const response = await fetch(url, {
|
|
23
|
+
method: 'POST',
|
|
24
|
+
headers: {
|
|
25
|
+
'Content-Type': 'application/json',
|
|
26
|
+
'Authorization': `Bearer ${this.token}`
|
|
27
|
+
},
|
|
28
|
+
body: JSON.stringify({
|
|
29
|
+
instruction,
|
|
30
|
+
files,
|
|
31
|
+
stream: true
|
|
32
|
+
})
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
if (!response.ok) {
|
|
36
|
+
throw new Error(`HTTP ${response.status}: ${await response.text()}`);
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
const reader = response.body.getReader();
|
|
40
|
+
const decoder = new TextDecoder();
|
|
41
|
+
let buffer = '';
|
|
42
|
+
|
|
43
|
+
while (true) {
|
|
44
|
+
const { done, value } = await reader.read();
|
|
45
|
+
if (done) break;
|
|
46
|
+
|
|
47
|
+
buffer += decoder.decode(value, { stream: true });
|
|
48
|
+
const lines = buffer.split('\n\n');
|
|
49
|
+
buffer = lines.pop() || '';
|
|
50
|
+
|
|
51
|
+
for (const line of lines) {
|
|
52
|
+
if (line.trim()) {
|
|
53
|
+
const event = this.parseSSE(line);
|
|
54
|
+
if (event) {
|
|
55
|
+
if (onProgress) onProgress(event);
|
|
56
|
+
yield event;
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
} catch (error) {
|
|
63
|
+
console.error(`${colors.red}[Streaming]${colors.reset} Error:`, error.message);
|
|
64
|
+
throw error;
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Batch audit with streaming acknowledgments
|
|
70
|
+
*/
|
|
71
|
+
async *streamBatchAudit(logs, onAck) {
|
|
72
|
+
const url = `${this.baseUrl}/logs/batch`;
|
|
73
|
+
|
|
74
|
+
try {
|
|
75
|
+
const response = await fetch(url, {
|
|
76
|
+
method: 'POST',
|
|
77
|
+
headers: {
|
|
78
|
+
'Content-Type': 'application/json',
|
|
79
|
+
'Authorization': `Bearer ${this.token}`
|
|
80
|
+
},
|
|
81
|
+
body: JSON.stringify({ logs, stream: true })
|
|
82
|
+
});
|
|
83
|
+
|
|
84
|
+
if (!response.ok) {
|
|
85
|
+
throw new Error(`HTTP ${response.status}: ${await response.text()}`);
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
const reader = response.body.getReader();
|
|
89
|
+
const decoder = new TextDecoder();
|
|
90
|
+
let buffer = '';
|
|
91
|
+
const acks = [];
|
|
92
|
+
|
|
93
|
+
while (true) {
|
|
94
|
+
const { done, value } = await reader.read();
|
|
95
|
+
if (done) break;
|
|
96
|
+
|
|
97
|
+
buffer += decoder.decode(value, { stream: true });
|
|
98
|
+
const lines = buffer.split('\n\n');
|
|
99
|
+
buffer = lines.pop() || '';
|
|
100
|
+
|
|
101
|
+
for (const line of lines) {
|
|
102
|
+
if (line.trim()) {
|
|
103
|
+
const event = this.parseSSE(line);
|
|
104
|
+
if (event) {
|
|
105
|
+
if (event.event === 'ack' && onAck) {
|
|
106
|
+
onAck(event.data);
|
|
107
|
+
}
|
|
108
|
+
acks.push(event.data);
|
|
109
|
+
yield event;
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
return acks;
|
|
116
|
+
|
|
117
|
+
} catch (error) {
|
|
118
|
+
console.error(`${colors.red}[Streaming]${colors.reset} Batch audit error:`, error.message);
|
|
119
|
+
throw error;
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
/**
|
|
124
|
+
* Parse SSE format
|
|
125
|
+
*/
|
|
126
|
+
parseSSE(raw) {
|
|
127
|
+
const lines = raw.split('\n');
|
|
128
|
+
const event = {};
|
|
129
|
+
|
|
130
|
+
for (const line of lines) {
|
|
131
|
+
if (line.startsWith('event: ')) {
|
|
132
|
+
event.event = line.substring(7);
|
|
133
|
+
} else if (line.startsWith('data: ')) {
|
|
134
|
+
try {
|
|
135
|
+
event.data = JSON.parse(line.substring(6));
|
|
136
|
+
} catch {
|
|
137
|
+
event.data = line.substring(6);
|
|
138
|
+
}
|
|
139
|
+
} else if (line.startsWith('id: ')) {
|
|
140
|
+
event.id = line.substring(4);
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
return event.event ? event : null;
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
/**
|
|
148
|
+
* Non-streaming batch audit (fallback)
|
|
149
|
+
*/
|
|
150
|
+
async batchAudit(logs) {
|
|
151
|
+
const url = `${this.baseUrl}/logs/batch`;
|
|
152
|
+
|
|
153
|
+
const response = await fetch(url, {
|
|
154
|
+
method: 'POST',
|
|
155
|
+
headers: {
|
|
156
|
+
'Content-Type': 'application/json',
|
|
157
|
+
'Authorization': `Bearer ${this.token}`
|
|
158
|
+
},
|
|
159
|
+
body: JSON.stringify({ logs, stream: false })
|
|
160
|
+
});
|
|
161
|
+
|
|
162
|
+
if (!response.ok) {
|
|
163
|
+
throw new Error(`HTTP ${response.status}: ${await response.text()}`);
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
return response.json();
|
|
167
|
+
}
|
|
168
|
+
}
|
|
@@ -0,0 +1,180 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Streaming utilities for MCP
|
|
3
|
+
* Supports real-time streaming of tool results
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import { colors } from './colors.js';
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Create a streaming response handler
|
|
10
|
+
* Allows tools to stream results progressively
|
|
11
|
+
*/
|
|
12
|
+
export class StreamingHandler {
|
|
13
|
+
constructor(options = {}) {
|
|
14
|
+
this.onProgress = options.onProgress || (() => {});
|
|
15
|
+
this.onComplete = options.onComplete || (() => {});
|
|
16
|
+
this.onError = options.onError || (() => {});
|
|
17
|
+
this.chunks = [];
|
|
18
|
+
this.isComplete = false;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* Stream a chunk of data
|
|
23
|
+
*/
|
|
24
|
+
stream(chunk, metadata = {}) {
|
|
25
|
+
if (this.isComplete) {
|
|
26
|
+
console.error(`${colors.yellow}[Stream]${colors.reset} Warning: Streaming after completion`);
|
|
27
|
+
return;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
this.chunks.push({ content: chunk, metadata, timestamp: Date.now() });
|
|
31
|
+
this.onProgress(chunk, metadata);
|
|
32
|
+
|
|
33
|
+
// Log progress for debugging
|
|
34
|
+
if (process.env.MCFAST_VERBOSE === 'true') {
|
|
35
|
+
console.error(`${colors.dim}[Stream]${colors.reset} Chunk #${this.chunks.length}: ${chunk.length} chars`);
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* Complete the stream
|
|
41
|
+
*/
|
|
42
|
+
complete(finalData = null) {
|
|
43
|
+
this.isComplete = true;
|
|
44
|
+
|
|
45
|
+
// Combine all chunks
|
|
46
|
+
const fullContent = this.chunks.map(c => c.content).join('');
|
|
47
|
+
|
|
48
|
+
this.onComplete({
|
|
49
|
+
content: fullContent,
|
|
50
|
+
chunks: this.chunks,
|
|
51
|
+
finalData
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
return fullContent;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
/**
|
|
58
|
+
* Error occurred during streaming
|
|
59
|
+
*/
|
|
60
|
+
error(error) {
|
|
61
|
+
this.isComplete = true;
|
|
62
|
+
this.onError(error);
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
/**
|
|
66
|
+
* Get current progress
|
|
67
|
+
*/
|
|
68
|
+
getProgress() {
|
|
69
|
+
return {
|
|
70
|
+
chunks: this.chunks.length,
|
|
71
|
+
totalChars: this.chunks.reduce((sum, c) => sum + c.content.length, 0),
|
|
72
|
+
isComplete: this.isComplete
|
|
73
|
+
};
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
/**
|
|
78
|
+
* Stream file reading for large files
|
|
79
|
+
* Reads file in chunks and streams progressively
|
|
80
|
+
*/
|
|
81
|
+
export async function streamFileRead(filePath, options = {}) {
|
|
82
|
+
const { chunkSize = 1000, onChunk } = options;
|
|
83
|
+
const handler = new StreamingHandler({ onProgress: onChunk });
|
|
84
|
+
|
|
85
|
+
try {
|
|
86
|
+
const fs = await import('fs/promises');
|
|
87
|
+
const handle = await fs.open(filePath, 'r');
|
|
88
|
+
|
|
89
|
+
let position = 0;
|
|
90
|
+
const buffer = Buffer.alloc(chunkSize);
|
|
91
|
+
|
|
92
|
+
while (true) {
|
|
93
|
+
const { bytesRead } = await handle.read(buffer, 0, chunkSize, position);
|
|
94
|
+
if (bytesRead === 0) break;
|
|
95
|
+
|
|
96
|
+
const chunk = buffer.toString('utf8', 0, bytesRead);
|
|
97
|
+
handler.stream(chunk, { position, bytesRead });
|
|
98
|
+
position += bytesRead;
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
await handle.close();
|
|
102
|
+
return handler.complete({ filePath, totalBytes: position });
|
|
103
|
+
|
|
104
|
+
} catch (error) {
|
|
105
|
+
handler.error(error);
|
|
106
|
+
throw error;
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
/**
|
|
111
|
+
* Stream search results as they're found
|
|
112
|
+
*/
|
|
113
|
+
export async function streamSearchResults(searchFn, query, options = {}) {
|
|
114
|
+
const { onResult, onComplete } = options;
|
|
115
|
+
const results = [];
|
|
116
|
+
|
|
117
|
+
try {
|
|
118
|
+
// For streaming, we simulate progressive results
|
|
119
|
+
// In real implementation, search would yield results as found
|
|
120
|
+
const allResults = await searchFn(query);
|
|
121
|
+
|
|
122
|
+
for (let i = 0; i < allResults.length; i++) {
|
|
123
|
+
const result = allResults[i];
|
|
124
|
+
results.push(result);
|
|
125
|
+
|
|
126
|
+
if (onResult) {
|
|
127
|
+
onResult(result, {
|
|
128
|
+
index: i,
|
|
129
|
+
total: allResults.length,
|
|
130
|
+
progress: ((i + 1) / allResults.length * 100).toFixed(1)
|
|
131
|
+
});
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
// Small delay to simulate streaming
|
|
135
|
+
if (i < allResults.length - 1) {
|
|
136
|
+
await new Promise(resolve => setTimeout(resolve, 10));
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
if (onComplete) {
|
|
141
|
+
onComplete(results);
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
return results;
|
|
145
|
+
|
|
146
|
+
} catch (error) {
|
|
147
|
+
console.error(`${colors.red}[Stream]${colors.reset} Search error:`, error.message);
|
|
148
|
+
throw error;
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
/**
|
|
153
|
+
* Stream edit progress
|
|
154
|
+
*/
|
|
155
|
+
export function createEditStream(options = {}) {
|
|
156
|
+
const handler = new StreamingHandler(options);
|
|
157
|
+
|
|
158
|
+
return {
|
|
159
|
+
// Report parsing phase
|
|
160
|
+
parsing: () => handler.stream('Parsing code...\n', { phase: 'parsing' }),
|
|
161
|
+
|
|
162
|
+
// Report analysis phase
|
|
163
|
+
analyzing: () => handler.stream('Analyzing impact...\n', { phase: 'analysis' }),
|
|
164
|
+
|
|
165
|
+
// Report applying phase
|
|
166
|
+
applying: (file) => handler.stream(`Applying changes to ${file}...\n`, { phase: 'applying', file }),
|
|
167
|
+
|
|
168
|
+
// Report validation phase
|
|
169
|
+
validating: () => handler.stream('Validating changes...\n', { phase: 'validation' }),
|
|
170
|
+
|
|
171
|
+
// Report completion
|
|
172
|
+
complete: (result) => handler.complete(result),
|
|
173
|
+
|
|
174
|
+
// Report error
|
|
175
|
+
error: (err) => handler.error(err),
|
|
176
|
+
|
|
177
|
+
// Get current state
|
|
178
|
+
getProgress: () => handler.getProgress()
|
|
179
|
+
};
|
|
180
|
+
}
|