genesis-ai-cli 7.4.7 → 7.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/src/cli/dispatcher.js +74 -4
- package/dist/src/mcp/cache.d.ts +100 -0
- package/dist/src/mcp/cache.js +395 -0
- package/dist/src/mcp/index.d.ts +15 -1
- package/dist/src/mcp/index.js +37 -7
- package/dist/src/mcp/multimodal.d.ts +52 -0
- package/dist/src/mcp/multimodal.js +355 -0
- package/dist/src/mcp/parallel-executor.d.ts +113 -0
- package/dist/src/mcp/parallel-executor.js +335 -0
- package/dist/src/mcp/streaming.d.ts +78 -0
- package/dist/src/mcp/streaming.js +345 -0
- package/dist/src/mcp/tool-chain.d.ts +79 -0
- package/dist/src/mcp/tool-chain.js +323 -0
- package/dist/src/mcp/transformers.d.ts +156 -0
- package/dist/src/mcp/transformers.js +362 -0
- package/package.json +1 -1
|
@@ -0,0 +1,335 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Genesis MCP Parallel DAG Executor
|
|
4
|
+
*
|
|
5
|
+
* Dependency-aware parallel execution of MCP tool calls.
|
|
6
|
+
* Analyzes call dependencies and maximizes parallelism.
|
|
7
|
+
*
|
|
8
|
+
* Features:
|
|
9
|
+
* - Automatic dependency detection
|
|
10
|
+
* - Topological sort for execution order
|
|
11
|
+
* - Maximum parallelism within dependency constraints
|
|
12
|
+
* - Execution visualization
|
|
13
|
+
* - Cycle detection
|
|
14
|
+
*/
|
|
15
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
16
|
+
exports.DAGBuilder = exports.ParallelDAGExecutor = void 0;
|
|
17
|
+
exports.getDAGExecutor = getDAGExecutor;
|
|
18
|
+
exports.dag = dag;
|
|
19
|
+
exports.parallel = parallel;
|
|
20
|
+
exports.sequential = sequential;
|
|
21
|
+
const index_js_1 = require("./index.js");
|
|
22
|
+
// ============================================================================
|
|
23
|
+
// DAG Executor
|
|
24
|
+
// ============================================================================
|
|
25
|
+
class ParallelDAGExecutor {
|
|
26
|
+
mcpClient = (0, index_js_1.getMCPClient)();
|
|
27
|
+
maxConcurrency;
|
|
28
|
+
constructor(maxConcurrency = 10) {
|
|
29
|
+
this.maxConcurrency = maxConcurrency;
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Execute a DAG of MCP calls with maximum parallelism
|
|
33
|
+
*/
|
|
34
|
+
async execute(nodes) {
|
|
35
|
+
const startTime = Date.now();
|
|
36
|
+
const results = new Map();
|
|
37
|
+
const errors = new Map();
|
|
38
|
+
const executionOrder = [];
|
|
39
|
+
// Validate DAG (check for cycles)
|
|
40
|
+
this.detectCycles(nodes);
|
|
41
|
+
// Build execution levels (topological sort)
|
|
42
|
+
const levels = this.buildExecutionLevels(nodes);
|
|
43
|
+
let totalParallel = 0;
|
|
44
|
+
// Execute level by level
|
|
45
|
+
for (const level of levels) {
|
|
46
|
+
const batch = [];
|
|
47
|
+
// Execute nodes at this level in parallel (respecting max concurrency)
|
|
48
|
+
const chunks = this.chunkArray(level, this.maxConcurrency);
|
|
49
|
+
for (const chunk of chunks) {
|
|
50
|
+
const promises = chunk.map(async (node) => {
|
|
51
|
+
try {
|
|
52
|
+
// Resolve params if they're functions
|
|
53
|
+
const params = typeof node.params === 'function'
|
|
54
|
+
? node.params(results)
|
|
55
|
+
: node.params;
|
|
56
|
+
const result = await this.executeNode(node, params);
|
|
57
|
+
results.set(node.id, result);
|
|
58
|
+
if (!result.success) {
|
|
59
|
+
errors.set(node.id, new Error(result.error || 'Unknown error'));
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
catch (error) {
|
|
63
|
+
const err = error instanceof Error ? error : new Error(String(error));
|
|
64
|
+
errors.set(node.id, err);
|
|
65
|
+
results.set(node.id, {
|
|
66
|
+
success: false,
|
|
67
|
+
error: err.message,
|
|
68
|
+
server: node.server,
|
|
69
|
+
tool: node.tool,
|
|
70
|
+
mode: 'real',
|
|
71
|
+
latency: 0,
|
|
72
|
+
timestamp: new Date(),
|
|
73
|
+
});
|
|
74
|
+
}
|
|
75
|
+
});
|
|
76
|
+
await Promise.all(promises);
|
|
77
|
+
batch.push(...chunk.map(n => n.id));
|
|
78
|
+
totalParallel += chunk.length;
|
|
79
|
+
}
|
|
80
|
+
executionOrder.push(batch);
|
|
81
|
+
}
|
|
82
|
+
const totalLatency = Date.now() - startTime;
|
|
83
|
+
const parallelismAchieved = totalParallel / executionOrder.length;
|
|
84
|
+
return {
|
|
85
|
+
success: errors.size === 0,
|
|
86
|
+
results,
|
|
87
|
+
errors,
|
|
88
|
+
executionOrder,
|
|
89
|
+
totalLatency,
|
|
90
|
+
parallelismAchieved,
|
|
91
|
+
};
|
|
92
|
+
}
|
|
93
|
+
/**
|
|
94
|
+
* Execute a single node
|
|
95
|
+
*/
|
|
96
|
+
async executeNode(node, params) {
|
|
97
|
+
if (node.timeout) {
|
|
98
|
+
return Promise.race([
|
|
99
|
+
this.mcpClient.call(node.server, node.tool, params),
|
|
100
|
+
new Promise((_, reject) => setTimeout(() => reject(new Error('Timeout')), node.timeout)),
|
|
101
|
+
]);
|
|
102
|
+
}
|
|
103
|
+
return this.mcpClient.call(node.server, node.tool, params);
|
|
104
|
+
}
|
|
105
|
+
/**
|
|
106
|
+
* Build execution levels via topological sort
|
|
107
|
+
*/
|
|
108
|
+
buildExecutionLevels(nodes) {
|
|
109
|
+
const nodeMap = new Map(nodes.map(n => [n.id, n]));
|
|
110
|
+
const inDegree = new Map();
|
|
111
|
+
const levels = [];
|
|
112
|
+
// Calculate in-degrees
|
|
113
|
+
for (const node of nodes) {
|
|
114
|
+
inDegree.set(node.id, node.dependsOn.length);
|
|
115
|
+
}
|
|
116
|
+
// Process level by level
|
|
117
|
+
while (inDegree.size > 0) {
|
|
118
|
+
// Find all nodes with no remaining dependencies
|
|
119
|
+
const readyNodes = [];
|
|
120
|
+
for (const [id, degree] of inDegree) {
|
|
121
|
+
if (degree === 0) {
|
|
122
|
+
readyNodes.push(nodeMap.get(id));
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
if (readyNodes.length === 0 && inDegree.size > 0) {
|
|
126
|
+
throw new Error('Cycle detected in DAG');
|
|
127
|
+
}
|
|
128
|
+
// Sort by priority within level
|
|
129
|
+
readyNodes.sort((a, b) => (b.priority || 0) - (a.priority || 0));
|
|
130
|
+
levels.push(readyNodes);
|
|
131
|
+
// Remove processed nodes and update dependencies
|
|
132
|
+
for (const node of readyNodes) {
|
|
133
|
+
inDegree.delete(node.id);
|
|
134
|
+
// Decrease in-degree of dependents
|
|
135
|
+
for (const otherNode of nodes) {
|
|
136
|
+
if (otherNode.dependsOn.includes(node.id)) {
|
|
137
|
+
inDegree.set(otherNode.id, (inDegree.get(otherNode.id) || 0) - 1);
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
return levels;
|
|
143
|
+
}
|
|
144
|
+
/**
|
|
145
|
+
* Detect cycles using DFS
|
|
146
|
+
*/
|
|
147
|
+
detectCycles(nodes) {
|
|
148
|
+
const nodeMap = new Map(nodes.map(n => [n.id, n]));
|
|
149
|
+
const visited = new Set();
|
|
150
|
+
const recursionStack = new Set();
|
|
151
|
+
const dfs = (nodeId) => {
|
|
152
|
+
visited.add(nodeId);
|
|
153
|
+
recursionStack.add(nodeId);
|
|
154
|
+
const node = nodeMap.get(nodeId);
|
|
155
|
+
if (node) {
|
|
156
|
+
for (const depId of node.dependsOn) {
|
|
157
|
+
if (!visited.has(depId)) {
|
|
158
|
+
if (dfs(depId))
|
|
159
|
+
return true;
|
|
160
|
+
}
|
|
161
|
+
else if (recursionStack.has(depId)) {
|
|
162
|
+
return true; // Cycle found
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
recursionStack.delete(nodeId);
|
|
167
|
+
return false;
|
|
168
|
+
};
|
|
169
|
+
for (const node of nodes) {
|
|
170
|
+
if (!visited.has(node.id)) {
|
|
171
|
+
if (dfs(node.id)) {
|
|
172
|
+
throw new Error(`Cycle detected in DAG involving node: ${node.id}`);
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
/**
|
|
178
|
+
* Get visualization of DAG structure
|
|
179
|
+
*/
|
|
180
|
+
visualize(nodes) {
|
|
181
|
+
const levels = this.buildExecutionLevels(nodes);
|
|
182
|
+
const nodeToLevel = new Map();
|
|
183
|
+
levels.forEach((level, idx) => {
|
|
184
|
+
level.forEach(node => nodeToLevel.set(node.id, idx));
|
|
185
|
+
});
|
|
186
|
+
// Find critical path (longest path)
|
|
187
|
+
const criticalPath = this.findCriticalPath(nodes, nodeToLevel);
|
|
188
|
+
return {
|
|
189
|
+
nodes: nodes.map(node => ({
|
|
190
|
+
id: node.id,
|
|
191
|
+
level: nodeToLevel.get(node.id) || 0,
|
|
192
|
+
dependsOn: node.dependsOn,
|
|
193
|
+
status: 'pending',
|
|
194
|
+
})),
|
|
195
|
+
levels: levels.length,
|
|
196
|
+
criticalPath,
|
|
197
|
+
};
|
|
198
|
+
}
|
|
199
|
+
/**
|
|
200
|
+
* Find the critical path (longest dependency chain)
|
|
201
|
+
*/
|
|
202
|
+
findCriticalPath(nodes, nodeToLevel) {
|
|
203
|
+
const nodeMap = new Map(nodes.map(n => [n.id, n]));
|
|
204
|
+
let longestPath = [];
|
|
205
|
+
const findPath = (nodeId, currentPath) => {
|
|
206
|
+
const node = nodeMap.get(nodeId);
|
|
207
|
+
if (!node)
|
|
208
|
+
return;
|
|
209
|
+
currentPath.push(nodeId);
|
|
210
|
+
if (node.dependsOn.length === 0) {
|
|
211
|
+
// Reached a root node
|
|
212
|
+
if (currentPath.length > longestPath.length) {
|
|
213
|
+
longestPath = [...currentPath];
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
else {
|
|
217
|
+
for (const depId of node.dependsOn) {
|
|
218
|
+
findPath(depId, currentPath);
|
|
219
|
+
}
|
|
220
|
+
}
|
|
221
|
+
currentPath.pop();
|
|
222
|
+
};
|
|
223
|
+
// Find leaf nodes (nodes that nothing depends on)
|
|
224
|
+
const hasDependent = new Set();
|
|
225
|
+
for (const node of nodes) {
|
|
226
|
+
for (const depId of node.dependsOn) {
|
|
227
|
+
hasDependent.add(depId);
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
const leaves = nodes.filter(n => !hasDependent.has(n.id));
|
|
231
|
+
for (const leaf of leaves) {
|
|
232
|
+
findPath(leaf.id, []);
|
|
233
|
+
}
|
|
234
|
+
return longestPath.reverse();
|
|
235
|
+
}
|
|
236
|
+
chunkArray(array, size) {
|
|
237
|
+
const chunks = [];
|
|
238
|
+
for (let i = 0; i < array.length; i += size) {
|
|
239
|
+
chunks.push(array.slice(i, i + size));
|
|
240
|
+
}
|
|
241
|
+
return chunks;
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
exports.ParallelDAGExecutor = ParallelDAGExecutor;
|
|
245
|
+
// ============================================================================
|
|
246
|
+
// DAG Builder (Fluent API)
|
|
247
|
+
// ============================================================================
|
|
248
|
+
class DAGBuilder {
|
|
249
|
+
nodes = [];
|
|
250
|
+
/**
|
|
251
|
+
* Add a node to the DAG
|
|
252
|
+
*/
|
|
253
|
+
node(id, server, tool, params, options = {}) {
|
|
254
|
+
this.nodes.push({
|
|
255
|
+
id,
|
|
256
|
+
server,
|
|
257
|
+
tool,
|
|
258
|
+
params,
|
|
259
|
+
dependsOn: options.dependsOn || [],
|
|
260
|
+
priority: options.priority,
|
|
261
|
+
timeout: options.timeout,
|
|
262
|
+
});
|
|
263
|
+
return this;
|
|
264
|
+
}
|
|
265
|
+
/**
|
|
266
|
+
* Add dependency between nodes
|
|
267
|
+
*/
|
|
268
|
+
depend(nodeId, ...dependsOnIds) {
|
|
269
|
+
const node = this.nodes.find(n => n.id === nodeId);
|
|
270
|
+
if (node) {
|
|
271
|
+
node.dependsOn.push(...dependsOnIds);
|
|
272
|
+
}
|
|
273
|
+
return this;
|
|
274
|
+
}
|
|
275
|
+
/**
|
|
276
|
+
* Build and return the nodes
|
|
277
|
+
*/
|
|
278
|
+
build() {
|
|
279
|
+
return this.nodes;
|
|
280
|
+
}
|
|
281
|
+
/**
|
|
282
|
+
* Execute the DAG
|
|
283
|
+
*/
|
|
284
|
+
async execute() {
|
|
285
|
+
const executor = getDAGExecutor();
|
|
286
|
+
return executor.execute(this.nodes);
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
exports.DAGBuilder = DAGBuilder;
|
|
290
|
+
// ============================================================================
|
|
291
|
+
// Singleton & Utilities
|
|
292
|
+
// ============================================================================
|
|
293
|
+
let executorInstance = null;
|
|
294
|
+
function getDAGExecutor(maxConcurrency) {
|
|
295
|
+
if (!executorInstance) {
|
|
296
|
+
executorInstance = new ParallelDAGExecutor(maxConcurrency);
|
|
297
|
+
}
|
|
298
|
+
return executorInstance;
|
|
299
|
+
}
|
|
300
|
+
function dag() {
|
|
301
|
+
return new DAGBuilder();
|
|
302
|
+
}
|
|
303
|
+
/**
|
|
304
|
+
* Quick parallel execution of independent calls
|
|
305
|
+
*/
|
|
306
|
+
async function parallel(calls) {
|
|
307
|
+
const nodes = calls.map((call, i) => ({
|
|
308
|
+
id: `call-${i}`,
|
|
309
|
+
server: call.server,
|
|
310
|
+
tool: call.tool,
|
|
311
|
+
params: call.params,
|
|
312
|
+
dependsOn: [],
|
|
313
|
+
}));
|
|
314
|
+
const result = await getDAGExecutor().execute(nodes);
|
|
315
|
+
return Array.from(result.results.values());
|
|
316
|
+
}
|
|
317
|
+
/**
|
|
318
|
+
* Sequential execution with result passing
|
|
319
|
+
*/
|
|
320
|
+
async function sequential(calls) {
|
|
321
|
+
const nodes = calls.map((call, i) => ({
|
|
322
|
+
id: `call-${i}`,
|
|
323
|
+
server: call.server,
|
|
324
|
+
tool: call.tool,
|
|
325
|
+
params: typeof call.params === 'function'
|
|
326
|
+
? (results) => {
|
|
327
|
+
const prevResult = i > 0 ? results.get(`call-${i - 1}`)?.data : undefined;
|
|
328
|
+
return call.params(prevResult);
|
|
329
|
+
}
|
|
330
|
+
: call.params,
|
|
331
|
+
dependsOn: i > 0 ? [`call-${i - 1}`] : [],
|
|
332
|
+
}));
|
|
333
|
+
const result = await getDAGExecutor().execute(nodes);
|
|
334
|
+
return Array.from(result.results.values());
|
|
335
|
+
}
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Genesis MCP Streaming Results
|
|
3
|
+
*
|
|
4
|
+
* Real-time streaming of tool results with progress updates.
|
|
5
|
+
* Enables live feedback during long-running operations.
|
|
6
|
+
*
|
|
7
|
+
* Features:
|
|
8
|
+
* - Async iterator for results
|
|
9
|
+
* - Progress events (started, progress, chunk, complete, error)
|
|
10
|
+
* - Timeout handling with partial results
|
|
11
|
+
* - Buffered vs unbuffered modes
|
|
12
|
+
* - Event emitter integration
|
|
13
|
+
*/
|
|
14
|
+
import { EventEmitter } from 'events';
|
|
15
|
+
import { MCPServerName } from '../types.js';
|
|
16
|
+
import { MCPCallResult } from './index.js';
|
|
17
|
+
export type StreamEventType = 'started' | 'progress' | 'chunk' | 'complete' | 'error' | 'timeout';
|
|
18
|
+
export interface StreamEvent<T = any> {
|
|
19
|
+
type: StreamEventType;
|
|
20
|
+
server: MCPServerName;
|
|
21
|
+
tool: string;
|
|
22
|
+
timestamp: Date;
|
|
23
|
+
data?: T;
|
|
24
|
+
progress?: number;
|
|
25
|
+
error?: Error;
|
|
26
|
+
latency?: number;
|
|
27
|
+
}
|
|
28
|
+
export interface StreamOptions {
|
|
29
|
+
progressInterval?: number;
|
|
30
|
+
timeout?: number;
|
|
31
|
+
bufferSize?: number;
|
|
32
|
+
partialOnTimeout?: boolean;
|
|
33
|
+
}
|
|
34
|
+
export interface StreamableResult<T = any> {
|
|
35
|
+
promise: Promise<MCPCallResult<T>>;
|
|
36
|
+
events: EventEmitter;
|
|
37
|
+
[Symbol.asyncIterator](): AsyncIterator<StreamEvent<T>>;
|
|
38
|
+
cancel(): void;
|
|
39
|
+
isRunning(): boolean;
|
|
40
|
+
}
|
|
41
|
+
export declare class StreamingMCPWrapper extends EventEmitter {
|
|
42
|
+
private mcpClient;
|
|
43
|
+
private activeStreams;
|
|
44
|
+
/**
|
|
45
|
+
* Call an MCP tool with streaming support
|
|
46
|
+
*/
|
|
47
|
+
callStreaming<T = any>(server: MCPServerName, tool: string, params: Record<string, any>, options?: StreamOptions): StreamableResult<T>;
|
|
48
|
+
/**
|
|
49
|
+
* Call multiple tools with merged streaming
|
|
50
|
+
*/
|
|
51
|
+
callParallelStreaming<T = any>(calls: Array<{
|
|
52
|
+
server: MCPServerName;
|
|
53
|
+
tool: string;
|
|
54
|
+
params: Record<string, any>;
|
|
55
|
+
}>, options?: StreamOptions): StreamableResult<T[]>;
|
|
56
|
+
/**
|
|
57
|
+
* Get count of active streams
|
|
58
|
+
*/
|
|
59
|
+
getActiveStreamCount(): number;
|
|
60
|
+
/**
|
|
61
|
+
* Cancel all active streams
|
|
62
|
+
*/
|
|
63
|
+
cancelAll(): void;
|
|
64
|
+
}
|
|
65
|
+
export declare class ProgressReporter {
|
|
66
|
+
private startTime;
|
|
67
|
+
private lastUpdate;
|
|
68
|
+
private spinner;
|
|
69
|
+
private spinnerIndex;
|
|
70
|
+
start(label: string): void;
|
|
71
|
+
update(progress?: number): void;
|
|
72
|
+
complete(label: string): void;
|
|
73
|
+
error(label: string): void;
|
|
74
|
+
private getSpinner;
|
|
75
|
+
private formatTime;
|
|
76
|
+
}
|
|
77
|
+
export declare function getStreamingWrapper(): StreamingMCPWrapper;
|
|
78
|
+
export declare function callStreaming<T = any>(server: MCPServerName, tool: string, params: Record<string, any>, options?: StreamOptions): StreamableResult<T>;
|