@juspay/neurolink 9.26.2 → 9.28.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/README.md +59 -9
- package/dist/cli/commands/config.d.ts +4 -4
- package/dist/cli/commands/mcp.d.ts +87 -0
- package/dist/cli/commands/mcp.js +1524 -0
- package/dist/cli/loop/optionsSchema.js +4 -0
- package/dist/core/modules/ToolsManager.js +29 -2
- package/dist/index.d.ts +2 -1
- package/dist/index.js +27 -1
- package/dist/lib/core/modules/ToolsManager.js +29 -2
- package/dist/lib/index.d.ts +2 -1
- package/dist/lib/index.js +27 -1
- package/dist/lib/mcp/agentExposure.d.ts +228 -0
- package/dist/lib/mcp/agentExposure.js +357 -0
- package/dist/lib/mcp/batching/index.d.ts +11 -0
- package/dist/lib/mcp/batching/index.js +11 -0
- package/dist/lib/mcp/batching/requestBatcher.d.ts +202 -0
- package/dist/lib/mcp/batching/requestBatcher.js +442 -0
- package/dist/lib/mcp/caching/index.d.ts +11 -0
- package/dist/lib/mcp/caching/index.js +11 -0
- package/dist/lib/mcp/caching/toolCache.d.ts +221 -0
- package/dist/lib/mcp/caching/toolCache.js +434 -0
- package/dist/lib/mcp/elicitation/elicitationManager.d.ts +169 -0
- package/dist/lib/mcp/elicitation/elicitationManager.js +377 -0
- package/dist/lib/mcp/elicitation/index.d.ts +11 -0
- package/dist/lib/mcp/elicitation/index.js +12 -0
- package/dist/lib/mcp/elicitation/types.d.ts +278 -0
- package/dist/lib/mcp/elicitation/types.js +11 -0
- package/dist/lib/mcp/elicitationProtocol.d.ts +228 -0
- package/dist/lib/mcp/elicitationProtocol.js +376 -0
- package/dist/lib/mcp/enhancedToolDiscovery.d.ts +205 -0
- package/dist/lib/mcp/enhancedToolDiscovery.js +482 -0
- package/dist/lib/mcp/index.d.ts +38 -1
- package/dist/lib/mcp/index.js +36 -3
- package/dist/lib/mcp/mcpRegistryClient.d.ts +332 -0
- package/dist/lib/mcp/mcpRegistryClient.js +489 -0
- package/dist/lib/mcp/mcpServerBase.d.ts +227 -0
- package/dist/lib/mcp/mcpServerBase.js +374 -0
- package/dist/lib/mcp/multiServerManager.d.ts +310 -0
- package/dist/lib/mcp/multiServerManager.js +580 -0
- package/dist/lib/mcp/routing/index.d.ts +11 -0
- package/dist/lib/mcp/routing/index.js +11 -0
- package/dist/lib/mcp/routing/toolRouter.d.ts +219 -0
- package/dist/lib/mcp/routing/toolRouter.js +417 -0
- package/dist/lib/mcp/serverCapabilities.d.ts +341 -0
- package/dist/lib/mcp/serverCapabilities.js +503 -0
- package/dist/lib/mcp/toolAnnotations.d.ts +154 -0
- package/dist/lib/mcp/toolAnnotations.js +240 -0
- package/dist/lib/mcp/toolConverter.d.ts +178 -0
- package/dist/lib/mcp/toolConverter.js +259 -0
- package/dist/lib/mcp/toolIntegration.d.ts +136 -0
- package/dist/lib/mcp/toolIntegration.js +335 -0
- package/dist/lib/memory/hippocampusInitializer.d.ts +2 -2
- package/dist/lib/memory/hippocampusInitializer.js +1 -1
- package/dist/lib/neurolink.d.ts +275 -2
- package/dist/lib/neurolink.js +596 -56
- package/dist/lib/providers/litellm.d.ts +10 -0
- package/dist/lib/providers/litellm.js +104 -2
- package/dist/lib/types/configTypes.d.ts +56 -0
- package/dist/lib/types/conversation.d.ts +2 -2
- package/dist/lib/types/generateTypes.d.ts +4 -0
- package/dist/lib/types/index.d.ts +2 -1
- package/dist/lib/types/modelTypes.d.ts +6 -6
- package/dist/lib/types/streamTypes.d.ts +2 -0
- package/dist/lib/types/tools.d.ts +2 -0
- package/dist/lib/utils/pricing.js +177 -17
- package/dist/lib/utils/schemaConversion.d.ts +6 -1
- package/dist/lib/utils/schemaConversion.js +50 -28
- package/dist/lib/workflow/config.d.ts +16 -16
- package/dist/mcp/agentExposure.d.ts +228 -0
- package/dist/mcp/agentExposure.js +356 -0
- package/dist/mcp/batching/index.d.ts +11 -0
- package/dist/mcp/batching/index.js +10 -0
- package/dist/mcp/batching/requestBatcher.d.ts +202 -0
- package/dist/mcp/batching/requestBatcher.js +441 -0
- package/dist/mcp/caching/index.d.ts +11 -0
- package/dist/mcp/caching/index.js +10 -0
- package/dist/mcp/caching/toolCache.d.ts +221 -0
- package/dist/mcp/caching/toolCache.js +433 -0
- package/dist/mcp/elicitation/elicitationManager.d.ts +169 -0
- package/dist/mcp/elicitation/elicitationManager.js +376 -0
- package/dist/mcp/elicitation/index.d.ts +11 -0
- package/dist/mcp/elicitation/index.js +11 -0
- package/dist/mcp/elicitation/types.d.ts +278 -0
- package/dist/mcp/elicitation/types.js +10 -0
- package/dist/mcp/elicitationProtocol.d.ts +228 -0
- package/dist/mcp/elicitationProtocol.js +375 -0
- package/dist/mcp/enhancedToolDiscovery.d.ts +205 -0
- package/dist/mcp/enhancedToolDiscovery.js +481 -0
- package/dist/mcp/index.d.ts +38 -1
- package/dist/mcp/index.js +36 -3
- package/dist/mcp/mcpRegistryClient.d.ts +332 -0
- package/dist/mcp/mcpRegistryClient.js +488 -0
- package/dist/mcp/mcpServerBase.d.ts +227 -0
- package/dist/mcp/mcpServerBase.js +373 -0
- package/dist/mcp/multiServerManager.d.ts +310 -0
- package/dist/mcp/multiServerManager.js +579 -0
- package/dist/mcp/routing/index.d.ts +11 -0
- package/dist/mcp/routing/index.js +10 -0
- package/dist/mcp/routing/toolRouter.d.ts +219 -0
- package/dist/mcp/routing/toolRouter.js +416 -0
- package/dist/mcp/serverCapabilities.d.ts +341 -0
- package/dist/mcp/serverCapabilities.js +502 -0
- package/dist/mcp/toolAnnotations.d.ts +154 -0
- package/dist/mcp/toolAnnotations.js +239 -0
- package/dist/mcp/toolConverter.d.ts +178 -0
- package/dist/mcp/toolConverter.js +258 -0
- package/dist/mcp/toolIntegration.d.ts +136 -0
- package/dist/mcp/toolIntegration.js +334 -0
- package/dist/memory/hippocampusInitializer.d.ts +2 -2
- package/dist/memory/hippocampusInitializer.js +1 -1
- package/dist/neurolink.d.ts +275 -2
- package/dist/neurolink.js +596 -56
- package/dist/providers/litellm.d.ts +10 -0
- package/dist/providers/litellm.js +104 -2
- package/dist/types/configTypes.d.ts +56 -0
- package/dist/types/conversation.d.ts +2 -2
- package/dist/types/generateTypes.d.ts +4 -0
- package/dist/types/index.d.ts +2 -1
- package/dist/types/streamTypes.d.ts +2 -0
- package/dist/types/tools.d.ts +2 -0
- package/dist/utils/pricing.js +177 -17
- package/dist/utils/schemaConversion.d.ts +6 -1
- package/dist/utils/schemaConversion.js +50 -28
- package/package.json +2 -2
|
@@ -0,0 +1,442 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Request Batcher - Batches multiple tool calls for efficiency
|
|
3
|
+
*
|
|
4
|
+
* Provides intelligent batching of MCP tool calls to reduce overhead
|
|
5
|
+
* and improve throughput. Supports automatic flushing based on:
|
|
6
|
+
* - Maximum batch size
|
|
7
|
+
* - Maximum wait time
|
|
8
|
+
* - Manual flush triggers
|
|
9
|
+
*/
|
|
10
|
+
import { EventEmitter } from "events";
|
|
11
|
+
import { logger } from "../../utils/logger.js";
|
|
12
|
+
import { ErrorFactory } from "../../utils/errorHandling.js";
|
|
13
|
+
/**
|
|
14
|
+
* Request Batcher - Efficient batch processing for MCP tool calls
|
|
15
|
+
*
|
|
16
|
+
* @example
|
|
17
|
+
* ```typescript
|
|
18
|
+
* const batcher = new RequestBatcher<ToolResult>({
|
|
19
|
+
* maxBatchSize: 10,
|
|
20
|
+
* maxWaitMs: 100,
|
|
21
|
+
* });
|
|
22
|
+
*
|
|
23
|
+
* // Set the batch executor
|
|
24
|
+
* batcher.setExecutor(async (requests) => {
|
|
25
|
+
* // Execute all requests in a batch
|
|
26
|
+
* return await Promise.all(requests.map(r => executeTool(r.tool, r.args)));
|
|
27
|
+
* });
|
|
28
|
+
*
|
|
29
|
+
* // Add requests - they'll be batched automatically
|
|
30
|
+
* const result1 = await batcher.add('getUserById', { id: 1 });
|
|
31
|
+
* const result2 = await batcher.add('getUserById', { id: 2 });
|
|
32
|
+
* ```
|
|
33
|
+
*/
|
|
34
|
+
export class RequestBatcher extends EventEmitter {
|
|
35
|
+
config;
|
|
36
|
+
pending = new Map();
|
|
37
|
+
serverQueues = new Map();
|
|
38
|
+
flushTimer;
|
|
39
|
+
executor;
|
|
40
|
+
activeBatches = 0;
|
|
41
|
+
batchCounter = 0;
|
|
42
|
+
requestCounter = 0;
|
|
43
|
+
isDestroyed = false;
|
|
44
|
+
constructor(config) {
|
|
45
|
+
super();
|
|
46
|
+
this.config = {
|
|
47
|
+
maxBatchSize: config.maxBatchSize,
|
|
48
|
+
maxWaitMs: config.maxWaitMs,
|
|
49
|
+
enableParallel: config.enableParallel ?? true,
|
|
50
|
+
maxConcurrentBatches: config.maxConcurrentBatches ?? 5,
|
|
51
|
+
groupByServer: config.groupByServer ?? true,
|
|
52
|
+
};
|
|
53
|
+
}
|
|
54
|
+
/**
|
|
55
|
+
* Set the batch executor function
|
|
56
|
+
*/
|
|
57
|
+
setExecutor(executor) {
|
|
58
|
+
this.executor = executor;
|
|
59
|
+
}
|
|
60
|
+
/**
|
|
61
|
+
* Add a request to the batch queue
|
|
62
|
+
*/
|
|
63
|
+
async add(tool, args, serverId) {
|
|
64
|
+
if (this.isDestroyed) {
|
|
65
|
+
throw ErrorFactory.invalidConfiguration("batcher", "Batcher has been destroyed");
|
|
66
|
+
}
|
|
67
|
+
if (!this.executor) {
|
|
68
|
+
throw ErrorFactory.missingConfiguration("batchExecutor", {
|
|
69
|
+
hint: "Call setExecutor() before adding requests",
|
|
70
|
+
});
|
|
71
|
+
}
|
|
72
|
+
const requestId = this.generateRequestId();
|
|
73
|
+
return new Promise((resolve, reject) => {
|
|
74
|
+
const request = {
|
|
75
|
+
id: requestId,
|
|
76
|
+
tool,
|
|
77
|
+
args,
|
|
78
|
+
serverId,
|
|
79
|
+
resolve,
|
|
80
|
+
reject,
|
|
81
|
+
addedAt: Date.now(),
|
|
82
|
+
};
|
|
83
|
+
this.pending.set(requestId, request);
|
|
84
|
+
// Track by server if grouping is enabled
|
|
85
|
+
if (this.config.groupByServer && serverId) {
|
|
86
|
+
if (!this.serverQueues.has(serverId)) {
|
|
87
|
+
this.serverQueues.set(serverId, new Set());
|
|
88
|
+
}
|
|
89
|
+
const queue = this.serverQueues.get(serverId);
|
|
90
|
+
if (queue) {
|
|
91
|
+
queue.add(requestId);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
this.emit("requestQueued", {
|
|
95
|
+
requestId,
|
|
96
|
+
queueSize: this.pending.size,
|
|
97
|
+
});
|
|
98
|
+
// Check if we should flush immediately
|
|
99
|
+
if (this.pending.size >= this.config.maxBatchSize) {
|
|
100
|
+
this.scheduleFlush("size");
|
|
101
|
+
}
|
|
102
|
+
else if (!this.flushTimer) {
|
|
103
|
+
// Start the timer for delayed flush
|
|
104
|
+
this.flushTimer = setTimeout(() => {
|
|
105
|
+
this.scheduleFlush("timeout");
|
|
106
|
+
}, this.config.maxWaitMs);
|
|
107
|
+
}
|
|
108
|
+
});
|
|
109
|
+
}
|
|
110
|
+
/**
|
|
111
|
+
* Manually flush the current batch
|
|
112
|
+
*/
|
|
113
|
+
async flush() {
|
|
114
|
+
this.clearFlushTimer();
|
|
115
|
+
if (this.pending.size === 0) {
|
|
116
|
+
return;
|
|
117
|
+
}
|
|
118
|
+
this.emit("flushTriggered", {
|
|
119
|
+
reason: "manual",
|
|
120
|
+
queueSize: this.pending.size,
|
|
121
|
+
});
|
|
122
|
+
await this.executeBatch();
|
|
123
|
+
}
|
|
124
|
+
/**
|
|
125
|
+
* Get current queue size
|
|
126
|
+
*/
|
|
127
|
+
get queueSize() {
|
|
128
|
+
return this.pending.size;
|
|
129
|
+
}
|
|
130
|
+
/**
|
|
131
|
+
* Get number of active batches
|
|
132
|
+
*/
|
|
133
|
+
get activeBatchCount() {
|
|
134
|
+
return this.activeBatches;
|
|
135
|
+
}
|
|
136
|
+
/**
|
|
137
|
+
* Check if the batcher is idle (no pending requests)
|
|
138
|
+
*/
|
|
139
|
+
get isIdle() {
|
|
140
|
+
return this.pending.size === 0 && this.activeBatches === 0;
|
|
141
|
+
}
|
|
142
|
+
/**
|
|
143
|
+
* Wait for all pending requests to complete
|
|
144
|
+
*/
|
|
145
|
+
async drain() {
|
|
146
|
+
await this.flush();
|
|
147
|
+
const maxDrainTimeout = 30_000;
|
|
148
|
+
const deadline = Date.now() + maxDrainTimeout;
|
|
149
|
+
// Wait for all queued and active batches to complete
|
|
150
|
+
while (!this.isIdle) {
|
|
151
|
+
if (Date.now() >= deadline) {
|
|
152
|
+
throw ErrorFactory.toolTimeout("batchDrain", maxDrainTimeout);
|
|
153
|
+
}
|
|
154
|
+
await new Promise((resolve) => setTimeout(resolve, 10));
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
/**
|
|
158
|
+
* Destroy the batcher and reject all pending requests
|
|
159
|
+
*/
|
|
160
|
+
destroy() {
|
|
161
|
+
this.isDestroyed = true;
|
|
162
|
+
this.clearFlushTimer();
|
|
163
|
+
// Reject all pending requests
|
|
164
|
+
for (const request of this.pending.values()) {
|
|
165
|
+
request.reject(ErrorFactory.invalidConfiguration("batcher", "Batcher was destroyed before request could complete"));
|
|
166
|
+
}
|
|
167
|
+
this.pending.clear();
|
|
168
|
+
this.serverQueues.clear();
|
|
169
|
+
}
|
|
170
|
+
// ==================== Private Methods ====================
|
|
171
|
+
generateRequestId() {
|
|
172
|
+
return `req-${Date.now()}-${++this.requestCounter}`;
|
|
173
|
+
}
|
|
174
|
+
generateBatchId() {
|
|
175
|
+
return `batch-${Date.now()}-${++this.batchCounter}`;
|
|
176
|
+
}
|
|
177
|
+
scheduleFlush(reason) {
|
|
178
|
+
this.clearFlushTimer();
|
|
179
|
+
this.emit("flushTriggered", {
|
|
180
|
+
reason,
|
|
181
|
+
queueSize: this.pending.size,
|
|
182
|
+
});
|
|
183
|
+
// Execute immediately but don't block
|
|
184
|
+
setImmediate(() => {
|
|
185
|
+
this.executeBatch().catch((error) => {
|
|
186
|
+
logger.error("Batch execution failed:", error);
|
|
187
|
+
});
|
|
188
|
+
});
|
|
189
|
+
}
|
|
190
|
+
clearFlushTimer() {
|
|
191
|
+
if (this.flushTimer) {
|
|
192
|
+
clearTimeout(this.flushTimer);
|
|
193
|
+
this.flushTimer = undefined;
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
async executeBatch() {
|
|
197
|
+
if (this.pending.size === 0) {
|
|
198
|
+
return;
|
|
199
|
+
}
|
|
200
|
+
// Check concurrent batch limit
|
|
201
|
+
if (this.activeBatches >= this.config.maxConcurrentBatches) {
|
|
202
|
+
// Reschedule for later
|
|
203
|
+
this.clearFlushTimer();
|
|
204
|
+
this.flushTimer = setTimeout(() => {
|
|
205
|
+
this.executeBatch().catch((error) => {
|
|
206
|
+
logger.error("Rescheduled batch execution failed:", error);
|
|
207
|
+
});
|
|
208
|
+
}, 10);
|
|
209
|
+
return;
|
|
210
|
+
}
|
|
211
|
+
// Get requests for this batch
|
|
212
|
+
const batchRequests = this.selectBatchRequests();
|
|
213
|
+
if (batchRequests.length === 0) {
|
|
214
|
+
return;
|
|
215
|
+
}
|
|
216
|
+
const batchId = this.generateBatchId();
|
|
217
|
+
const startTime = Date.now();
|
|
218
|
+
this.activeBatches++;
|
|
219
|
+
this.emit("batchStarted", { batchId, size: batchRequests.length });
|
|
220
|
+
try {
|
|
221
|
+
// Guard against missing executor
|
|
222
|
+
if (!this.executor) {
|
|
223
|
+
throw ErrorFactory.missingConfiguration("batchExecutor", {
|
|
224
|
+
hint: "Call setExecutor() before executing batches",
|
|
225
|
+
});
|
|
226
|
+
}
|
|
227
|
+
// Execute the batch with a timeout to prevent indefinite hangs
|
|
228
|
+
const executorPromise = this.executor(batchRequests.map((r) => ({
|
|
229
|
+
tool: r.tool,
|
|
230
|
+
args: r.args,
|
|
231
|
+
serverId: r.serverId,
|
|
232
|
+
})));
|
|
233
|
+
const timeoutMs = 30_000;
|
|
234
|
+
let timeoutHandle;
|
|
235
|
+
const timeoutPromise = new Promise((_, reject) => {
|
|
236
|
+
timeoutHandle = setTimeout(() => reject(ErrorFactory.toolTimeout("batchExecution", timeoutMs)), timeoutMs);
|
|
237
|
+
});
|
|
238
|
+
const results = await Promise.race([
|
|
239
|
+
executorPromise,
|
|
240
|
+
timeoutPromise,
|
|
241
|
+
]).finally(() => {
|
|
242
|
+
if (timeoutHandle) {
|
|
243
|
+
clearTimeout(timeoutHandle);
|
|
244
|
+
}
|
|
245
|
+
});
|
|
246
|
+
// Process results
|
|
247
|
+
const batchResults = [];
|
|
248
|
+
for (let i = 0; i < batchRequests.length; i++) {
|
|
249
|
+
const request = batchRequests[i];
|
|
250
|
+
const result = results[i];
|
|
251
|
+
const executionTime = Date.now() - startTime;
|
|
252
|
+
if (!result) {
|
|
253
|
+
const noResultError = ErrorFactory.toolExecutionFailed(request.tool, new Error(`Batch executor returned no result for request ${i}`));
|
|
254
|
+
request.reject(noResultError);
|
|
255
|
+
batchResults.push({
|
|
256
|
+
id: request.id,
|
|
257
|
+
success: false,
|
|
258
|
+
error: noResultError,
|
|
259
|
+
executionTime,
|
|
260
|
+
});
|
|
261
|
+
continue;
|
|
262
|
+
}
|
|
263
|
+
if (result.success) {
|
|
264
|
+
request.resolve(result.result);
|
|
265
|
+
batchResults.push({
|
|
266
|
+
id: request.id,
|
|
267
|
+
success: true,
|
|
268
|
+
result: result.result,
|
|
269
|
+
executionTime,
|
|
270
|
+
});
|
|
271
|
+
}
|
|
272
|
+
else {
|
|
273
|
+
const error = result.error ??
|
|
274
|
+
ErrorFactory.toolExecutionFailed(request.tool, new Error("Unknown batch execution error"));
|
|
275
|
+
request.reject(error);
|
|
276
|
+
batchResults.push({
|
|
277
|
+
id: request.id,
|
|
278
|
+
success: false,
|
|
279
|
+
error,
|
|
280
|
+
executionTime,
|
|
281
|
+
});
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
this.emit("batchCompleted", { batchId, results: batchResults });
|
|
285
|
+
}
|
|
286
|
+
catch (error) {
|
|
287
|
+
// Batch-level failure - reject all requests
|
|
288
|
+
const batchError = error instanceof Error
|
|
289
|
+
? error
|
|
290
|
+
: ErrorFactory.toolExecutionFailed("batch", new Error(String(error)));
|
|
291
|
+
for (const request of batchRequests) {
|
|
292
|
+
request.reject(batchError);
|
|
293
|
+
}
|
|
294
|
+
this.emit("batchFailed", { batchId, error: batchError });
|
|
295
|
+
}
|
|
296
|
+
finally {
|
|
297
|
+
this.activeBatches--;
|
|
298
|
+
}
|
|
299
|
+
// Schedule next batch if there are more pending requests
|
|
300
|
+
if (this.pending.size > 0) {
|
|
301
|
+
this.clearFlushTimer();
|
|
302
|
+
this.flushTimer = setTimeout(() => {
|
|
303
|
+
this.executeBatch().catch((error) => {
|
|
304
|
+
logger.error("Follow-up batch execution failed:", error);
|
|
305
|
+
});
|
|
306
|
+
}, 0);
|
|
307
|
+
}
|
|
308
|
+
}
|
|
309
|
+
selectBatchRequests() {
|
|
310
|
+
const batchRequests = [];
|
|
311
|
+
if (this.config.groupByServer && this.serverQueues.size > 0) {
|
|
312
|
+
// Select from a single server queue for better locality
|
|
313
|
+
const [serverId, requestIds] = this.serverQueues.entries().next()
|
|
314
|
+
.value;
|
|
315
|
+
for (const requestId of requestIds) {
|
|
316
|
+
if (batchRequests.length >= this.config.maxBatchSize) {
|
|
317
|
+
break;
|
|
318
|
+
}
|
|
319
|
+
const request = this.pending.get(requestId);
|
|
320
|
+
if (request) {
|
|
321
|
+
batchRequests.push(request);
|
|
322
|
+
this.pending.delete(requestId);
|
|
323
|
+
requestIds.delete(requestId);
|
|
324
|
+
}
|
|
325
|
+
}
|
|
326
|
+
// Clean up empty server queue
|
|
327
|
+
if (requestIds.size === 0) {
|
|
328
|
+
this.serverQueues.delete(serverId);
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
else {
|
|
332
|
+
// Select oldest requests up to batch size
|
|
333
|
+
const sortedRequests = Array.from(this.pending.values()).sort((a, b) => a.addedAt - b.addedAt);
|
|
334
|
+
for (const request of sortedRequests) {
|
|
335
|
+
if (batchRequests.length >= this.config.maxBatchSize) {
|
|
336
|
+
break;
|
|
337
|
+
}
|
|
338
|
+
batchRequests.push(request);
|
|
339
|
+
this.pending.delete(request.id);
|
|
340
|
+
}
|
|
341
|
+
}
|
|
342
|
+
return batchRequests;
|
|
343
|
+
}
|
|
344
|
+
}
|
|
345
|
+
/**
|
|
346
|
+
* Factory function to create a RequestBatcher instance
|
|
347
|
+
*/
|
|
348
|
+
export const createRequestBatcher = (config) => new RequestBatcher(config);
|
|
349
|
+
/**
|
|
350
|
+
* Default batcher configuration
|
|
351
|
+
*/
|
|
352
|
+
export const DEFAULT_BATCH_CONFIG = {
|
|
353
|
+
maxBatchSize: 10,
|
|
354
|
+
maxWaitMs: 100,
|
|
355
|
+
enableParallel: true,
|
|
356
|
+
maxConcurrentBatches: 5,
|
|
357
|
+
groupByServer: true,
|
|
358
|
+
};
|
|
359
|
+
/**
|
|
360
|
+
* Tool Call Batcher - Specialized batcher for MCP tool calls
|
|
361
|
+
*/
|
|
362
|
+
export class ToolCallBatcher {
|
|
363
|
+
batcher;
|
|
364
|
+
toolExecutor;
|
|
365
|
+
constructor(config) {
|
|
366
|
+
this.batcher = new RequestBatcher({
|
|
367
|
+
...DEFAULT_BATCH_CONFIG,
|
|
368
|
+
...config,
|
|
369
|
+
});
|
|
370
|
+
// Set up internal executor that calls individual tool executions
|
|
371
|
+
this.batcher.setExecutor(async (requests) => {
|
|
372
|
+
if (!this.toolExecutor) {
|
|
373
|
+
throw ErrorFactory.missingConfiguration("toolExecutor", {
|
|
374
|
+
hint: "Call setToolExecutor() before executing tool calls",
|
|
375
|
+
});
|
|
376
|
+
}
|
|
377
|
+
const executor = this.toolExecutor;
|
|
378
|
+
const results = await Promise.all(requests.map(async (req) => {
|
|
379
|
+
try {
|
|
380
|
+
const result = await executor(req.tool, req.args, req.serverId);
|
|
381
|
+
return { success: true, result };
|
|
382
|
+
}
|
|
383
|
+
catch (error) {
|
|
384
|
+
return {
|
|
385
|
+
success: false,
|
|
386
|
+
error: error instanceof Error
|
|
387
|
+
? error
|
|
388
|
+
: ErrorFactory.toolExecutionFailed(req.tool, new Error(String(error))),
|
|
389
|
+
};
|
|
390
|
+
}
|
|
391
|
+
}));
|
|
392
|
+
return results;
|
|
393
|
+
});
|
|
394
|
+
}
|
|
395
|
+
/**
|
|
396
|
+
* Set the tool executor function
|
|
397
|
+
*/
|
|
398
|
+
setToolExecutor(executor) {
|
|
399
|
+
this.toolExecutor = executor;
|
|
400
|
+
}
|
|
401
|
+
/**
|
|
402
|
+
* Execute a tool call (will be batched automatically)
|
|
403
|
+
*/
|
|
404
|
+
async execute(tool, args, serverId) {
|
|
405
|
+
return this.batcher.add(tool, args, serverId);
|
|
406
|
+
}
|
|
407
|
+
/**
|
|
408
|
+
* Flush pending tool calls
|
|
409
|
+
*/
|
|
410
|
+
async flush() {
|
|
411
|
+
return this.batcher.flush();
|
|
412
|
+
}
|
|
413
|
+
/**
|
|
414
|
+
* Wait for all pending tool calls to complete
|
|
415
|
+
*/
|
|
416
|
+
async drain() {
|
|
417
|
+
return this.batcher.drain();
|
|
418
|
+
}
|
|
419
|
+
/**
|
|
420
|
+
* Get current queue size
|
|
421
|
+
*/
|
|
422
|
+
get queueSize() {
|
|
423
|
+
return this.batcher.queueSize;
|
|
424
|
+
}
|
|
425
|
+
/**
|
|
426
|
+
* Check if idle
|
|
427
|
+
*/
|
|
428
|
+
get isIdle() {
|
|
429
|
+
return this.batcher.isIdle;
|
|
430
|
+
}
|
|
431
|
+
/**
|
|
432
|
+
* Destroy the batcher
|
|
433
|
+
*/
|
|
434
|
+
destroy() {
|
|
435
|
+
this.batcher.destroy();
|
|
436
|
+
}
|
|
437
|
+
}
|
|
438
|
+
/**
|
|
439
|
+
* Create a tool call batcher instance
|
|
440
|
+
*/
|
|
441
|
+
export const createToolCallBatcher = (config) => new ToolCallBatcher(config);
|
|
442
|
+
//# sourceMappingURL=requestBatcher.js.map
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* MCP Caching Module - Tool result and response caching
|
|
3
|
+
*
|
|
4
|
+
* Provides intelligent caching for MCP tool calls:
|
|
5
|
+
* - Multiple eviction strategies (LRU, FIFO, LFU)
|
|
6
|
+
* - Automatic cache key generation
|
|
7
|
+
* - Pattern-based invalidation
|
|
8
|
+
* - Cache statistics and monitoring
|
|
9
|
+
*/
|
|
10
|
+
export type { CacheConfig, CacheEvents, CacheStats, CacheStrategy, } from "./toolCache.js";
|
|
11
|
+
export { createToolCache, createToolResultCache, DEFAULT_CACHE_CONFIG, ToolCache, ToolResultCache, } from "./toolCache.js";
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* MCP Caching Module - Tool result and response caching
|
|
3
|
+
*
|
|
4
|
+
* Provides intelligent caching for MCP tool calls:
|
|
5
|
+
* - Multiple eviction strategies (LRU, FIFO, LFU)
|
|
6
|
+
* - Automatic cache key generation
|
|
7
|
+
* - Pattern-based invalidation
|
|
8
|
+
* - Cache statistics and monitoring
|
|
9
|
+
*/
|
|
10
|
+
export { createToolCache, createToolResultCache, DEFAULT_CACHE_CONFIG, ToolCache, ToolResultCache, } from "./toolCache.js";
|
|
11
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1,221 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Tool Cache - Caches tool results and server responses
|
|
3
|
+
*
|
|
4
|
+
* Provides intelligent caching for MCP tool calls to improve performance
|
|
5
|
+
* and reduce redundant operations. Supports multiple eviction strategies:
|
|
6
|
+
* - LRU (Least Recently Used)
|
|
7
|
+
* - FIFO (First In, First Out)
|
|
8
|
+
* - LFU (Least Frequently Used)
|
|
9
|
+
*/
|
|
10
|
+
import { EventEmitter } from "events";
|
|
11
|
+
/**
|
|
12
|
+
* Cache eviction strategy
|
|
13
|
+
*/
|
|
14
|
+
export type CacheStrategy = "lru" | "fifo" | "lfu";
|
|
15
|
+
/**
|
|
16
|
+
* Cache configuration options
|
|
17
|
+
*/
|
|
18
|
+
export type CacheConfig = {
|
|
19
|
+
/**
|
|
20
|
+
* Time-to-live in milliseconds (default: 5 minutes)
|
|
21
|
+
*/
|
|
22
|
+
ttl: number;
|
|
23
|
+
/**
|
|
24
|
+
* Maximum number of entries (default: 500)
|
|
25
|
+
*/
|
|
26
|
+
maxSize: number;
|
|
27
|
+
/**
|
|
28
|
+
* Eviction strategy (default: 'lru')
|
|
29
|
+
*/
|
|
30
|
+
strategy: CacheStrategy;
|
|
31
|
+
/**
|
|
32
|
+
* Enable automatic cleanup of expired entries
|
|
33
|
+
*/
|
|
34
|
+
enableAutoCleanup?: boolean;
|
|
35
|
+
/**
|
|
36
|
+
* Cleanup interval in milliseconds (default: 60 seconds)
|
|
37
|
+
*/
|
|
38
|
+
cleanupInterval?: number;
|
|
39
|
+
/**
|
|
40
|
+
* Namespace for cache keys (optional)
|
|
41
|
+
*/
|
|
42
|
+
namespace?: string;
|
|
43
|
+
};
|
|
44
|
+
/**
|
|
45
|
+
* Cache statistics
|
|
46
|
+
*/
|
|
47
|
+
export type CacheStats = {
|
|
48
|
+
hits: number;
|
|
49
|
+
misses: number;
|
|
50
|
+
evictions: number;
|
|
51
|
+
size: number;
|
|
52
|
+
maxSize: number;
|
|
53
|
+
hitRate: number;
|
|
54
|
+
};
|
|
55
|
+
/**
|
|
56
|
+
* Cache events
|
|
57
|
+
*/
|
|
58
|
+
export type CacheEvents = {
|
|
59
|
+
hit: {
|
|
60
|
+
key: string;
|
|
61
|
+
value: unknown;
|
|
62
|
+
};
|
|
63
|
+
miss: {
|
|
64
|
+
key: string;
|
|
65
|
+
};
|
|
66
|
+
set: {
|
|
67
|
+
key: string;
|
|
68
|
+
value: unknown;
|
|
69
|
+
ttl: number;
|
|
70
|
+
};
|
|
71
|
+
evict: {
|
|
72
|
+
key: string;
|
|
73
|
+
reason: "expired" | "capacity" | "manual";
|
|
74
|
+
};
|
|
75
|
+
clear: {
|
|
76
|
+
entriesRemoved: number;
|
|
77
|
+
};
|
|
78
|
+
};
|
|
79
|
+
/**
|
|
80
|
+
* Tool Cache - High-performance caching for MCP tool results
|
|
81
|
+
*
|
|
82
|
+
* @example
|
|
83
|
+
* ```typescript
|
|
84
|
+
* const cache = new ToolCache({
|
|
85
|
+
* ttl: 60000, // 1 minute
|
|
86
|
+
* maxSize: 500,
|
|
87
|
+
* strategy: 'lru',
|
|
88
|
+
* });
|
|
89
|
+
*
|
|
90
|
+
* // Cache a tool result
|
|
91
|
+
* cache.set('getUserById:123', { id: 123, name: 'John' });
|
|
92
|
+
*
|
|
93
|
+
* // Retrieve from cache
|
|
94
|
+
* const user = cache.get('getUserById:123');
|
|
95
|
+
*
|
|
96
|
+
* // Invalidate by pattern
|
|
97
|
+
* cache.invalidate('getUserById:*');
|
|
98
|
+
* ```
|
|
99
|
+
*/
|
|
100
|
+
export declare class ToolCache<T = unknown> extends EventEmitter {
|
|
101
|
+
private cache;
|
|
102
|
+
private config;
|
|
103
|
+
private stats;
|
|
104
|
+
private cleanupTimer?;
|
|
105
|
+
constructor(config: CacheConfig);
|
|
106
|
+
/**
|
|
107
|
+
* Get a value from the cache
|
|
108
|
+
*/
|
|
109
|
+
get(key: string): T | undefined;
|
|
110
|
+
/**
|
|
111
|
+
* Set a value in the cache
|
|
112
|
+
*/
|
|
113
|
+
set(key: string, value: T, ttl?: number): void;
|
|
114
|
+
/**
|
|
115
|
+
* Check if a key exists and is not expired
|
|
116
|
+
*/
|
|
117
|
+
has(key: string): boolean;
|
|
118
|
+
/**
|
|
119
|
+
* Delete a specific key from the cache
|
|
120
|
+
*/
|
|
121
|
+
delete(key: string): boolean;
|
|
122
|
+
/**
|
|
123
|
+
* Invalidate entries matching a pattern
|
|
124
|
+
* Supports glob-style patterns with * wildcard
|
|
125
|
+
*/
|
|
126
|
+
invalidate(pattern: string): number;
|
|
127
|
+
/**
|
|
128
|
+
* Clear all entries from the cache
|
|
129
|
+
*/
|
|
130
|
+
clear(): void;
|
|
131
|
+
/**
|
|
132
|
+
* Get or set a value (cache-aside pattern)
|
|
133
|
+
*/
|
|
134
|
+
getOrSet(key: string, factory: () => Promise<T> | T, ttl?: number): Promise<T>;
|
|
135
|
+
/**
|
|
136
|
+
* Get cache statistics
|
|
137
|
+
*/
|
|
138
|
+
getStats(): CacheStats;
|
|
139
|
+
/**
|
|
140
|
+
* Reset statistics
|
|
141
|
+
*/
|
|
142
|
+
resetStats(): void;
|
|
143
|
+
/**
|
|
144
|
+
* Get all keys in the cache
|
|
145
|
+
*/
|
|
146
|
+
keys(): string[];
|
|
147
|
+
/**
|
|
148
|
+
* Get the number of entries in the cache
|
|
149
|
+
*/
|
|
150
|
+
get size(): number;
|
|
151
|
+
/**
|
|
152
|
+
* Generate a cache key from tool name and arguments
|
|
153
|
+
*/
|
|
154
|
+
static generateKey(toolName: string, args: unknown): string;
|
|
155
|
+
/**
|
|
156
|
+
* Stop the auto-cleanup timer
|
|
157
|
+
*/
|
|
158
|
+
destroy(): void;
|
|
159
|
+
private getFullKey;
|
|
160
|
+
private isExpired;
|
|
161
|
+
/**
|
|
162
|
+
* Delete a cache entry by its full key with a specific eviction reason.
|
|
163
|
+
*/
|
|
164
|
+
private deleteWithReason;
|
|
165
|
+
private evictOne;
|
|
166
|
+
private selectEvictionCandidate;
|
|
167
|
+
private findLRU;
|
|
168
|
+
private findFIFO;
|
|
169
|
+
private findLFU;
|
|
170
|
+
private patternToRegex;
|
|
171
|
+
private updateHitRate;
|
|
172
|
+
private startAutoCleanup;
|
|
173
|
+
private cleanupExpired;
|
|
174
|
+
}
|
|
175
|
+
/**
|
|
176
|
+
* Factory function to create a ToolCache instance
|
|
177
|
+
*/
|
|
178
|
+
export declare const createToolCache: <T = unknown>(config: CacheConfig) => ToolCache<T>;
|
|
179
|
+
/**
|
|
180
|
+
* Default cache configuration
|
|
181
|
+
*/
|
|
182
|
+
export declare const DEFAULT_CACHE_CONFIG: CacheConfig;
|
|
183
|
+
/**
|
|
184
|
+
* Tool-specific cache wrapper with automatic key generation
|
|
185
|
+
*/
|
|
186
|
+
export declare class ToolResultCache {
|
|
187
|
+
private cache;
|
|
188
|
+
constructor(config?: Partial<CacheConfig>);
|
|
189
|
+
/**
|
|
190
|
+
* Cache a tool result
|
|
191
|
+
*/
|
|
192
|
+
cacheResult(toolName: string, args: unknown, result: unknown, ttl?: number): void;
|
|
193
|
+
/**
|
|
194
|
+
* Get a cached tool result
|
|
195
|
+
*/
|
|
196
|
+
getCachedResult(toolName: string, args: unknown): unknown | undefined;
|
|
197
|
+
/**
|
|
198
|
+
* Check if a result is cached
|
|
199
|
+
*/
|
|
200
|
+
hasCachedResult(toolName: string, args: unknown): boolean;
|
|
201
|
+
/**
|
|
202
|
+
* Invalidate all cached results for a tool
|
|
203
|
+
*/
|
|
204
|
+
invalidateTool(toolName: string): number;
|
|
205
|
+
/**
|
|
206
|
+
* Get cache statistics
|
|
207
|
+
*/
|
|
208
|
+
getStats(): CacheStats;
|
|
209
|
+
/**
|
|
210
|
+
* Clear all cached results
|
|
211
|
+
*/
|
|
212
|
+
clear(): void;
|
|
213
|
+
/**
|
|
214
|
+
* Destroy the cache
|
|
215
|
+
*/
|
|
216
|
+
destroy(): void;
|
|
217
|
+
}
|
|
218
|
+
/**
|
|
219
|
+
* Create a tool result cache instance
|
|
220
|
+
*/
|
|
221
|
+
export declare const createToolResultCache: (config?: Partial<CacheConfig>) => ToolResultCache;
|