@juspay/neurolink 1.11.3 → 2.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +19 -0
- package/README.md +63 -21
- package/dist/cli/commands/config.d.ts +6 -6
- package/dist/cli/index.js +89 -39
- package/dist/core/types.d.ts +2 -0
- package/dist/lib/core/types.d.ts +2 -0
- package/dist/lib/neurolink.d.ts +2 -0
- package/dist/lib/neurolink.js +23 -2
- package/dist/lib/providers/agent-enhanced-provider.d.ts +1 -0
- package/dist/lib/providers/agent-enhanced-provider.js +59 -3
- package/dist/lib/providers/amazonBedrock.js +70 -24
- package/dist/lib/providers/anthropic.js +77 -15
- package/dist/lib/providers/azureOpenAI.js +77 -15
- package/dist/lib/providers/googleAIStudio.js +70 -26
- package/dist/lib/providers/googleVertexAI.js +70 -24
- package/dist/lib/providers/huggingFace.js +70 -26
- package/dist/lib/providers/mistralAI.js +70 -26
- package/dist/lib/providers/ollama.d.ts +1 -1
- package/dist/lib/providers/ollama.js +24 -10
- package/dist/lib/providers/openAI.js +67 -23
- package/dist/lib/providers/timeout-wrapper.d.ts +40 -0
- package/dist/lib/providers/timeout-wrapper.js +100 -0
- package/dist/lib/utils/timeout.d.ts +69 -0
- package/dist/lib/utils/timeout.js +130 -0
- package/dist/neurolink.d.ts +2 -0
- package/dist/neurolink.js +23 -2
- package/dist/providers/agent-enhanced-provider.d.ts +1 -0
- package/dist/providers/agent-enhanced-provider.js +59 -3
- package/dist/providers/amazonBedrock.js +70 -24
- package/dist/providers/anthropic.js +77 -15
- package/dist/providers/azureOpenAI.js +77 -15
- package/dist/providers/googleAIStudio.js +70 -26
- package/dist/providers/googleVertexAI.js +70 -24
- package/dist/providers/huggingFace.js +70 -26
- package/dist/providers/mistralAI.js +70 -26
- package/dist/providers/ollama.d.ts +1 -1
- package/dist/providers/ollama.js +24 -10
- package/dist/providers/openAI.js +67 -23
- package/dist/providers/timeout-wrapper.d.ts +40 -0
- package/dist/providers/timeout-wrapper.js +100 -0
- package/dist/utils/timeout.d.ts +69 -0
- package/dist/utils/timeout.js +130 -0
- package/package.json +1 -1
|
@@ -9,6 +9,7 @@ import { anthropic } from "@ai-sdk/anthropic";
|
|
|
9
9
|
import { getToolsForCategory, } from "../agent/direct-tools.js";
|
|
10
10
|
import { UnifiedMCPSystem } from "../mcp/unified-mcp.js";
|
|
11
11
|
import { mcpLogger } from "../mcp/logging.js";
|
|
12
|
+
import { parseTimeout } from "../utils/timeout.js";
|
|
12
13
|
/**
|
|
13
14
|
* Agent-Enhanced Provider Class
|
|
14
15
|
* Provides AI generation with tool calling capabilities
|
|
@@ -114,7 +115,21 @@ export class AgentEnhancedProvider {
|
|
|
114
115
|
description: toolInfo.description || `MCP tool: ${toolInfo.name}`,
|
|
115
116
|
parameters: toolInfo.inputSchema || {},
|
|
116
117
|
execute: async (args) => {
|
|
118
|
+
let timeoutId;
|
|
117
119
|
try {
|
|
120
|
+
// Create timeout controller for tool execution if configured
|
|
121
|
+
const toolTimeout = this.config.toolExecutionTimeout;
|
|
122
|
+
const toolAbortController = toolTimeout
|
|
123
|
+
? new AbortController()
|
|
124
|
+
: undefined;
|
|
125
|
+
if (toolAbortController && toolTimeout) {
|
|
126
|
+
const timeoutMs = typeof toolTimeout === 'string'
|
|
127
|
+
? parseTimeout(toolTimeout)
|
|
128
|
+
: toolTimeout;
|
|
129
|
+
timeoutId = setTimeout(() => {
|
|
130
|
+
toolAbortController.abort();
|
|
131
|
+
}, timeoutMs);
|
|
132
|
+
}
|
|
118
133
|
const context = {
|
|
119
134
|
sessionId: 'cli-session',
|
|
120
135
|
userId: 'cli-user',
|
|
@@ -185,10 +200,33 @@ export class AgentEnhancedProvider {
|
|
|
185
200
|
}
|
|
186
201
|
}
|
|
187
202
|
};
|
|
188
|
-
const
|
|
203
|
+
const toolPromise = this.mcpSystem.executeTool(toolInfo.name, args, context);
|
|
204
|
+
let result;
|
|
205
|
+
if (toolAbortController) {
|
|
206
|
+
// Race between tool execution and timeout
|
|
207
|
+
result = await Promise.race([
|
|
208
|
+
toolPromise,
|
|
209
|
+
new Promise((_, reject) => {
|
|
210
|
+
toolAbortController.signal.addEventListener('abort', () => {
|
|
211
|
+
reject(new Error(`Tool ${toolInfo.name} timed out after ${this.config.toolExecutionTimeout}`));
|
|
212
|
+
});
|
|
213
|
+
})
|
|
214
|
+
]);
|
|
215
|
+
}
|
|
216
|
+
else {
|
|
217
|
+
result = await toolPromise;
|
|
218
|
+
}
|
|
219
|
+
// Clear timeout if successful
|
|
220
|
+
if (timeoutId) {
|
|
221
|
+
clearTimeout(timeoutId);
|
|
222
|
+
}
|
|
189
223
|
return result.data || result;
|
|
190
224
|
}
|
|
191
225
|
catch (error) {
|
|
226
|
+
// Clear timeout on error
|
|
227
|
+
if (timeoutId) {
|
|
228
|
+
clearTimeout(timeoutId);
|
|
229
|
+
}
|
|
192
230
|
mcpLogger.error(`MCP tool ${toolInfo.name} execution failed:`, error);
|
|
193
231
|
throw error;
|
|
194
232
|
}
|
|
@@ -206,7 +244,7 @@ export class AgentEnhancedProvider {
|
|
|
206
244
|
const options = typeof optionsOrPrompt === "string"
|
|
207
245
|
? { prompt: optionsOrPrompt }
|
|
208
246
|
: optionsOrPrompt;
|
|
209
|
-
const { prompt, temperature = 0.7, maxTokens = 1000, systemPrompt, schema, } = options;
|
|
247
|
+
const { prompt, temperature = 0.7, maxTokens = 1000, systemPrompt, schema, timeout, } = options;
|
|
210
248
|
// Get combined tools (direct + MCP) if enabled
|
|
211
249
|
const tools = this.config.enableTools
|
|
212
250
|
? await this.getCombinedTools()
|
|
@@ -220,6 +258,14 @@ export class AgentEnhancedProvider {
|
|
|
220
258
|
maxSteps: this.config.maxSteps
|
|
221
259
|
});
|
|
222
260
|
try {
|
|
261
|
+
// Parse timeout if provided
|
|
262
|
+
let abortSignal;
|
|
263
|
+
if (timeout) {
|
|
264
|
+
const timeoutMs = typeof timeout === 'string' ? parseTimeout(timeout) : timeout;
|
|
265
|
+
if (timeoutMs !== undefined) {
|
|
266
|
+
abortSignal = AbortSignal.timeout(timeoutMs);
|
|
267
|
+
}
|
|
268
|
+
}
|
|
223
269
|
// The AI SDK with maxSteps automatically handles tool calling and result integration
|
|
224
270
|
const result = await generateText({
|
|
225
271
|
model: this.model,
|
|
@@ -231,6 +277,7 @@ export class AgentEnhancedProvider {
|
|
|
231
277
|
temperature,
|
|
232
278
|
maxTokens,
|
|
233
279
|
toolChoice: this.shouldForceToolUsage(prompt) ? "required" : "auto",
|
|
280
|
+
abortSignal, // Pass abort signal for timeout support
|
|
234
281
|
});
|
|
235
282
|
log('Generation completed', {
|
|
236
283
|
text: result.text?.substring(0, 200),
|
|
@@ -307,12 +354,20 @@ export class AgentEnhancedProvider {
|
|
|
307
354
|
const options = typeof optionsOrPrompt === "string"
|
|
308
355
|
? { prompt: optionsOrPrompt }
|
|
309
356
|
: optionsOrPrompt;
|
|
310
|
-
const { prompt, temperature = 0.7, maxTokens = 1000, systemPrompt, } = options;
|
|
357
|
+
const { prompt, temperature = 0.7, maxTokens = 1000, systemPrompt, timeout, } = options;
|
|
311
358
|
// Get combined tools (direct + MCP) if enabled
|
|
312
359
|
const tools = this.config.enableTools
|
|
313
360
|
? await this.getCombinedTools()
|
|
314
361
|
: {};
|
|
315
362
|
try {
|
|
363
|
+
// Parse timeout if provided
|
|
364
|
+
let abortSignal;
|
|
365
|
+
if (timeout) {
|
|
366
|
+
const timeoutMs = typeof timeout === 'string' ? parseTimeout(timeout) : timeout;
|
|
367
|
+
if (timeoutMs !== undefined) {
|
|
368
|
+
abortSignal = AbortSignal.timeout(timeoutMs);
|
|
369
|
+
}
|
|
370
|
+
}
|
|
316
371
|
const result = await streamText({
|
|
317
372
|
model: this.model,
|
|
318
373
|
prompt: systemPrompt
|
|
@@ -323,6 +378,7 @@ export class AgentEnhancedProvider {
|
|
|
323
378
|
temperature,
|
|
324
379
|
maxTokens,
|
|
325
380
|
toolChoice: this.shouldForceToolUsage(prompt) ? "required" : "auto",
|
|
381
|
+
abortSignal, // Pass abort signal for timeout support
|
|
326
382
|
});
|
|
327
383
|
return result;
|
|
328
384
|
}
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import { createAmazonBedrock } from "@ai-sdk/amazon-bedrock";
|
|
2
2
|
import { streamText, generateText, Output, } from "ai";
|
|
3
3
|
import { logger } from "../utils/logger.js";
|
|
4
|
+
import { createTimeoutController, TimeoutError, getDefaultTimeout } from "../utils/timeout.js";
|
|
4
5
|
// Default system context
|
|
5
6
|
const DEFAULT_SYSTEM_CONTEXT = {
|
|
6
7
|
systemPrompt: "You are a helpful AI assistant.",
|
|
@@ -128,7 +129,7 @@ export class AmazonBedrock {
|
|
|
128
129
|
const options = typeof optionsOrPrompt === "string"
|
|
129
130
|
? { prompt: optionsOrPrompt }
|
|
130
131
|
: optionsOrPrompt;
|
|
131
|
-
const { prompt, temperature = 0.7, maxTokens = 1000, systemPrompt = DEFAULT_SYSTEM_CONTEXT.systemPrompt, schema, } = options;
|
|
132
|
+
const { prompt, temperature = 0.7, maxTokens = 1000, systemPrompt = DEFAULT_SYSTEM_CONTEXT.systemPrompt, schema, timeout = getDefaultTimeout(provider, 'stream'), } = options;
|
|
132
133
|
// Use schema from options or fallback parameter
|
|
133
134
|
const finalSchema = schema || analysisSchema;
|
|
134
135
|
logger.debug(`[${functionTag}] Stream request started`, {
|
|
@@ -137,13 +138,18 @@ export class AmazonBedrock {
|
|
|
137
138
|
promptLength: prompt.length,
|
|
138
139
|
temperature,
|
|
139
140
|
maxTokens,
|
|
141
|
+
timeout,
|
|
140
142
|
});
|
|
143
|
+
// Create timeout controller if timeout is specified
|
|
144
|
+
const timeoutController = createTimeoutController(timeout, provider, 'stream');
|
|
141
145
|
const streamOptions = {
|
|
142
146
|
model: this.model,
|
|
143
147
|
prompt: prompt,
|
|
144
148
|
system: systemPrompt,
|
|
145
149
|
temperature,
|
|
146
150
|
maxTokens,
|
|
151
|
+
// Add abort signal if available
|
|
152
|
+
...(timeoutController && { abortSignal: timeoutController.controller.signal }),
|
|
147
153
|
onError: (event) => {
|
|
148
154
|
const error = event.error;
|
|
149
155
|
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
@@ -193,16 +199,30 @@ export class AmazonBedrock {
|
|
|
193
199
|
modelName: this.modelName,
|
|
194
200
|
promptLength: prompt.length,
|
|
195
201
|
});
|
|
202
|
+
// For streaming, we can't clean up immediately, but the timeout will auto-clean
|
|
203
|
+
// The user should handle the stream and any timeout errors
|
|
196
204
|
return result;
|
|
197
205
|
}
|
|
198
206
|
catch (err) {
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
207
|
+
// Log timeout errors specifically
|
|
208
|
+
if (err instanceof TimeoutError) {
|
|
209
|
+
logger.error(`[${functionTag}] Timeout error`, {
|
|
210
|
+
provider,
|
|
211
|
+
modelName: this.modelName,
|
|
212
|
+
region: getAWSRegion(),
|
|
213
|
+
timeout: err.timeout,
|
|
214
|
+
message: err.message,
|
|
215
|
+
});
|
|
216
|
+
}
|
|
217
|
+
else {
|
|
218
|
+
logger.error(`[${functionTag}] Exception`, {
|
|
219
|
+
provider,
|
|
220
|
+
modelName: this.modelName,
|
|
221
|
+
region: getAWSRegion(),
|
|
222
|
+
message: "Error in streaming text",
|
|
223
|
+
err: String(err),
|
|
224
|
+
});
|
|
225
|
+
}
|
|
206
226
|
throw err; // Re-throw error to trigger fallback
|
|
207
227
|
}
|
|
208
228
|
}
|
|
@@ -214,7 +234,7 @@ export class AmazonBedrock {
|
|
|
214
234
|
const options = typeof optionsOrPrompt === "string"
|
|
215
235
|
? { prompt: optionsOrPrompt }
|
|
216
236
|
: optionsOrPrompt;
|
|
217
|
-
const { prompt, temperature = 0.7, maxTokens = 1000, systemPrompt = DEFAULT_SYSTEM_CONTEXT.systemPrompt, schema, } = options;
|
|
237
|
+
const { prompt, temperature = 0.7, maxTokens = 1000, systemPrompt = DEFAULT_SYSTEM_CONTEXT.systemPrompt, schema, timeout = getDefaultTimeout(provider, 'generate'), } = options;
|
|
218
238
|
// Use schema from options or fallback parameter
|
|
219
239
|
const finalSchema = schema || analysisSchema;
|
|
220
240
|
logger.debug(`[${functionTag}] Generate text started`, {
|
|
@@ -224,36 +244,62 @@ export class AmazonBedrock {
|
|
|
224
244
|
promptLength: prompt.length,
|
|
225
245
|
temperature,
|
|
226
246
|
maxTokens,
|
|
247
|
+
timeout,
|
|
227
248
|
});
|
|
249
|
+
// Create timeout controller if timeout is specified
|
|
250
|
+
const timeoutController = createTimeoutController(timeout, provider, 'generate');
|
|
228
251
|
const generateOptions = {
|
|
229
252
|
model: this.model,
|
|
230
253
|
prompt: prompt,
|
|
231
254
|
system: systemPrompt,
|
|
232
255
|
temperature,
|
|
233
256
|
maxTokens,
|
|
257
|
+
// Add abort signal if available
|
|
258
|
+
...(timeoutController && { abortSignal: timeoutController.controller.signal }),
|
|
234
259
|
};
|
|
235
260
|
if (finalSchema) {
|
|
236
261
|
generateOptions.experimental_output = Output.object({
|
|
237
262
|
schema: finalSchema,
|
|
238
263
|
});
|
|
239
264
|
}
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
265
|
+
try {
|
|
266
|
+
const result = await generateText(generateOptions);
|
|
267
|
+
// Clean up timeout if successful
|
|
268
|
+
timeoutController?.cleanup();
|
|
269
|
+
logger.debug(`[${functionTag}] Generate text completed`, {
|
|
270
|
+
provider,
|
|
271
|
+
modelName: this.modelName,
|
|
272
|
+
usage: result.usage,
|
|
273
|
+
finishReason: result.finishReason,
|
|
274
|
+
responseLength: result.text?.length || 0,
|
|
275
|
+
timeout,
|
|
276
|
+
});
|
|
277
|
+
return result;
|
|
278
|
+
}
|
|
279
|
+
finally {
|
|
280
|
+
// Always cleanup timeout
|
|
281
|
+
timeoutController?.cleanup();
|
|
282
|
+
}
|
|
249
283
|
}
|
|
250
284
|
catch (err) {
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
285
|
+
// Log timeout errors specifically
|
|
286
|
+
if (err instanceof TimeoutError) {
|
|
287
|
+
logger.error(`[${functionTag}] Timeout error`, {
|
|
288
|
+
provider,
|
|
289
|
+
modelName: this.modelName,
|
|
290
|
+
region: getAWSRegion(),
|
|
291
|
+
timeout: err.timeout,
|
|
292
|
+
message: err.message,
|
|
293
|
+
});
|
|
294
|
+
}
|
|
295
|
+
else {
|
|
296
|
+
logger.error(`[${functionTag}] Exception`, {
|
|
297
|
+
provider,
|
|
298
|
+
modelName: this.modelName,
|
|
299
|
+
message: "Error in generating text",
|
|
300
|
+
err: String(err),
|
|
301
|
+
});
|
|
302
|
+
}
|
|
257
303
|
throw err; // Re-throw error to trigger fallback instead of returning null
|
|
258
304
|
}
|
|
259
305
|
}
|
|
@@ -6,6 +6,7 @@
|
|
|
6
6
|
*/
|
|
7
7
|
import { AIProviderName } from "../core/types.js";
|
|
8
8
|
import { logger } from "../utils/logger.js";
|
|
9
|
+
import { createTimeoutController, TimeoutError, getDefaultTimeout } from "../utils/timeout.js";
|
|
9
10
|
export class AnthropicProvider {
|
|
10
11
|
name = AIProviderName.ANTHROPIC;
|
|
11
12
|
apiKey;
|
|
@@ -29,7 +30,7 @@ export class AnthropicProvider {
|
|
|
29
30
|
getModel() {
|
|
30
31
|
return this.defaultModel;
|
|
31
32
|
}
|
|
32
|
-
async makeRequest(endpoint, body, stream = false) {
|
|
33
|
+
async makeRequest(endpoint, body, stream = false, signal) {
|
|
33
34
|
const url = `${this.baseURL}/v1/${endpoint}`;
|
|
34
35
|
const headers = {
|
|
35
36
|
"Content-Type": "application/json",
|
|
@@ -43,6 +44,7 @@ export class AnthropicProvider {
|
|
|
43
44
|
method: "POST",
|
|
44
45
|
headers,
|
|
45
46
|
body: JSON.stringify(body),
|
|
47
|
+
signal, // Add abort signal for timeout support
|
|
46
48
|
});
|
|
47
49
|
if (!response.ok) {
|
|
48
50
|
const errorText = await response.text();
|
|
@@ -52,13 +54,15 @@ export class AnthropicProvider {
|
|
|
52
54
|
return response;
|
|
53
55
|
}
|
|
54
56
|
async generateText(optionsOrPrompt, schema) {
|
|
55
|
-
|
|
57
|
+
const functionTag = "AnthropicProvider.generateText";
|
|
58
|
+
const provider = "anthropic";
|
|
59
|
+
logger.debug(`[${functionTag}] Starting text generation`);
|
|
56
60
|
// Parse parameters with backward compatibility
|
|
57
61
|
const options = typeof optionsOrPrompt === "string"
|
|
58
62
|
? { prompt: optionsOrPrompt }
|
|
59
63
|
: optionsOrPrompt;
|
|
60
|
-
const { prompt, temperature = 0.7, maxTokens = 1000, systemPrompt = "You are Claude, an AI assistant created by Anthropic. You are helpful, harmless, and honest.", } = options;
|
|
61
|
-
logger.debug(`[
|
|
64
|
+
const { prompt, temperature = 0.7, maxTokens = 1000, systemPrompt = "You are Claude, an AI assistant created by Anthropic. You are helpful, harmless, and honest.", timeout = getDefaultTimeout(provider, 'generate'), } = options;
|
|
65
|
+
logger.debug(`[${functionTag}] Prompt: "${prompt.substring(0, 100)}...", Temperature: ${temperature}, Max tokens: ${maxTokens}, Timeout: ${timeout}`);
|
|
62
66
|
const requestBody = {
|
|
63
67
|
model: this.getModel(),
|
|
64
68
|
max_tokens: maxTokens,
|
|
@@ -71,10 +75,14 @@ export class AnthropicProvider {
|
|
|
71
75
|
temperature,
|
|
72
76
|
system: systemPrompt,
|
|
73
77
|
};
|
|
78
|
+
// Create timeout controller if timeout is specified
|
|
79
|
+
const timeoutController = createTimeoutController(timeout, provider, 'generate');
|
|
74
80
|
try {
|
|
75
|
-
const response = await this.makeRequest("messages", requestBody);
|
|
81
|
+
const response = await this.makeRequest("messages", requestBody, false, timeoutController?.controller.signal);
|
|
76
82
|
const data = await response.json();
|
|
77
|
-
|
|
83
|
+
// Clean up timeout if successful
|
|
84
|
+
timeoutController?.cleanup();
|
|
85
|
+
logger.debug(`[${functionTag}] Success. Generated ${data.usage.output_tokens} tokens`);
|
|
78
86
|
const content = data.content.map((block) => block.text).join("");
|
|
79
87
|
return {
|
|
80
88
|
content,
|
|
@@ -89,18 +97,42 @@ export class AnthropicProvider {
|
|
|
89
97
|
};
|
|
90
98
|
}
|
|
91
99
|
catch (error) {
|
|
92
|
-
|
|
100
|
+
// Always cleanup timeout
|
|
101
|
+
timeoutController?.cleanup();
|
|
102
|
+
// Log timeout errors specifically
|
|
103
|
+
if (error instanceof TimeoutError) {
|
|
104
|
+
logger.error(`[${functionTag}] Timeout error`, {
|
|
105
|
+
provider,
|
|
106
|
+
timeout: error.timeout,
|
|
107
|
+
message: error.message,
|
|
108
|
+
});
|
|
109
|
+
}
|
|
110
|
+
else if (error?.name === 'AbortError') {
|
|
111
|
+
// Convert AbortError to TimeoutError
|
|
112
|
+
const timeoutError = new TimeoutError(`${provider} generate operation timed out after ${timeout}`, timeoutController?.timeoutMs || 0, provider, 'generate');
|
|
113
|
+
logger.error(`[${functionTag}] Timeout error`, {
|
|
114
|
+
provider,
|
|
115
|
+
timeout: timeoutController?.timeoutMs,
|
|
116
|
+
message: timeoutError.message,
|
|
117
|
+
});
|
|
118
|
+
throw timeoutError;
|
|
119
|
+
}
|
|
120
|
+
else {
|
|
121
|
+
logger.error(`[${functionTag}] Error:`, error);
|
|
122
|
+
}
|
|
93
123
|
throw error;
|
|
94
124
|
}
|
|
95
125
|
}
|
|
96
126
|
async streamText(optionsOrPrompt, schema) {
|
|
97
|
-
|
|
127
|
+
const functionTag = "AnthropicProvider.streamText";
|
|
128
|
+
const provider = "anthropic";
|
|
129
|
+
logger.debug(`[${functionTag}] Starting text streaming`);
|
|
98
130
|
// Parse parameters with backward compatibility
|
|
99
131
|
const options = typeof optionsOrPrompt === "string"
|
|
100
132
|
? { prompt: optionsOrPrompt }
|
|
101
133
|
: optionsOrPrompt;
|
|
102
|
-
const { prompt, temperature = 0.7, maxTokens = 1000, systemPrompt = "You are Claude, an AI assistant created by Anthropic. You are helpful, harmless, and honest.", } = options;
|
|
103
|
-
logger.debug(`[
|
|
134
|
+
const { prompt, temperature = 0.7, maxTokens = 1000, systemPrompt = "You are Claude, an AI assistant created by Anthropic. You are helpful, harmless, and honest.", timeout = getDefaultTimeout(provider, 'stream'), } = options;
|
|
135
|
+
logger.debug(`[${functionTag}] Streaming prompt: "${prompt.substring(0, 100)}...", Timeout: ${timeout}`);
|
|
104
136
|
const requestBody = {
|
|
105
137
|
model: this.getModel(),
|
|
106
138
|
max_tokens: maxTokens,
|
|
@@ -114,30 +146,60 @@ export class AnthropicProvider {
|
|
|
114
146
|
system: systemPrompt,
|
|
115
147
|
stream: true,
|
|
116
148
|
};
|
|
149
|
+
// Create timeout controller if timeout is specified
|
|
150
|
+
const timeoutController = createTimeoutController(timeout, provider, 'stream');
|
|
117
151
|
try {
|
|
118
|
-
const response = await this.makeRequest("messages", requestBody, true);
|
|
152
|
+
const response = await this.makeRequest("messages", requestBody, true, timeoutController?.controller.signal);
|
|
119
153
|
if (!response.body) {
|
|
120
154
|
throw new Error("No response body received");
|
|
121
155
|
}
|
|
122
|
-
// Return a StreamTextResult-like object
|
|
156
|
+
// Return a StreamTextResult-like object with timeout signal
|
|
123
157
|
return {
|
|
124
|
-
textStream: this.createAsyncIterable(response.body),
|
|
158
|
+
textStream: this.createAsyncIterable(response.body, timeoutController?.controller.signal),
|
|
125
159
|
text: "",
|
|
126
160
|
usage: { promptTokens: 0, completionTokens: 0, totalTokens: 0 },
|
|
127
161
|
finishReason: "end_turn",
|
|
162
|
+
// Store timeout controller for external cleanup if needed
|
|
163
|
+
_timeoutController: timeoutController,
|
|
128
164
|
};
|
|
129
165
|
}
|
|
130
166
|
catch (error) {
|
|
131
|
-
|
|
167
|
+
// Cleanup timeout on error
|
|
168
|
+
timeoutController?.cleanup();
|
|
169
|
+
// Log timeout errors specifically
|
|
170
|
+
if (error instanceof TimeoutError) {
|
|
171
|
+
logger.error(`[${functionTag}] Timeout error`, {
|
|
172
|
+
provider,
|
|
173
|
+
timeout: error.timeout,
|
|
174
|
+
message: error.message,
|
|
175
|
+
});
|
|
176
|
+
}
|
|
177
|
+
else if (error?.name === 'AbortError') {
|
|
178
|
+
// Convert AbortError to TimeoutError
|
|
179
|
+
const timeoutError = new TimeoutError(`${provider} stream operation timed out after ${timeout}`, timeoutController?.timeoutMs || 0, provider, 'stream');
|
|
180
|
+
logger.error(`[${functionTag}] Timeout error`, {
|
|
181
|
+
provider,
|
|
182
|
+
timeout: timeoutController?.timeoutMs,
|
|
183
|
+
message: timeoutError.message,
|
|
184
|
+
});
|
|
185
|
+
throw timeoutError;
|
|
186
|
+
}
|
|
187
|
+
else {
|
|
188
|
+
logger.error(`[${functionTag}] Error:`, error);
|
|
189
|
+
}
|
|
132
190
|
throw error;
|
|
133
191
|
}
|
|
134
192
|
}
|
|
135
|
-
async *createAsyncIterable(body) {
|
|
193
|
+
async *createAsyncIterable(body, signal) {
|
|
136
194
|
const reader = body.getReader();
|
|
137
195
|
const decoder = new TextDecoder();
|
|
138
196
|
let buffer = "";
|
|
139
197
|
try {
|
|
140
198
|
while (true) {
|
|
199
|
+
// Check if aborted
|
|
200
|
+
if (signal?.aborted) {
|
|
201
|
+
throw new Error('AbortError');
|
|
202
|
+
}
|
|
141
203
|
const { done, value } = await reader.read();
|
|
142
204
|
if (done) {
|
|
143
205
|
break;
|
|
@@ -6,6 +6,7 @@
|
|
|
6
6
|
*/
|
|
7
7
|
import { AIProviderName } from "../core/types.js";
|
|
8
8
|
import { logger } from "../utils/logger.js";
|
|
9
|
+
import { createTimeoutController, TimeoutError, getDefaultTimeout } from "../utils/timeout.js";
|
|
9
10
|
export class AzureOpenAIProvider {
|
|
10
11
|
name = AIProviderName.AZURE;
|
|
11
12
|
apiKey;
|
|
@@ -44,7 +45,7 @@ export class AzureOpenAIProvider {
|
|
|
44
45
|
getApiUrl(stream = false) {
|
|
45
46
|
return `${this.endpoint}/openai/deployments/${this.deploymentId}/chat/completions?api-version=${this.apiVersion}`;
|
|
46
47
|
}
|
|
47
|
-
async makeRequest(body, stream = false) {
|
|
48
|
+
async makeRequest(body, stream = false, signal) {
|
|
48
49
|
const url = this.getApiUrl(stream);
|
|
49
50
|
const headers = {
|
|
50
51
|
"Content-Type": "application/json",
|
|
@@ -56,6 +57,7 @@ export class AzureOpenAIProvider {
|
|
|
56
57
|
method: "POST",
|
|
57
58
|
headers,
|
|
58
59
|
body: JSON.stringify(body),
|
|
60
|
+
signal, // Add abort signal for timeout support
|
|
59
61
|
});
|
|
60
62
|
if (!response.ok) {
|
|
61
63
|
const errorText = await response.text();
|
|
@@ -65,13 +67,15 @@ export class AzureOpenAIProvider {
|
|
|
65
67
|
return response;
|
|
66
68
|
}
|
|
67
69
|
async generateText(optionsOrPrompt, schema) {
|
|
68
|
-
|
|
70
|
+
const functionTag = "AzureOpenAIProvider.generateText";
|
|
71
|
+
const provider = "azure";
|
|
72
|
+
logger.debug(`[${functionTag}] Starting text generation`);
|
|
69
73
|
// Parse parameters with backward compatibility
|
|
70
74
|
const options = typeof optionsOrPrompt === "string"
|
|
71
75
|
? { prompt: optionsOrPrompt }
|
|
72
76
|
: optionsOrPrompt;
|
|
73
|
-
const { prompt, temperature = 0.7, maxTokens = 1000, systemPrompt = "You are a helpful AI assistant.", } = options;
|
|
74
|
-
logger.debug(`[
|
|
77
|
+
const { prompt, temperature = 0.7, maxTokens = 1000, systemPrompt = "You are a helpful AI assistant.", timeout = getDefaultTimeout(provider, 'generate'), } = options;
|
|
78
|
+
logger.debug(`[${functionTag}] Prompt: "${prompt.substring(0, 100)}...", Temperature: ${temperature}, Max tokens: ${maxTokens}, Timeout: ${timeout}`);
|
|
75
79
|
const messages = [];
|
|
76
80
|
if (systemPrompt) {
|
|
77
81
|
messages.push({
|
|
@@ -88,10 +92,14 @@ export class AzureOpenAIProvider {
|
|
|
88
92
|
temperature,
|
|
89
93
|
max_tokens: maxTokens,
|
|
90
94
|
};
|
|
95
|
+
// Create timeout controller if timeout is specified
|
|
96
|
+
const timeoutController = createTimeoutController(timeout, provider, 'generate');
|
|
91
97
|
try {
|
|
92
|
-
const response = await this.makeRequest(requestBody);
|
|
98
|
+
const response = await this.makeRequest(requestBody, false, timeoutController?.controller.signal);
|
|
93
99
|
const data = await response.json();
|
|
94
|
-
|
|
100
|
+
// Clean up timeout if successful
|
|
101
|
+
timeoutController?.cleanup();
|
|
102
|
+
logger.debug(`[${functionTag}] Success. Generated ${data.usage.completion_tokens} tokens`);
|
|
95
103
|
const content = data.choices[0]?.message?.content || "";
|
|
96
104
|
return {
|
|
97
105
|
content,
|
|
@@ -106,18 +114,42 @@ export class AzureOpenAIProvider {
|
|
|
106
114
|
};
|
|
107
115
|
}
|
|
108
116
|
catch (error) {
|
|
109
|
-
|
|
117
|
+
// Always cleanup timeout
|
|
118
|
+
timeoutController?.cleanup();
|
|
119
|
+
// Log timeout errors specifically
|
|
120
|
+
if (error instanceof TimeoutError) {
|
|
121
|
+
logger.error(`[${functionTag}] Timeout error`, {
|
|
122
|
+
provider,
|
|
123
|
+
timeout: error.timeout,
|
|
124
|
+
message: error.message,
|
|
125
|
+
});
|
|
126
|
+
}
|
|
127
|
+
else if (error?.name === 'AbortError') {
|
|
128
|
+
// Convert AbortError to TimeoutError
|
|
129
|
+
const timeoutError = new TimeoutError(`${provider} generate operation timed out after ${timeout}`, timeoutController?.timeoutMs || 0, provider, 'generate');
|
|
130
|
+
logger.error(`[${functionTag}] Timeout error`, {
|
|
131
|
+
provider,
|
|
132
|
+
timeout: timeoutController?.timeoutMs,
|
|
133
|
+
message: timeoutError.message,
|
|
134
|
+
});
|
|
135
|
+
throw timeoutError;
|
|
136
|
+
}
|
|
137
|
+
else {
|
|
138
|
+
logger.error(`[${functionTag}] Error:`, error);
|
|
139
|
+
}
|
|
110
140
|
throw error;
|
|
111
141
|
}
|
|
112
142
|
}
|
|
113
143
|
async streamText(optionsOrPrompt, schema) {
|
|
114
|
-
|
|
144
|
+
const functionTag = "AzureOpenAIProvider.streamText";
|
|
145
|
+
const provider = "azure";
|
|
146
|
+
logger.debug(`[${functionTag}] Starting text streaming`);
|
|
115
147
|
// Parse parameters with backward compatibility
|
|
116
148
|
const options = typeof optionsOrPrompt === "string"
|
|
117
149
|
? { prompt: optionsOrPrompt }
|
|
118
150
|
: optionsOrPrompt;
|
|
119
|
-
const { prompt, temperature = 0.7, maxTokens = 1000, systemPrompt = "You are a helpful AI assistant.", } = options;
|
|
120
|
-
logger.debug(`[
|
|
151
|
+
const { prompt, temperature = 0.7, maxTokens = 1000, systemPrompt = "You are a helpful AI assistant.", timeout = getDefaultTimeout(provider, 'stream'), } = options;
|
|
152
|
+
logger.debug(`[${functionTag}] Streaming prompt: "${prompt.substring(0, 100)}...", Timeout: ${timeout}`);
|
|
121
153
|
const messages = [];
|
|
122
154
|
if (systemPrompt) {
|
|
123
155
|
messages.push({
|
|
@@ -135,30 +167,60 @@ export class AzureOpenAIProvider {
|
|
|
135
167
|
max_tokens: maxTokens,
|
|
136
168
|
stream: true,
|
|
137
169
|
};
|
|
170
|
+
// Create timeout controller if timeout is specified
|
|
171
|
+
const timeoutController = createTimeoutController(timeout, provider, 'stream');
|
|
138
172
|
try {
|
|
139
|
-
const response = await this.makeRequest(requestBody, true);
|
|
173
|
+
const response = await this.makeRequest(requestBody, true, timeoutController?.controller.signal);
|
|
140
174
|
if (!response.body) {
|
|
141
175
|
throw new Error("No response body received");
|
|
142
176
|
}
|
|
143
|
-
// Return a StreamTextResult-like object
|
|
177
|
+
// Return a StreamTextResult-like object with timeout signal
|
|
144
178
|
return {
|
|
145
|
-
textStream: this.createAsyncIterable(response.body),
|
|
179
|
+
textStream: this.createAsyncIterable(response.body, timeoutController?.controller.signal),
|
|
146
180
|
text: "",
|
|
147
181
|
usage: { promptTokens: 0, completionTokens: 0, totalTokens: 0 },
|
|
148
182
|
finishReason: "stop",
|
|
183
|
+
// Store timeout controller for external cleanup if needed
|
|
184
|
+
_timeoutController: timeoutController,
|
|
149
185
|
};
|
|
150
186
|
}
|
|
151
187
|
catch (error) {
|
|
152
|
-
|
|
188
|
+
// Cleanup timeout on error
|
|
189
|
+
timeoutController?.cleanup();
|
|
190
|
+
// Log timeout errors specifically
|
|
191
|
+
if (error instanceof TimeoutError) {
|
|
192
|
+
logger.error(`[${functionTag}] Timeout error`, {
|
|
193
|
+
provider,
|
|
194
|
+
timeout: error.timeout,
|
|
195
|
+
message: error.message,
|
|
196
|
+
});
|
|
197
|
+
}
|
|
198
|
+
else if (error?.name === 'AbortError') {
|
|
199
|
+
// Convert AbortError to TimeoutError
|
|
200
|
+
const timeoutError = new TimeoutError(`${provider} stream operation timed out after ${timeout}`, timeoutController?.timeoutMs || 0, provider, 'stream');
|
|
201
|
+
logger.error(`[${functionTag}] Timeout error`, {
|
|
202
|
+
provider,
|
|
203
|
+
timeout: timeoutController?.timeoutMs,
|
|
204
|
+
message: timeoutError.message,
|
|
205
|
+
});
|
|
206
|
+
throw timeoutError;
|
|
207
|
+
}
|
|
208
|
+
else {
|
|
209
|
+
logger.error(`[${functionTag}] Error:`, error);
|
|
210
|
+
}
|
|
153
211
|
throw error;
|
|
154
212
|
}
|
|
155
213
|
}
|
|
156
|
-
async *createAsyncIterable(body) {
|
|
214
|
+
async *createAsyncIterable(body, signal) {
|
|
157
215
|
const reader = body.getReader();
|
|
158
216
|
const decoder = new TextDecoder();
|
|
159
217
|
let buffer = "";
|
|
160
218
|
try {
|
|
161
219
|
while (true) {
|
|
220
|
+
// Check if aborted
|
|
221
|
+
if (signal?.aborted) {
|
|
222
|
+
throw new Error('AbortError');
|
|
223
|
+
}
|
|
162
224
|
const { done, value } = await reader.read();
|
|
163
225
|
if (done) {
|
|
164
226
|
break;
|