@juspay/neurolink 9.10.0 → 9.11.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (174) hide show
  1. package/CHANGELOG.md +12 -0
  2. package/dist/adapters/video/videoAnalyzer.d.ts +3 -3
  3. package/dist/adapters/video/videoAnalyzer.js +39 -25
  4. package/dist/agent/directTools.d.ts +3 -3
  5. package/dist/cli/commands/config.d.ts +9 -9
  6. package/dist/cli/loop/optionsSchema.d.ts +1 -1
  7. package/dist/constants/contextWindows.d.ts +6 -3
  8. package/dist/constants/contextWindows.js +30 -3
  9. package/dist/constants/index.d.ts +3 -3
  10. package/dist/constants/retry.d.ts +4 -4
  11. package/dist/constants/retry.js +1 -1
  12. package/dist/context/contextCompactor.d.ts +1 -1
  13. package/dist/context/contextCompactor.js +59 -1
  14. package/dist/context/summarizationEngine.d.ts +2 -2
  15. package/dist/context/summarizationEngine.js +44 -18
  16. package/dist/context/toolOutputLimits.d.ts +22 -13
  17. package/dist/context/toolOutputLimits.js +58 -64
  18. package/dist/core/baseProvider.d.ts +11 -2
  19. package/dist/core/baseProvider.js +30 -1
  20. package/dist/core/conversationMemoryManager.d.ts +13 -1
  21. package/dist/core/conversationMemoryManager.js +36 -5
  22. package/dist/core/modules/GenerationHandler.d.ts +6 -0
  23. package/dist/core/modules/GenerationHandler.js +192 -7
  24. package/dist/core/modules/MessageBuilder.js +42 -4
  25. package/dist/core/modules/TelemetryHandler.js +4 -1
  26. package/dist/core/redisConversationMemoryManager.d.ts +19 -3
  27. package/dist/core/redisConversationMemoryManager.js +253 -58
  28. package/dist/index.d.ts +2 -0
  29. package/dist/index.js +3 -0
  30. package/dist/lib/adapters/video/videoAnalyzer.d.ts +3 -3
  31. package/dist/lib/adapters/video/videoAnalyzer.js +39 -25
  32. package/dist/lib/agent/directTools.d.ts +7 -7
  33. package/dist/lib/constants/contextWindows.d.ts +6 -3
  34. package/dist/lib/constants/contextWindows.js +30 -3
  35. package/dist/lib/constants/index.d.ts +3 -3
  36. package/dist/lib/constants/retry.d.ts +4 -4
  37. package/dist/lib/constants/retry.js +1 -1
  38. package/dist/lib/context/contextCompactor.d.ts +1 -1
  39. package/dist/lib/context/contextCompactor.js +59 -1
  40. package/dist/lib/context/summarizationEngine.d.ts +2 -2
  41. package/dist/lib/context/summarizationEngine.js +44 -18
  42. package/dist/lib/context/toolOutputLimits.d.ts +22 -13
  43. package/dist/lib/context/toolOutputLimits.js +58 -64
  44. package/dist/lib/core/baseProvider.d.ts +11 -2
  45. package/dist/lib/core/baseProvider.js +30 -1
  46. package/dist/lib/core/conversationMemoryManager.d.ts +13 -1
  47. package/dist/lib/core/conversationMemoryManager.js +36 -5
  48. package/dist/lib/core/modules/GenerationHandler.d.ts +6 -0
  49. package/dist/lib/core/modules/GenerationHandler.js +192 -7
  50. package/dist/lib/core/modules/MessageBuilder.js +42 -4
  51. package/dist/lib/core/modules/TelemetryHandler.js +4 -1
  52. package/dist/lib/core/redisConversationMemoryManager.d.ts +19 -3
  53. package/dist/lib/core/redisConversationMemoryManager.js +253 -58
  54. package/dist/lib/files/fileTools.d.ts +3 -3
  55. package/dist/lib/index.d.ts +2 -0
  56. package/dist/lib/index.js +3 -0
  57. package/dist/lib/mcp/externalServerManager.js +36 -1
  58. package/dist/lib/memory/memoryRetrievalTools.d.ts +166 -0
  59. package/dist/lib/memory/memoryRetrievalTools.js +145 -0
  60. package/dist/lib/neurolink.d.ts +35 -1
  61. package/dist/lib/neurolink.js +471 -16
  62. package/dist/lib/providers/amazonBedrock.d.ts +1 -1
  63. package/dist/lib/providers/amazonBedrock.js +78 -45
  64. package/dist/lib/providers/amazonSagemaker.d.ts +1 -1
  65. package/dist/lib/providers/amazonSagemaker.js +1 -1
  66. package/dist/lib/providers/anthropic.d.ts +1 -1
  67. package/dist/lib/providers/anthropic.js +7 -7
  68. package/dist/lib/providers/anthropicBaseProvider.d.ts +1 -1
  69. package/dist/lib/providers/anthropicBaseProvider.js +7 -6
  70. package/dist/lib/providers/azureOpenai.d.ts +1 -1
  71. package/dist/lib/providers/azureOpenai.js +1 -1
  72. package/dist/lib/providers/googleAiStudio.d.ts +1 -1
  73. package/dist/lib/providers/googleAiStudio.js +5 -5
  74. package/dist/lib/providers/googleVertex.d.ts +1 -1
  75. package/dist/lib/providers/googleVertex.js +74 -17
  76. package/dist/lib/providers/huggingFace.d.ts +1 -1
  77. package/dist/lib/providers/huggingFace.js +1 -1
  78. package/dist/lib/providers/litellm.d.ts +1 -1
  79. package/dist/lib/providers/litellm.js +18 -16
  80. package/dist/lib/providers/mistral.d.ts +1 -1
  81. package/dist/lib/providers/mistral.js +1 -1
  82. package/dist/lib/providers/ollama.d.ts +1 -1
  83. package/dist/lib/providers/ollama.js +8 -7
  84. package/dist/lib/providers/openAI.d.ts +1 -1
  85. package/dist/lib/providers/openAI.js +6 -6
  86. package/dist/lib/providers/openRouter.d.ts +1 -1
  87. package/dist/lib/providers/openRouter.js +6 -2
  88. package/dist/lib/providers/openaiCompatible.d.ts +1 -1
  89. package/dist/lib/providers/openaiCompatible.js +1 -1
  90. package/dist/lib/proxy/proxyFetch.js +291 -65
  91. package/dist/lib/server/utils/validation.d.ts +4 -4
  92. package/dist/lib/services/server/ai/observability/instrumentation.js +12 -3
  93. package/dist/lib/telemetry/telemetryService.d.ts +2 -1
  94. package/dist/lib/telemetry/telemetryService.js +8 -1
  95. package/dist/lib/types/contextTypes.d.ts +26 -2
  96. package/dist/lib/types/conversation.d.ts +72 -40
  97. package/dist/lib/types/conversationMemoryInterface.d.ts +5 -1
  98. package/dist/lib/types/generateTypes.d.ts +26 -0
  99. package/dist/lib/types/modelTypes.d.ts +2 -2
  100. package/dist/lib/types/multimodal.d.ts +2 -0
  101. package/dist/lib/types/observability.d.ts +10 -0
  102. package/dist/lib/types/sdkTypes.d.ts +1 -1
  103. package/dist/lib/utils/conversationMemory.d.ts +4 -3
  104. package/dist/lib/utils/conversationMemory.js +44 -6
  105. package/dist/lib/utils/errorHandling.d.ts +5 -0
  106. package/dist/lib/utils/errorHandling.js +7 -2
  107. package/dist/lib/utils/logger.d.ts +8 -0
  108. package/dist/lib/utils/logger.js +56 -1
  109. package/dist/lib/utils/messageBuilder.js +74 -4
  110. package/dist/lib/utils/redis.js +6 -1
  111. package/dist/lib/utils/tokenEstimation.d.ts +2 -2
  112. package/dist/lib/utils/tokenEstimation.js +16 -1
  113. package/dist/lib/utils/videoAnalysisProcessor.d.ts +2 -1
  114. package/dist/lib/utils/videoAnalysisProcessor.js +7 -2
  115. package/dist/lib/workflow/config.d.ts +110 -110
  116. package/dist/mcp/externalServerManager.js +36 -1
  117. package/dist/memory/memoryRetrievalTools.d.ts +166 -0
  118. package/dist/memory/memoryRetrievalTools.js +144 -0
  119. package/dist/neurolink.d.ts +35 -1
  120. package/dist/neurolink.js +471 -16
  121. package/dist/providers/amazonBedrock.d.ts +1 -1
  122. package/dist/providers/amazonBedrock.js +78 -45
  123. package/dist/providers/amazonSagemaker.d.ts +1 -1
  124. package/dist/providers/amazonSagemaker.js +1 -1
  125. package/dist/providers/anthropic.d.ts +1 -1
  126. package/dist/providers/anthropic.js +7 -7
  127. package/dist/providers/anthropicBaseProvider.d.ts +1 -1
  128. package/dist/providers/anthropicBaseProvider.js +7 -6
  129. package/dist/providers/azureOpenai.d.ts +1 -1
  130. package/dist/providers/azureOpenai.js +1 -1
  131. package/dist/providers/googleAiStudio.d.ts +1 -1
  132. package/dist/providers/googleAiStudio.js +5 -5
  133. package/dist/providers/googleVertex.d.ts +1 -1
  134. package/dist/providers/googleVertex.js +74 -17
  135. package/dist/providers/huggingFace.d.ts +1 -1
  136. package/dist/providers/huggingFace.js +1 -1
  137. package/dist/providers/litellm.d.ts +1 -1
  138. package/dist/providers/litellm.js +18 -16
  139. package/dist/providers/mistral.d.ts +1 -1
  140. package/dist/providers/mistral.js +1 -1
  141. package/dist/providers/ollama.d.ts +1 -1
  142. package/dist/providers/ollama.js +8 -7
  143. package/dist/providers/openAI.d.ts +1 -1
  144. package/dist/providers/openAI.js +6 -6
  145. package/dist/providers/openRouter.d.ts +1 -1
  146. package/dist/providers/openRouter.js +6 -2
  147. package/dist/providers/openaiCompatible.d.ts +1 -1
  148. package/dist/providers/openaiCompatible.js +1 -1
  149. package/dist/proxy/proxyFetch.js +291 -65
  150. package/dist/services/server/ai/observability/instrumentation.js +12 -3
  151. package/dist/telemetry/telemetryService.d.ts +2 -1
  152. package/dist/telemetry/telemetryService.js +8 -1
  153. package/dist/types/contextTypes.d.ts +26 -2
  154. package/dist/types/conversation.d.ts +72 -40
  155. package/dist/types/conversationMemoryInterface.d.ts +5 -1
  156. package/dist/types/generateTypes.d.ts +26 -0
  157. package/dist/types/modelTypes.d.ts +10 -10
  158. package/dist/types/multimodal.d.ts +2 -0
  159. package/dist/types/observability.d.ts +10 -0
  160. package/dist/types/sdkTypes.d.ts +1 -1
  161. package/dist/utils/conversationMemory.d.ts +4 -3
  162. package/dist/utils/conversationMemory.js +44 -6
  163. package/dist/utils/errorHandling.d.ts +5 -0
  164. package/dist/utils/errorHandling.js +7 -2
  165. package/dist/utils/logger.d.ts +8 -0
  166. package/dist/utils/logger.js +56 -1
  167. package/dist/utils/messageBuilder.js +74 -4
  168. package/dist/utils/redis.js +6 -1
  169. package/dist/utils/tokenEstimation.d.ts +2 -2
  170. package/dist/utils/tokenEstimation.js +16 -1
  171. package/dist/utils/videoAnalysisProcessor.d.ts +2 -1
  172. package/dist/utils/videoAnalysisProcessor.js +7 -2
  173. package/dist/workflow/config.d.ts +12 -12
  174. package/package.json +1 -1
@@ -4,6 +4,7 @@ import { BaseProvider } from "../core/baseProvider.js";
4
4
  import { DEFAULT_MAX_STEPS } from "../core/constants.js";
5
5
  import { streamAnalyticsCollector } from "../core/streamAnalytics.js";
6
6
  import { createProxyFetch } from "../proxy/proxyFetch.js";
7
+ import { AuthenticationError, InvalidModelError, NetworkError, ProviderError, RateLimitError, } from "../types/errors.js";
7
8
  import { isAbortError } from "../utils/errorHandling.js";
8
9
  import { logger } from "../utils/logger.js";
9
10
  import { getProviderModel } from "../utils/providerConfig.js";
@@ -73,37 +74,37 @@ export class LiteLLMProvider extends BaseProvider {
73
74
  getAISDKModel() {
74
75
  return this.model;
75
76
  }
76
- handleProviderError(error) {
77
+ formatProviderError(error) {
77
78
  if (error instanceof TimeoutError) {
78
- return new Error(`LiteLLM request timed out: ${error.message}`);
79
+ return new NetworkError(`Request timed out: ${error.message}`, this.providerName);
79
80
  }
80
81
  // Check for timeout by error name and message as fallback
81
82
  const errorRecord = error;
82
83
  if (errorRecord?.name === "TimeoutError" ||
83
84
  (typeof errorRecord?.message === "string" &&
84
- errorRecord.message.includes("Timeout"))) {
85
- return new Error(`LiteLLM request timed out: ${errorRecord?.message || "Unknown timeout"}`);
85
+ errorRecord.message.toLowerCase().includes("timeout"))) {
86
+ return new NetworkError(`Request timed out: ${errorRecord?.message || "Unknown timeout"}`, this.providerName);
86
87
  }
87
88
  if (typeof errorRecord?.message === "string") {
88
89
  if (errorRecord.message.includes("ECONNREFUSED") ||
89
90
  errorRecord.message.includes("Failed to fetch")) {
90
- return new Error("LiteLLM proxy server not available. Please start the LiteLLM proxy server at " +
91
- `${process.env.LITELLM_BASE_URL || "http://localhost:4000"}`);
91
+ return new NetworkError("LiteLLM proxy server not available. Please start the LiteLLM proxy server at " +
92
+ `${process.env.LITELLM_BASE_URL || "http://localhost:4000"}`, this.providerName);
92
93
  }
93
94
  if (errorRecord.message.includes("API_KEY_INVALID") ||
94
95
  errorRecord.message.includes("Invalid API key")) {
95
- return new Error("Invalid LiteLLM configuration. Please check your LITELLM_API_KEY environment variable.");
96
+ return new AuthenticationError("Invalid LiteLLM configuration. Please check your LITELLM_API_KEY environment variable.", this.providerName);
96
97
  }
97
- if (errorRecord.message.includes("rate limit")) {
98
- return new Error("LiteLLM rate limit exceeded. Please try again later.");
98
+ if (errorRecord.message.toLowerCase().includes("rate limit")) {
99
+ return new RateLimitError("LiteLLM rate limit exceeded. Please try again later.", this.providerName);
99
100
  }
100
- if (errorRecord.message.includes("model") &&
101
- errorRecord.message.includes("not found")) {
102
- return new Error(`Model '${this.modelName}' not available in LiteLLM proxy. ` +
103
- "Please check your LiteLLM configuration and ensure the model is configured.");
101
+ if (errorRecord.message.toLowerCase().includes("model") &&
102
+ errorRecord.message.toLowerCase().includes("not found")) {
103
+ return new InvalidModelError(`Model '${this.modelName}' not available in LiteLLM proxy. ` +
104
+ "Please check your LiteLLM configuration and ensure the model is configured.", this.providerName);
104
105
  }
105
106
  }
106
- return new Error(`LiteLLM error: ${errorRecord?.message || "Unknown error"}`);
107
+ return new ProviderError(`LiteLLM error: ${errorRecord?.message || "Unknown error"}`, this.providerName);
107
108
  }
108
109
  /**
109
110
  * LiteLLM supports tools for compatible models
@@ -248,7 +249,8 @@ export class LiteLLMProvider extends BaseProvider {
248
249
  })();
249
250
  // Create analytics promise that resolves after stream completion
250
251
  const analyticsPromise = streamAnalyticsCollector.createAnalytics(this.providerName, this.modelName, result, Date.now() - startTime, {
251
- requestId: `litellm-stream-${Date.now()}`,
252
+ requestId: options.requestId ??
253
+ `litellm-stream-${Date.now()}`,
252
254
  streamingMode: true,
253
255
  });
254
256
  return {
@@ -364,7 +366,7 @@ export class LiteLLMProvider extends BaseProvider {
364
366
  catch (error) {
365
367
  clearTimeout(timeoutId);
366
368
  if (isAbortError(error)) {
367
- throw new Error("Request timed out after 5 seconds");
369
+ throw new NetworkError("Request timed out after 5 seconds", this.providerName);
368
370
  }
369
371
  throw error;
370
372
  }
@@ -17,7 +17,7 @@ export declare class MistralProvider extends BaseProvider {
17
17
  * Returns the Vercel AI SDK model instance for Mistral
18
18
  */
19
19
  getAISDKModel(): LanguageModelV1;
20
- handleProviderError(error: unknown): Error;
20
+ formatProviderError(error: unknown): Error;
21
21
  /**
22
22
  * Validate provider configuration
23
23
  */
@@ -113,7 +113,7 @@ export class MistralProvider extends BaseProvider {
113
113
  getAISDKModel() {
114
114
  return this.model;
115
115
  }
116
- handleProviderError(error) {
116
+ formatProviderError(error) {
117
117
  if (error instanceof TimeoutError) {
118
118
  return new Error(`Mistral request timed out: ${error.message}`);
119
119
  }
@@ -114,7 +114,7 @@ export declare class OllamaProvider extends BaseProvider {
114
114
  */
115
115
  private createOllamaStream;
116
116
  private createOpenAIStream;
117
- protected handleProviderError(error: unknown): Error;
117
+ protected formatProviderError(error: unknown): Error;
118
118
  /**
119
119
  * Check if Ollama service is healthy and accessible
120
120
  */
@@ -6,6 +6,7 @@ import { createProxyFetch } from "../proxy/proxyFetch.js";
6
6
  import { logger } from "../utils/logger.js";
7
7
  import { buildMultimodalMessagesArray } from "../utils/messageBuilder.js";
8
8
  import { buildMultimodalOptions } from "../utils/multimodalOptionsBuilder.js";
9
+ import { InvalidModelError, NetworkError, ProviderError, } from "../types/errors.js";
9
10
  import { TimeoutError } from "../utils/timeout.js";
10
11
  // Model version constants (configurable via environment)
11
12
  const DEFAULT_OLLAMA_MODEL = "llama3.1:8b";
@@ -1400,22 +1401,22 @@ export class OllamaProvider extends BaseProvider {
1400
1401
  reader.releaseLock();
1401
1402
  }
1402
1403
  }
1403
- handleProviderError(error) {
1404
- if (error.name === "TimeoutError") {
1405
- return new TimeoutError(`Ollama request timed out. The model might be loading or the request is too complex.`, this.defaultTimeout);
1404
+ formatProviderError(error) {
1405
+ if (error instanceof TimeoutError) {
1406
+ return new TimeoutError(`Ollama request timed out. The model might be loading or the request is too complex.`, this.timeout);
1406
1407
  }
1407
1408
  if (error.message?.includes("ECONNREFUSED") ||
1408
1409
  error.message?.includes("fetch failed")) {
1409
- return new Error(`āŒ Ollama Service Not Running\n\nCannot connect to Ollama at ${this.baseUrl}\n\nšŸ”§ Steps to Fix:\n1. Install Ollama: https://ollama.ai/\n2. Start Ollama service: 'ollama serve'\n3. Verify it's running: 'curl ${this.baseUrl}/api/version'\n4. Try again`);
1410
+ return new NetworkError(`āŒ Ollama Service Not Running\n\nCannot connect to Ollama at ${this.baseUrl}\n\nšŸ”§ Steps to Fix:\n1. Install Ollama: https://ollama.ai/\n2. Start Ollama service: 'ollama serve'\n3. Verify it's running: 'curl ${this.baseUrl}/api/version'\n4. Try again`, this.providerName);
1410
1411
  }
1411
1412
  if (error.message?.includes("model") &&
1412
1413
  error.message?.includes("not found")) {
1413
- return new Error(`āŒ Ollama Model Not Found\n\nModel '${this.modelName}' is not available locally.\n\nšŸ”§ Install Model:\n1. Run: ollama pull ${this.modelName}\n2. Or try a different model:\n - ollama pull ${FALLBACK_OLLAMA_MODEL}\n - ollama pull mistral:latest\n - ollama pull codellama:latest\n\nšŸ”§ List Available Models:\nollama list`);
1414
+ return new InvalidModelError(`āŒ Ollama Model Not Found\n\nModel '${this.modelName}' is not available locally.\n\nšŸ”§ Install Model:\n1. Run: ollama pull ${this.modelName}\n2. Or try a different model:\n - ollama pull ${FALLBACK_OLLAMA_MODEL}\n - ollama pull mistral:latest\n - ollama pull codellama:latest\n\nšŸ”§ List Available Models:\nollama list`, this.providerName);
1414
1415
  }
1415
1416
  if (error.message?.includes("404")) {
1416
- return new Error(`āŒ Ollama API Endpoint Not Found\n\nThe API endpoint might have changed or Ollama version is incompatible.\n\nšŸ”§ Check:\n1. Ollama version: 'ollama --version'\n2. Update Ollama to latest version\n3. Verify API is available: 'curl ${this.baseUrl}/api/version'`);
1417
+ return new NetworkError(`āŒ Ollama API Endpoint Not Found\n\nThe API endpoint might have changed or Ollama version is incompatible.\n\nšŸ”§ Check:\n1. Ollama version: 'ollama --version'\n2. Update Ollama to latest version\n3. Verify API is available: 'curl ${this.baseUrl}/api/version'`, this.providerName);
1417
1418
  }
1418
- return new Error(`āŒ Ollama Provider Error\n\n${error.message || "Unknown error occurred"}\n\nšŸ”§ Troubleshooting:\n1. Check if Ollama service is running\n2. Verify model is installed: 'ollama list'\n3. Check network connectivity to ${this.baseUrl}\n4. Review Ollama logs for details`);
1419
+ return new ProviderError(`āŒ Ollama Provider Error\n\n${error.message || "Unknown error occurred"}\n\nšŸ”§ Troubleshooting:\n1. Check if Ollama service is running\n2. Verify model is installed: 'ollama list'\n3. Check network connectivity to ${this.baseUrl}\n4. Review Ollama logs for details`, this.providerName);
1419
1420
  }
1420
1421
  /**
1421
1422
  * Check if Ollama service is healthy and accessible
@@ -45,7 +45,7 @@ export declare class OpenAIProvider extends BaseProvider {
45
45
  * Ensures the tool has either valid Zod schema or valid JSON schema
46
46
  */
47
47
  private isValidToolParameters;
48
- handleProviderError(error: unknown): Error;
48
+ formatProviderError(error: unknown): Error;
49
49
  /**
50
50
  * executeGenerate method removed - generation is now handled by BaseProvider.
51
51
  * For details on the changes and migration steps, refer to the BaseProvider documentation
@@ -189,9 +189,9 @@ export class OpenAIProvider extends BaseProvider {
189
189
  }
190
190
  return true;
191
191
  }
192
- handleProviderError(error) {
192
+ formatProviderError(error) {
193
193
  if (error instanceof TimeoutError) {
194
- throw new NetworkError(error.message, this.providerName);
194
+ return new NetworkError(error.message, this.providerName);
195
195
  }
196
196
  const errorObj = error;
197
197
  const message = errorObj?.message && typeof errorObj.message === "string"
@@ -203,16 +203,16 @@ export class OpenAIProvider extends BaseProvider {
203
203
  if (message.includes("API_KEY_INVALID") ||
204
204
  message.includes("Invalid API key") ||
205
205
  errorType === "invalid_api_key") {
206
- throw new AuthenticationError("Invalid OpenAI API key. Please check your OPENAI_API_KEY environment variable.", this.providerName);
206
+ return new AuthenticationError("Invalid OpenAI API key. Please check your OPENAI_API_KEY environment variable.", this.providerName);
207
207
  }
208
208
  if (message.includes("rate limit") || errorType === "rate_limit_error") {
209
- throw new RateLimitError("OpenAI rate limit exceeded. Please try again later.", this.providerName);
209
+ return new RateLimitError("OpenAI rate limit exceeded. Please try again later.", this.providerName);
210
210
  }
211
211
  if (message.includes("model_not_found")) {
212
- throw new InvalidModelError(`Model not found: ${this.modelName}`, this.providerName);
212
+ return new InvalidModelError(`Model not found: ${this.modelName}`, this.providerName);
213
213
  }
214
214
  // Generic provider error
215
- throw new ProviderError(`OpenAI error: ${message}`, this.providerName);
215
+ return new ProviderError(`OpenAI error: ${message}`, this.providerName);
216
216
  }
217
217
  /**
218
218
  * executeGenerate method removed - generation is now handled by BaseProvider.
@@ -22,7 +22,7 @@ export declare class OpenRouterProvider extends BaseProvider {
22
22
  * Returns the Vercel AI SDK model instance for OpenRouter
23
23
  */
24
24
  protected getAISDKModel(): LanguageModelV1;
25
- handleProviderError(error: unknown): Error;
25
+ formatProviderError(error: unknown): Error;
26
26
  /**
27
27
  * OpenRouter supports tools for compatible models
28
28
  * Checks cached model capabilities or uses known patterns as fallback
@@ -89,7 +89,7 @@ export class OpenRouterProvider extends BaseProvider {
89
89
  getAISDKModel() {
90
90
  return this.model;
91
91
  }
92
- handleProviderError(error) {
92
+ formatProviderError(error) {
93
93
  if (error instanceof TimeoutError) {
94
94
  return new Error(`OpenRouter request timed out: ${error.message}`);
95
95
  }
@@ -251,7 +251,11 @@ export class OpenRouterProvider extends BaseProvider {
251
251
  chunkCount++;
252
252
  },
253
253
  onStepFinish: ({ toolCalls, toolResults }) => {
254
- logger.info("Tool execution completed", { toolResults, toolCalls });
254
+ logger.info("Tool execution completed", {
255
+ toolCallCount: toolCalls?.length || 0,
256
+ toolResultCount: toolResults?.length || 0,
257
+ toolNames: toolCalls?.map((tc) => tc.toolName),
258
+ });
255
259
  this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
256
260
  logger.warn("OpenRouterProvider: Failed to store tool executions", {
257
261
  provider: this.providerName,
@@ -20,7 +20,7 @@ export declare class OpenAICompatibleProvider extends BaseProvider {
20
20
  * Handles auto-discovery if no model was specified
21
21
  */
22
22
  protected getAISDKModel(): Promise<LanguageModelV1>;
23
- protected handleProviderError(error: unknown): Error;
23
+ protected formatProviderError(error: unknown): Error;
24
24
  /**
25
25
  * OpenAI Compatible endpoints support tools for compatible models
26
26
  */
@@ -110,7 +110,7 @@ export class OpenAICompatibleProvider extends BaseProvider {
110
110
  }
111
111
  return this.model;
112
112
  }
113
- handleProviderError(error) {
113
+ formatProviderError(error) {
114
114
  if (error instanceof TimeoutError) {
115
115
  return new Error(`OpenAI Compatible request timed out: ${error.message}`);
116
116
  }