@juspay/neurolink 9.54.6 → 9.54.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (408) hide show
  1. package/CHANGELOG.md +2 -0
  2. package/dist/action/actionInputs.d.ts +1 -1
  3. package/dist/adapters/video/directorPipeline.js +6 -0
  4. package/dist/adapters/video/vertexVideoHandler.js +6 -0
  5. package/dist/agent/directTools.d.ts +3 -23
  6. package/dist/auth/AuthProviderFactory.d.ts +1 -3
  7. package/dist/auth/anthropicOAuth.d.ts +4 -7
  8. package/dist/auth/anthropicOAuth.js +23 -0
  9. package/dist/auth/errors.d.ts +1 -1
  10. package/dist/auth/index.d.ts +11 -0
  11. package/dist/auth/index.js +14 -0
  12. package/dist/auth/middleware/AuthMiddleware.d.ts +5 -60
  13. package/dist/auth/middleware/AuthMiddleware.js +3 -0
  14. package/dist/auth/middleware/rateLimitByUser.d.ts +4 -93
  15. package/dist/auth/middleware/rateLimitByUser.js +4 -0
  16. package/dist/auth/providers/BaseAuthProvider.d.ts +1 -1
  17. package/dist/auth/providers/CognitoProvider.js +3 -0
  18. package/dist/auth/providers/KeycloakProvider.js +3 -0
  19. package/dist/auth/providers/auth0.d.ts +1 -1
  20. package/dist/auth/sessionManager.d.ts +2 -0
  21. package/dist/auth/sessionManager.js +53 -11
  22. package/dist/auth/tokenStore.d.ts +2 -0
  23. package/dist/auth/tokenStore.js +45 -4
  24. package/dist/autoresearch/tools.d.ts +1 -16
  25. package/dist/browser/neurolink.min.js +353 -353
  26. package/dist/cli/commands/config.d.ts +3 -123
  27. package/dist/cli/commands/config.js +4 -2
  28. package/dist/cli/commands/evaluate.d.ts +1 -19
  29. package/dist/cli/commands/proxy.d.ts +1 -1
  30. package/dist/cli/commands/proxy.js +3 -0
  31. package/dist/cli/commands/rag.js +3 -0
  32. package/dist/cli/commands/setup-anthropic.d.ts +2 -6
  33. package/dist/cli/commands/setup-anthropic.js +1 -1
  34. package/dist/cli/commands/setup-azure.d.ts +2 -6
  35. package/dist/cli/commands/setup-azure.js +1 -1
  36. package/dist/cli/commands/setup-bedrock.d.ts +2 -6
  37. package/dist/cli/commands/setup-bedrock.js +1 -1
  38. package/dist/cli/commands/setup-gcp.d.ts +2 -6
  39. package/dist/cli/commands/setup-google-ai.d.ts +2 -6
  40. package/dist/cli/commands/setup-google-ai.js +1 -1
  41. package/dist/cli/commands/setup-huggingface.d.ts +1 -5
  42. package/dist/cli/commands/setup-mistral.d.ts +1 -5
  43. package/dist/cli/commands/setup-openai.d.ts +2 -6
  44. package/dist/cli/commands/setup-openai.js +1 -1
  45. package/dist/cli/commands/setup.d.ts +1 -8
  46. package/dist/cli/commands/task.js +1 -0
  47. package/dist/cli/commands/voiceServer.d.ts +1 -4
  48. package/dist/cli/loop/session.js +31 -10
  49. package/dist/cli/utils/interactiveSetup.d.ts +2 -15
  50. package/dist/cli/utils/videoFileUtils.d.ts +1 -15
  51. package/dist/client/aiSdkAdapter.d.ts +1 -1
  52. package/dist/client/aiSdkAdapter.js +1 -0
  53. package/dist/client/httpClient.d.ts +1 -0
  54. package/dist/client/httpClient.js +13 -0
  55. package/dist/client/sseClient.d.ts +1 -0
  56. package/dist/client/sseClient.js +29 -0
  57. package/dist/client/streamingClient.d.ts +2 -0
  58. package/dist/client/streamingClient.js +19 -0
  59. package/dist/client/wsClient.d.ts +6 -0
  60. package/dist/client/wsClient.js +90 -10
  61. package/dist/context/budgetChecker.js +3 -1
  62. package/dist/context/contextCompactor.js +163 -143
  63. package/dist/context/fileSummarizationService.d.ts +1 -9
  64. package/dist/context/summarizationEngine.js +29 -16
  65. package/dist/core/baseProvider.js +124 -153
  66. package/dist/core/infrastructure/baseRegistry.d.ts +1 -7
  67. package/dist/core/modules/GenerationHandler.d.ts +3 -2
  68. package/dist/core/modules/GenerationHandler.js +9 -1
  69. package/dist/core/modules/StreamHandler.js +9 -0
  70. package/dist/core/modules/ToolsManager.js +18 -2
  71. package/dist/evaluation/BatchEvaluator.d.ts +1 -97
  72. package/dist/evaluation/EvaluationAggregator.d.ts +1 -118
  73. package/dist/evaluation/EvaluatorFactory.d.ts +1 -13
  74. package/dist/evaluation/EvaluatorRegistry.d.ts +1 -50
  75. package/dist/evaluation/errors/EvaluationError.d.ts +2 -27
  76. package/dist/evaluation/hooks/langfuseAdapter.d.ts +1 -39
  77. package/dist/evaluation/hooks/observabilityHooks.d.ts +3 -55
  78. package/dist/evaluation/hooks/observabilityHooks.js +3 -0
  79. package/dist/evaluation/pipeline/strategies/batchStrategy.d.ts +7 -61
  80. package/dist/evaluation/pipeline/strategies/batchStrategy.js +7 -7
  81. package/dist/evaluation/ragasEvaluator.js +54 -37
  82. package/dist/evaluation/reporting/metricsCollector.d.ts +1 -60
  83. package/dist/evaluation/reporting/reportGenerator.d.ts +1 -17
  84. package/dist/evaluation/scorers/rule/contentSimilarityScorer.d.ts +1 -29
  85. package/dist/evaluation/scorers/rule/formatScorer.d.ts +1 -42
  86. package/dist/evaluation/scorers/rule/keywordCoverageScorer.d.ts +1 -19
  87. package/dist/evaluation/scorers/rule/lengthScorer.d.ts +1 -33
  88. package/dist/factories/providerFactory.d.ts +1 -16
  89. package/dist/factories/providerFactory.js +2 -0
  90. package/dist/image-gen/ImageGenService.d.ts +3 -0
  91. package/dist/image-gen/ImageGenService.js +3 -0
  92. package/dist/lib/action/actionInputs.d.ts +1 -1
  93. package/dist/lib/adapters/video/directorPipeline.js +6 -0
  94. package/dist/lib/adapters/video/vertexVideoHandler.js +6 -0
  95. package/dist/lib/agent/directTools.d.ts +3 -23
  96. package/dist/lib/auth/AuthProviderFactory.d.ts +1 -3
  97. package/dist/lib/auth/anthropicOAuth.d.ts +4 -7
  98. package/dist/lib/auth/anthropicOAuth.js +23 -0
  99. package/dist/lib/auth/errors.d.ts +1 -1
  100. package/dist/lib/auth/index.d.ts +11 -0
  101. package/dist/lib/auth/index.js +14 -0
  102. package/dist/lib/auth/middleware/AuthMiddleware.d.ts +5 -60
  103. package/dist/lib/auth/middleware/AuthMiddleware.js +3 -0
  104. package/dist/lib/auth/middleware/rateLimitByUser.d.ts +4 -93
  105. package/dist/lib/auth/middleware/rateLimitByUser.js +4 -0
  106. package/dist/lib/auth/providers/BaseAuthProvider.d.ts +1 -1
  107. package/dist/lib/auth/providers/CognitoProvider.js +3 -0
  108. package/dist/lib/auth/providers/KeycloakProvider.js +3 -0
  109. package/dist/lib/auth/providers/auth0.d.ts +1 -1
  110. package/dist/lib/auth/sessionManager.d.ts +2 -0
  111. package/dist/lib/auth/sessionManager.js +53 -11
  112. package/dist/lib/auth/tokenStore.d.ts +2 -0
  113. package/dist/lib/auth/tokenStore.js +45 -4
  114. package/dist/lib/autoresearch/tools.d.ts +1 -16
  115. package/dist/lib/client/aiSdkAdapter.d.ts +1 -1
  116. package/dist/lib/client/aiSdkAdapter.js +1 -0
  117. package/dist/lib/client/httpClient.d.ts +1 -0
  118. package/dist/lib/client/httpClient.js +13 -0
  119. package/dist/lib/client/sseClient.d.ts +1 -0
  120. package/dist/lib/client/sseClient.js +29 -0
  121. package/dist/lib/client/streamingClient.d.ts +2 -0
  122. package/dist/lib/client/streamingClient.js +19 -0
  123. package/dist/lib/client/wsClient.d.ts +6 -0
  124. package/dist/lib/client/wsClient.js +90 -10
  125. package/dist/lib/context/budgetChecker.js +3 -1
  126. package/dist/lib/context/contextCompactor.js +163 -143
  127. package/dist/lib/context/fileSummarizationService.d.ts +1 -9
  128. package/dist/lib/context/summarizationEngine.js +29 -16
  129. package/dist/lib/core/baseProvider.js +124 -153
  130. package/dist/lib/core/infrastructure/baseRegistry.d.ts +1 -7
  131. package/dist/lib/core/modules/GenerationHandler.d.ts +3 -2
  132. package/dist/lib/core/modules/GenerationHandler.js +9 -1
  133. package/dist/lib/core/modules/StreamHandler.js +9 -0
  134. package/dist/lib/core/modules/ToolsManager.js +18 -2
  135. package/dist/lib/evaluation/BatchEvaluator.d.ts +1 -97
  136. package/dist/lib/evaluation/EvaluationAggregator.d.ts +1 -118
  137. package/dist/lib/evaluation/EvaluatorFactory.d.ts +1 -13
  138. package/dist/lib/evaluation/EvaluatorRegistry.d.ts +1 -50
  139. package/dist/lib/evaluation/errors/EvaluationError.d.ts +2 -27
  140. package/dist/lib/evaluation/hooks/langfuseAdapter.d.ts +1 -39
  141. package/dist/lib/evaluation/hooks/observabilityHooks.d.ts +3 -55
  142. package/dist/lib/evaluation/hooks/observabilityHooks.js +3 -0
  143. package/dist/lib/evaluation/pipeline/strategies/batchStrategy.d.ts +7 -61
  144. package/dist/lib/evaluation/pipeline/strategies/batchStrategy.js +7 -7
  145. package/dist/lib/evaluation/ragasEvaluator.js +54 -37
  146. package/dist/lib/evaluation/reporting/metricsCollector.d.ts +1 -60
  147. package/dist/lib/evaluation/reporting/reportGenerator.d.ts +1 -17
  148. package/dist/lib/evaluation/scorers/rule/contentSimilarityScorer.d.ts +1 -29
  149. package/dist/lib/evaluation/scorers/rule/formatScorer.d.ts +1 -42
  150. package/dist/lib/evaluation/scorers/rule/keywordCoverageScorer.d.ts +1 -19
  151. package/dist/lib/evaluation/scorers/rule/lengthScorer.d.ts +1 -33
  152. package/dist/lib/factories/providerFactory.d.ts +1 -16
  153. package/dist/lib/factories/providerFactory.js +2 -0
  154. package/dist/lib/image-gen/ImageGenService.d.ts +3 -0
  155. package/dist/lib/image-gen/ImageGenService.js +3 -0
  156. package/dist/lib/mcp/batching/requestBatcher.js +99 -73
  157. package/dist/lib/mcp/httpRateLimiter.js +3 -1
  158. package/dist/lib/mcp/httpRetryHandler.js +3 -1
  159. package/dist/lib/mcp/mcpClientFactory.js +3 -1
  160. package/dist/lib/mcp/multiServerManager.d.ts +1 -14
  161. package/dist/lib/mcp/servers/aiProviders/aiAnalysisTools.js +5 -1
  162. package/dist/lib/mcp/servers/aiProviders/aiWorkflowTools.js +1 -0
  163. package/dist/lib/mcp/toolDiscoveryService.js +70 -57
  164. package/dist/lib/mcp/toolRegistry.js +11 -1
  165. package/dist/lib/memory/memoryRetrievalTools.js +182 -141
  166. package/dist/lib/neurolink.js +236 -40
  167. package/dist/lib/observability/exporterRegistry.d.ts +3 -21
  168. package/dist/lib/observability/exporters/sentryExporter.js +1 -0
  169. package/dist/lib/observability/metricsAggregator.d.ts +1 -31
  170. package/dist/lib/observability/tokenTracker.d.ts +7 -16
  171. package/dist/lib/observability/tokenTracker.js +6 -4
  172. package/dist/lib/observability/utils/spanSerializer.d.ts +5 -1
  173. package/dist/lib/observability/utils/spanSerializer.js +24 -4
  174. package/dist/lib/processors/base/BaseFileProcessor.js +66 -53
  175. package/dist/lib/processors/document/ExcelProcessor.d.ts +1 -1
  176. package/dist/lib/processors/errors/errorHelpers.d.ts +1 -31
  177. package/dist/lib/processors/errors/errorSerializer.d.ts +1 -45
  178. package/dist/lib/processors/registry/ProcessorRegistry.js +17 -6
  179. package/dist/lib/providers/amazonBedrock.js +189 -15
  180. package/dist/lib/providers/amazonSagemaker.js +25 -11
  181. package/dist/lib/providers/anthropic.js +13 -0
  182. package/dist/lib/providers/azureOpenai.js +2 -0
  183. package/dist/lib/providers/googleAiStudio.js +82 -0
  184. package/dist/lib/providers/googleVertex.js +52 -0
  185. package/dist/lib/providers/huggingFace.js +2 -0
  186. package/dist/lib/providers/litellm.js +2 -0
  187. package/dist/lib/providers/mistral.js +2 -0
  188. package/dist/lib/providers/ollama.js +84 -5
  189. package/dist/lib/providers/openAI.d.ts +2 -0
  190. package/dist/lib/providers/openAI.js +17 -6
  191. package/dist/lib/providers/openRouter.js +2 -0
  192. package/dist/lib/providers/openaiCompatible.js +2 -0
  193. package/dist/lib/providers/sagemaker/detection.d.ts +1 -33
  194. package/dist/lib/providers/sagemaker/diagnostics.d.ts +1 -25
  195. package/dist/lib/providers/sagemaker/language-model.d.ts +1 -1
  196. package/dist/lib/proxy/proxyConfig.js +4 -0
  197. package/dist/lib/proxy/proxyEnv.d.ts +1 -17
  198. package/dist/lib/proxy/proxyTracer.d.ts +1 -36
  199. package/dist/lib/proxy/proxyTracer.js +9 -0
  200. package/dist/lib/proxy/quietDetector.d.ts +1 -7
  201. package/dist/lib/proxy/rawStreamCapture.d.ts +1 -10
  202. package/dist/lib/proxy/requestLogger.d.ts +1 -21
  203. package/dist/lib/proxy/sseInterceptor.d.ts +1 -66
  204. package/dist/lib/proxy/sseInterceptor.js +6 -0
  205. package/dist/lib/proxy/updateChecker.d.ts +1 -6
  206. package/dist/lib/proxy/updateState.d.ts +1 -12
  207. package/dist/lib/rag/chunkers/BaseChunker.js +36 -22
  208. package/dist/lib/rag/chunking/jsonChunker.d.ts +1 -1
  209. package/dist/lib/rag/errors/RAGError.d.ts +1 -2
  210. package/dist/lib/rag/ragIntegration.js +45 -32
  211. package/dist/lib/rag/reranker/reranker.js +151 -122
  212. package/dist/lib/rag/retrieval/vectorQueryTool.js +79 -65
  213. package/dist/lib/sdk/toolRegistration.d.ts +10 -44
  214. package/dist/lib/sdk/toolRegistration.js +1 -1
  215. package/dist/lib/server/middleware/abortSignal.d.ts +1 -11
  216. package/dist/lib/server/middleware/auth.d.ts +1 -21
  217. package/dist/lib/server/middleware/auth.js +12 -0
  218. package/dist/lib/server/middleware/common.js +48 -32
  219. package/dist/lib/server/middleware/deprecation.d.ts +1 -20
  220. package/dist/lib/server/middleware/rateLimit.d.ts +1 -75
  221. package/dist/lib/server/middleware/validation.d.ts +3 -81
  222. package/dist/lib/server/middleware/validation.js +3 -0
  223. package/dist/lib/server/openapi/generator.d.ts +1 -47
  224. package/dist/lib/server/routes/agentRoutes.js +112 -57
  225. package/dist/lib/server/routes/claudeProxyRoutes.d.ts +1 -6
  226. package/dist/lib/server/routes/claudeProxyRoutes.js +127 -13
  227. package/dist/lib/server/routes/healthRoutes.js +58 -12
  228. package/dist/lib/server/routes/index.d.ts +1 -26
  229. package/dist/lib/server/routes/mcpRoutes.js +40 -7
  230. package/dist/lib/server/routes/memoryRoutes.js +22 -7
  231. package/dist/lib/server/routes/openApiRoutes.js +30 -6
  232. package/dist/lib/server/routes/toolRoutes.js +140 -68
  233. package/dist/lib/server/streaming/dataStream.d.ts +1 -35
  234. package/dist/lib/server/streaming/dataStream.js +15 -0
  235. package/dist/lib/services/server/ai/observability/instrumentation.js +114 -14
  236. package/dist/lib/session/globalSessionState.d.ts +1 -10
  237. package/dist/lib/tasks/tools/taskTools.d.ts +2 -2
  238. package/dist/lib/telemetry/traceContext.d.ts +9 -0
  239. package/dist/lib/telemetry/traceContext.js +19 -0
  240. package/dist/lib/telemetry/tracers.d.ts +2 -0
  241. package/dist/lib/telemetry/tracers.js +2 -0
  242. package/dist/lib/types/action.d.ts +2 -0
  243. package/dist/lib/types/artifact.d.ts +7 -0
  244. package/dist/lib/types/auth.d.ts +125 -18
  245. package/dist/lib/types/autoresearch.d.ts +12 -0
  246. package/dist/lib/types/cli.d.ts +415 -0
  247. package/dist/lib/types/client.d.ts +34 -0
  248. package/dist/lib/types/common.d.ts +12 -41
  249. package/dist/lib/types/context.d.ts +5 -0
  250. package/dist/lib/types/evaluation.d.ts +332 -1
  251. package/dist/lib/types/file.d.ts +4 -0
  252. package/dist/lib/types/mcp.d.ts +102 -2
  253. package/dist/lib/types/middleware.d.ts +116 -0
  254. package/dist/lib/types/multimodal.d.ts +65 -0
  255. package/dist/lib/types/observability.d.ts +81 -0
  256. package/dist/lib/types/processor.d.ts +47 -0
  257. package/dist/lib/types/providers.d.ts +120 -16
  258. package/dist/lib/types/proxy.d.ts +321 -1
  259. package/dist/lib/types/rag.d.ts +22 -0
  260. package/dist/lib/types/scorer.d.ts +141 -0
  261. package/dist/lib/types/server.d.ts +99 -0
  262. package/dist/lib/types/span.d.ts +2 -1
  263. package/dist/lib/types/span.js +1 -0
  264. package/dist/lib/types/tools.d.ts +44 -0
  265. package/dist/lib/types/tts.d.ts +6 -0
  266. package/dist/lib/types/utilities.d.ts +22 -0
  267. package/dist/lib/types/workflow.d.ts +18 -0
  268. package/dist/lib/utils/async/retry.d.ts +2 -8
  269. package/dist/lib/utils/async/retry.js +9 -9
  270. package/dist/lib/utils/imageCompressor.d.ts +1 -21
  271. package/dist/lib/utils/imageCompressor.js +5 -1
  272. package/dist/lib/utils/messageBuilder.d.ts +1 -1
  273. package/dist/lib/utils/redis.d.ts +1 -4
  274. package/dist/lib/utils/toolEndEmitter.d.ts +25 -0
  275. package/dist/lib/utils/toolEndEmitter.js +65 -0
  276. package/dist/lib/workflow/config.d.ts +7 -32
  277. package/dist/lib/workflow/core/ensembleExecutor.js +28 -0
  278. package/dist/lib/workflow/core/judgeScorer.js +23 -0
  279. package/dist/lib/workflow/core/responseConditioner.js +17 -0
  280. package/dist/lib/workflow/core/workflowRunner.d.ts +1 -19
  281. package/dist/lib/workflow/core/workflowRunner.js +202 -147
  282. package/dist/mcp/batching/requestBatcher.js +99 -73
  283. package/dist/mcp/httpRateLimiter.js +3 -1
  284. package/dist/mcp/httpRetryHandler.js +3 -1
  285. package/dist/mcp/mcpClientFactory.js +3 -1
  286. package/dist/mcp/multiServerManager.d.ts +1 -14
  287. package/dist/mcp/servers/aiProviders/aiAnalysisTools.js +5 -1
  288. package/dist/mcp/servers/aiProviders/aiWorkflowTools.js +1 -0
  289. package/dist/mcp/toolDiscoveryService.js +70 -57
  290. package/dist/mcp/toolRegistry.js +11 -1
  291. package/dist/memory/memoryRetrievalTools.js +182 -141
  292. package/dist/neurolink.js +236 -40
  293. package/dist/observability/exporterRegistry.d.ts +3 -21
  294. package/dist/observability/exporters/sentryExporter.js +1 -0
  295. package/dist/observability/metricsAggregator.d.ts +1 -31
  296. package/dist/observability/tokenTracker.d.ts +7 -16
  297. package/dist/observability/tokenTracker.js +6 -4
  298. package/dist/observability/utils/spanSerializer.d.ts +5 -1
  299. package/dist/observability/utils/spanSerializer.js +24 -4
  300. package/dist/processors/base/BaseFileProcessor.js +66 -53
  301. package/dist/processors/document/ExcelProcessor.d.ts +1 -1
  302. package/dist/processors/errors/errorHelpers.d.ts +1 -31
  303. package/dist/processors/errors/errorSerializer.d.ts +1 -45
  304. package/dist/processors/registry/ProcessorRegistry.js +17 -6
  305. package/dist/providers/amazonBedrock.js +189 -15
  306. package/dist/providers/amazonSagemaker.js +25 -11
  307. package/dist/providers/anthropic.js +13 -0
  308. package/dist/providers/azureOpenai.js +2 -0
  309. package/dist/providers/googleAiStudio.js +82 -0
  310. package/dist/providers/googleVertex.js +52 -0
  311. package/dist/providers/huggingFace.js +2 -0
  312. package/dist/providers/litellm.js +2 -0
  313. package/dist/providers/mistral.js +2 -0
  314. package/dist/providers/ollama.js +84 -5
  315. package/dist/providers/openAI.d.ts +2 -0
  316. package/dist/providers/openAI.js +17 -6
  317. package/dist/providers/openRouter.js +2 -0
  318. package/dist/providers/openaiCompatible.js +2 -0
  319. package/dist/providers/sagemaker/detection.d.ts +1 -33
  320. package/dist/providers/sagemaker/diagnostics.d.ts +1 -25
  321. package/dist/providers/sagemaker/language-model.d.ts +1 -1
  322. package/dist/proxy/proxyConfig.js +4 -0
  323. package/dist/proxy/proxyEnv.d.ts +1 -17
  324. package/dist/proxy/proxyTracer.d.ts +1 -36
  325. package/dist/proxy/proxyTracer.js +9 -0
  326. package/dist/proxy/quietDetector.d.ts +1 -7
  327. package/dist/proxy/rawStreamCapture.d.ts +1 -10
  328. package/dist/proxy/requestLogger.d.ts +1 -21
  329. package/dist/proxy/sseInterceptor.d.ts +1 -66
  330. package/dist/proxy/sseInterceptor.js +6 -0
  331. package/dist/proxy/updateChecker.d.ts +1 -6
  332. package/dist/proxy/updateState.d.ts +1 -12
  333. package/dist/rag/chunkers/BaseChunker.js +36 -22
  334. package/dist/rag/chunking/jsonChunker.d.ts +1 -1
  335. package/dist/rag/errors/RAGError.d.ts +1 -2
  336. package/dist/rag/ragIntegration.js +45 -32
  337. package/dist/rag/reranker/reranker.js +151 -122
  338. package/dist/rag/retrieval/vectorQueryTool.js +79 -65
  339. package/dist/sdk/toolRegistration.d.ts +10 -44
  340. package/dist/sdk/toolRegistration.js +1 -1
  341. package/dist/server/middleware/abortSignal.d.ts +1 -11
  342. package/dist/server/middleware/auth.d.ts +1 -21
  343. package/dist/server/middleware/auth.js +12 -0
  344. package/dist/server/middleware/common.js +48 -32
  345. package/dist/server/middleware/deprecation.d.ts +1 -20
  346. package/dist/server/middleware/rateLimit.d.ts +1 -75
  347. package/dist/server/middleware/validation.d.ts +3 -81
  348. package/dist/server/middleware/validation.js +3 -0
  349. package/dist/server/openapi/generator.d.ts +1 -47
  350. package/dist/server/routes/agentRoutes.js +112 -57
  351. package/dist/server/routes/claudeProxyRoutes.d.ts +1 -6
  352. package/dist/server/routes/claudeProxyRoutes.js +127 -13
  353. package/dist/server/routes/healthRoutes.js +58 -12
  354. package/dist/server/routes/index.d.ts +1 -26
  355. package/dist/server/routes/mcpRoutes.js +40 -7
  356. package/dist/server/routes/memoryRoutes.js +22 -7
  357. package/dist/server/routes/openApiRoutes.js +30 -6
  358. package/dist/server/routes/toolRoutes.js +140 -68
  359. package/dist/server/streaming/dataStream.d.ts +1 -35
  360. package/dist/server/streaming/dataStream.js +15 -0
  361. package/dist/services/server/ai/observability/instrumentation.js +114 -14
  362. package/dist/session/globalSessionState.d.ts +1 -10
  363. package/dist/tasks/tools/taskTools.d.ts +2 -2
  364. package/dist/telemetry/traceContext.d.ts +9 -0
  365. package/dist/telemetry/traceContext.js +18 -0
  366. package/dist/telemetry/tracers.d.ts +2 -0
  367. package/dist/telemetry/tracers.js +2 -0
  368. package/dist/types/action.d.ts +2 -0
  369. package/dist/types/artifact.d.ts +7 -0
  370. package/dist/types/auth.d.ts +125 -18
  371. package/dist/types/autoresearch.d.ts +12 -0
  372. package/dist/types/cli.d.ts +415 -0
  373. package/dist/types/client.d.ts +34 -0
  374. package/dist/types/common.d.ts +12 -41
  375. package/dist/types/context.d.ts +5 -0
  376. package/dist/types/evaluation.d.ts +332 -1
  377. package/dist/types/file.d.ts +4 -0
  378. package/dist/types/mcp.d.ts +102 -2
  379. package/dist/types/middleware.d.ts +116 -0
  380. package/dist/types/multimodal.d.ts +65 -0
  381. package/dist/types/observability.d.ts +81 -0
  382. package/dist/types/processor.d.ts +47 -0
  383. package/dist/types/providers.d.ts +120 -16
  384. package/dist/types/proxy.d.ts +321 -1
  385. package/dist/types/rag.d.ts +22 -0
  386. package/dist/types/scorer.d.ts +141 -0
  387. package/dist/types/server.d.ts +99 -0
  388. package/dist/types/span.d.ts +2 -1
  389. package/dist/types/span.js +1 -0
  390. package/dist/types/tools.d.ts +44 -0
  391. package/dist/types/tts.d.ts +6 -0
  392. package/dist/types/utilities.d.ts +22 -0
  393. package/dist/types/workflow.d.ts +18 -0
  394. package/dist/utils/async/retry.d.ts +2 -8
  395. package/dist/utils/async/retry.js +9 -9
  396. package/dist/utils/imageCompressor.d.ts +1 -21
  397. package/dist/utils/imageCompressor.js +5 -1
  398. package/dist/utils/messageBuilder.d.ts +1 -1
  399. package/dist/utils/redis.d.ts +1 -4
  400. package/dist/utils/toolEndEmitter.d.ts +25 -0
  401. package/dist/utils/toolEndEmitter.js +64 -0
  402. package/dist/workflow/config.d.ts +4 -29
  403. package/dist/workflow/core/ensembleExecutor.js +28 -0
  404. package/dist/workflow/core/judgeScorer.js +23 -0
  405. package/dist/workflow/core/responseConditioner.js +17 -0
  406. package/dist/workflow/core/workflowRunner.d.ts +1 -19
  407. package/dist/workflow/core/workflowRunner.js +202 -147
  408. package/package.json +2 -1
@@ -4,15 +4,17 @@ import path from "path";
4
4
  import { createAnalytics } from "../core/analytics.js";
5
5
  import { BaseProvider } from "../core/baseProvider.js";
6
6
  import { DEFAULT_MAX_STEPS } from "../core/constants.js";
7
- import { AuthenticationError, ProviderError } from "../types/index.js";
7
+ import { AuthenticationError, ProviderError, RateLimitError, } from "../types/index.js";
8
8
  import { isAbortError, withTimeout } from "../utils/errorHandling.js";
9
+ import { emitToolEndFromStepFinish } from "../utils/toolEndEmitter.js";
9
10
  import { logger } from "../utils/logger.js";
10
11
  import { calculateCost } from "../utils/pricing.js";
11
12
  import { buildMultimodalMessagesArray } from "../utils/messageBuilder.js";
12
13
  import { buildMultimodalOptions } from "../utils/multimodalOptionsBuilder.js";
13
14
  import { convertZodToJsonSchema } from "../utils/schemaConversion.js";
14
- import { trace, SpanKind, SpanStatusCode } from "@opentelemetry/api";
15
- const bedrockTracer = trace.getTracer("neurolink.bedrock");
15
+ import { SpanKind, SpanStatusCode } from "@opentelemetry/api";
16
+ import { tracers } from "../telemetry/index.js";
17
+ const bedrockTracer = tracers.provider;
16
18
  // Bedrock-specific types now imported from ../types/providerSpecific.js
17
19
  export class AmazonBedrockProvider extends BaseProvider {
18
20
  bedrockClient;
@@ -114,6 +116,7 @@ export class AmazonBedrockProvider extends BaseProvider {
114
116
  // Override the main generate method to implement conversation management
115
117
  async generate(optionsOrPrompt) {
116
118
  logger.debug("[AmazonBedrockProvider] generate() called with conversation management");
119
+ const generateStartTime = Date.now();
117
120
  const options = typeof optionsOrPrompt === "string"
118
121
  ? { prompt: optionsOrPrompt }
119
122
  : optionsOrPrompt;
@@ -159,10 +162,55 @@ export class AmazonBedrockProvider extends BaseProvider {
159
162
  }
160
163
  logger.debug(`[AmazonBedrockProvider] Starting conversation with ${this.conversationHistory.length} message(s)`);
161
164
  // Start conversation loop and return enhanced result
162
- const text = await this.conversationLoop(options);
165
+ let text;
166
+ let usage;
167
+ let finishReason;
168
+ try {
169
+ ({ text, usage, finishReason } = await this.conversationLoop(options));
170
+ }
171
+ catch (error) {
172
+ // Emit failure generation:end so Pipeline B records the failed generation
173
+ const failEmitter = this.neurolink?.getEventEmitter();
174
+ if (failEmitter) {
175
+ failEmitter.emit("generation:end", {
176
+ provider: this.providerName,
177
+ responseTime: Date.now() - generateStartTime,
178
+ timestamp: Date.now(),
179
+ result: {
180
+ content: "",
181
+ usage: { input: 0, output: 0, total: 0 },
182
+ model: this.modelName || this.getDefaultModel(),
183
+ provider: this.providerName,
184
+ finishReason: "error",
185
+ },
186
+ success: false,
187
+ error: error instanceof Error ? error.message : String(error),
188
+ });
189
+ }
190
+ throw error;
191
+ }
192
+ // Emit generation:end so Pipeline B (Langfuse) creates a GENERATION observation.
193
+ // Bedrock bypasses the Vercel AI SDK so experimental_telemetry is never injected;
194
+ // we emit the event manually to fill that gap.
195
+ const generateEmitter = this.neurolink?.getEventEmitter();
196
+ if (generateEmitter) {
197
+ generateEmitter.emit("generation:end", {
198
+ provider: this.providerName,
199
+ responseTime: Date.now() - generateStartTime,
200
+ timestamp: Date.now(),
201
+ result: {
202
+ content: text,
203
+ usage,
204
+ model: this.modelName || this.getDefaultModel(),
205
+ provider: this.providerName,
206
+ finishReason,
207
+ },
208
+ success: true,
209
+ });
210
+ }
163
211
  return {
164
212
  content: text, // CLI expects 'content' not 'text'
165
- usage: { total: 0, input: 0, output: 0 },
213
+ usage,
166
214
  model: this.modelName || this.getDefaultModel(),
167
215
  provider: this.getProviderName(),
168
216
  };
@@ -170,6 +218,9 @@ export class AmazonBedrockProvider extends BaseProvider {
170
218
  async conversationLoop(options) {
171
219
  const maxIterations = 10; // Prevent infinite loops
172
220
  let iteration = 0;
221
+ let totalInputTokens = 0;
222
+ let totalOutputTokens = 0;
223
+ let lastFinishReason;
173
224
  while (iteration < maxIterations) {
174
225
  iteration++;
175
226
  logger.debug(`[AmazonBedrockProvider] Conversation iteration ${iteration}`);
@@ -177,6 +228,13 @@ export class AmazonBedrockProvider extends BaseProvider {
177
228
  logger.debug(`[AmazonBedrockProvider] About to call Bedrock API`);
178
229
  const response = await this.callBedrock(options);
179
230
  logger.debug(`[AmazonBedrockProvider] Received Bedrock response`, JSON.stringify(response, null, 2));
231
+ // Accumulate real token counts and capture the stop reason so
232
+ // Pipeline B (Langfuse) gets correct usage and finishReason.
233
+ totalInputTokens += response.usage?.inputTokens ?? 0;
234
+ totalOutputTokens += response.usage?.outputTokens ?? 0;
235
+ if (response.stopReason) {
236
+ lastFinishReason = response.stopReason;
237
+ }
180
238
  const result = await this.handleBedrockResponse(response);
181
239
  logger.debug(`[AmazonBedrockProvider] Handle response result:`, result);
182
240
  if (result.shouldContinue) {
@@ -185,7 +243,15 @@ export class AmazonBedrockProvider extends BaseProvider {
185
243
  else {
186
244
  logger.debug(`[AmazonBedrockProvider] Conversation completed with final text`);
187
245
  logger.debug(`[AmazonBedrockProvider] Returning final text: "${result.text}"`);
188
- return result.text || "";
246
+ return {
247
+ text: result.text || "",
248
+ usage: {
249
+ input: totalInputTokens,
250
+ output: totalOutputTokens,
251
+ total: totalInputTokens + totalOutputTokens,
252
+ },
253
+ finishReason: lastFinishReason,
254
+ };
189
255
  }
190
256
  }
191
257
  catch (error) {
@@ -905,6 +971,12 @@ export class AmazonBedrockProvider extends BaseProvider {
905
971
  const startTime = Date.now();
906
972
  const maxIterations = options.maxSteps || DEFAULT_MAX_STEPS;
907
973
  let iteration = 0;
974
+ // Shared counters updated by both the first-iteration inline loop and
975
+ // the processStreamResponse loop. Read by the final generation:end emit
976
+ // so Pipeline B (Langfuse) gets real token counts from Bedrock streams.
977
+ let streamTotalInputTokens = 0;
978
+ let streamTotalOutputTokens = 0;
979
+ let streamLastStopReason;
908
980
  // The REAL issue: ReadableStream errors don't bubble up to the caller
909
981
  // So we need to make the first streaming call synchronously to test permissions
910
982
  try {
@@ -999,9 +1071,23 @@ export class AmazonBedrockProvider extends BaseProvider {
999
1071
  }
1000
1072
  if (chunk.messageStop) {
1001
1073
  firstStopReason = chunk.messageStop.stopReason || "end_turn";
1074
+ // Don't break — metadata chunk with usage comes after messageStop
1075
+ continue;
1076
+ }
1077
+ // Accumulate usage from Bedrock metadata chunk for Pipeline B.
1078
+ // The metadata chunk is emitted after messageStop with aggregate usage.
1079
+ if (chunk.metadata?.usage) {
1080
+ streamTotalInputTokens +=
1081
+ chunk.metadata.usage.inputTokens ?? 0;
1082
+ streamTotalOutputTokens +=
1083
+ chunk.metadata.usage.outputTokens ?? 0;
1084
+ // Stream is effectively complete after metadata chunk
1002
1085
  break;
1003
1086
  }
1004
1087
  }
1088
+ if (firstStopReason) {
1089
+ streamLastStopReason = firstStopReason;
1090
+ }
1005
1091
  // Add first assistant message to conversation history
1006
1092
  const firstAssistantMessage = {
1007
1093
  role: "assistant",
@@ -1033,7 +1119,15 @@ export class AmazonBedrockProvider extends BaseProvider {
1033
1119
  iteration++;
1034
1120
  logger.debug(`[AmazonBedrockProvider] Streaming iteration ${iteration}`);
1035
1121
  const commandInput = await this.prepareStreamCommand(options);
1036
- const { stopReason, assistantMessage } = await this.processStreamResponse(commandInput, controller);
1122
+ const { stopReason, assistantMessage, usage } = await this.processStreamResponse(commandInput, controller);
1123
+ // Accumulate real usage from Bedrock metadata chunks.
1124
+ if (usage) {
1125
+ streamTotalInputTokens += usage.input;
1126
+ streamTotalOutputTokens += usage.output;
1127
+ }
1128
+ if (stopReason) {
1129
+ streamLastStopReason = stopReason;
1130
+ }
1037
1131
  streamSpan.addEvent("stream.turn_complete", {
1038
1132
  iteration,
1039
1133
  stop_reason: stopReason,
@@ -1064,14 +1158,60 @@ export class AmazonBedrockProvider extends BaseProvider {
1064
1158
  }
1065
1159
  },
1066
1160
  });
1067
- // Create analytics promise (without token tracking for now due to AWS SDK limitations)
1068
- const analyticsPromise = Promise.resolve(createAnalytics(this.providerName, this.modelName || this.getDefaultModel(), { usage: { input: 0, output: 0, total: 0 } }, Date.now() - startTime, {
1069
- requestId: `bedrock-stream-${Date.now()}`,
1070
- streamingMode: true,
1071
- note: "Token usage not available from AWS SDK streaming responses",
1072
- }));
1161
+ // Emit generation:end after the stream completes so Pipeline B (Langfuse)
1162
+ // creates a GENERATION observation. Bedrock bypasses the Vercel AI SDK so
1163
+ // experimental_telemetry is never injected; we emit the event manually.
1164
+ const streamEmitter = this.neurolink?.getEventEmitter();
1165
+ const streamAsyncIterable = this.convertToAsyncIterable(stream);
1166
+ const self = this;
1167
+ // Defer analytics resolution until the stream completes so we have
1168
+ // real token counts aggregated from Bedrock metadata chunks.
1169
+ let resolveAnalytics;
1170
+ const analyticsPromise = new Promise((resolve) => {
1171
+ resolveAnalytics = resolve;
1172
+ });
1173
+ const wrappedStreamIterable = {
1174
+ async *[Symbol.asyncIterator]() {
1175
+ let streamErrored = false;
1176
+ try {
1177
+ yield* streamAsyncIterable;
1178
+ }
1179
+ catch (error) {
1180
+ streamErrored = true;
1181
+ throw error;
1182
+ }
1183
+ finally {
1184
+ const aggregatedUsage = {
1185
+ input: streamTotalInputTokens,
1186
+ output: streamTotalOutputTokens,
1187
+ total: streamTotalInputTokens + streamTotalOutputTokens,
1188
+ };
1189
+ // Resolve analytics with accumulated token counts from Bedrock
1190
+ // metadata chunks so Pipeline A also reports real usage.
1191
+ resolveAnalytics(createAnalytics(self.providerName, self.modelName || self.getDefaultModel(), { usage: aggregatedUsage }, Date.now() - startTime, {
1192
+ requestId: `bedrock-stream-${Date.now()}`,
1193
+ streamingMode: true,
1194
+ }));
1195
+ if (streamEmitter) {
1196
+ streamEmitter.emit("generation:end", {
1197
+ provider: self.providerName,
1198
+ responseTime: Date.now() - startTime,
1199
+ timestamp: Date.now(),
1200
+ result: {
1201
+ content: "",
1202
+ usage: aggregatedUsage,
1203
+ model: self.modelName || self.getDefaultModel(),
1204
+ provider: self.providerName,
1205
+ finishReason: streamErrored ? "error" : streamLastStopReason,
1206
+ },
1207
+ success: !streamErrored,
1208
+ });
1209
+ }
1210
+ }
1211
+ },
1212
+ };
1073
1213
  return {
1074
- stream: this.convertToAsyncIterable(stream),
1214
+ stream: wrappedStreamIterable,
1075
1215
  usage: { total: 0, input: 0, output: 0 },
1076
1216
  model: this.modelName || this.getDefaultModel(),
1077
1217
  provider: this.getProviderName(),
@@ -1174,6 +1314,7 @@ export class AmazonBedrockProvider extends BaseProvider {
1174
1314
  const currentMessageContent = [];
1175
1315
  let stopReason = "";
1176
1316
  let currentText = "";
1317
+ let streamUsage;
1177
1318
  // Process streaming chunks
1178
1319
  for await (const chunk of response.stream) {
1179
1320
  if (chunk.contentBlockStart) {
@@ -1250,6 +1391,20 @@ export class AmazonBedrockProvider extends BaseProvider {
1250
1391
  }
1251
1392
  if (chunk.messageStop) {
1252
1393
  stopReason = chunk.messageStop.stopReason || "end_turn";
1394
+ // Don't break — metadata chunk with usage arrives after messageStop
1395
+ continue;
1396
+ }
1397
+ // Bedrock ConverseStream emits a metadata chunk at the end with
1398
+ // aggregate usage. Capture it for Pipeline B telemetry.
1399
+ if (chunk.metadata?.usage) {
1400
+ const input = chunk.metadata.usage.inputTokens ?? 0;
1401
+ const output = chunk.metadata.usage.outputTokens ?? 0;
1402
+ streamUsage = {
1403
+ input,
1404
+ output,
1405
+ total: chunk.metadata.usage.totalTokens ?? input + output,
1406
+ };
1407
+ // Stream is effectively complete after metadata chunk
1253
1408
  break;
1254
1409
  }
1255
1410
  }
@@ -1259,7 +1414,7 @@ export class AmazonBedrockProvider extends BaseProvider {
1259
1414
  content: currentMessageContent,
1260
1415
  };
1261
1416
  this.conversationHistory.push(assistantMessage);
1262
- return { stopReason, assistantMessage };
1417
+ return { stopReason, assistantMessage, usage: streamUsage };
1263
1418
  }
1264
1419
  async handleStreamStopReason(stopReason, assistantMessage, controller, options) {
1265
1420
  if (stopReason === "end_turn" || stopReason === "stop_sequence") {
@@ -1365,6 +1520,18 @@ export class AmazonBedrockProvider extends BaseProvider {
1365
1520
  };
1366
1521
  this.conversationHistory.push(userMessageWithToolResults);
1367
1522
  logger.debug(`📤 [AmazonBedrockProvider] Added ${toolResults.length} tool results to conversation (1:1 mapping validated)`);
1523
+ // Emit tool:end for each completed tool result so Pipeline B
1524
+ // captures telemetry for Bedrock-driven tool calls (gap S2).
1525
+ emitToolEndFromStepFinish(this.neurolink?.getEventEmitter(), toolResultsForStorage.map((tr) => {
1526
+ const hasError = tr.result && typeof tr.result === "object" && "error" in tr.result;
1527
+ return {
1528
+ toolName: tr.toolName,
1529
+ result: tr.result,
1530
+ error: hasError
1531
+ ? String(tr.result.error)
1532
+ : undefined,
1533
+ };
1534
+ }));
1368
1535
  // Store tool execution for analytics and debugging (similar to Vertex onStepFinish)
1369
1536
  this.handleToolExecutionStorage(toolCalls, toolResultsForStorage, options, new Date()).catch((error) => {
1370
1537
  logger.warn("[AmazonBedrockProvider] Failed to store tool executions", {
@@ -1437,6 +1604,13 @@ export class AmazonBedrockProvider extends BaseProvider {
1437
1604
  if (message.includes("ValidationException")) {
1438
1605
  return new ProviderError(`Validation error: ${message}`, this.providerName);
1439
1606
  }
1607
+ // Check for AWS-specific throttling BEFORE generic mapping
1608
+ const errName = error?.name ?? "";
1609
+ const errCode = error?.code ?? "";
1610
+ if (errName === "ThrottlingException" ||
1611
+ errCode === "ThrottlingException") {
1612
+ return new RateLimitError(`Bedrock rate limit (throttled): ${error instanceof Error ? error.message : String(error)}`, "bedrock");
1613
+ }
1440
1614
  return new ProviderError(`AWS Bedrock error: ${message}`, this.providerName);
1441
1615
  }
1442
1616
  /**
@@ -6,6 +6,8 @@
6
6
  */
7
7
  import { BaseProvider } from "../core/baseProvider.js";
8
8
  import { logger } from "../utils/logger.js";
9
+ import { withSpan } from "../telemetry/withSpan.js";
10
+ import { tracers } from "../telemetry/tracers.js";
9
11
  // SageMaker-specific imports
10
12
  import { getDefaultSageMakerEndpoint, getSageMakerConfig, getSageMakerModel, getSageMakerModelConfig, } from "./sagemaker/config.js";
11
13
  import { handleSageMakerError, SageMakerError } from "./sagemaker/errors.js";
@@ -73,17 +75,29 @@ export class AmazonSageMakerProvider extends BaseProvider {
73
75
  return this.sagemakerModel;
74
76
  }
75
77
  async executeStream(_options, _analysisSchema) {
76
- try {
77
- // For now, throw an error indicating this is not yet implemented
78
- throw new SageMakerError("SageMaker streaming not yet fully implemented. Coming in next phase.", {
79
- code: "MODEL_ERROR",
80
- statusCode: 501,
81
- endpoint: this.modelConfig.endpointName,
82
- });
83
- }
84
- catch (error) {
85
- throw this.handleProviderError(error);
86
- }
78
+ return withSpan({
79
+ name: "neurolink.provider.sagemaker.stream",
80
+ tracer: tracers.stream,
81
+ attributes: {
82
+ "provider.name": "sagemaker",
83
+ "model.name": this.modelName,
84
+ "sagemaker.endpoint": this.modelConfig.endpointName,
85
+ "sagemaker.region": this.sagemakerConfig.region,
86
+ "sagemaker.not_implemented": true,
87
+ },
88
+ }, async () => {
89
+ try {
90
+ // For now, throw an error indicating this is not yet implemented
91
+ throw new SageMakerError("SageMaker streaming not yet fully implemented. Coming in next phase.", {
92
+ code: "MODEL_ERROR",
93
+ statusCode: 501,
94
+ endpoint: this.modelConfig.endpointName,
95
+ });
96
+ }
97
+ catch (error) {
98
+ throw this.handleProviderError(error);
99
+ }
100
+ });
87
101
  }
88
102
  formatProviderError(error) {
89
103
  if (error instanceof SageMakerError) {
@@ -17,6 +17,7 @@ import { calculateCost } from "../utils/pricing.js";
17
17
  import { createAnthropicConfig, getProviderModel, validateApiKey, } from "../utils/providerConfig.js";
18
18
  import { composeAbortSignals, createTimeoutController, TimeoutError, } from "../utils/timeout.js";
19
19
  import { resolveToolChoice } from "../utils/toolChoice.js";
20
+ import { emitToolEndFromStepFinish } from "../utils/toolEndEmitter.js";
20
21
  import { getModelId } from "./providerTypeUtils.js";
21
22
  /**
22
23
  * Beta headers for Claude Code integration.
@@ -804,6 +805,9 @@ export class AnthropicProvider extends BaseProvider {
804
805
  experimental_repairToolCall: this.getToolCallRepairFn(options),
805
806
  experimental_telemetry: this.telemetryHandler.getTelemetryConfig(options),
806
807
  onStepFinish: ({ toolCalls, toolResults }) => {
808
+ // Emit tool:end for each completed tool result so Pipeline B
809
+ // captures telemetry for AI-SDK-driven tool calls (gap S2).
810
+ emitToolEndFromStepFinish(this.neurolink?.getEventEmitter(), toolResults);
807
811
  this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
808
812
  logger.warn("[AnthropicProvider] Failed to store tool executions", {
809
813
  provider: this.providerName,
@@ -814,6 +818,15 @@ export class AnthropicProvider extends BaseProvider {
814
818
  });
815
819
  }
816
820
  catch (streamError) {
821
+ streamSpan.setStatus({
822
+ code: SpanStatusCode.ERROR,
823
+ message: streamError instanceof Error
824
+ ? streamError.message
825
+ : String(streamError),
826
+ });
827
+ if (streamError instanceof Error) {
828
+ streamSpan.recordException(streamError);
829
+ }
817
830
  streamSpan.end();
818
831
  throw streamError;
819
832
  }
@@ -4,6 +4,7 @@ import { APIVersions } from "../constants/enums.js";
4
4
  import { BaseProvider } from "../core/baseProvider.js";
5
5
  import { DEFAULT_MAX_STEPS } from "../core/constants.js";
6
6
  import { createProxyFetch } from "../proxy/proxyFetch.js";
7
+ import { emitToolEndFromStepFinish } from "../utils/toolEndEmitter.js";
7
8
  import { logger } from "../utils/logger.js";
8
9
  import { createAzureAPIKeyConfig, createAzureEndpointConfig, validateApiKey, } from "../utils/providerConfig.js";
9
10
  import { composeAbortSignals, createTimeoutController, TimeoutError, } from "../utils/timeout.js";
@@ -126,6 +127,7 @@ export class AzureOpenAIProvider extends BaseProvider {
126
127
  experimental_telemetry: this.telemetryHandler.getTelemetryConfig(options),
127
128
  experimental_repairToolCall: this.getToolCallRepairFn(options),
128
129
  onStepFinish: (event) => {
130
+ emitToolEndFromStepFinish(this.neurolink?.getEventEmitter(), event.toolResults);
129
131
  this.handleToolExecutionStorage([...event.toolCalls], [...event.toolResults], options, new Date()).catch((error) => {
130
132
  logger.warn("[AzureOpenaiProvider] Failed to store tool executions", {
131
133
  provider: this.providerName,
@@ -4,6 +4,7 @@ import { ErrorCategory, ErrorSeverity, GoogleAIModels, } from "../constants/enum
4
4
  import { BaseProvider } from "../core/baseProvider.js";
5
5
  import { DEFAULT_MAX_STEPS } from "../core/constants.js";
6
6
  import { streamAnalyticsCollector } from "../core/streamAnalytics.js";
7
+ import { SpanStatusCode } from "@opentelemetry/api";
7
8
  import { ATTR, tracers, withClientSpan } from "../telemetry/index.js";
8
9
  import { AuthenticationError, NetworkError, ProviderError, RateLimitError, } from "../types/index.js";
9
10
  import { ERROR_CODES, NeuroLinkError } from "../utils/errorHandling.js";
@@ -12,6 +13,7 @@ import { isGemini3Model } from "../utils/modelDetection.js";
12
13
  import { composeAbortSignals, createTimeoutController, TimeoutError, } from "../utils/timeout.js";
13
14
  import { estimateTokens } from "../utils/tokenEstimation.js";
14
15
  import { resolveToolChoice } from "../utils/toolChoice.js";
16
+ import { emitToolEndFromStepFinish } from "../utils/toolEndEmitter.js";
15
17
  import { buildNativeConfig, buildNativeToolDeclarations, collectStreamChunks, collectStreamChunksIncremental, computeMaxSteps, createTextChannel, executeNativeToolCalls, extractTextFromParts, handleMaxStepsTermination, pushModelResponseToHistory, sanitizeToolsForGemini, } from "./googleNativeGemini3.js";
16
18
  import { toAnalyticsStreamResult } from "./providerTypeUtils.js";
17
19
  // Google AI Live API types now imported from ../types/providerSpecific.js
@@ -521,6 +523,9 @@ export class GoogleAIStudioProvider extends BaseProvider {
521
523
  id: rawToolResult.toolCallId ?? toolResult.toolName,
522
524
  });
523
525
  }
526
+ // Emit tool:end for each completed tool result so Pipeline B
527
+ // captures telemetry for AI-SDK-driven tool calls (gap S2).
528
+ emitToolEndFromStepFinish(this.neurolink?.getEventEmitter(), toolResults);
524
529
  this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
525
530
  logger.warn("[GoogleAiStudioProvider] Failed to store tool executions", {
526
531
  provider: this.providerName,
@@ -730,9 +735,62 @@ export class GoogleAIStudioProvider extends BaseProvider {
730
735
  requestDuration: responseTime,
731
736
  timestamp: new Date().toISOString(),
732
737
  });
738
+ // Emit generation:end so Pipeline B (Langfuse) creates a GENERATION
739
+ // observation. The native @google/genai stream path bypasses the Vercel
740
+ // AI SDK so experimental_telemetry is never injected; we emit manually.
741
+ const nativeStreamEmitter = this.neurolink?.getEventEmitter();
742
+ if (nativeStreamEmitter) {
743
+ nativeStreamEmitter.emit("generation:end", {
744
+ provider: this.providerName,
745
+ responseTime,
746
+ timestamp: Date.now(),
747
+ result: {
748
+ content: "",
749
+ usage: {
750
+ input: totalInputTokens,
751
+ output: totalOutputTokens,
752
+ total: totalInputTokens + totalOutputTokens,
753
+ },
754
+ model: modelName,
755
+ provider: this.providerName,
756
+ finishReason: hitStepLimitWithoutFinalAnswer
757
+ ? "max_steps"
758
+ : "stop",
759
+ },
760
+ success: true,
761
+ });
762
+ }
733
763
  channel.close();
734
764
  }
735
765
  catch (err) {
766
+ // Propagate error to OTel span so traces show ERROR status
767
+ span.recordException(err instanceof Error ? err : new Error(String(err)));
768
+ span.setStatus({
769
+ code: SpanStatusCode.ERROR,
770
+ message: err instanceof Error ? err.message : String(err),
771
+ });
772
+ // Emit failure generation:end so Pipeline B records the failed stream
773
+ const errorEmitter = this.neurolink?.getEventEmitter();
774
+ if (errorEmitter) {
775
+ errorEmitter.emit("generation:end", {
776
+ provider: this.providerName,
777
+ responseTime: Date.now() - startTime,
778
+ timestamp: Date.now(),
779
+ result: {
780
+ content: "",
781
+ usage: {
782
+ input: totalInputTokens,
783
+ output: totalOutputTokens,
784
+ total: totalInputTokens + totalOutputTokens,
785
+ },
786
+ model: modelName,
787
+ provider: this.providerName,
788
+ finishReason: "error",
789
+ },
790
+ success: false,
791
+ error: err instanceof Error ? err.message : String(err),
792
+ });
793
+ }
736
794
  channel.error(err);
737
795
  analyticsReject(err);
738
796
  }
@@ -876,6 +934,29 @@ export class GoogleAIStudioProvider extends BaseProvider {
876
934
  span.setAttribute(ATTR.GEN_AI_INPUT_TOKENS, totalInputTokens);
877
935
  span.setAttribute(ATTR.GEN_AI_OUTPUT_TOKENS, totalOutputTokens);
878
936
  span.setAttribute(ATTR.GEN_AI_FINISH_REASON, step >= maxSteps ? "max_steps" : "stop");
937
+ // Emit generation:end so Pipeline B (Langfuse) creates a GENERATION
938
+ // observation. The native @google/genai path bypasses the Vercel AI SDK
939
+ // so experimental_telemetry is never injected; we emit the event manually.
940
+ const nativeGenerateEmitter = this.neurolink?.getEventEmitter();
941
+ if (nativeGenerateEmitter) {
942
+ nativeGenerateEmitter.emit("generation:end", {
943
+ provider: this.providerName,
944
+ responseTime,
945
+ timestamp: Date.now(),
946
+ result: {
947
+ content: finalText,
948
+ usage: {
949
+ input: totalInputTokens,
950
+ output: totalOutputTokens,
951
+ total: totalInputTokens + totalOutputTokens,
952
+ },
953
+ model: modelName,
954
+ provider: this.providerName,
955
+ finishReason: step >= maxSteps ? "max_steps" : "stop",
956
+ },
957
+ success: true,
958
+ });
959
+ }
879
960
  // Build EnhancedGenerateResult
880
961
  return {
881
962
  content: finalText,
@@ -964,6 +1045,7 @@ export class GoogleAIStudioProvider extends BaseProvider {
964
1045
  const model = this.modelName ||
965
1046
  process.env.GOOGLE_VOICE_AI_MODEL ||
966
1047
  "gemini-2.5-flash-preview-native-audio-dialog";
1048
+ // Simple async queue for yielding audio events to the outer AsyncIterable
967
1049
  const queue = [];
968
1050
  let resolveNext = null;
969
1051
  let done = false;
@@ -23,6 +23,7 @@ import { convertZodToJsonSchema, inlineJsonSchema, } from "../utils/schemaConver
23
23
  import { composeAbortSignals, createTimeoutController, TimeoutError, } from "../utils/timeout.js";
24
24
  import { estimateTokens } from "../utils/tokenEstimation.js";
25
25
  import { resolveToolChoice } from "../utils/toolChoice.js";
26
+ import { emitToolEndFromStepFinish } from "../utils/toolEndEmitter.js";
26
27
  import { buildNativeConfig, buildNativeToolDeclarations, collectStreamChunks, collectStreamChunksIncremental, computeMaxSteps as computeMaxStepsShared, createTextChannel, executeNativeToolCalls, extractTextFromParts, handleMaxStepsTermination, normalizeToolsForJsonSchemaProvider, pushModelResponseToHistory, sanitizeToolsForGemini, } from "./googleNativeGemini3.js";
27
28
  import { getModelId } from "./providerTypeUtils.js";
28
29
  // Import proper types for multimodal message handling
@@ -1075,6 +1076,9 @@ export class GoogleVertexProvider extends BaseProvider {
1075
1076
  id: toolResult.toolCallId ?? toolResult.toolName,
1076
1077
  });
1077
1078
  }
1079
+ // Emit tool:end for each completed tool result so Pipeline B
1080
+ // captures telemetry for AI-SDK-driven tool calls (gap S2).
1081
+ emitToolEndFromStepFinish(this.neurolink?.getEventEmitter(), toolResults);
1078
1082
  this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
1079
1083
  logger.warn("[GoogleVertexProvider] Failed to store tool executions", {
1080
1084
  provider: this.providerName,
@@ -1516,6 +1520,31 @@ export class GoogleVertexProvider extends BaseProvider {
1516
1520
  requestDuration: responseTime,
1517
1521
  timestamp: new Date().toISOString(),
1518
1522
  });
1523
+ // Emit generation:end so Pipeline B (Langfuse) creates a GENERATION
1524
+ // observation. The native @google/genai stream path on Vertex bypasses the
1525
+ // Vercel AI SDK so experimental_telemetry is never injected; we emit manually.
1526
+ const vertexStreamEmitter = this.neurolink?.getEventEmitter();
1527
+ if (vertexStreamEmitter) {
1528
+ vertexStreamEmitter.emit("generation:end", {
1529
+ provider: this.providerName,
1530
+ responseTime,
1531
+ timestamp: Date.now(),
1532
+ result: {
1533
+ content: "",
1534
+ usage: {
1535
+ input: totalInputTokens,
1536
+ output: totalOutputTokens,
1537
+ total: totalInputTokens + totalOutputTokens,
1538
+ },
1539
+ model: params.modelName,
1540
+ provider: this.providerName,
1541
+ finishReason: step >= params.maxSteps && !completedWithFinalAnswer
1542
+ ? "max_steps"
1543
+ : "stop",
1544
+ },
1545
+ success: true,
1546
+ });
1547
+ }
1519
1548
  params.channel.close();
1520
1549
  }
1521
1550
  catch (error) {
@@ -1661,6 +1690,29 @@ export class GoogleVertexProvider extends BaseProvider {
1661
1690
  span.setAttribute(ATTR.GEN_AI_INPUT_TOKENS, totalInputTokens);
1662
1691
  span.setAttribute(ATTR.GEN_AI_OUTPUT_TOKENS, totalOutputTokens);
1663
1692
  span.setAttribute(ATTR.GEN_AI_FINISH_REASON, step >= maxSteps ? "max_steps" : "stop");
1693
+ // Emit generation:end so Pipeline B (Langfuse) creates a GENERATION
1694
+ // observation. The native @google/genai path on Vertex bypasses the Vercel
1695
+ // AI SDK so experimental_telemetry is never injected; we emit manually.
1696
+ const vertexGenerateEmitter = this.neurolink?.getEventEmitter();
1697
+ if (vertexGenerateEmitter) {
1698
+ vertexGenerateEmitter.emit("generation:end", {
1699
+ provider: this.providerName,
1700
+ responseTime,
1701
+ timestamp: Date.now(),
1702
+ result: {
1703
+ content: finalText,
1704
+ usage: {
1705
+ input: totalInputTokens,
1706
+ output: totalOutputTokens,
1707
+ total: totalInputTokens + totalOutputTokens,
1708
+ },
1709
+ model: modelName,
1710
+ provider: this.providerName,
1711
+ finishReason: step >= maxSteps ? "max_steps" : "stop",
1712
+ },
1713
+ success: true,
1714
+ });
1715
+ }
1664
1716
  // Build EnhancedGenerateResult
1665
1717
  return {
1666
1718
  content: finalText,
@@ -3,6 +3,7 @@ import { NoOutputGeneratedError, stepCountIs, streamText, } from "ai";
3
3
  import { BaseProvider } from "../core/baseProvider.js";
4
4
  import { DEFAULT_MAX_STEPS } from "../core/constants.js";
5
5
  import { createProxyFetch } from "../proxy/proxyFetch.js";
6
+ import { emitToolEndFromStepFinish } from "../utils/toolEndEmitter.js";
6
7
  import { logger } from "../utils/logger.js";
7
8
  import { createHuggingFaceConfig, getProviderModel, validateApiKey, } from "../utils/providerConfig.js";
8
9
  import { composeAbortSignals, createTimeoutController, TimeoutError, } from "../utils/timeout.js";
@@ -141,6 +142,7 @@ export class HuggingFaceProvider extends BaseProvider {
141
142
  experimental_telemetry: this.telemetryHandler.getTelemetryConfig(options),
142
143
  experimental_repairToolCall: this.getToolCallRepairFn(options),
143
144
  onStepFinish: ({ toolCalls, toolResults }) => {
145
+ emitToolEndFromStepFinish(this.neurolink?.getEventEmitter(), toolResults);
144
146
  this.handleToolExecutionStorage(toolCalls, toolResults, options, new Date()).catch((error) => {
145
147
  logger.warn("[HuggingFaceProvider] Failed to store tool executions", {
146
148
  provider: this.providerName,