@juspay/neurolink 7.29.2 → 7.30.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (226) hide show
  1. package/CHANGELOG.md +12 -0
  2. package/dist/cli/commands/config.d.ts +83 -83
  3. package/dist/cli/commands/mcp.js +39 -9
  4. package/dist/cli/commands/models.js +25 -21
  5. package/dist/cli/commands/ollama.js +2 -2
  6. package/dist/cli/factories/commandFactory.d.ts +8 -0
  7. package/dist/cli/factories/commandFactory.js +65 -65
  8. package/dist/cli/factories/ollamaCommandFactory.js +3 -1
  9. package/dist/cli/factories/sagemakerCommandFactory.js +3 -2
  10. package/dist/cli/index.d.ts +1 -1
  11. package/dist/cli/index.js +11 -11
  12. package/dist/cli/utils/envManager.js +5 -5
  13. package/dist/cli/utils/ollamaUtils.d.ts +12 -0
  14. package/dist/cli/utils/ollamaUtils.js +58 -42
  15. package/dist/config/configManager.js +5 -2
  16. package/dist/config/conversationMemoryConfig.js +5 -0
  17. package/dist/core/analytics.d.ts +2 -24
  18. package/dist/core/analytics.js +12 -17
  19. package/dist/core/baseProvider.d.ts +30 -1
  20. package/dist/core/baseProvider.js +180 -198
  21. package/dist/core/conversationMemoryManager.d.ts +9 -15
  22. package/dist/core/conversationMemoryManager.js +98 -57
  23. package/dist/core/dynamicModels.d.ts +4 -4
  24. package/dist/core/dynamicModels.js +7 -7
  25. package/dist/core/evaluation.d.ts +9 -9
  26. package/dist/core/evaluation.js +117 -65
  27. package/dist/core/evaluationProviders.d.ts +18 -2
  28. package/dist/core/evaluationProviders.js +15 -13
  29. package/dist/core/modelConfiguration.d.ts +63 -0
  30. package/dist/core/modelConfiguration.js +354 -290
  31. package/dist/core/streamAnalytics.d.ts +10 -5
  32. package/dist/core/streamAnalytics.js +10 -10
  33. package/dist/core/types.d.ts +22 -110
  34. package/dist/core/types.js +13 -0
  35. package/dist/factories/providerFactory.js +1 -1
  36. package/dist/index.d.ts +2 -1
  37. package/dist/lib/config/configManager.js +5 -2
  38. package/dist/lib/config/conversationMemoryConfig.js +5 -0
  39. package/dist/lib/core/analytics.d.ts +2 -24
  40. package/dist/lib/core/analytics.js +12 -17
  41. package/dist/lib/core/baseProvider.d.ts +30 -1
  42. package/dist/lib/core/baseProvider.js +180 -198
  43. package/dist/lib/core/conversationMemoryManager.d.ts +9 -15
  44. package/dist/lib/core/conversationMemoryManager.js +98 -57
  45. package/dist/lib/core/dynamicModels.js +7 -7
  46. package/dist/lib/core/evaluation.d.ts +9 -9
  47. package/dist/lib/core/evaluation.js +117 -65
  48. package/dist/lib/core/evaluationProviders.d.ts +18 -2
  49. package/dist/lib/core/evaluationProviders.js +15 -13
  50. package/dist/lib/core/modelConfiguration.d.ts +63 -0
  51. package/dist/lib/core/modelConfiguration.js +354 -290
  52. package/dist/lib/core/streamAnalytics.d.ts +10 -5
  53. package/dist/lib/core/streamAnalytics.js +10 -10
  54. package/dist/lib/core/types.d.ts +22 -110
  55. package/dist/lib/core/types.js +13 -0
  56. package/dist/lib/factories/providerFactory.js +1 -1
  57. package/dist/lib/index.d.ts +2 -1
  58. package/dist/lib/mcp/externalServerManager.js +15 -6
  59. package/dist/lib/mcp/factory.js +1 -1
  60. package/dist/lib/mcp/index.d.ts +1 -1
  61. package/dist/lib/mcp/index.js +1 -1
  62. package/dist/lib/mcp/mcpCircuitBreaker.js +5 -1
  63. package/dist/lib/mcp/mcpClientFactory.js +3 -0
  64. package/dist/lib/mcp/registry.d.ts +3 -3
  65. package/dist/lib/mcp/registry.js +3 -3
  66. package/dist/lib/mcp/servers/aiProviders/aiAnalysisTools.js +5 -5
  67. package/dist/lib/mcp/servers/aiProviders/aiWorkflowTools.js +6 -6
  68. package/dist/lib/mcp/servers/utilities/utilityServer.js +1 -1
  69. package/dist/lib/mcp/toolDiscoveryService.js +8 -2
  70. package/dist/lib/mcp/toolRegistry.js +4 -4
  71. package/dist/lib/middleware/builtin/analytics.js +4 -4
  72. package/dist/lib/middleware/builtin/guardrails.js +2 -2
  73. package/dist/lib/middleware/registry.js +11 -2
  74. package/dist/lib/models/modelRegistry.d.ts +1 -1
  75. package/dist/lib/models/modelRegistry.js +3 -3
  76. package/dist/lib/models/modelResolver.d.ts +1 -1
  77. package/dist/lib/models/modelResolver.js +2 -2
  78. package/dist/lib/neurolink.d.ts +116 -9
  79. package/dist/lib/neurolink.js +718 -956
  80. package/dist/lib/providers/amazonSagemaker.d.ts +1 -1
  81. package/dist/lib/providers/amazonSagemaker.js +12 -3
  82. package/dist/lib/providers/anthropic.d.ts +1 -1
  83. package/dist/lib/providers/anthropic.js +7 -6
  84. package/dist/lib/providers/anthropicBaseProvider.d.ts +1 -1
  85. package/dist/lib/providers/anthropicBaseProvider.js +4 -3
  86. package/dist/lib/providers/azureOpenai.d.ts +1 -1
  87. package/dist/lib/providers/azureOpenai.js +1 -1
  88. package/dist/lib/providers/googleAiStudio.d.ts +1 -1
  89. package/dist/lib/providers/googleAiStudio.js +2 -2
  90. package/dist/lib/providers/googleVertex.d.ts +40 -0
  91. package/dist/lib/providers/googleVertex.js +330 -274
  92. package/dist/lib/providers/huggingFace.js +1 -1
  93. package/dist/lib/providers/mistral.d.ts +1 -1
  94. package/dist/lib/providers/mistral.js +2 -2
  95. package/dist/lib/providers/ollama.d.ts +4 -0
  96. package/dist/lib/providers/ollama.js +38 -18
  97. package/dist/lib/providers/openAI.d.ts +1 -1
  98. package/dist/lib/providers/openAI.js +2 -2
  99. package/dist/lib/providers/sagemaker/adaptive-semaphore.js +7 -4
  100. package/dist/lib/providers/sagemaker/client.js +13 -3
  101. package/dist/lib/providers/sagemaker/config.js +5 -1
  102. package/dist/lib/providers/sagemaker/detection.js +19 -9
  103. package/dist/lib/providers/sagemaker/errors.d.ts +8 -1
  104. package/dist/lib/providers/sagemaker/errors.js +103 -20
  105. package/dist/lib/providers/sagemaker/language-model.d.ts +3 -3
  106. package/dist/lib/providers/sagemaker/language-model.js +4 -4
  107. package/dist/lib/providers/sagemaker/parsers.js +14 -6
  108. package/dist/lib/providers/sagemaker/streaming.js +14 -3
  109. package/dist/lib/providers/sagemaker/types.d.ts +1 -1
  110. package/dist/lib/proxy/awsProxyIntegration.js +1 -1
  111. package/dist/lib/sdk/toolRegistration.d.ts +1 -1
  112. package/dist/lib/types/cli.d.ts +80 -8
  113. package/dist/lib/types/contextTypes.js +2 -2
  114. package/dist/lib/types/conversationTypes.d.ts +10 -0
  115. package/dist/lib/types/generateTypes.d.ts +2 -5
  116. package/dist/lib/types/providers.d.ts +81 -19
  117. package/dist/lib/types/providers.js +6 -6
  118. package/dist/lib/types/streamTypes.d.ts +4 -6
  119. package/dist/lib/types/typeAliases.d.ts +1 -1
  120. package/dist/lib/utils/analyticsUtils.d.ts +33 -0
  121. package/dist/lib/utils/analyticsUtils.js +76 -0
  122. package/dist/lib/utils/conversationMemoryUtils.d.ts +1 -2
  123. package/dist/lib/utils/conversationMemoryUtils.js +6 -7
  124. package/dist/lib/utils/errorHandling.js +4 -1
  125. package/dist/lib/utils/evaluationUtils.d.ts +27 -0
  126. package/dist/lib/utils/evaluationUtils.js +131 -0
  127. package/dist/lib/utils/optionsUtils.js +10 -1
  128. package/dist/lib/utils/performance.d.ts +1 -1
  129. package/dist/lib/utils/performance.js +15 -3
  130. package/dist/lib/utils/providerHealth.d.ts +48 -0
  131. package/dist/lib/utils/providerHealth.js +199 -254
  132. package/dist/lib/utils/providerUtils.js +2 -2
  133. package/dist/lib/utils/timeout.js +8 -3
  134. package/dist/mcp/externalServerManager.js +15 -6
  135. package/dist/mcp/factory.js +1 -1
  136. package/dist/mcp/index.d.ts +1 -1
  137. package/dist/mcp/index.js +1 -1
  138. package/dist/mcp/mcpCircuitBreaker.js +5 -1
  139. package/dist/mcp/mcpClientFactory.js +3 -0
  140. package/dist/mcp/registry.d.ts +3 -3
  141. package/dist/mcp/registry.js +3 -3
  142. package/dist/mcp/servers/aiProviders/aiAnalysisTools.js +5 -5
  143. package/dist/mcp/servers/aiProviders/aiWorkflowTools.js +6 -6
  144. package/dist/mcp/servers/utilities/utilityServer.js +1 -1
  145. package/dist/mcp/toolDiscoveryService.js +8 -2
  146. package/dist/mcp/toolRegistry.js +4 -4
  147. package/dist/middleware/builtin/analytics.js +4 -4
  148. package/dist/middleware/builtin/guardrails.js +2 -2
  149. package/dist/middleware/registry.js +11 -2
  150. package/dist/models/modelRegistry.d.ts +1 -1
  151. package/dist/models/modelRegistry.js +3 -3
  152. package/dist/models/modelResolver.d.ts +1 -1
  153. package/dist/models/modelResolver.js +2 -2
  154. package/dist/neurolink.d.ts +116 -9
  155. package/dist/neurolink.js +718 -956
  156. package/dist/providers/amazonSagemaker.d.ts +1 -1
  157. package/dist/providers/amazonSagemaker.js +12 -3
  158. package/dist/providers/anthropic.d.ts +1 -1
  159. package/dist/providers/anthropic.js +7 -6
  160. package/dist/providers/anthropicBaseProvider.d.ts +1 -1
  161. package/dist/providers/anthropicBaseProvider.js +4 -3
  162. package/dist/providers/azureOpenai.d.ts +1 -1
  163. package/dist/providers/azureOpenai.js +1 -1
  164. package/dist/providers/googleAiStudio.d.ts +1 -1
  165. package/dist/providers/googleAiStudio.js +2 -2
  166. package/dist/providers/googleVertex.d.ts +40 -0
  167. package/dist/providers/googleVertex.js +330 -274
  168. package/dist/providers/huggingFace.js +1 -1
  169. package/dist/providers/mistral.d.ts +1 -1
  170. package/dist/providers/mistral.js +2 -2
  171. package/dist/providers/ollama.d.ts +4 -0
  172. package/dist/providers/ollama.js +38 -18
  173. package/dist/providers/openAI.d.ts +1 -1
  174. package/dist/providers/openAI.js +2 -2
  175. package/dist/providers/sagemaker/adaptive-semaphore.js +7 -4
  176. package/dist/providers/sagemaker/client.js +13 -3
  177. package/dist/providers/sagemaker/config.js +5 -1
  178. package/dist/providers/sagemaker/detection.js +19 -9
  179. package/dist/providers/sagemaker/errors.d.ts +8 -1
  180. package/dist/providers/sagemaker/errors.js +103 -20
  181. package/dist/providers/sagemaker/language-model.d.ts +3 -3
  182. package/dist/providers/sagemaker/language-model.js +4 -4
  183. package/dist/providers/sagemaker/parsers.js +14 -6
  184. package/dist/providers/sagemaker/streaming.js +14 -3
  185. package/dist/providers/sagemaker/types.d.ts +1 -1
  186. package/dist/proxy/awsProxyIntegration.js +1 -1
  187. package/dist/sdk/toolRegistration.d.ts +1 -1
  188. package/dist/types/cli.d.ts +80 -8
  189. package/dist/types/contextTypes.js +2 -2
  190. package/dist/types/conversationTypes.d.ts +10 -0
  191. package/dist/types/generateTypes.d.ts +2 -5
  192. package/dist/types/providers.d.ts +81 -19
  193. package/dist/types/providers.js +6 -6
  194. package/dist/types/streamTypes.d.ts +4 -6
  195. package/dist/types/typeAliases.d.ts +1 -1
  196. package/dist/utils/analyticsUtils.d.ts +33 -0
  197. package/dist/utils/analyticsUtils.js +76 -0
  198. package/dist/utils/conversationMemoryUtils.d.ts +1 -2
  199. package/dist/utils/conversationMemoryUtils.js +6 -7
  200. package/dist/utils/errorHandling.js +4 -1
  201. package/dist/utils/evaluationUtils.d.ts +27 -0
  202. package/dist/utils/evaluationUtils.js +131 -0
  203. package/dist/utils/optionsUtils.js +10 -1
  204. package/dist/utils/performance.d.ts +1 -1
  205. package/dist/utils/performance.js +15 -3
  206. package/dist/utils/providerHealth.d.ts +48 -0
  207. package/dist/utils/providerHealth.js +199 -254
  208. package/dist/utils/providerUtils.js +2 -2
  209. package/dist/utils/timeout.js +8 -3
  210. package/package.json +1 -1
  211. package/dist/context/ContextManager.d.ts +0 -28
  212. package/dist/context/ContextManager.js +0 -113
  213. package/dist/context/config.d.ts +0 -5
  214. package/dist/context/config.js +0 -42
  215. package/dist/context/types.d.ts +0 -20
  216. package/dist/context/types.js +0 -1
  217. package/dist/context/utils.d.ts +0 -7
  218. package/dist/context/utils.js +0 -8
  219. package/dist/lib/context/ContextManager.d.ts +0 -28
  220. package/dist/lib/context/ContextManager.js +0 -113
  221. package/dist/lib/context/config.d.ts +0 -5
  222. package/dist/lib/context/config.js +0 -42
  223. package/dist/lib/context/types.d.ts +0 -20
  224. package/dist/lib/context/types.js +0 -1
  225. package/dist/lib/context/utils.d.ts +0 -7
  226. package/dist/lib/context/utils.js +0 -8
@@ -1,5 +1,8 @@
1
1
  import { NeuroLink } from "../../lib/neurolink.js";
2
2
  import { configManager } from "../commands/config.js";
3
+ import { handleError } from "../index.js";
4
+ import { normalizeEvaluationData } from "../../lib/utils/evaluationUtils.js";
5
+ // Use TokenUsage from standard types - no local interface needed
3
6
  import { ContextFactory, } from "../../lib/types/contextTypes.js";
4
7
  import { ModelsCommandFactory } from "../commands/models.js";
5
8
  import { MCPCommandFactory } from "../commands/mcp.js";
@@ -9,6 +12,7 @@ import ora from "ora";
9
12
  import chalk from "chalk";
10
13
  import { logger } from "../../lib/utils/logger.js";
11
14
  import fs from "fs";
15
+ // Use specific command interfaces from cli.ts instead of universal interface
12
16
  /**
13
17
  * CLI Command Factory for generate commands
14
18
  */
@@ -209,7 +213,9 @@ export class CLICommandFactory {
209
213
  }
210
214
  }
211
215
  return {
212
- provider: argv.provider === "auto" ? undefined : argv.provider,
216
+ provider: argv.provider === "auto"
217
+ ? undefined
218
+ : argv.provider,
213
219
  model: argv.model,
214
220
  temperature: argv.temperature,
215
221
  maxTokens: argv.maxTokens,
@@ -339,7 +345,7 @@ export class CLICommandFactory {
339
345
  }
340
346
  analyticsText += "\n";
341
347
  // Token usage with fallback handling
342
- const normalizedTokens = this.normalizeTokenUsage(analytics.tokens);
348
+ const normalizedTokens = this.normalizeTokenUsage(analytics.tokenUsage);
343
349
  if (normalizedTokens) {
344
350
  analyticsText += ` Tokens: ${normalizedTokens.input} input + ${normalizedTokens.output} output = ${normalizedTokens.total} total\n`;
345
351
  }
@@ -350,8 +356,8 @@ export class CLICommandFactory {
350
356
  analyticsText += ` Cost: $${analytics.cost.toFixed(5)}\n`;
351
357
  }
352
358
  // Response time with fallback handling for requestDuration vs responseTime
353
- const duration = analytics.responseTime ||
354
- analytics.requestDuration ||
359
+ const duration = analytics.requestDuration ||
360
+ analytics.responseTime ||
355
361
  analytics.duration;
356
362
  if (duration && typeof duration === "number") {
357
363
  const timeInSeconds = (duration / 1000).toFixed(1);
@@ -603,21 +609,21 @@ export class CLICommandFactory {
603
609
  provider: "google-ai",
604
610
  status: "working",
605
611
  configured: true,
606
- responseTime: 150,
612
+ requestDuration: 150,
607
613
  model: "gemini-2.5-flash",
608
614
  },
609
615
  {
610
616
  provider: "openai",
611
617
  status: "working",
612
618
  configured: true,
613
- responseTime: 200,
619
+ requestDuration: 200,
614
620
  model: "gpt-4o-mini",
615
621
  },
616
622
  {
617
623
  provider: "anthropic",
618
624
  status: "working",
619
625
  configured: true,
620
- responseTime: 180,
626
+ requestDuration: 180,
621
627
  model: "claude-3-haiku",
622
628
  },
623
629
  { provider: "bedrock", status: "not configured", configured: false },
@@ -633,7 +639,9 @@ export class CLICommandFactory {
633
639
  : result.status === "failed"
634
640
  ? chalk.red("❌ Failed")
635
641
  : chalk.gray("⚪ Not configured");
636
- const time = result.responseTime ? ` (${result.responseTime}ms)` : "";
642
+ const time = result.requestDuration
643
+ ? ` (${result.requestDuration}ms)`
644
+ : "";
637
645
  const model = result.model ? ` [${result.model}]` : "";
638
646
  logger.always(`${result.provider}: ${status}${time}${model}`);
639
647
  }
@@ -734,33 +742,31 @@ export class CLICommandFactory {
734
742
  provider: options.provider || "auto",
735
743
  model: options.model || "test-model",
736
744
  usage: {
737
- inputTokens: 10,
738
- outputTokens: 15,
739
- totalTokens: 25,
745
+ input: 10,
746
+ output: 15,
747
+ total: 25,
740
748
  },
741
749
  responseTime: 150,
742
750
  analytics: options.enableAnalytics
743
751
  ? {
744
752
  provider: options.provider || "auto",
745
753
  model: options.model || "test-model",
746
- tokens: { input: 10, output: 15, total: 25 },
754
+ tokenUsage: { input: 10, output: 15, total: 25 },
747
755
  cost: 0.00025,
748
- responseTime: 150,
756
+ requestDuration: 150,
749
757
  context: contextMetadata,
750
758
  }
751
759
  : undefined,
752
760
  evaluation: options.enableEvaluation
753
- ? {
761
+ ? normalizeEvaluationData({
754
762
  relevance: 8,
755
763
  accuracy: 9,
756
764
  completeness: 8,
757
765
  overall: 8.3,
758
- isOffTopic: false,
759
- alertSeverity: "none",
760
766
  reasoning: "Test evaluation response",
761
767
  evaluationModel: "test-evaluator",
762
768
  evaluationTime: 50,
763
- }
769
+ })
764
770
  : undefined,
765
771
  };
766
772
  if (spinner) {
@@ -774,7 +780,6 @@ export class CLICommandFactory {
774
780
  logger.debug("Mode: DRY-RUN (no actual API calls made)");
775
781
  }
776
782
  process.exit(0);
777
- return;
778
783
  }
779
784
  const sdk = new NeuroLink();
780
785
  if (options.debug) {
@@ -827,18 +832,36 @@ export class CLICommandFactory {
827
832
  if (spinner) {
828
833
  spinner.fail();
829
834
  }
830
- logger.error(chalk.red(`❌ Generation failed: ${error.message}`));
831
- if (options.debug) {
832
- logger.error(chalk.gray(error.stack));
833
- }
834
- process.exit(1);
835
+ handleError(error, "Generation");
835
836
  }
836
837
  }
837
838
  /**
838
- * Execute the stream command
839
+ * Log debug information for stream result
839
840
  */
840
- static async executeStream(argv) {
841
- // Handle stdin input if no input provided
841
+ static async logStreamDebugInfo(stream) {
842
+ logger.debug("\n" + chalk.yellow("Debug Information (Streaming):"));
843
+ logger.debug("Provider:", stream.provider);
844
+ logger.debug("Model:", stream.model);
845
+ if (stream.analytics) {
846
+ const resolvedAnalytics = await (stream.analytics instanceof Promise
847
+ ? stream.analytics
848
+ : Promise.resolve(stream.analytics));
849
+ logger.debug("Analytics:", JSON.stringify(resolvedAnalytics, null, 2));
850
+ }
851
+ if (stream.evaluation) {
852
+ const resolvedEvaluation = await (stream.evaluation instanceof Promise
853
+ ? stream.evaluation
854
+ : Promise.resolve(stream.evaluation));
855
+ logger.debug("Evaluation:", JSON.stringify(resolvedEvaluation, null, 2));
856
+ }
857
+ if (stream.metadata) {
858
+ logger.debug("Metadata:", JSON.stringify(stream.metadata, null, 2));
859
+ }
860
+ }
861
+ /**
862
+ * Handle stdin input for stream command
863
+ */
864
+ static async handleStdinInput(argv) {
842
865
  if (!argv.input && !process.stdin.isTTY) {
843
866
  let stdinData = "";
844
867
  process.stdin.setEncoding("utf8");
@@ -853,6 +876,12 @@ export class CLICommandFactory {
853
876
  else if (!argv.input) {
854
877
  throw new Error('Input required. Use: neurolink stream "your prompt" or echo "prompt" | neurolink stream');
855
878
  }
879
+ }
880
+ /**
881
+ * Execute the stream command
882
+ */
883
+ static async executeStream(argv) {
884
+ await this.handleStdinInput(argv);
856
885
  const options = this.processOptions(argv);
857
886
  if (!options.quiet) {
858
887
  logger.always(chalk.blue("🔄 Streaming..."));
@@ -915,11 +944,11 @@ export class CLICommandFactory {
915
944
  model: options.model || "test-model",
916
945
  requestDuration: 300,
917
946
  tokenUsage: {
918
- inputTokens: 10,
919
- outputTokens: 15,
920
- totalTokens: 25,
947
+ input: 10,
948
+ output: 15,
949
+ total: 25,
921
950
  },
922
- timestamp: Date.now(),
951
+ timestamp: new Date().toISOString(),
923
952
  context: contextMetadata,
924
953
  };
925
954
  const mockGenerateResult = {
@@ -953,7 +982,6 @@ export class CLICommandFactory {
953
982
  logger.debug("Mode: DRY-RUN (no actual API calls made)");
954
983
  }
955
984
  process.exit(0);
956
- return;
957
985
  }
958
986
  const sdk = new NeuroLink();
959
987
  const stream = await sdk.stream({
@@ -1079,33 +1107,12 @@ export class CLICommandFactory {
1079
1107
  }
1080
1108
  // 🔧 NEW: Debug output for streaming (similar to generate command)
1081
1109
  if (options.debug) {
1082
- logger.debug("\n" + chalk.yellow("Debug Information (Streaming):"));
1083
- logger.debug("Provider:", stream.provider);
1084
- logger.debug("Model:", stream.model);
1085
- if (stream.analytics) {
1086
- const resolvedAnalytics = await (stream.analytics instanceof Promise
1087
- ? stream.analytics
1088
- : Promise.resolve(stream.analytics));
1089
- logger.debug("Analytics:", JSON.stringify(resolvedAnalytics, null, 2));
1090
- }
1091
- if (stream.evaluation) {
1092
- const resolvedEvaluation = await (stream.evaluation instanceof Promise
1093
- ? stream.evaluation
1094
- : Promise.resolve(stream.evaluation));
1095
- logger.debug("Evaluation:", JSON.stringify(resolvedEvaluation, null, 2));
1096
- }
1097
- if (stream.metadata) {
1098
- logger.debug("Metadata:", JSON.stringify(stream.metadata, null, 2));
1099
- }
1110
+ await this.logStreamDebugInfo(stream);
1100
1111
  }
1101
1112
  process.exit(0);
1102
1113
  }
1103
1114
  catch (error) {
1104
- logger.error(chalk.red(`❌ Streaming failed: ${error.message}`));
1105
- if (options.debug) {
1106
- logger.error(chalk.gray(error.stack));
1107
- }
1108
- process.exit(1);
1115
+ handleError(error, "Streaming");
1109
1116
  }
1110
1117
  }
1111
1118
  /**
@@ -1219,11 +1226,7 @@ export class CLICommandFactory {
1219
1226
  if (spinner) {
1220
1227
  spinner.fail();
1221
1228
  }
1222
- logger.error(chalk.red(`❌ Batch processing failed: ${error.message}`));
1223
- if (options.debug) {
1224
- logger.error(chalk.gray(error.stack));
1225
- }
1226
- process.exit(1);
1229
+ handleError(error, "Batch processing");
1227
1230
  }
1228
1231
  }
1229
1232
  /**
@@ -1252,8 +1255,7 @@ export class CLICommandFactory {
1252
1255
  this.handleOutput(config, options);
1253
1256
  }
1254
1257
  catch (error) {
1255
- logger.error(chalk.red(`❌ Configuration export failed: ${error.message}`));
1256
- process.exit(1);
1258
+ handleError(error, "Configuration export");
1257
1259
  }
1258
1260
  }
1259
1261
  /**
@@ -1277,8 +1279,7 @@ export class CLICommandFactory {
1277
1279
  }
1278
1280
  }
1279
1281
  catch (error) {
1280
- logger.error(chalk.red(`❌ Provider selection failed: ${error.message}`));
1281
- process.exit(1);
1282
+ handleError(error, "Provider selection");
1282
1283
  }
1283
1284
  }
1284
1285
  /**
@@ -1417,8 +1418,7 @@ export class CLICommandFactory {
1417
1418
  }
1418
1419
  }
1419
1420
  catch (error) {
1420
- logger.error(chalk.red(`❌ Completion generation failed: ${error.message}`));
1421
- process.exit(1);
1421
+ handleError(error, "Completion generation");
1422
1422
  }
1423
1423
  }
1424
1424
  }
@@ -183,7 +183,9 @@ export class OllamaCommandFactory {
183
183
  logger.always(chalk.green(`\n${modelsData.models.length} models available`));
184
184
  }
185
185
  }
186
- catch (_error) {
186
+ catch (error) {
187
+ const errorMessage = error instanceof Error ? error.message : String(error);
188
+ logger.debug("Error:", errorMessage);
187
189
  spinner.fail("Ollama service is not running");
188
190
  logger.always(chalk.yellow("\nStart Ollama with: ollama serve"));
189
191
  logger.always(chalk.blue("Or restart the Ollama app if using the desktop version"));
@@ -309,9 +309,10 @@ export class SageMakerCommandFactory {
309
309
  logger.always(chalk.yellow("No SageMaker endpoints found"));
310
310
  }
311
311
  }
312
- catch (_awsError) {
312
+ catch (error) {
313
313
  spinner.fail("Failed to list endpoints");
314
- logger.error(chalk.red("AWS SDK credentials error or insufficient permissions"));
314
+ const errorMessage = error instanceof Error ? error.message : String(error);
315
+ logger.error(chalk.red("Error:", errorMessage));
315
316
  logger.always(chalk.yellow("\nTo list endpoints, please:"));
316
317
  logger.always("1. Set AWS_ACCESS_KEY_ID environment variable");
317
318
  logger.always("2. Set AWS_SECRET_ACCESS_KEY environment variable");
@@ -5,4 +5,4 @@
5
5
  * Professional CLI experience with minimal maintenance overhead.
6
6
  * Features: Spinners, colors, batch processing, provider testing, rich help
7
7
  */
8
- export {};
8
+ export declare function handleError(_error: Error, context: string): void;
package/dist/cli/index.js CHANGED
@@ -33,14 +33,14 @@ try {
33
33
  const { config } = await import("dotenv");
34
34
  config(); // Load .env from current working directory
35
35
  }
36
- catch (error) {
36
+ catch {
37
37
  // dotenv is not available (dev dependency only) - this is fine for production
38
38
  // Environment variables should be set externally in production
39
39
  }
40
40
  // Utility Functions (Simple, Zero Maintenance)
41
- function handleError(error, context) {
42
- logger.error(chalk.red(`❌ ${context} failed: ${error.message}`));
43
- if (error instanceof AuthenticationError) {
41
+ export function handleError(_error, context) {
42
+ logger.error(chalk.red(`❌ ${context} failed: ${_error.message}`));
43
+ if (_error instanceof AuthenticationError) {
44
44
  logger.error(chalk.yellow("💡 Set Google AI Studio API key (RECOMMENDED): export GOOGLE_AI_API_KEY=AIza-..."));
45
45
  logger.error(chalk.yellow("💡 Or set OpenAI API key: export OPENAI_API_KEY=sk-..."));
46
46
  logger.error(chalk.yellow("💡 Or set AWS Bedrock credentials: export AWS_ACCESS_KEY_ID=... AWS_SECRET_ACCESS_KEY=... AWS_REGION=us-east-1"));
@@ -48,14 +48,14 @@ function handleError(error, context) {
48
48
  logger.error(chalk.yellow("💡 Or set Anthropic API key: export ANTHROPIC_API_KEY=sk-ant-..."));
49
49
  logger.error(chalk.yellow("💡 Or set Azure OpenAI credentials: export AZURE_OPENAI_API_KEY=... AZURE_OPENAI_ENDPOINT=..."));
50
50
  }
51
- else if (error instanceof RateLimitError) {
51
+ else if (_error instanceof RateLimitError) {
52
52
  logger.error(chalk.yellow("💡 Try again in a few moments or use --provider vertex"));
53
53
  }
54
- else if (error instanceof AuthorizationError) {
54
+ else if (_error instanceof AuthorizationError) {
55
55
  logger.error(chalk.yellow("💡 Check your account permissions for the selected model/service."));
56
56
  logger.error(chalk.yellow("💡 For AWS Bedrock, ensure you have permissions for the specific model and consider using inference profile ARNs."));
57
57
  }
58
- else if (error instanceof NetworkError) {
58
+ else if (_error instanceof NetworkError) {
59
59
  logger.error(chalk.yellow("💡 Check your internet connection and the provider's status page."));
60
60
  }
61
61
  process.exit(1);
@@ -105,9 +105,9 @@ const cli = yargs(args)
105
105
  }
106
106
  };
107
107
  if (err) {
108
- // Error likely from an async command handler (e.g., via handleError)
109
- // handleError already prints and calls process.exit(1).
110
- // If we're here, it means handleError's process.exit might not have been caught by the top-level async IIFE.
108
+ // Error likely from an async command handler (e.g., via _handleError)
109
+ // _handleError already prints and calls process.exit(1).
110
+ // If we're here, it means _handleError's process.exit might not have been caught by the top-level async IIFE.
111
111
  // Or, it's a synchronous yargs error during parsing that yargs itself throws.
112
112
  const alreadyExitedByHandleError = err?.exitCode !== undefined;
113
113
  // A simple heuristic: if the error message doesn't look like one of our handled generic messages,
@@ -197,7 +197,7 @@ const cli = yargs(args)
197
197
  }
198
198
  catch (error) {
199
199
  // Global error handler - should not reach here due to fail() handler
200
- process.stderr.write(chalk.red(`Unexpected CLI error: ${error.message}\n`));
200
+ process.stderr.write(chalk.red(`Unexpected CLI _error: ${error.message}\n`));
201
201
  process.exit(1);
202
202
  }
203
203
  })();
@@ -61,7 +61,7 @@ export function parseEnvFile(content) {
61
61
  */
62
62
  export function generateEnvContent(envVars, existingContent) {
63
63
  const lines = [];
64
- const existingVars = existingContent ? parseEnvFile(existingContent) : {};
64
+ const _existingVars = existingContent ? parseEnvFile(existingContent) : {};
65
65
  const processedKeys = new Set();
66
66
  // If we have existing content, preserve its structure and comments
67
67
  if (existingContent) {
@@ -121,15 +121,15 @@ export function updateEnvFile(newVars, envPath = ".env", createBackup = true) {
121
121
  }
122
122
  // Read existing content
123
123
  let existingContent = "";
124
- let existingVars = {};
124
+ let _existingVars = {};
125
125
  if (fs.existsSync(envPath)) {
126
126
  existingContent = fs.readFileSync(envPath, "utf8");
127
- existingVars = parseEnvFile(existingContent);
127
+ _existingVars = parseEnvFile(existingContent);
128
128
  }
129
129
  // Categorize changes
130
130
  for (const [key, value] of Object.entries(newVars)) {
131
- if (Object.prototype.hasOwnProperty.call(existingVars, key)) {
132
- if (existingVars[key] !== value) {
131
+ if (Object.prototype.hasOwnProperty.call(_existingVars, key)) {
132
+ if (_existingVars[key] !== value) {
133
133
  result.updated.push(key);
134
134
  }
135
135
  else {
@@ -8,6 +8,18 @@ export declare class OllamaUtils {
8
8
  * Secure wrapper around spawnSync to prevent command injection.
9
9
  */
10
10
  static safeSpawn(command: AllowedCommand, args: string[], options?: SpawnSyncOptions): SpawnSyncReturns<string>;
11
+ /**
12
+ * Check if Ollama command line is available
13
+ */
14
+ private static isOllamaCommandReady;
15
+ /**
16
+ * Validate HTTP API response from Ollama
17
+ */
18
+ private static validateApiResponse;
19
+ /**
20
+ * Check if Ollama HTTP API is ready
21
+ */
22
+ private static isOllamaApiReady;
11
23
  /**
12
24
  * Wait for Ollama service to become ready with exponential backoff
13
25
  */
@@ -16,6 +16,55 @@ export class OllamaUtils {
16
16
  };
17
17
  return spawnSync(command, args, defaultOptions);
18
18
  }
19
+ /**
20
+ * Check if Ollama command line is available
21
+ */
22
+ static isOllamaCommandReady() {
23
+ const cmdCheck = this.safeSpawn("ollama", ["list"]);
24
+ return !cmdCheck.error && cmdCheck.status === 0;
25
+ }
26
+ /**
27
+ * Validate HTTP API response from Ollama
28
+ */
29
+ static validateApiResponse(output) {
30
+ const httpCodeMatch = output.match(/(\d{3})$/);
31
+ if (!httpCodeMatch || httpCodeMatch[1] !== "200") {
32
+ return false;
33
+ }
34
+ // Try to parse the JSON response (excluding HTTP code)
35
+ const jsonResponse = output.replace(/\d{3}$/, "");
36
+ try {
37
+ const parsedResponse = JSON.parse(jsonResponse);
38
+ return parsedResponse && typeof parsedResponse === "object";
39
+ }
40
+ catch {
41
+ // JSON parsing failed, but HTTP 200 is good enough
42
+ return true;
43
+ }
44
+ }
45
+ /**
46
+ * Check if Ollama HTTP API is ready
47
+ */
48
+ static isOllamaApiReady() {
49
+ try {
50
+ const apiCheck = this.safeSpawn("curl", [
51
+ "-s",
52
+ "--max-time",
53
+ "3",
54
+ "--fail", // Fail on HTTP error codes
55
+ "-w",
56
+ "%{http_code}",
57
+ "http://localhost:11434/api/tags",
58
+ ]);
59
+ if (apiCheck.error || apiCheck.status !== 0 || !apiCheck.stdout.trim()) {
60
+ return false;
61
+ }
62
+ return this.validateApiResponse(apiCheck.stdout.trim());
63
+ }
64
+ catch {
65
+ return false;
66
+ }
67
+ }
19
68
  /**
20
69
  * Wait for Ollama service to become ready with exponential backoff
21
70
  */
@@ -23,49 +72,16 @@ export class OllamaUtils {
23
72
  let delay = initialDelay;
24
73
  for (let attempt = 1; attempt <= maxAttempts; attempt++) {
25
74
  try {
26
- // Try both command line and HTTP API checks
27
- const cmdCheck = this.safeSpawn("ollama", ["list"]);
28
- if (!cmdCheck.error && cmdCheck.status === 0) {
29
- // Stronger HTTP API probe with response validation
30
- try {
31
- const apiCheck = this.safeSpawn("curl", [
32
- "-s",
33
- "--max-time",
34
- "3",
35
- "--fail", // Fail on HTTP error codes
36
- "-w",
37
- "%{http_code}",
38
- "http://localhost:11434/api/tags",
39
- ]);
40
- if (!apiCheck.error &&
41
- apiCheck.status === 0 &&
42
- apiCheck.stdout.trim()) {
43
- // Validate that we get a proper HTTP 200 response and JSON structure
44
- const output = apiCheck.stdout.trim();
45
- const httpCodeMatch = output.match(/(\d{3})$/);
46
- if (httpCodeMatch && httpCodeMatch[1] === "200") {
47
- // Try to parse the JSON response (excluding HTTP code)
48
- const jsonResponse = output.replace(/\d{3}$/, "");
49
- try {
50
- const parsedResponse = JSON.parse(jsonResponse);
51
- // Verify it has the expected structure
52
- if (parsedResponse && typeof parsedResponse === "object") {
53
- return true; // Strong verification passed
54
- }
55
- }
56
- catch {
57
- // JSON parsing failed, but HTTP 200 is good enough
58
- return true;
59
- }
60
- }
61
- }
62
- }
63
- catch {
64
- // If curl fails, fall back to command check only
65
- return true;
66
- }
67
- return true; // Command check passed
75
+ // Try command line check first
76
+ if (!this.isOllamaCommandReady()) {
77
+ continue;
78
+ }
79
+ // If command check passes, verify HTTP API
80
+ if (this.isOllamaApiReady()) {
81
+ return true;
68
82
  }
83
+ // Command check passed but API not ready, still consider ready
84
+ return true;
69
85
  }
70
86
  catch {
71
87
  // Service not ready yet
@@ -6,6 +6,7 @@ import { promises as fs } from "fs";
6
6
  import path from "path";
7
7
  import crypto from "crypto";
8
8
  import { logger } from "../utils/logger.js";
9
+ import { DEFAULT_CONFIG } from "./types.js";
9
10
  const { readFile, writeFile, readdir, mkdir, unlink, access } = fs;
10
11
  /**
11
12
  * Enhanced Config Manager with automatic backup/restore capabilities
@@ -237,7 +238,6 @@ export default ${JSON.stringify(currentConfig, null, 2)};`;
237
238
  * Generate default configuration
238
239
  */
239
240
  async generateDefaultConfig() {
240
- const { DEFAULT_CONFIG } = await import("./types.js");
241
241
  return {
242
242
  ...DEFAULT_CONFIG,
243
243
  lastUpdated: Date.now(),
@@ -266,7 +266,10 @@ export default ${JSON.stringify(currentConfig, null, 2)};`;
266
266
  throw new Error("Invalid config file format");
267
267
  }
268
268
  catch (error) {
269
- logger.info("Config file not found, generating default...");
269
+ logger.info("Config file not found or invalid, generating default...", {
270
+ error: error instanceof Error ? error.message : String(error),
271
+ configPath: this.configPath,
272
+ });
270
273
  return await this.generateDefaultConfig();
271
274
  }
272
275
  }
@@ -36,5 +36,10 @@ export function getConversationMemoryDefaults() {
36
36
  maxSessions: Number(process.env.NEUROLINK_MEMORY_MAX_SESSIONS) || DEFAULT_MAX_SESSIONS,
37
37
  maxTurnsPerSession: Number(process.env.NEUROLINK_MEMORY_MAX_TURNS_PER_SESSION) ||
38
38
  DEFAULT_MAX_TURNS_PER_SESSION,
39
+ enableSummarization: process.env.NEUROLINK_SUMMARIZATION_ENABLED === "true",
40
+ summarizationThresholdTurns: Number(process.env.NEUROLINK_SUMMARIZATION_THRESHOLD_TURNS) || 20,
41
+ summarizationTargetTurns: Number(process.env.NEUROLINK_SUMMARIZATION_TARGET_TURNS) || 10,
42
+ summarizationProvider: process.env.NEUROLINK_SUMMARIZATION_PROVIDER || "vertex",
43
+ summarizationModel: process.env.NEUROLINK_SUMMARIZATION_MODEL || "gemini-2.5-flash",
39
44
  };
40
45
  }
@@ -4,30 +4,8 @@
4
4
  * Provides lightweight analytics tracking for AI provider usage,
5
5
  * including tokens, costs, performance metrics, and custom context.
6
6
  */
7
- import type { JsonValue, UnknownRecord } from "../types/common.js";
8
- export interface AnalyticsData {
9
- provider: string;
10
- model: string;
11
- tokens: {
12
- input: number;
13
- output: number;
14
- total: number;
15
- };
16
- cost?: number;
17
- responseTime: number;
18
- context?: Record<string, JsonValue>;
19
- timestamp: string;
20
- evaluation?: {
21
- relevanceScore: number;
22
- accuracyScore: number;
23
- completenessScore: number;
24
- overall: number;
25
- evaluationProvider?: string;
26
- evaluationTime?: number;
27
- evaluationAttempt?: number;
28
- };
29
- costDetails?: UnknownRecord;
30
- }
7
+ import type { AnalyticsData } from "../types/providers.js";
8
+ export type { AnalyticsData, TokenUsage } from "../types/providers.js";
31
9
  /**
32
10
  * Create analytics data structure from AI response
33
11
  */