@juspay/neurolink 1.6.0 → 1.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (176) hide show
  1. package/CHANGELOG.md +193 -7
  2. package/README.md +100 -17
  3. package/dist/agent/direct-tools.d.ts +1203 -0
  4. package/dist/agent/direct-tools.js +387 -0
  5. package/dist/cli/commands/agent-generate.d.ts +2 -0
  6. package/dist/cli/commands/agent-generate.js +70 -0
  7. package/dist/cli/commands/config.d.ts +6 -6
  8. package/dist/cli/commands/config.js +326 -273
  9. package/dist/cli/commands/mcp.d.ts +2 -1
  10. package/dist/cli/commands/mcp.js +874 -146
  11. package/dist/cli/commands/ollama.d.ts +1 -1
  12. package/dist/cli/commands/ollama.js +153 -143
  13. package/dist/cli/index.js +589 -323
  14. package/dist/cli/utils/complete-setup.d.ts +19 -0
  15. package/dist/cli/utils/complete-setup.js +81 -0
  16. package/dist/cli/utils/env-manager.d.ts +44 -0
  17. package/dist/cli/utils/env-manager.js +226 -0
  18. package/dist/cli/utils/interactive-setup.d.ts +48 -0
  19. package/dist/cli/utils/interactive-setup.js +302 -0
  20. package/dist/core/dynamic-models.d.ts +208 -0
  21. package/dist/core/dynamic-models.js +250 -0
  22. package/dist/core/factory.d.ts +13 -6
  23. package/dist/core/factory.js +176 -61
  24. package/dist/core/types.d.ts +4 -2
  25. package/dist/core/types.js +4 -4
  26. package/dist/index.d.ts +16 -16
  27. package/dist/index.js +16 -16
  28. package/dist/lib/agent/direct-tools.d.ts +1203 -0
  29. package/dist/lib/agent/direct-tools.js +387 -0
  30. package/dist/lib/core/dynamic-models.d.ts +208 -0
  31. package/dist/lib/core/dynamic-models.js +250 -0
  32. package/dist/lib/core/factory.d.ts +13 -6
  33. package/dist/lib/core/factory.js +176 -61
  34. package/dist/lib/core/types.d.ts +4 -2
  35. package/dist/lib/core/types.js +4 -4
  36. package/dist/lib/index.d.ts +16 -16
  37. package/dist/lib/index.js +16 -16
  38. package/dist/lib/mcp/auto-discovery.d.ts +120 -0
  39. package/dist/lib/mcp/auto-discovery.js +793 -0
  40. package/dist/lib/mcp/client.d.ts +66 -0
  41. package/dist/lib/mcp/client.js +245 -0
  42. package/dist/lib/mcp/config.d.ts +31 -0
  43. package/dist/lib/mcp/config.js +74 -0
  44. package/dist/lib/mcp/context-manager.d.ts +4 -4
  45. package/dist/lib/mcp/context-manager.js +24 -18
  46. package/dist/lib/mcp/factory.d.ts +28 -11
  47. package/dist/lib/mcp/factory.js +36 -29
  48. package/dist/lib/mcp/function-calling.d.ts +51 -0
  49. package/dist/lib/mcp/function-calling.js +510 -0
  50. package/dist/lib/mcp/index.d.ts +190 -0
  51. package/dist/lib/mcp/index.js +156 -0
  52. package/dist/lib/mcp/initialize-tools.d.ts +28 -0
  53. package/dist/lib/mcp/initialize-tools.js +209 -0
  54. package/dist/lib/mcp/initialize.d.ts +17 -0
  55. package/dist/lib/mcp/initialize.js +51 -0
  56. package/dist/lib/mcp/logging.d.ts +71 -0
  57. package/dist/lib/mcp/logging.js +183 -0
  58. package/dist/lib/mcp/manager.d.ts +67 -0
  59. package/dist/lib/mcp/manager.js +176 -0
  60. package/dist/lib/mcp/neurolink-mcp-client.d.ts +96 -0
  61. package/dist/lib/mcp/neurolink-mcp-client.js +417 -0
  62. package/dist/lib/mcp/orchestrator.d.ts +3 -3
  63. package/dist/lib/mcp/orchestrator.js +46 -43
  64. package/dist/lib/mcp/registry.d.ts +2 -2
  65. package/dist/lib/mcp/registry.js +42 -33
  66. package/dist/lib/mcp/servers/ai-providers/ai-analysis-tools.d.ts +1 -1
  67. package/dist/lib/mcp/servers/ai-providers/ai-analysis-tools.js +204 -65
  68. package/dist/lib/mcp/servers/ai-providers/ai-core-server.js +142 -102
  69. package/dist/lib/mcp/servers/ai-providers/ai-workflow-tools.d.ts +6 -6
  70. package/dist/lib/mcp/servers/ai-providers/ai-workflow-tools.js +197 -142
  71. package/dist/lib/mcp/servers/utilities/utility-server.d.ts +8 -0
  72. package/dist/lib/mcp/servers/utilities/utility-server.js +326 -0
  73. package/dist/lib/mcp/tool-integration.d.ts +67 -0
  74. package/dist/lib/mcp/tool-integration.js +179 -0
  75. package/dist/lib/mcp/unified-registry.d.ts +269 -0
  76. package/dist/lib/mcp/unified-registry.js +1411 -0
  77. package/dist/lib/neurolink.d.ts +68 -6
  78. package/dist/lib/neurolink.js +304 -42
  79. package/dist/lib/providers/agent-enhanced-provider.d.ts +59 -0
  80. package/dist/lib/providers/agent-enhanced-provider.js +242 -0
  81. package/dist/lib/providers/amazonBedrock.d.ts +3 -3
  82. package/dist/lib/providers/amazonBedrock.js +54 -50
  83. package/dist/lib/providers/anthropic.d.ts +2 -2
  84. package/dist/lib/providers/anthropic.js +92 -84
  85. package/dist/lib/providers/azureOpenAI.d.ts +2 -2
  86. package/dist/lib/providers/azureOpenAI.js +97 -86
  87. package/dist/lib/providers/function-calling-provider.d.ts +70 -0
  88. package/dist/lib/providers/function-calling-provider.js +359 -0
  89. package/dist/lib/providers/googleAIStudio.d.ts +10 -5
  90. package/dist/lib/providers/googleAIStudio.js +60 -38
  91. package/dist/lib/providers/googleVertexAI.d.ts +3 -3
  92. package/dist/lib/providers/googleVertexAI.js +96 -86
  93. package/dist/lib/providers/huggingFace.d.ts +3 -3
  94. package/dist/lib/providers/huggingFace.js +70 -63
  95. package/dist/lib/providers/index.d.ts +11 -11
  96. package/dist/lib/providers/index.js +18 -18
  97. package/dist/lib/providers/mcp-provider.d.ts +62 -0
  98. package/dist/lib/providers/mcp-provider.js +183 -0
  99. package/dist/lib/providers/mistralAI.d.ts +3 -3
  100. package/dist/lib/providers/mistralAI.js +42 -36
  101. package/dist/lib/providers/ollama.d.ts +4 -4
  102. package/dist/lib/providers/ollama.js +113 -98
  103. package/dist/lib/providers/openAI.d.ts +7 -3
  104. package/dist/lib/providers/openAI.js +45 -33
  105. package/dist/lib/utils/logger.js +2 -2
  106. package/dist/lib/utils/providerUtils.js +53 -31
  107. package/dist/mcp/auto-discovery.d.ts +120 -0
  108. package/dist/mcp/auto-discovery.js +794 -0
  109. package/dist/mcp/client.d.ts +66 -0
  110. package/dist/mcp/client.js +245 -0
  111. package/dist/mcp/config.d.ts +31 -0
  112. package/dist/mcp/config.js +74 -0
  113. package/dist/mcp/context-manager.d.ts +4 -4
  114. package/dist/mcp/context-manager.js +24 -18
  115. package/dist/mcp/factory.d.ts +28 -11
  116. package/dist/mcp/factory.js +36 -29
  117. package/dist/mcp/function-calling.d.ts +51 -0
  118. package/dist/mcp/function-calling.js +510 -0
  119. package/dist/mcp/index.d.ts +190 -0
  120. package/dist/mcp/index.js +156 -0
  121. package/dist/mcp/initialize-tools.d.ts +28 -0
  122. package/dist/mcp/initialize-tools.js +210 -0
  123. package/dist/mcp/initialize.d.ts +17 -0
  124. package/dist/mcp/initialize.js +51 -0
  125. package/dist/mcp/logging.d.ts +71 -0
  126. package/dist/mcp/logging.js +183 -0
  127. package/dist/mcp/manager.d.ts +67 -0
  128. package/dist/mcp/manager.js +176 -0
  129. package/dist/mcp/neurolink-mcp-client.d.ts +96 -0
  130. package/dist/mcp/neurolink-mcp-client.js +417 -0
  131. package/dist/mcp/orchestrator.d.ts +3 -3
  132. package/dist/mcp/orchestrator.js +46 -43
  133. package/dist/mcp/registry.d.ts +2 -2
  134. package/dist/mcp/registry.js +42 -33
  135. package/dist/mcp/servers/ai-providers/ai-analysis-tools.d.ts +1 -1
  136. package/dist/mcp/servers/ai-providers/ai-analysis-tools.js +204 -65
  137. package/dist/mcp/servers/ai-providers/ai-core-server.js +142 -102
  138. package/dist/mcp/servers/ai-providers/ai-workflow-tools.d.ts +6 -6
  139. package/dist/mcp/servers/ai-providers/ai-workflow-tools.js +197 -142
  140. package/dist/mcp/servers/utilities/utility-server.d.ts +8 -0
  141. package/dist/mcp/servers/utilities/utility-server.js +326 -0
  142. package/dist/mcp/tool-integration.d.ts +67 -0
  143. package/dist/mcp/tool-integration.js +179 -0
  144. package/dist/mcp/unified-registry.d.ts +269 -0
  145. package/dist/mcp/unified-registry.js +1411 -0
  146. package/dist/neurolink.d.ts +68 -6
  147. package/dist/neurolink.js +304 -42
  148. package/dist/providers/agent-enhanced-provider.d.ts +59 -0
  149. package/dist/providers/agent-enhanced-provider.js +242 -0
  150. package/dist/providers/amazonBedrock.d.ts +3 -3
  151. package/dist/providers/amazonBedrock.js +54 -50
  152. package/dist/providers/anthropic.d.ts +2 -2
  153. package/dist/providers/anthropic.js +92 -84
  154. package/dist/providers/azureOpenAI.d.ts +2 -2
  155. package/dist/providers/azureOpenAI.js +97 -86
  156. package/dist/providers/function-calling-provider.d.ts +70 -0
  157. package/dist/providers/function-calling-provider.js +359 -0
  158. package/dist/providers/googleAIStudio.d.ts +10 -5
  159. package/dist/providers/googleAIStudio.js +60 -38
  160. package/dist/providers/googleVertexAI.d.ts +3 -3
  161. package/dist/providers/googleVertexAI.js +96 -86
  162. package/dist/providers/huggingFace.d.ts +3 -3
  163. package/dist/providers/huggingFace.js +70 -63
  164. package/dist/providers/index.d.ts +11 -11
  165. package/dist/providers/index.js +18 -18
  166. package/dist/providers/mcp-provider.d.ts +62 -0
  167. package/dist/providers/mcp-provider.js +183 -0
  168. package/dist/providers/mistralAI.d.ts +3 -3
  169. package/dist/providers/mistralAI.js +42 -36
  170. package/dist/providers/ollama.d.ts +4 -4
  171. package/dist/providers/ollama.js +113 -98
  172. package/dist/providers/openAI.d.ts +7 -3
  173. package/dist/providers/openAI.js +45 -33
  174. package/dist/utils/logger.js +2 -2
  175. package/dist/utils/providerUtils.js +53 -31
  176. package/package.json +175 -161
@@ -1,57 +1,65 @@
1
- import { openai } from '@ai-sdk/openai';
2
- import { streamText, generateText, Output } from 'ai';
3
- import { logger } from '../utils/logger.js';
1
+ import { openai } from "@ai-sdk/openai";
2
+ import { streamText, generateText, Output, } from "ai";
3
+ import { logger } from "../utils/logger.js";
4
4
  // Default system context
5
5
  const DEFAULT_SYSTEM_CONTEXT = {
6
- systemPrompt: 'You are a helpful AI assistant.'
6
+ systemPrompt: "You are a helpful AI assistant.",
7
7
  };
8
8
  // Configuration helpers
9
9
  const getOpenAIApiKey = () => {
10
10
  const apiKey = process.env.OPENAI_API_KEY;
11
11
  if (!apiKey) {
12
- throw new Error('OPENAI_API_KEY environment variable is not set');
12
+ throw new Error("OPENAI_API_KEY environment variable is not set");
13
13
  }
14
14
  return apiKey;
15
15
  };
16
16
  const getOpenAIModel = () => {
17
- return process.env.OPENAI_MODEL || 'gpt-4o';
17
+ return process.env.OPENAI_MODEL || "gpt-4o";
18
18
  };
19
19
  // OpenAI class with enhanced error handling
20
20
  export class OpenAI {
21
21
  modelName;
22
22
  model;
23
23
  constructor(modelName) {
24
- const functionTag = 'OpenAI.constructor';
24
+ const functionTag = "OpenAI.constructor";
25
25
  this.modelName = modelName || getOpenAIModel();
26
26
  try {
27
- logger.debug(`[${functionTag}] Function called`, { modelName: this.modelName });
27
+ logger.debug(`[${functionTag}] Function called`, {
28
+ modelName: this.modelName,
29
+ });
28
30
  // Set OpenAI API key as environment variable
29
31
  process.env.OPENAI_API_KEY = getOpenAIApiKey();
30
32
  this.model = openai(this.modelName);
31
33
  logger.debug(`[${functionTag}] Function result`, {
32
34
  modelName: this.modelName,
33
- success: true
35
+ success: true,
34
36
  });
35
37
  }
36
38
  catch (err) {
37
39
  logger.debug(`[${functionTag}] Exception`, {
38
- message: 'Error in initializing OpenAI',
40
+ message: "Error in initializing OpenAI",
39
41
  modelName: this.modelName,
40
- err: String(err)
42
+ err: String(err),
41
43
  });
42
44
  throw err;
43
45
  }
44
46
  }
47
+ /**
48
+ * Get the underlying model for function calling
49
+ */
50
+ getModel() {
51
+ return this.model;
52
+ }
45
53
  async streamText(optionsOrPrompt, analysisSchema) {
46
- const functionTag = 'OpenAI.streamText';
47
- const provider = 'openai';
54
+ const functionTag = "OpenAI.streamText";
55
+ const provider = "openai";
48
56
  let chunkCount = 0;
49
57
  try {
50
58
  // Parse parameters - support both string and options object
51
- const options = typeof optionsOrPrompt === 'string'
59
+ const options = typeof optionsOrPrompt === "string"
52
60
  ? { prompt: optionsOrPrompt }
53
61
  : optionsOrPrompt;
54
- const { prompt, temperature = 0.7, maxTokens = 500, systemPrompt = DEFAULT_SYSTEM_CONTEXT.systemPrompt, schema } = options;
62
+ const { prompt, temperature = 0.7, maxTokens = 500, systemPrompt = DEFAULT_SYSTEM_CONTEXT.systemPrompt, schema, } = options;
55
63
  // Use schema from options or fallback parameter
56
64
  const finalSchema = schema || analysisSchema;
57
65
  logger.debug(`[${functionTag}] Stream text started`, {
@@ -59,7 +67,7 @@ export class OpenAI {
59
67
  modelName: this.modelName,
60
68
  promptLength: prompt.length,
61
69
  temperature,
62
- maxTokens
70
+ maxTokens,
63
71
  });
64
72
  const streamOptions = {
65
73
  model: this.model,
@@ -77,7 +85,7 @@ export class OpenAI {
77
85
  error: errorMessage,
78
86
  stack: errorStack,
79
87
  promptLength: prompt.length,
80
- chunkCount
88
+ chunkCount,
81
89
  });
82
90
  },
83
91
  onFinish: (event) => {
@@ -88,7 +96,7 @@ export class OpenAI {
88
96
  usage: event.usage,
89
97
  totalChunks: chunkCount,
90
98
  promptLength: prompt.length,
91
- responseLength: event.text?.length || 0
99
+ responseLength: event.text?.length || 0,
92
100
  });
93
101
  },
94
102
  onChunk: (event) => {
@@ -98,12 +106,14 @@ export class OpenAI {
98
106
  modelName: this.modelName,
99
107
  chunkNumber: chunkCount,
100
108
  chunkLength: event.chunk.text?.length || 0,
101
- chunkType: event.chunk.type
109
+ chunkType: event.chunk.type,
102
110
  });
103
- }
111
+ },
104
112
  };
105
113
  if (finalSchema) {
106
- streamOptions.experimental_output = Output.object({ schema: finalSchema });
114
+ streamOptions.experimental_output = Output.object({
115
+ schema: finalSchema,
116
+ });
107
117
  }
108
118
  const result = streamText(streamOptions);
109
119
  return result;
@@ -112,21 +122,21 @@ export class OpenAI {
112
122
  logger.debug(`[${functionTag}] Exception`, {
113
123
  provider,
114
124
  modelName: this.modelName,
115
- message: 'Error in streaming text',
116
- err: String(err)
125
+ message: "Error in streaming text",
126
+ err: String(err),
117
127
  });
118
128
  throw err; // Re-throw error to trigger fallback
119
129
  }
120
130
  }
121
131
  async generateText(optionsOrPrompt, analysisSchema) {
122
- const functionTag = 'OpenAI.generateText';
123
- const provider = 'openai';
132
+ const functionTag = "OpenAI.generateText";
133
+ const provider = "openai";
124
134
  try {
125
135
  // Parse parameters - support both string and options object
126
- const options = typeof optionsOrPrompt === 'string'
136
+ const options = typeof optionsOrPrompt === "string"
127
137
  ? { prompt: optionsOrPrompt }
128
138
  : optionsOrPrompt;
129
- const { prompt, temperature = 0.7, maxTokens = 500, systemPrompt = DEFAULT_SYSTEM_CONTEXT.systemPrompt, schema } = options;
139
+ const { prompt, temperature = 0.7, maxTokens = 500, systemPrompt = DEFAULT_SYSTEM_CONTEXT.systemPrompt, schema, } = options;
130
140
  // Use schema from options or fallback parameter
131
141
  const finalSchema = schema || analysisSchema;
132
142
  logger.debug(`[${functionTag}] Generate text started`, {
@@ -134,17 +144,19 @@ export class OpenAI {
134
144
  modelName: this.modelName,
135
145
  promptLength: prompt.length,
136
146
  temperature,
137
- maxTokens
147
+ maxTokens,
138
148
  });
139
149
  const generateOptions = {
140
150
  model: this.model,
141
151
  prompt: prompt,
142
152
  system: systemPrompt,
143
153
  temperature,
144
- maxTokens
154
+ maxTokens,
145
155
  };
146
156
  if (finalSchema) {
147
- generateOptions.experimental_output = Output.object({ schema: finalSchema });
157
+ generateOptions.experimental_output = Output.object({
158
+ schema: finalSchema,
159
+ });
148
160
  }
149
161
  const result = await generateText(generateOptions);
150
162
  logger.debug(`[${functionTag}] Generate text completed`, {
@@ -152,7 +164,7 @@ export class OpenAI {
152
164
  modelName: this.modelName,
153
165
  usage: result.usage,
154
166
  finishReason: result.finishReason,
155
- responseLength: result.text?.length || 0
167
+ responseLength: result.text?.length || 0,
156
168
  });
157
169
  return result;
158
170
  }
@@ -160,8 +172,8 @@ export class OpenAI {
160
172
  logger.debug(`[${functionTag}] Exception`, {
161
173
  provider,
162
174
  modelName: this.modelName,
163
- message: 'Error in generating text',
164
- err: String(err)
175
+ message: "Error in generating text",
176
+ err: String(err),
165
177
  });
166
178
  throw err; // Re-throw error to trigger fallback
167
179
  }
@@ -5,7 +5,7 @@
5
5
  */
6
6
  export const logger = {
7
7
  debug: (...args) => {
8
- if (process.env.NEUROLINK_DEBUG === 'true') {
8
+ if (process.env.NEUROLINK_DEBUG === "true") {
9
9
  console.log(...args);
10
10
  }
11
11
  },
@@ -21,5 +21,5 @@ export const logger = {
21
21
  },
22
22
  always: (...args) => {
23
23
  console.log(...args);
24
- }
24
+ },
25
25
  };
@@ -1,7 +1,7 @@
1
1
  /**
2
2
  * Utility functions for AI provider management
3
3
  */
4
- import { logger } from './logger.js';
4
+ import { logger } from "./logger.js";
5
5
  /**
6
6
  * Get the best available provider based on preferences and availability
7
7
  * @param requestedProvider - Optional preferred provider name
@@ -12,9 +12,19 @@ export function getBestProvider(requestedProvider) {
12
12
  if (requestedProvider) {
13
13
  return requestedProvider;
14
14
  }
15
- // Default fallback order based on environment variables - OpenAI first since it's most reliable
15
+ // Default fallback order - Google AI (Gemini) first as primary provider
16
16
  // Ollama last since it requires local setup
17
- const providers = ['openai', 'anthropic', 'google-ai', 'mistral', 'vertex', 'azure', 'huggingface', 'bedrock', 'ollama'];
17
+ const providers = [
18
+ "google-ai",
19
+ "anthropic",
20
+ "openai",
21
+ "mistral",
22
+ "vertex",
23
+ "azure",
24
+ "huggingface",
25
+ "bedrock",
26
+ "ollama",
27
+ ];
18
28
  // Check which providers have their required environment variables
19
29
  for (const provider of providers) {
20
30
  if (isProviderConfigured(provider)) {
@@ -23,8 +33,8 @@ export function getBestProvider(requestedProvider) {
23
33
  }
24
34
  }
25
35
  // Default to bedrock if nothing is configured
26
- logger.warn('[getBestProvider] No providers configured, defaulting to bedrock');
27
- return 'bedrock';
36
+ logger.warn("[getBestProvider] No providers configured, defaulting to bedrock");
37
+ return "bedrock";
28
38
  }
29
39
  /**
30
40
  * Check if a provider has the minimum required configuration
@@ -33,39 +43,41 @@ export function getBestProvider(requestedProvider) {
33
43
  */
34
44
  function isProviderConfigured(provider) {
35
45
  switch (provider.toLowerCase()) {
36
- case 'bedrock':
37
- case 'amazon':
38
- case 'aws':
46
+ case "bedrock":
47
+ case "amazon":
48
+ case "aws":
39
49
  return !!(process.env.AWS_ACCESS_KEY_ID && process.env.AWS_SECRET_ACCESS_KEY);
40
- case 'vertex':
41
- case 'google':
42
- case 'gemini':
43
- return !!(process.env.GOOGLE_VERTEX_PROJECT || process.env.GOOGLE_APPLICATION_CREDENTIALS);
44
- case 'openai':
45
- case 'gpt':
50
+ case "vertex":
51
+ case "google":
52
+ case "gemini":
53
+ return !!(process.env.GOOGLE_VERTEX_PROJECT ||
54
+ process.env.GOOGLE_APPLICATION_CREDENTIALS);
55
+ case "openai":
56
+ case "gpt":
46
57
  return !!process.env.OPENAI_API_KEY;
47
- case 'anthropic':
48
- case 'claude':
58
+ case "anthropic":
59
+ case "claude":
49
60
  return !!process.env.ANTHROPIC_API_KEY;
50
- case 'azure':
51
- case 'azure-openai':
61
+ case "azure":
62
+ case "azure-openai":
52
63
  return !!process.env.AZURE_OPENAI_API_KEY;
53
- case 'google-ai':
54
- case 'google-studio':
55
- return !!(process.env.GOOGLE_AI_API_KEY || process.env.GOOGLE_GENERATIVE_AI_API_KEY);
56
- case 'huggingface':
57
- case 'hugging-face':
58
- case 'hf':
64
+ case "google-ai":
65
+ case "google-studio":
66
+ return !!(process.env.GOOGLE_AI_API_KEY ||
67
+ process.env.GOOGLE_GENERATIVE_AI_API_KEY);
68
+ case "huggingface":
69
+ case "hugging-face":
70
+ case "hf":
59
71
  return !!(process.env.HUGGINGFACE_API_KEY || process.env.HF_TOKEN);
60
- case 'ollama':
61
- case 'local':
62
- case 'local-ollama':
72
+ case "ollama":
73
+ case "local":
74
+ case "local-ollama":
63
75
  // For Ollama, we check if the service is potentially available
64
76
  // This is a basic check - actual connectivity will be verified during usage
65
77
  return true; // Ollama doesn't require environment variables, just local service
66
- case 'mistral':
67
- case 'mistral-ai':
68
- case 'mistralai':
78
+ case "mistral":
79
+ case "mistral-ai":
80
+ case "mistralai":
69
81
  return !!process.env.MISTRAL_API_KEY;
70
82
  default:
71
83
  return false;
@@ -76,7 +88,17 @@ function isProviderConfigured(provider) {
76
88
  * @returns Array of available provider names
77
89
  */
78
90
  export function getAvailableProviders() {
79
- return ['bedrock', 'vertex', 'openai', 'anthropic', 'azure', 'google-ai', 'huggingface', 'ollama', 'mistral'];
91
+ return [
92
+ "bedrock",
93
+ "vertex",
94
+ "openai",
95
+ "anthropic",
96
+ "azure",
97
+ "google-ai",
98
+ "huggingface",
99
+ "ollama",
100
+ "mistral",
101
+ ];
80
102
  }
81
103
  /**
82
104
  * Validate provider name