@juspay/neurolink 1.5.3 → 1.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (176) hide show
  1. package/CHANGELOG.md +241 -1
  2. package/README.md +113 -20
  3. package/dist/agent/direct-tools.d.ts +1203 -0
  4. package/dist/agent/direct-tools.js +387 -0
  5. package/dist/cli/commands/agent-generate.d.ts +2 -0
  6. package/dist/cli/commands/agent-generate.js +70 -0
  7. package/dist/cli/commands/config.d.ts +76 -9
  8. package/dist/cli/commands/config.js +358 -233
  9. package/dist/cli/commands/mcp.d.ts +2 -1
  10. package/dist/cli/commands/mcp.js +874 -146
  11. package/dist/cli/commands/ollama.d.ts +8 -0
  12. package/dist/cli/commands/ollama.js +333 -0
  13. package/dist/cli/index.js +591 -327
  14. package/dist/cli/utils/complete-setup.d.ts +19 -0
  15. package/dist/cli/utils/complete-setup.js +81 -0
  16. package/dist/cli/utils/env-manager.d.ts +44 -0
  17. package/dist/cli/utils/env-manager.js +226 -0
  18. package/dist/cli/utils/interactive-setup.d.ts +48 -0
  19. package/dist/cli/utils/interactive-setup.js +302 -0
  20. package/dist/core/dynamic-models.d.ts +208 -0
  21. package/dist/core/dynamic-models.js +250 -0
  22. package/dist/core/factory.d.ts +13 -6
  23. package/dist/core/factory.js +180 -50
  24. package/dist/core/types.d.ts +8 -3
  25. package/dist/core/types.js +7 -4
  26. package/dist/index.d.ts +16 -16
  27. package/dist/index.js +16 -16
  28. package/dist/lib/agent/direct-tools.d.ts +1203 -0
  29. package/dist/lib/agent/direct-tools.js +387 -0
  30. package/dist/lib/core/dynamic-models.d.ts +208 -0
  31. package/dist/lib/core/dynamic-models.js +250 -0
  32. package/dist/lib/core/factory.d.ts +13 -6
  33. package/dist/lib/core/factory.js +180 -50
  34. package/dist/lib/core/types.d.ts +8 -3
  35. package/dist/lib/core/types.js +7 -4
  36. package/dist/lib/index.d.ts +16 -16
  37. package/dist/lib/index.js +16 -16
  38. package/dist/lib/mcp/auto-discovery.d.ts +120 -0
  39. package/dist/lib/mcp/auto-discovery.js +793 -0
  40. package/dist/lib/mcp/client.d.ts +66 -0
  41. package/dist/lib/mcp/client.js +245 -0
  42. package/dist/lib/mcp/config.d.ts +31 -0
  43. package/dist/lib/mcp/config.js +74 -0
  44. package/dist/lib/mcp/context-manager.d.ts +4 -4
  45. package/dist/lib/mcp/context-manager.js +24 -18
  46. package/dist/lib/mcp/factory.d.ts +28 -11
  47. package/dist/lib/mcp/factory.js +36 -29
  48. package/dist/lib/mcp/function-calling.d.ts +51 -0
  49. package/dist/lib/mcp/function-calling.js +510 -0
  50. package/dist/lib/mcp/index.d.ts +190 -0
  51. package/dist/lib/mcp/index.js +156 -0
  52. package/dist/lib/mcp/initialize-tools.d.ts +28 -0
  53. package/dist/lib/mcp/initialize-tools.js +209 -0
  54. package/dist/lib/mcp/initialize.d.ts +17 -0
  55. package/dist/lib/mcp/initialize.js +51 -0
  56. package/dist/lib/mcp/logging.d.ts +71 -0
  57. package/dist/lib/mcp/logging.js +183 -0
  58. package/dist/lib/mcp/manager.d.ts +67 -0
  59. package/dist/lib/mcp/manager.js +176 -0
  60. package/dist/lib/mcp/neurolink-mcp-client.d.ts +96 -0
  61. package/dist/lib/mcp/neurolink-mcp-client.js +417 -0
  62. package/dist/lib/mcp/orchestrator.d.ts +3 -3
  63. package/dist/lib/mcp/orchestrator.js +46 -43
  64. package/dist/lib/mcp/registry.d.ts +2 -2
  65. package/dist/lib/mcp/registry.js +42 -33
  66. package/dist/lib/mcp/servers/ai-providers/ai-analysis-tools.d.ts +1 -1
  67. package/dist/lib/mcp/servers/ai-providers/ai-analysis-tools.js +205 -66
  68. package/dist/lib/mcp/servers/ai-providers/ai-core-server.js +143 -99
  69. package/dist/lib/mcp/servers/ai-providers/ai-workflow-tools.d.ts +6 -6
  70. package/dist/lib/mcp/servers/ai-providers/ai-workflow-tools.js +404 -251
  71. package/dist/lib/mcp/servers/utilities/utility-server.d.ts +8 -0
  72. package/dist/lib/mcp/servers/utilities/utility-server.js +326 -0
  73. package/dist/lib/mcp/tool-integration.d.ts +67 -0
  74. package/dist/lib/mcp/tool-integration.js +179 -0
  75. package/dist/lib/mcp/unified-registry.d.ts +269 -0
  76. package/dist/lib/mcp/unified-registry.js +1411 -0
  77. package/dist/lib/neurolink.d.ts +68 -6
  78. package/dist/lib/neurolink.js +314 -42
  79. package/dist/lib/providers/agent-enhanced-provider.d.ts +59 -0
  80. package/dist/lib/providers/agent-enhanced-provider.js +242 -0
  81. package/dist/lib/providers/amazonBedrock.d.ts +3 -3
  82. package/dist/lib/providers/amazonBedrock.js +54 -50
  83. package/dist/lib/providers/anthropic.d.ts +2 -2
  84. package/dist/lib/providers/anthropic.js +92 -84
  85. package/dist/lib/providers/azureOpenAI.d.ts +2 -2
  86. package/dist/lib/providers/azureOpenAI.js +97 -86
  87. package/dist/lib/providers/function-calling-provider.d.ts +70 -0
  88. package/dist/lib/providers/function-calling-provider.js +359 -0
  89. package/dist/lib/providers/googleAIStudio.d.ts +10 -5
  90. package/dist/lib/providers/googleAIStudio.js +60 -38
  91. package/dist/lib/providers/googleVertexAI.d.ts +3 -3
  92. package/dist/lib/providers/googleVertexAI.js +96 -86
  93. package/dist/lib/providers/huggingFace.d.ts +31 -0
  94. package/dist/lib/providers/huggingFace.js +362 -0
  95. package/dist/lib/providers/index.d.ts +14 -8
  96. package/dist/lib/providers/index.js +18 -12
  97. package/dist/lib/providers/mcp-provider.d.ts +62 -0
  98. package/dist/lib/providers/mcp-provider.js +183 -0
  99. package/dist/lib/providers/mistralAI.d.ts +32 -0
  100. package/dist/lib/providers/mistralAI.js +223 -0
  101. package/dist/lib/providers/ollama.d.ts +51 -0
  102. package/dist/lib/providers/ollama.js +508 -0
  103. package/dist/lib/providers/openAI.d.ts +7 -3
  104. package/dist/lib/providers/openAI.js +45 -33
  105. package/dist/lib/utils/logger.js +2 -2
  106. package/dist/lib/utils/providerUtils.js +59 -22
  107. package/dist/mcp/auto-discovery.d.ts +120 -0
  108. package/dist/mcp/auto-discovery.js +794 -0
  109. package/dist/mcp/client.d.ts +66 -0
  110. package/dist/mcp/client.js +245 -0
  111. package/dist/mcp/config.d.ts +31 -0
  112. package/dist/mcp/config.js +74 -0
  113. package/dist/mcp/context-manager.d.ts +4 -4
  114. package/dist/mcp/context-manager.js +24 -18
  115. package/dist/mcp/factory.d.ts +28 -11
  116. package/dist/mcp/factory.js +36 -29
  117. package/dist/mcp/function-calling.d.ts +51 -0
  118. package/dist/mcp/function-calling.js +510 -0
  119. package/dist/mcp/index.d.ts +190 -0
  120. package/dist/mcp/index.js +156 -0
  121. package/dist/mcp/initialize-tools.d.ts +28 -0
  122. package/dist/mcp/initialize-tools.js +210 -0
  123. package/dist/mcp/initialize.d.ts +17 -0
  124. package/dist/mcp/initialize.js +51 -0
  125. package/dist/mcp/logging.d.ts +71 -0
  126. package/dist/mcp/logging.js +183 -0
  127. package/dist/mcp/manager.d.ts +67 -0
  128. package/dist/mcp/manager.js +176 -0
  129. package/dist/mcp/neurolink-mcp-client.d.ts +96 -0
  130. package/dist/mcp/neurolink-mcp-client.js +417 -0
  131. package/dist/mcp/orchestrator.d.ts +3 -3
  132. package/dist/mcp/orchestrator.js +46 -43
  133. package/dist/mcp/registry.d.ts +2 -2
  134. package/dist/mcp/registry.js +42 -33
  135. package/dist/mcp/servers/ai-providers/ai-analysis-tools.d.ts +1 -1
  136. package/dist/mcp/servers/ai-providers/ai-analysis-tools.js +205 -66
  137. package/dist/mcp/servers/ai-providers/ai-core-server.js +143 -99
  138. package/dist/mcp/servers/ai-providers/ai-workflow-tools.d.ts +6 -6
  139. package/dist/mcp/servers/ai-providers/ai-workflow-tools.js +404 -253
  140. package/dist/mcp/servers/utilities/utility-server.d.ts +8 -0
  141. package/dist/mcp/servers/utilities/utility-server.js +326 -0
  142. package/dist/mcp/tool-integration.d.ts +67 -0
  143. package/dist/mcp/tool-integration.js +179 -0
  144. package/dist/mcp/unified-registry.d.ts +269 -0
  145. package/dist/mcp/unified-registry.js +1411 -0
  146. package/dist/neurolink.d.ts +68 -6
  147. package/dist/neurolink.js +314 -42
  148. package/dist/providers/agent-enhanced-provider.d.ts +59 -0
  149. package/dist/providers/agent-enhanced-provider.js +242 -0
  150. package/dist/providers/amazonBedrock.d.ts +3 -3
  151. package/dist/providers/amazonBedrock.js +54 -50
  152. package/dist/providers/anthropic.d.ts +2 -2
  153. package/dist/providers/anthropic.js +92 -84
  154. package/dist/providers/azureOpenAI.d.ts +2 -2
  155. package/dist/providers/azureOpenAI.js +97 -86
  156. package/dist/providers/function-calling-provider.d.ts +70 -0
  157. package/dist/providers/function-calling-provider.js +359 -0
  158. package/dist/providers/googleAIStudio.d.ts +10 -5
  159. package/dist/providers/googleAIStudio.js +60 -38
  160. package/dist/providers/googleVertexAI.d.ts +3 -3
  161. package/dist/providers/googleVertexAI.js +96 -86
  162. package/dist/providers/huggingFace.d.ts +31 -0
  163. package/dist/providers/huggingFace.js +362 -0
  164. package/dist/providers/index.d.ts +14 -8
  165. package/dist/providers/index.js +18 -12
  166. package/dist/providers/mcp-provider.d.ts +62 -0
  167. package/dist/providers/mcp-provider.js +183 -0
  168. package/dist/providers/mistralAI.d.ts +32 -0
  169. package/dist/providers/mistralAI.js +223 -0
  170. package/dist/providers/ollama.d.ts +51 -0
  171. package/dist/providers/ollama.js +508 -0
  172. package/dist/providers/openAI.d.ts +7 -3
  173. package/dist/providers/openAI.js +45 -33
  174. package/dist/utils/logger.js +2 -2
  175. package/dist/utils/providerUtils.js +59 -22
  176. package/package.json +28 -4
@@ -1,57 +1,65 @@
1
- import { openai } from '@ai-sdk/openai';
2
- import { streamText, generateText, Output } from 'ai';
3
- import { logger } from '../utils/logger.js';
1
+ import { openai } from "@ai-sdk/openai";
2
+ import { streamText, generateText, Output, } from "ai";
3
+ import { logger } from "../utils/logger.js";
4
4
  // Default system context
5
5
  const DEFAULT_SYSTEM_CONTEXT = {
6
- systemPrompt: 'You are a helpful AI assistant.'
6
+ systemPrompt: "You are a helpful AI assistant.",
7
7
  };
8
8
  // Configuration helpers
9
9
  const getOpenAIApiKey = () => {
10
10
  const apiKey = process.env.OPENAI_API_KEY;
11
11
  if (!apiKey) {
12
- throw new Error('OPENAI_API_KEY environment variable is not set');
12
+ throw new Error("OPENAI_API_KEY environment variable is not set");
13
13
  }
14
14
  return apiKey;
15
15
  };
16
16
  const getOpenAIModel = () => {
17
- return process.env.OPENAI_MODEL || 'gpt-4o';
17
+ return process.env.OPENAI_MODEL || "gpt-4o";
18
18
  };
19
19
  // OpenAI class with enhanced error handling
20
20
  export class OpenAI {
21
21
  modelName;
22
22
  model;
23
23
  constructor(modelName) {
24
- const functionTag = 'OpenAI.constructor';
24
+ const functionTag = "OpenAI.constructor";
25
25
  this.modelName = modelName || getOpenAIModel();
26
26
  try {
27
- logger.debug(`[${functionTag}] Function called`, { modelName: this.modelName });
27
+ logger.debug(`[${functionTag}] Function called`, {
28
+ modelName: this.modelName,
29
+ });
28
30
  // Set OpenAI API key as environment variable
29
31
  process.env.OPENAI_API_KEY = getOpenAIApiKey();
30
32
  this.model = openai(this.modelName);
31
33
  logger.debug(`[${functionTag}] Function result`, {
32
34
  modelName: this.modelName,
33
- success: true
35
+ success: true,
34
36
  });
35
37
  }
36
38
  catch (err) {
37
39
  logger.debug(`[${functionTag}] Exception`, {
38
- message: 'Error in initializing OpenAI',
40
+ message: "Error in initializing OpenAI",
39
41
  modelName: this.modelName,
40
- err: String(err)
42
+ err: String(err),
41
43
  });
42
44
  throw err;
43
45
  }
44
46
  }
47
+ /**
48
+ * Get the underlying model for function calling
49
+ */
50
+ getModel() {
51
+ return this.model;
52
+ }
45
53
  async streamText(optionsOrPrompt, analysisSchema) {
46
- const functionTag = 'OpenAI.streamText';
47
- const provider = 'openai';
54
+ const functionTag = "OpenAI.streamText";
55
+ const provider = "openai";
48
56
  let chunkCount = 0;
49
57
  try {
50
58
  // Parse parameters - support both string and options object
51
- const options = typeof optionsOrPrompt === 'string'
59
+ const options = typeof optionsOrPrompt === "string"
52
60
  ? { prompt: optionsOrPrompt }
53
61
  : optionsOrPrompt;
54
- const { prompt, temperature = 0.7, maxTokens = 500, systemPrompt = DEFAULT_SYSTEM_CONTEXT.systemPrompt, schema } = options;
62
+ const { prompt, temperature = 0.7, maxTokens = 500, systemPrompt = DEFAULT_SYSTEM_CONTEXT.systemPrompt, schema, } = options;
55
63
  // Use schema from options or fallback parameter
56
64
  const finalSchema = schema || analysisSchema;
57
65
  logger.debug(`[${functionTag}] Stream text started`, {
@@ -59,7 +67,7 @@ export class OpenAI {
59
67
  modelName: this.modelName,
60
68
  promptLength: prompt.length,
61
69
  temperature,
62
- maxTokens
70
+ maxTokens,
63
71
  });
64
72
  const streamOptions = {
65
73
  model: this.model,
@@ -77,7 +85,7 @@ export class OpenAI {
77
85
  error: errorMessage,
78
86
  stack: errorStack,
79
87
  promptLength: prompt.length,
80
- chunkCount
88
+ chunkCount,
81
89
  });
82
90
  },
83
91
  onFinish: (event) => {
@@ -88,7 +96,7 @@ export class OpenAI {
88
96
  usage: event.usage,
89
97
  totalChunks: chunkCount,
90
98
  promptLength: prompt.length,
91
- responseLength: event.text?.length || 0
99
+ responseLength: event.text?.length || 0,
92
100
  });
93
101
  },
94
102
  onChunk: (event) => {
@@ -98,12 +106,14 @@ export class OpenAI {
98
106
  modelName: this.modelName,
99
107
  chunkNumber: chunkCount,
100
108
  chunkLength: event.chunk.text?.length || 0,
101
- chunkType: event.chunk.type
109
+ chunkType: event.chunk.type,
102
110
  });
103
- }
111
+ },
104
112
  };
105
113
  if (finalSchema) {
106
- streamOptions.experimental_output = Output.object({ schema: finalSchema });
114
+ streamOptions.experimental_output = Output.object({
115
+ schema: finalSchema,
116
+ });
107
117
  }
108
118
  const result = streamText(streamOptions);
109
119
  return result;
@@ -112,21 +122,21 @@ export class OpenAI {
112
122
  logger.debug(`[${functionTag}] Exception`, {
113
123
  provider,
114
124
  modelName: this.modelName,
115
- message: 'Error in streaming text',
116
- err: String(err)
125
+ message: "Error in streaming text",
126
+ err: String(err),
117
127
  });
118
128
  throw err; // Re-throw error to trigger fallback
119
129
  }
120
130
  }
121
131
  async generateText(optionsOrPrompt, analysisSchema) {
122
- const functionTag = 'OpenAI.generateText';
123
- const provider = 'openai';
132
+ const functionTag = "OpenAI.generateText";
133
+ const provider = "openai";
124
134
  try {
125
135
  // Parse parameters - support both string and options object
126
- const options = typeof optionsOrPrompt === 'string'
136
+ const options = typeof optionsOrPrompt === "string"
127
137
  ? { prompt: optionsOrPrompt }
128
138
  : optionsOrPrompt;
129
- const { prompt, temperature = 0.7, maxTokens = 500, systemPrompt = DEFAULT_SYSTEM_CONTEXT.systemPrompt, schema } = options;
139
+ const { prompt, temperature = 0.7, maxTokens = 500, systemPrompt = DEFAULT_SYSTEM_CONTEXT.systemPrompt, schema, } = options;
130
140
  // Use schema from options or fallback parameter
131
141
  const finalSchema = schema || analysisSchema;
132
142
  logger.debug(`[${functionTag}] Generate text started`, {
@@ -134,17 +144,19 @@ export class OpenAI {
134
144
  modelName: this.modelName,
135
145
  promptLength: prompt.length,
136
146
  temperature,
137
- maxTokens
147
+ maxTokens,
138
148
  });
139
149
  const generateOptions = {
140
150
  model: this.model,
141
151
  prompt: prompt,
142
152
  system: systemPrompt,
143
153
  temperature,
144
- maxTokens
154
+ maxTokens,
145
155
  };
146
156
  if (finalSchema) {
147
- generateOptions.experimental_output = Output.object({ schema: finalSchema });
157
+ generateOptions.experimental_output = Output.object({
158
+ schema: finalSchema,
159
+ });
148
160
  }
149
161
  const result = await generateText(generateOptions);
150
162
  logger.debug(`[${functionTag}] Generate text completed`, {
@@ -152,7 +164,7 @@ export class OpenAI {
152
164
  modelName: this.modelName,
153
165
  usage: result.usage,
154
166
  finishReason: result.finishReason,
155
- responseLength: result.text?.length || 0
167
+ responseLength: result.text?.length || 0,
156
168
  });
157
169
  return result;
158
170
  }
@@ -160,8 +172,8 @@ export class OpenAI {
160
172
  logger.debug(`[${functionTag}] Exception`, {
161
173
  provider,
162
174
  modelName: this.modelName,
163
- message: 'Error in generating text',
164
- err: String(err)
175
+ message: "Error in generating text",
176
+ err: String(err),
165
177
  });
166
178
  throw err; // Re-throw error to trigger fallback
167
179
  }
@@ -5,7 +5,7 @@
5
5
  */
6
6
  export const logger = {
7
7
  debug: (...args) => {
8
- if (process.env.NEUROLINK_DEBUG === 'true') {
8
+ if (process.env.NEUROLINK_DEBUG === "true") {
9
9
  console.log(...args);
10
10
  }
11
11
  },
@@ -21,5 +21,5 @@ export const logger = {
21
21
  },
22
22
  always: (...args) => {
23
23
  console.log(...args);
24
- }
24
+ },
25
25
  };
@@ -1,7 +1,7 @@
1
1
  /**
2
2
  * Utility functions for AI provider management
3
3
  */
4
- import { logger } from './logger.js';
4
+ import { logger } from "./logger.js";
5
5
  /**
6
6
  * Get the best available provider based on preferences and availability
7
7
  * @param requestedProvider - Optional preferred provider name
@@ -12,8 +12,19 @@ export function getBestProvider(requestedProvider) {
12
12
  if (requestedProvider) {
13
13
  return requestedProvider;
14
14
  }
15
- // Default fallback order based on environment variables - OpenAI first since it's most reliable
16
- const providers = ['openai', 'vertex', 'bedrock', 'anthropic', 'azure', 'google-ai'];
15
+ // Default fallback order - Google AI (Gemini) first as primary provider
16
+ // Ollama last since it requires local setup
17
+ const providers = [
18
+ "google-ai",
19
+ "anthropic",
20
+ "openai",
21
+ "mistral",
22
+ "vertex",
23
+ "azure",
24
+ "huggingface",
25
+ "bedrock",
26
+ "ollama",
27
+ ];
17
28
  // Check which providers have their required environment variables
18
29
  for (const provider of providers) {
19
30
  if (isProviderConfigured(provider)) {
@@ -22,8 +33,8 @@ export function getBestProvider(requestedProvider) {
22
33
  }
23
34
  }
24
35
  // Default to bedrock if nothing is configured
25
- logger.warn('[getBestProvider] No providers configured, defaulting to bedrock');
26
- return 'bedrock';
36
+ logger.warn("[getBestProvider] No providers configured, defaulting to bedrock");
37
+ return "bedrock";
27
38
  }
28
39
  /**
29
40
  * Check if a provider has the minimum required configuration
@@ -32,26 +43,42 @@ export function getBestProvider(requestedProvider) {
32
43
  */
33
44
  function isProviderConfigured(provider) {
34
45
  switch (provider.toLowerCase()) {
35
- case 'bedrock':
36
- case 'amazon':
37
- case 'aws':
46
+ case "bedrock":
47
+ case "amazon":
48
+ case "aws":
38
49
  return !!(process.env.AWS_ACCESS_KEY_ID && process.env.AWS_SECRET_ACCESS_KEY);
39
- case 'vertex':
40
- case 'google':
41
- case 'gemini':
42
- return !!(process.env.GOOGLE_VERTEX_PROJECT || process.env.GOOGLE_APPLICATION_CREDENTIALS);
43
- case 'openai':
44
- case 'gpt':
50
+ case "vertex":
51
+ case "google":
52
+ case "gemini":
53
+ return !!(process.env.GOOGLE_VERTEX_PROJECT ||
54
+ process.env.GOOGLE_APPLICATION_CREDENTIALS);
55
+ case "openai":
56
+ case "gpt":
45
57
  return !!process.env.OPENAI_API_KEY;
46
- case 'anthropic':
47
- case 'claude':
58
+ case "anthropic":
59
+ case "claude":
48
60
  return !!process.env.ANTHROPIC_API_KEY;
49
- case 'azure':
50
- case 'azure-openai':
61
+ case "azure":
62
+ case "azure-openai":
51
63
  return !!process.env.AZURE_OPENAI_API_KEY;
52
- case 'google-ai':
53
- case 'google-studio':
54
- return !!(process.env.GOOGLE_AI_API_KEY || process.env.GOOGLE_GENERATIVE_AI_API_KEY);
64
+ case "google-ai":
65
+ case "google-studio":
66
+ return !!(process.env.GOOGLE_AI_API_KEY ||
67
+ process.env.GOOGLE_GENERATIVE_AI_API_KEY);
68
+ case "huggingface":
69
+ case "hugging-face":
70
+ case "hf":
71
+ return !!(process.env.HUGGINGFACE_API_KEY || process.env.HF_TOKEN);
72
+ case "ollama":
73
+ case "local":
74
+ case "local-ollama":
75
+ // For Ollama, we check if the service is potentially available
76
+ // This is a basic check - actual connectivity will be verified during usage
77
+ return true; // Ollama doesn't require environment variables, just local service
78
+ case "mistral":
79
+ case "mistral-ai":
80
+ case "mistralai":
81
+ return !!process.env.MISTRAL_API_KEY;
55
82
  default:
56
83
  return false;
57
84
  }
@@ -61,7 +88,17 @@ function isProviderConfigured(provider) {
61
88
  * @returns Array of available provider names
62
89
  */
63
90
  export function getAvailableProviders() {
64
- return ['bedrock', 'vertex', 'openai', 'anthropic', 'azure', 'google-ai'];
91
+ return [
92
+ "bedrock",
93
+ "vertex",
94
+ "openai",
95
+ "anthropic",
96
+ "azure",
97
+ "google-ai",
98
+ "huggingface",
99
+ "ollama",
100
+ "mistral",
101
+ ];
65
102
  }
66
103
  /**
67
104
  * Validate provider name
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@juspay/neurolink",
3
- "version": "1.5.3",
4
- "description": "Universal AI Development Platform with external MCP server integration, multi-provider support, and professional CLI. Connect to 65+ MCP servers for filesystem, GitHub, database operations, and more. Build, test, and deploy AI applications with OpenAI, Anthropic, Google Vertex AI, and AWS Bedrock.",
3
+ "version": "1.9.0",
4
+ "description": "Universal AI Development Platform with working MCP integration, multi-provider support, and professional CLI. Built-in tools operational, 58+ external MCP servers discoverable. Connect to filesystem, GitHub, database operations, and more. Build, test, and deploy AI applications with 9 major providers: OpenAI, Anthropic, Google AI, AWS Bedrock, Azure, Hugging Face, Ollama, and Mistral AI.",
5
5
  "author": {
6
6
  "name": "Juspay Technologies",
7
7
  "email": "support@juspay.in",
@@ -60,37 +60,59 @@
60
60
  },
61
61
  "dependencies": {
62
62
  "@ai-sdk/amazon-bedrock": "^1.0.0",
63
+ "@ai-sdk/anthropic": "^1.2.12",
63
64
  "@ai-sdk/google": "^1.2.19",
64
65
  "@ai-sdk/google-vertex": "^2.2.0",
66
+ "@ai-sdk/mistral": "^1.0.0",
65
67
  "@ai-sdk/openai": "^1.0.0",
68
+ "@google/generative-ai": "^0.24.1",
69
+ "@huggingface/inference": "^2.8.0",
70
+ "@modelcontextprotocol/sdk": "^1.13.0",
66
71
  "ai": "^4.0.0",
67
72
  "chalk": "^5.3.0",
73
+ "cors": "^2.8.5",
68
74
  "dotenv": "^16.5.0",
75
+ "express": "^5.1.0",
69
76
  "inquirer": "^9.2.15",
70
77
  "ora": "^7.0.1",
71
78
  "playwright": "^1.52.0",
79
+ "uuid": "^11.1.0",
72
80
  "yargs": "^17.7.2",
73
- "zod": "^3.22.0"
81
+ "zod": "^3.22.0",
82
+ "zod-to-json-schema": "^3.24.5"
74
83
  },
75
84
  "devDependencies": {
76
85
  "@changesets/cli": "^2.26.2",
86
+ "@eslint/js": "^9.0.0",
87
+ "@semantic-release/changelog": "^6.0.3",
88
+ "@semantic-release/commit-analyzer": "^13.0.0",
89
+ "@semantic-release/git": "^10.0.1",
90
+ "@semantic-release/github": "^11.0.0",
91
+ "@semantic-release/npm": "^12.0.1",
92
+ "@semantic-release/release-notes-generator": "^14.0.1",
77
93
  "@sveltejs/adapter-auto": "^6.0.0",
78
94
  "@sveltejs/kit": "^2.16.0",
79
95
  "@sveltejs/package": "^2.0.0",
80
96
  "@sveltejs/vite-plugin-svelte": "^5.0.0",
97
+ "@types/cors": "^2.8.19",
98
+ "@types/express": "^5.0.3",
81
99
  "@types/inquirer": "^9.0.7",
82
100
  "@types/node": "^20.0.0",
83
101
  "@types/yargs": "^17.0.33",
102
+ "@typescript-eslint/eslint-plugin": "^8.0.0",
103
+ "@typescript-eslint/parser": "^8.0.0",
84
104
  "eslint": "^9.0.0",
85
105
  "prettier": "^3.0.0",
86
106
  "publint": "^0.3.2",
87
107
  "puppeteer": "^24.10.0",
108
+ "semantic-release": "^24.0.0",
88
109
  "svelte": "^5.0.0",
89
110
  "svelte-check": "^4.0.0",
90
111
  "tslib": "^2.4.1",
91
112
  "typescript": "^5.0.0",
92
113
  "vite": "^6.2.6",
93
- "vitest": "^2.0.0"
114
+ "vitest": "^2.0.0",
115
+ "why-is-node-running": "^3.2.2"
94
116
  },
95
117
  "keywords": [
96
118
  "ai",
@@ -144,6 +166,8 @@
144
166
  "check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch",
145
167
  "test": "vitest",
146
168
  "test:run": "vitest run",
169
+ "test:dynamic-models": "node test-dynamic-models.js",
170
+ "model-server": "node scripts/model-server.js",
147
171
  "lint": "prettier --check . && eslint .",
148
172
  "format": "prettier --write .",
149
173
  "changeset": "changeset",