@juspay/neurolink 7.19.0 → 7.21.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/cli/index.js +9 -1
- package/dist/lib/providers/litellm.js +4 -11
- package/dist/providers/litellm.js +4 -11
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -1,3 +1,15 @@
|
|
|
1
|
+
## [7.21.0](https://github.com/juspay/neurolink/compare/v7.20.0...v7.21.0) (2025-08-19)
|
|
2
|
+
|
|
3
|
+
### Features
|
|
4
|
+
|
|
5
|
+
- **(provider):** add env-based fallback for available models (BZ-43348) ([4b6cee3](https://github.com/juspay/neurolink/commit/4b6cee3c19b2b2512b8d236a49b29e2091343195))
|
|
6
|
+
|
|
7
|
+
## [7.20.0](https://github.com/juspay/neurolink/compare/v7.19.0...v7.20.0) (2025-08-19)
|
|
8
|
+
|
|
9
|
+
### Features
|
|
10
|
+
|
|
11
|
+
- **(cli):** add --version flag to display package version ([632eb7c](https://github.com/juspay/neurolink/commit/632eb7ca93024dd055dc626951c5a05153d4eda7))
|
|
12
|
+
|
|
1
13
|
## [7.19.0](https://github.com/juspay/neurolink/compare/v7.18.0...v7.19.0) (2025-08-19)
|
|
2
14
|
|
|
3
15
|
### Features
|
package/dist/cli/index.js
CHANGED
|
@@ -8,10 +8,18 @@
|
|
|
8
8
|
import yargs from "yargs";
|
|
9
9
|
import { hideBin } from "yargs/helpers";
|
|
10
10
|
import chalk from "chalk";
|
|
11
|
+
import _fs from "fs";
|
|
12
|
+
import path from "path";
|
|
13
|
+
import { fileURLToPath } from "url";
|
|
11
14
|
import { addOllamaCommands } from "./commands/ollama.js";
|
|
12
15
|
import { addSageMakerCommands } from "./commands/sagemaker.js";
|
|
13
16
|
import { CLICommandFactory } from "./factories/commandFactory.js";
|
|
14
17
|
import { logger } from "../lib/utils/logger.js";
|
|
18
|
+
// Get version from package.json
|
|
19
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
20
|
+
const __dirname = path.dirname(__filename);
|
|
21
|
+
const packageJson = JSON.parse(_fs.readFileSync(path.resolve(__dirname, "../../package.json"), "utf-8"));
|
|
22
|
+
const cliVersion = packageJson.version;
|
|
15
23
|
// Load environment variables from .env file
|
|
16
24
|
try {
|
|
17
25
|
// Try to import and configure dotenv
|
|
@@ -110,7 +118,7 @@ const args = hideBin(process.argv);
|
|
|
110
118
|
const cli = yargs(args)
|
|
111
119
|
.scriptName("neurolink")
|
|
112
120
|
.usage("Usage: $0 <command> [options]")
|
|
113
|
-
.version()
|
|
121
|
+
.version(cliVersion)
|
|
114
122
|
.help()
|
|
115
123
|
.alias("h", "help")
|
|
116
124
|
.alias("V", "version")
|
|
@@ -194,18 +194,11 @@ export class LiteLLMProvider extends BaseProvider {
|
|
|
194
194
|
});
|
|
195
195
|
}
|
|
196
196
|
// Fallback to hardcoded list if API fetch fails
|
|
197
|
-
const fallbackModels = [
|
|
198
|
-
"openai/gpt-4o",
|
|
199
|
-
"
|
|
200
|
-
"openai/gpt-3.5-turbo",
|
|
201
|
-
"anthropic/claude-3-5-sonnet-20241022",
|
|
202
|
-
"anthropic/claude-3-haiku-20240307",
|
|
203
|
-
"google/gemini-2.0-flash",
|
|
204
|
-
"google/gemini-1.5-pro",
|
|
205
|
-
"mistral/mistral-large-latest",
|
|
206
|
-
"mistral/mistral-medium-latest",
|
|
197
|
+
const fallbackModels = process.env.LITELLM_FALLBACK_MODELS?.split(",").map((m) => m.trim()) || [
|
|
198
|
+
"openai/gpt-4o", // minimal safe baseline
|
|
199
|
+
"anthropic/claude-3-haiku",
|
|
207
200
|
"meta-llama/llama-3.1-8b-instruct",
|
|
208
|
-
"
|
|
201
|
+
"google/gemini-2.5-flash",
|
|
209
202
|
];
|
|
210
203
|
logger.debug(`[${functionTag}] Using fallback model list`, {
|
|
211
204
|
modelCount: fallbackModels.length,
|
|
@@ -194,18 +194,11 @@ export class LiteLLMProvider extends BaseProvider {
|
|
|
194
194
|
});
|
|
195
195
|
}
|
|
196
196
|
// Fallback to hardcoded list if API fetch fails
|
|
197
|
-
const fallbackModels = [
|
|
198
|
-
"openai/gpt-4o",
|
|
199
|
-
"
|
|
200
|
-
"openai/gpt-3.5-turbo",
|
|
201
|
-
"anthropic/claude-3-5-sonnet-20241022",
|
|
202
|
-
"anthropic/claude-3-haiku-20240307",
|
|
203
|
-
"google/gemini-2.0-flash",
|
|
204
|
-
"google/gemini-1.5-pro",
|
|
205
|
-
"mistral/mistral-large-latest",
|
|
206
|
-
"mistral/mistral-medium-latest",
|
|
197
|
+
const fallbackModels = process.env.LITELLM_FALLBACK_MODELS?.split(",").map((m) => m.trim()) || [
|
|
198
|
+
"openai/gpt-4o", // minimal safe baseline
|
|
199
|
+
"anthropic/claude-3-haiku",
|
|
207
200
|
"meta-llama/llama-3.1-8b-instruct",
|
|
208
|
-
"
|
|
201
|
+
"google/gemini-2.5-flash",
|
|
209
202
|
];
|
|
210
203
|
logger.debug(`[${functionTag}] Using fallback model list`, {
|
|
211
204
|
modelCount: fallbackModels.length,
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@juspay/neurolink",
|
|
3
|
-
"version": "7.
|
|
3
|
+
"version": "7.21.0",
|
|
4
4
|
"description": "Universal AI Development Platform with working MCP integration, multi-provider support, and professional CLI. Built-in tools operational, 58+ external MCP servers discoverable. Connect to filesystem, GitHub, database operations, and more. Build, test, and deploy AI applications with 9 major providers: OpenAI, Anthropic, Google AI, AWS Bedrock, Azure, Hugging Face, Ollama, and Mistral AI.",
|
|
5
5
|
"author": {
|
|
6
6
|
"name": "Juspay Technologies",
|