@juspay/neurolink 7.25.0 → 7.26.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +8 -0
- package/dist/cli/factories/commandFactory.d.ts +8 -0
- package/dist/cli/factories/commandFactory.js +14 -0
- package/dist/cli/factories/ollamaCommandFactory.d.ts +38 -0
- package/dist/cli/factories/ollamaCommandFactory.js +375 -0
- package/dist/cli/factories/sagemakerCommandFactory.d.ts +62 -0
- package/dist/cli/factories/sagemakerCommandFactory.js +783 -0
- package/dist/cli/index.js +5 -7
- package/dist/core/baseProvider.js +42 -0
- package/dist/core/evaluationProviders.d.ts +26 -0
- package/dist/core/evaluationProviders.js +160 -0
- package/dist/lib/core/baseProvider.js +42 -0
- package/dist/lib/core/evaluationProviders.d.ts +26 -0
- package/dist/lib/core/evaluationProviders.js +160 -0
- package/package.json +1 -1
- package/dist/cli/commands/ollama.d.ts +0 -3
- package/dist/cli/commands/ollama.js +0 -339
- package/dist/cli/commands/sagemaker.d.ts +0 -11
- package/dist/cli/commands/sagemaker.js +0 -779
|
@@ -1,339 +0,0 @@
|
|
|
1
|
-
import { execSync } from "child_process";
|
|
2
|
-
import chalk from "chalk";
|
|
3
|
-
import ora from "ora";
|
|
4
|
-
import inquirer from "inquirer";
|
|
5
|
-
import { logger } from "../../lib/utils/logger.js";
|
|
6
|
-
export function addOllamaCommands(cli) {
|
|
7
|
-
cli.command("ollama <command>", "Manage Ollama local AI models", (yargs) => {
|
|
8
|
-
return yargs
|
|
9
|
-
.command("list-models", "List installed Ollama models", {}, listModelsHandler)
|
|
10
|
-
.command("pull <model>", "Download an Ollama model", {
|
|
11
|
-
model: {
|
|
12
|
-
describe: "Model name to download",
|
|
13
|
-
type: "string",
|
|
14
|
-
demandOption: true,
|
|
15
|
-
},
|
|
16
|
-
}, pullModelHandler)
|
|
17
|
-
.command("remove <model>", "Remove an Ollama model", {
|
|
18
|
-
model: {
|
|
19
|
-
describe: "Model name to remove",
|
|
20
|
-
type: "string",
|
|
21
|
-
demandOption: true,
|
|
22
|
-
},
|
|
23
|
-
}, removeModelHandler)
|
|
24
|
-
.command("status", "Check Ollama service status", {}, statusHandler)
|
|
25
|
-
.command("start", "Start Ollama service", {}, startHandler)
|
|
26
|
-
.command("stop", "Stop Ollama service", {}, stopHandler)
|
|
27
|
-
.command("setup", "Interactive Ollama setup", {}, setupHandler)
|
|
28
|
-
.demandCommand(1, "Please specify a command");
|
|
29
|
-
}, () => { });
|
|
30
|
-
}
|
|
31
|
-
async function listModelsHandler() {
|
|
32
|
-
const spinner = ora("Fetching installed models...").start();
|
|
33
|
-
try {
|
|
34
|
-
const output = execSync("ollama list", { encoding: "utf8" });
|
|
35
|
-
spinner.succeed("Installed models:");
|
|
36
|
-
if (output.trim()) {
|
|
37
|
-
logger.always(output);
|
|
38
|
-
}
|
|
39
|
-
else {
|
|
40
|
-
logger.always(chalk.yellow('No models installed. Use "neurolink ollama pull <model>" to download a model.'));
|
|
41
|
-
}
|
|
42
|
-
}
|
|
43
|
-
catch (error) {
|
|
44
|
-
spinner.fail("Failed to list models. Is Ollama installed?");
|
|
45
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
46
|
-
logger.error(chalk.red("Error:", errorMessage));
|
|
47
|
-
logger.always(chalk.blue("\nTip: Install Ollama from https://ollama.ai"));
|
|
48
|
-
process.exit(1);
|
|
49
|
-
}
|
|
50
|
-
}
|
|
51
|
-
async function pullModelHandler(argv) {
|
|
52
|
-
const { model } = argv;
|
|
53
|
-
logger.always(chalk.blue(`Downloading model: ${model}`));
|
|
54
|
-
logger.always(chalk.gray("This may take several minutes..."));
|
|
55
|
-
try {
|
|
56
|
-
execSync(`ollama pull ${model}`, { stdio: "inherit" });
|
|
57
|
-
logger.always(chalk.green(`\n✅ Successfully downloaded ${model}`));
|
|
58
|
-
logger.always(chalk.blue(`\nTest it with: npx @juspay/neurolink generate "Hello!" --provider ollama --model ${model}`));
|
|
59
|
-
}
|
|
60
|
-
catch (error) {
|
|
61
|
-
logger.error(chalk.red(`\n❌ Failed to download ${model}`));
|
|
62
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
63
|
-
logger.error(chalk.red("Error:", errorMessage));
|
|
64
|
-
process.exit(1);
|
|
65
|
-
}
|
|
66
|
-
}
|
|
67
|
-
async function removeModelHandler(argv) {
|
|
68
|
-
const { model } = argv;
|
|
69
|
-
// Confirm removal
|
|
70
|
-
const { confirm } = await inquirer.prompt([
|
|
71
|
-
{
|
|
72
|
-
type: "confirm",
|
|
73
|
-
name: "confirm",
|
|
74
|
-
message: `Are you sure you want to remove model "${model}"?`,
|
|
75
|
-
default: false,
|
|
76
|
-
},
|
|
77
|
-
]);
|
|
78
|
-
if (!confirm) {
|
|
79
|
-
logger.always(chalk.yellow("Removal cancelled."));
|
|
80
|
-
return;
|
|
81
|
-
}
|
|
82
|
-
const spinner = ora(`Removing model ${model}...`).start();
|
|
83
|
-
try {
|
|
84
|
-
execSync(`ollama rm ${model}`, { encoding: "utf8" });
|
|
85
|
-
spinner.succeed(`Successfully removed ${model}`);
|
|
86
|
-
}
|
|
87
|
-
catch (error) {
|
|
88
|
-
spinner.fail(`Failed to remove ${model}`);
|
|
89
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
90
|
-
logger.error(chalk.red("Error:", errorMessage));
|
|
91
|
-
process.exit(1);
|
|
92
|
-
}
|
|
93
|
-
}
|
|
94
|
-
async function statusHandler() {
|
|
95
|
-
const spinner = ora("Checking Ollama service status...").start();
|
|
96
|
-
try {
|
|
97
|
-
// Try to run a simple command
|
|
98
|
-
execSync("ollama list", { encoding: "utf8" });
|
|
99
|
-
spinner.succeed("Ollama service is running");
|
|
100
|
-
// Get additional info
|
|
101
|
-
try {
|
|
102
|
-
const response = execSync("curl -s http://localhost:11434/api/tags", {
|
|
103
|
-
encoding: "utf8",
|
|
104
|
-
});
|
|
105
|
-
const data = JSON.parse(response);
|
|
106
|
-
if (data.models && data.models.length > 0) {
|
|
107
|
-
logger.always(chalk.green(`\n${data.models.length} models available`));
|
|
108
|
-
}
|
|
109
|
-
}
|
|
110
|
-
catch (error) {
|
|
111
|
-
// Curl might not be available, that's ok. Error is ignored.
|
|
112
|
-
logger.debug &&
|
|
113
|
-
logger.debug("Optional curl command failed in statusHandler:", error);
|
|
114
|
-
}
|
|
115
|
-
}
|
|
116
|
-
catch (error) {
|
|
117
|
-
spinner.fail("Ollama service is not running");
|
|
118
|
-
logger.debug && logger.debug("Ollama status check failed:", error);
|
|
119
|
-
logger.always(chalk.yellow("\nStart Ollama with: ollama serve"));
|
|
120
|
-
logger.always(chalk.blue("Or restart the Ollama app if using the desktop version"));
|
|
121
|
-
process.exit(1);
|
|
122
|
-
}
|
|
123
|
-
}
|
|
124
|
-
async function startHandler() {
|
|
125
|
-
logger.always(chalk.blue("Starting Ollama service..."));
|
|
126
|
-
try {
|
|
127
|
-
// Check if already running
|
|
128
|
-
try {
|
|
129
|
-
execSync("ollama list", { encoding: "utf8" });
|
|
130
|
-
logger.always(chalk.yellow("Ollama service is already running!"));
|
|
131
|
-
return;
|
|
132
|
-
}
|
|
133
|
-
catch {
|
|
134
|
-
// Not running, continue to start
|
|
135
|
-
}
|
|
136
|
-
// Different approaches for different platforms
|
|
137
|
-
if (process.platform === "darwin") {
|
|
138
|
-
// macOS
|
|
139
|
-
logger.always(chalk.gray("Starting Ollama on macOS..."));
|
|
140
|
-
try {
|
|
141
|
-
execSync("open -a Ollama");
|
|
142
|
-
logger.always(chalk.green("✅ Ollama app started"));
|
|
143
|
-
}
|
|
144
|
-
catch {
|
|
145
|
-
// Try service command
|
|
146
|
-
execSync("ollama serve > /dev/null 2>&1 &", { stdio: "ignore" });
|
|
147
|
-
logger.always(chalk.green("✅ Ollama service started"));
|
|
148
|
-
}
|
|
149
|
-
}
|
|
150
|
-
else if (process.platform === "linux") {
|
|
151
|
-
// Linux
|
|
152
|
-
logger.always(chalk.gray("Starting Ollama service on Linux..."));
|
|
153
|
-
try {
|
|
154
|
-
execSync("systemctl start ollama", { encoding: "utf8" });
|
|
155
|
-
logger.always(chalk.green("✅ Ollama service started"));
|
|
156
|
-
}
|
|
157
|
-
catch {
|
|
158
|
-
// Try direct command
|
|
159
|
-
execSync("ollama serve > /dev/null 2>&1 &", { stdio: "ignore" });
|
|
160
|
-
logger.always(chalk.green("✅ Ollama service started"));
|
|
161
|
-
}
|
|
162
|
-
}
|
|
163
|
-
else {
|
|
164
|
-
// Windows
|
|
165
|
-
logger.always(chalk.gray("Starting Ollama on Windows..."));
|
|
166
|
-
execSync("start ollama serve", { stdio: "ignore" });
|
|
167
|
-
logger.always(chalk.green("✅ Ollama service started"));
|
|
168
|
-
}
|
|
169
|
-
logger.always(chalk.blue("\nWait a few seconds for the service to initialize..."));
|
|
170
|
-
}
|
|
171
|
-
catch (error) {
|
|
172
|
-
logger.error(chalk.red("Failed to start Ollama service"));
|
|
173
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
174
|
-
logger.error(chalk.red("Error:", errorMessage));
|
|
175
|
-
logger.always(chalk.blue("\nTry starting Ollama manually or check installation"));
|
|
176
|
-
process.exit(1);
|
|
177
|
-
}
|
|
178
|
-
}
|
|
179
|
-
async function stopHandler() {
|
|
180
|
-
const spinner = ora("Stopping Ollama service...").start();
|
|
181
|
-
try {
|
|
182
|
-
if (process.platform === "darwin") {
|
|
183
|
-
// macOS
|
|
184
|
-
try {
|
|
185
|
-
execSync("pkill ollama", { encoding: "utf8" });
|
|
186
|
-
}
|
|
187
|
-
catch {
|
|
188
|
-
execSync("killall Ollama", { encoding: "utf8" });
|
|
189
|
-
}
|
|
190
|
-
}
|
|
191
|
-
else if (process.platform === "linux") {
|
|
192
|
-
// Linux
|
|
193
|
-
try {
|
|
194
|
-
execSync("systemctl stop ollama", { encoding: "utf8" });
|
|
195
|
-
}
|
|
196
|
-
catch {
|
|
197
|
-
execSync("pkill ollama", { encoding: "utf8" });
|
|
198
|
-
}
|
|
199
|
-
}
|
|
200
|
-
else {
|
|
201
|
-
// Windows
|
|
202
|
-
execSync("taskkill /F /IM ollama.exe", { encoding: "utf8" });
|
|
203
|
-
}
|
|
204
|
-
spinner.succeed("Ollama service stopped");
|
|
205
|
-
}
|
|
206
|
-
catch (err) {
|
|
207
|
-
spinner.fail("Failed to stop Ollama service");
|
|
208
|
-
logger.error(chalk.red("It may not be running or requires manual stop"));
|
|
209
|
-
logger.error(chalk.red(`Error details: ${err}`));
|
|
210
|
-
}
|
|
211
|
-
}
|
|
212
|
-
async function setupHandler() {
|
|
213
|
-
logger.always(chalk.blue("🦙 Welcome to Ollama Setup!\n"));
|
|
214
|
-
// Check if Ollama is installed
|
|
215
|
-
const checkSpinner = ora("Checking Ollama installation...").start();
|
|
216
|
-
let isInstalled = false;
|
|
217
|
-
try {
|
|
218
|
-
execSync("ollama --version", { encoding: "utf8" });
|
|
219
|
-
isInstalled = true;
|
|
220
|
-
checkSpinner.succeed("Ollama is installed");
|
|
221
|
-
}
|
|
222
|
-
catch {
|
|
223
|
-
checkSpinner.fail("Ollama is not installed");
|
|
224
|
-
}
|
|
225
|
-
if (!isInstalled) {
|
|
226
|
-
logger.always(chalk.yellow("\nOllama needs to be installed first."));
|
|
227
|
-
logger.always(chalk.blue("\nInstallation instructions:"));
|
|
228
|
-
if (process.platform === "darwin") {
|
|
229
|
-
logger.always("\nFor macOS:");
|
|
230
|
-
logger.always(chalk.gray(" brew install ollama"));
|
|
231
|
-
logger.always(chalk.gray(" # or download from https://ollama.ai"));
|
|
232
|
-
}
|
|
233
|
-
else if (process.platform === "linux") {
|
|
234
|
-
logger.always("\nFor Linux:");
|
|
235
|
-
logger.always(chalk.gray(" curl -fsSL https://ollama.ai/install.sh | sh"));
|
|
236
|
-
}
|
|
237
|
-
else {
|
|
238
|
-
logger.always("\nFor Windows:");
|
|
239
|
-
logger.always(chalk.gray(" Download from https://ollama.ai"));
|
|
240
|
-
}
|
|
241
|
-
const { proceedAnyway } = await inquirer.prompt([
|
|
242
|
-
{
|
|
243
|
-
type: "confirm",
|
|
244
|
-
name: "proceedAnyway",
|
|
245
|
-
message: "Would you like to continue with setup anyway?",
|
|
246
|
-
default: false,
|
|
247
|
-
},
|
|
248
|
-
]);
|
|
249
|
-
if (!proceedAnyway) {
|
|
250
|
-
logger.always(chalk.blue("\nInstall Ollama and run setup again!"));
|
|
251
|
-
return;
|
|
252
|
-
}
|
|
253
|
-
}
|
|
254
|
-
// Check if service is running
|
|
255
|
-
let serviceRunning = false;
|
|
256
|
-
try {
|
|
257
|
-
execSync("ollama list", { encoding: "utf8" });
|
|
258
|
-
serviceRunning = true;
|
|
259
|
-
logger.always(chalk.green("\n✅ Ollama service is running"));
|
|
260
|
-
}
|
|
261
|
-
catch {
|
|
262
|
-
logger.always(chalk.yellow("\n⚠️ Ollama service is not running"));
|
|
263
|
-
const { startService } = await inquirer.prompt([
|
|
264
|
-
{
|
|
265
|
-
type: "confirm",
|
|
266
|
-
name: "startService",
|
|
267
|
-
message: "Would you like to start the Ollama service?",
|
|
268
|
-
default: true,
|
|
269
|
-
},
|
|
270
|
-
]);
|
|
271
|
-
if (startService) {
|
|
272
|
-
await startHandler();
|
|
273
|
-
serviceRunning = true;
|
|
274
|
-
}
|
|
275
|
-
}
|
|
276
|
-
if (serviceRunning) {
|
|
277
|
-
// List available models
|
|
278
|
-
logger.always(chalk.blue("\n📦 Popular Ollama models:"));
|
|
279
|
-
logger.always(" • llama2 (7B) - General purpose");
|
|
280
|
-
logger.always(" • codellama (7B) - Code generation");
|
|
281
|
-
logger.always(" • mistral (7B) - Fast and efficient");
|
|
282
|
-
logger.always(" • tinyllama (1B) - Lightweight");
|
|
283
|
-
logger.always(" • phi (2.7B) - Microsoft's compact model");
|
|
284
|
-
const { downloadModel } = await inquirer.prompt([
|
|
285
|
-
{
|
|
286
|
-
type: "confirm",
|
|
287
|
-
name: "downloadModel",
|
|
288
|
-
message: "Would you like to download a model?",
|
|
289
|
-
default: true,
|
|
290
|
-
},
|
|
291
|
-
]);
|
|
292
|
-
if (downloadModel) {
|
|
293
|
-
const { selectedModel } = await inquirer.prompt([
|
|
294
|
-
{
|
|
295
|
-
type: "list",
|
|
296
|
-
name: "selectedModel",
|
|
297
|
-
message: "Select a model to download:",
|
|
298
|
-
choices: [
|
|
299
|
-
{
|
|
300
|
-
name: "llama2 (7B) - Recommended for general use",
|
|
301
|
-
value: "llama2",
|
|
302
|
-
},
|
|
303
|
-
{
|
|
304
|
-
name: "codellama (7B) - Best for code generation",
|
|
305
|
-
value: "codellama",
|
|
306
|
-
},
|
|
307
|
-
{ name: "mistral (7B) - Fast and efficient", value: "mistral" },
|
|
308
|
-
{ name: "tinyllama (1B) - Lightweight, fast", value: "tinyllama" },
|
|
309
|
-
{ name: "phi (2.7B) - Microsoft's compact model", value: "phi" },
|
|
310
|
-
{ name: "Other (enter manually)", value: "other" },
|
|
311
|
-
],
|
|
312
|
-
},
|
|
313
|
-
]);
|
|
314
|
-
let modelToDownload = selectedModel;
|
|
315
|
-
if (selectedModel === "other") {
|
|
316
|
-
const { customModel } = await inquirer.prompt([
|
|
317
|
-
{
|
|
318
|
-
type: "input",
|
|
319
|
-
name: "customModel",
|
|
320
|
-
message: "Enter the model name:",
|
|
321
|
-
validate: (input) => input.trim().length > 0 || "Model name is required",
|
|
322
|
-
},
|
|
323
|
-
]);
|
|
324
|
-
modelToDownload = customModel;
|
|
325
|
-
}
|
|
326
|
-
await pullModelHandler({ model: modelToDownload });
|
|
327
|
-
}
|
|
328
|
-
}
|
|
329
|
-
// Final instructions
|
|
330
|
-
logger.always(chalk.green("\n✅ Setup complete!\n"));
|
|
331
|
-
logger.always(chalk.blue("Next steps:"));
|
|
332
|
-
logger.always("1. List models: " + chalk.gray("neurolink ollama list-models"));
|
|
333
|
-
logger.always("2. Generate text: " +
|
|
334
|
-
chalk.gray('neurolink generate "Hello!" --provider ollama'));
|
|
335
|
-
logger.always("3. Use specific model: " +
|
|
336
|
-
chalk.gray('neurolink generate "Hello!" --provider ollama --model codellama'));
|
|
337
|
-
logger.always(chalk.gray("\nFor more information, see: https://docs.neurolink.ai/providers/ollama"));
|
|
338
|
-
}
|
|
339
|
-
export default addOllamaCommands;
|
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* SageMaker CLI Commands
|
|
3
|
-
*
|
|
4
|
-
* Provides comprehensive command-line interface for Amazon SageMaker operations
|
|
5
|
-
* including configuration management, endpoint testing, and model deployment.
|
|
6
|
-
*/
|
|
7
|
-
import type { Argv } from "yargs";
|
|
8
|
-
/**
|
|
9
|
-
* Add SageMaker commands to the CLI
|
|
10
|
-
*/
|
|
11
|
-
export declare function addSageMakerCommands(cli: Argv): void;
|