@juspay/neurolink 7.28.0 → 7.29.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +13 -0
- package/dist/cli/commands/ollama.d.ts +3 -0
- package/dist/cli/commands/ollama.js +288 -0
- package/dist/cli/factories/ollamaCommandFactory.d.ts +4 -0
- package/dist/cli/factories/ollamaCommandFactory.js +86 -93
- package/dist/cli/utils/ollamaUtils.d.ts +24 -0
- package/dist/cli/utils/ollamaUtils.js +161 -0
- package/dist/core/baseProvider.js +4 -2
- package/dist/index.d.ts +2 -3
- package/dist/index.js +1 -2
- package/dist/lib/core/baseProvider.js +4 -2
- package/dist/lib/core/dynamicModels.d.ts +6 -6
- package/dist/lib/index.d.ts +2 -3
- package/dist/lib/index.js +1 -2
- package/dist/lib/middleware/builtin/analytics.js +13 -14
- package/dist/lib/middleware/builtin/guardrails.d.ts +20 -0
- package/dist/lib/middleware/builtin/guardrails.js +87 -0
- package/dist/lib/middleware/factory.d.ts +29 -14
- package/dist/lib/middleware/factory.js +136 -110
- package/dist/lib/middleware/index.d.ts +3 -49
- package/dist/lib/middleware/index.js +4 -58
- package/dist/lib/middleware/registry.d.ts +1 -3
- package/dist/lib/middleware/registry.js +4 -5
- package/dist/lib/middleware/types.d.ts +3 -1
- package/dist/middleware/builtin/analytics.js +13 -14
- package/dist/middleware/builtin/guardrails.d.ts +20 -0
- package/dist/middleware/builtin/guardrails.js +87 -0
- package/dist/middleware/factory.d.ts +29 -14
- package/dist/middleware/factory.js +136 -110
- package/dist/middleware/index.d.ts +3 -49
- package/dist/middleware/index.js +4 -58
- package/dist/middleware/registry.d.ts +1 -3
- package/dist/middleware/registry.js +4 -5
- package/dist/middleware/types.d.ts +3 -1
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -1,3 +1,16 @@
|
|
|
1
|
+
## [7.29.0](https://github.com/juspay/neurolink/compare/v7.28.1...v7.29.0) (2025-08-26)
|
|
2
|
+
|
|
3
|
+
### Features
|
|
4
|
+
|
|
5
|
+
- **(guardrails):** added guardrails as a middleware ([ac60f6b](https://github.com/juspay/neurolink/commit/ac60f6b143a58f86e17481ddb3e067e5307391cf))
|
|
6
|
+
|
|
7
|
+
## [7.28.1](https://github.com/juspay/neurolink/compare/v7.28.0...v7.28.1) (2025-08-26)
|
|
8
|
+
|
|
9
|
+
### Bug Fixes
|
|
10
|
+
|
|
11
|
+
- **(cli):** resolve ESM interop and spawn synchronization issues ([4983221](https://github.com/juspay/neurolink/commit/49832210cd56df14e7cb77925fcc89c1cc72c046))
|
|
12
|
+
- **(security):** prevent command injection in ollama pull ([27e6088](https://github.com/juspay/neurolink/commit/27e6088aa9e2d7dddaa1839d777e6b642e095549))
|
|
13
|
+
|
|
1
14
|
## [7.28.0](https://github.com/juspay/neurolink/compare/v7.27.0...v7.28.0) (2025-08-25)
|
|
2
15
|
|
|
3
16
|
### Features
|
|
@@ -0,0 +1,288 @@
|
|
|
1
|
+
import { spawnSync } from "child_process";
|
|
2
|
+
import chalk from "chalk";
|
|
3
|
+
import ora from "ora";
|
|
4
|
+
import inquirer from "inquirer";
|
|
5
|
+
import { logger } from "../../lib/utils/logger.js";
|
|
6
|
+
import { OllamaUtils } from "../utils/ollamaUtils.js";
|
|
7
|
+
export function addOllamaCommands(cli) {
|
|
8
|
+
cli.command("ollama <command>", "Manage Ollama local AI models", (yargs) => {
|
|
9
|
+
return yargs
|
|
10
|
+
.command("list-models", "List installed Ollama models", {}, listModelsHandler)
|
|
11
|
+
.command("pull <model>", "Download an Ollama model", {
|
|
12
|
+
model: {
|
|
13
|
+
describe: "Model name to download",
|
|
14
|
+
type: "string",
|
|
15
|
+
demandOption: true,
|
|
16
|
+
},
|
|
17
|
+
}, pullModelHandler)
|
|
18
|
+
.command("remove <model>", "Remove an Ollama model", {
|
|
19
|
+
model: {
|
|
20
|
+
describe: "Model name to remove",
|
|
21
|
+
type: "string",
|
|
22
|
+
demandOption: true,
|
|
23
|
+
},
|
|
24
|
+
}, removeModelHandler)
|
|
25
|
+
.command("status", "Check Ollama service status", {}, statusHandler)
|
|
26
|
+
.command("start", "Start Ollama service", {}, startHandler)
|
|
27
|
+
.command("stop", "Stop Ollama service", {}, stopHandler)
|
|
28
|
+
.command("setup", "Interactive Ollama setup", {}, setupHandler)
|
|
29
|
+
.demandCommand(1, "Please specify a command");
|
|
30
|
+
}, () => { });
|
|
31
|
+
}
|
|
32
|
+
async function listModelsHandler() {
|
|
33
|
+
const spinner = ora("Fetching installed models...").start();
|
|
34
|
+
try {
|
|
35
|
+
const res = spawnSync("ollama", ["list"], { encoding: "utf8" });
|
|
36
|
+
if (res.error) {
|
|
37
|
+
throw res.error;
|
|
38
|
+
}
|
|
39
|
+
spinner.succeed("Installed models:");
|
|
40
|
+
const output = res.stdout?.toString().trim();
|
|
41
|
+
if (output) {
|
|
42
|
+
logger.always(output);
|
|
43
|
+
}
|
|
44
|
+
else {
|
|
45
|
+
logger.always(chalk.yellow('No models installed. Use "neurolink ollama pull <model>" to download a model.'));
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
catch (error) {
|
|
49
|
+
spinner.fail("Failed to list models. Is Ollama installed?");
|
|
50
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
51
|
+
logger.error(chalk.red("Error:", errorMessage));
|
|
52
|
+
logger.always(chalk.blue("\nTip: Install Ollama from https://ollama.ai"));
|
|
53
|
+
process.exit(1);
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
async function pullModelHandler(argv) {
|
|
57
|
+
const { model } = argv;
|
|
58
|
+
logger.always(chalk.blue(`Downloading model: ${model}`));
|
|
59
|
+
logger.always(chalk.gray("This may take several minutes..."));
|
|
60
|
+
try {
|
|
61
|
+
const res = spawnSync("ollama", ["pull", model], { stdio: "inherit" });
|
|
62
|
+
if (res.error) {
|
|
63
|
+
throw res.error;
|
|
64
|
+
}
|
|
65
|
+
if (res.status !== 0) {
|
|
66
|
+
throw new Error(`ollama pull exited with code ${res.status}`);
|
|
67
|
+
}
|
|
68
|
+
logger.always(chalk.green(`\n✅ Successfully downloaded ${model}`));
|
|
69
|
+
logger.always(chalk.blue(`\nTest it with: npx @juspay/neurolink generate "Hello!" --provider ollama --model ${model}`));
|
|
70
|
+
}
|
|
71
|
+
catch (error) {
|
|
72
|
+
logger.error(chalk.red(`\n❌ Failed to download ${model}`));
|
|
73
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
74
|
+
logger.error(chalk.red("Error:", errorMessage));
|
|
75
|
+
process.exit(1);
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
async function removeModelHandler(argv) {
|
|
79
|
+
const { model } = argv;
|
|
80
|
+
const { confirm } = await inquirer.prompt([
|
|
81
|
+
{
|
|
82
|
+
type: "confirm",
|
|
83
|
+
name: "confirm",
|
|
84
|
+
message: `Are you sure you want to remove model "${model}"?`,
|
|
85
|
+
default: false,
|
|
86
|
+
},
|
|
87
|
+
]);
|
|
88
|
+
if (!confirm) {
|
|
89
|
+
logger.always(chalk.yellow("Removal cancelled."));
|
|
90
|
+
return;
|
|
91
|
+
}
|
|
92
|
+
const spinner = ora(`Removing model ${model}...`).start();
|
|
93
|
+
try {
|
|
94
|
+
const res = spawnSync("ollama", ["rm", model], { encoding: "utf8" });
|
|
95
|
+
if (res.error) {
|
|
96
|
+
throw res.error;
|
|
97
|
+
}
|
|
98
|
+
if (res.status !== 0) {
|
|
99
|
+
throw new Error(`ollama rm exited with ${res.status}`);
|
|
100
|
+
}
|
|
101
|
+
spinner.succeed(`Successfully removed ${model}`);
|
|
102
|
+
}
|
|
103
|
+
catch (error) {
|
|
104
|
+
spinner.fail(`Failed to remove ${model}`);
|
|
105
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
106
|
+
logger.error(chalk.red("Error:", errorMessage));
|
|
107
|
+
process.exit(1);
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
async function statusHandler() {
|
|
111
|
+
const spinner = ora("Checking Ollama service status...").start();
|
|
112
|
+
try {
|
|
113
|
+
const res = spawnSync("ollama", ["list"], { encoding: "utf8" });
|
|
114
|
+
if (res.error) {
|
|
115
|
+
throw res.error;
|
|
116
|
+
}
|
|
117
|
+
if (res.status !== 0) {
|
|
118
|
+
throw new Error("Ollama not running");
|
|
119
|
+
}
|
|
120
|
+
spinner.succeed("Ollama service is running");
|
|
121
|
+
}
|
|
122
|
+
catch (error) {
|
|
123
|
+
spinner.fail("Ollama service is not running");
|
|
124
|
+
logger.debug("Ollama status check failed:", error);
|
|
125
|
+
logger.always(chalk.yellow("\nStart Ollama with: ollama serve"));
|
|
126
|
+
process.exit(1);
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
async function startHandler() {
|
|
130
|
+
await OllamaUtils.startOllamaService();
|
|
131
|
+
}
|
|
132
|
+
async function stopHandler() {
|
|
133
|
+
const spinner = ora("Stopping Ollama service...").start();
|
|
134
|
+
try {
|
|
135
|
+
if (process.platform === "darwin") {
|
|
136
|
+
try {
|
|
137
|
+
spawnSync("pkill", ["ollama"], { encoding: "utf8" });
|
|
138
|
+
}
|
|
139
|
+
catch {
|
|
140
|
+
spawnSync("killall", ["Ollama"], { encoding: "utf8" });
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
else if (process.platform === "linux") {
|
|
144
|
+
try {
|
|
145
|
+
spawnSync("systemctl", ["stop", "ollama"], { encoding: "utf8" });
|
|
146
|
+
}
|
|
147
|
+
catch {
|
|
148
|
+
spawnSync("pkill", ["ollama"], { encoding: "utf8" });
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
else {
|
|
152
|
+
spawnSync("taskkill", ["/F", "/IM", "ollama.exe"], { encoding: "utf8" });
|
|
153
|
+
}
|
|
154
|
+
spinner.succeed("Ollama service stopped");
|
|
155
|
+
}
|
|
156
|
+
catch (err) {
|
|
157
|
+
spinner.fail("Failed to stop Ollama service");
|
|
158
|
+
logger.error(chalk.red("It may not be running or requires manual stop"));
|
|
159
|
+
logger.error(chalk.red(`Error details: ${err}`));
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
async function setupHandler() {
|
|
163
|
+
logger.always(chalk.blue("🦙 Welcome to Ollama Setup!\n"));
|
|
164
|
+
// Check installation
|
|
165
|
+
const checkSpinner = ora("Checking Ollama installation...").start();
|
|
166
|
+
let isInstalled = false;
|
|
167
|
+
try {
|
|
168
|
+
spawnSync("ollama", ["--version"], { encoding: "utf8" });
|
|
169
|
+
isInstalled = true;
|
|
170
|
+
checkSpinner.succeed("Ollama is installed");
|
|
171
|
+
}
|
|
172
|
+
catch {
|
|
173
|
+
checkSpinner.fail("Ollama is not installed");
|
|
174
|
+
}
|
|
175
|
+
if (!isInstalled) {
|
|
176
|
+
logger.always(chalk.yellow("\nOllama needs to be installed first."));
|
|
177
|
+
logger.always(chalk.blue("\nInstallation instructions:"));
|
|
178
|
+
if (process.platform === "darwin") {
|
|
179
|
+
logger.always("\nFor macOS:");
|
|
180
|
+
logger.always(chalk.gray(" brew install ollama"));
|
|
181
|
+
logger.always(chalk.gray(" # or download from https://ollama.ai"));
|
|
182
|
+
}
|
|
183
|
+
else if (process.platform === "linux") {
|
|
184
|
+
logger.always("\nFor Linux:");
|
|
185
|
+
logger.always(chalk.gray(" curl -fsSL https://ollama.ai/install.sh | sh"));
|
|
186
|
+
}
|
|
187
|
+
else {
|
|
188
|
+
logger.always("\nFor Windows:");
|
|
189
|
+
logger.always(chalk.gray(" Download from https://ollama.ai"));
|
|
190
|
+
}
|
|
191
|
+
const { proceedAnyway } = await inquirer.prompt([
|
|
192
|
+
{
|
|
193
|
+
type: "confirm",
|
|
194
|
+
name: "proceedAnyway",
|
|
195
|
+
message: "Would you like to continue with setup anyway?",
|
|
196
|
+
default: false,
|
|
197
|
+
},
|
|
198
|
+
]);
|
|
199
|
+
if (!proceedAnyway) {
|
|
200
|
+
logger.always(chalk.blue("\nInstall Ollama and run setup again!"));
|
|
201
|
+
return;
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
// Check if service is running
|
|
205
|
+
let serviceRunning = false;
|
|
206
|
+
try {
|
|
207
|
+
spawnSync("ollama", ["list"], { encoding: "utf8" });
|
|
208
|
+
serviceRunning = true;
|
|
209
|
+
logger.always(chalk.green("\n✅ Ollama service is running"));
|
|
210
|
+
}
|
|
211
|
+
catch {
|
|
212
|
+
logger.always(chalk.yellow("\n⚠️ Ollama service is not running"));
|
|
213
|
+
const { startService } = await inquirer.prompt([
|
|
214
|
+
{
|
|
215
|
+
type: "confirm",
|
|
216
|
+
name: "startService",
|
|
217
|
+
message: "Would you like to start the Ollama service?",
|
|
218
|
+
default: true,
|
|
219
|
+
},
|
|
220
|
+
]);
|
|
221
|
+
if (startService) {
|
|
222
|
+
await startHandler();
|
|
223
|
+
serviceRunning = true;
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
if (serviceRunning) {
|
|
227
|
+
// List available models
|
|
228
|
+
logger.always(chalk.blue("\n📦 Popular Ollama models:"));
|
|
229
|
+
logger.always(" • llama2 (7B) - General purpose");
|
|
230
|
+
logger.always(" • codellama (7B) - Code generation");
|
|
231
|
+
logger.always(" • mistral (7B) - Fast and efficient");
|
|
232
|
+
logger.always(" • tinyllama (1B) - Lightweight");
|
|
233
|
+
logger.always(" • phi (2.7B) - Microsoft's compact model");
|
|
234
|
+
const { downloadModel } = await inquirer.prompt([
|
|
235
|
+
{
|
|
236
|
+
type: "confirm",
|
|
237
|
+
name: "downloadModel",
|
|
238
|
+
message: "Would you like to download a model?",
|
|
239
|
+
default: true,
|
|
240
|
+
},
|
|
241
|
+
]);
|
|
242
|
+
if (downloadModel) {
|
|
243
|
+
const { selectedModel } = await inquirer.prompt([
|
|
244
|
+
{
|
|
245
|
+
type: "list",
|
|
246
|
+
name: "selectedModel",
|
|
247
|
+
message: "Select a model to download:",
|
|
248
|
+
choices: [
|
|
249
|
+
{
|
|
250
|
+
name: "llama2 (7B) - Recommended for general use",
|
|
251
|
+
value: "llama2",
|
|
252
|
+
},
|
|
253
|
+
{
|
|
254
|
+
name: "codellama (7B) - Best for code generation",
|
|
255
|
+
value: "codellama",
|
|
256
|
+
},
|
|
257
|
+
{ name: "mistral (7B) - Fast and efficient", value: "mistral" },
|
|
258
|
+
{ name: "tinyllama (1B) - Lightweight, fast", value: "tinyllama" },
|
|
259
|
+
{ name: "phi (2.7B) - Microsoft's compact model", value: "phi" },
|
|
260
|
+
{ name: "Other (enter manually)", value: "other" },
|
|
261
|
+
],
|
|
262
|
+
},
|
|
263
|
+
]);
|
|
264
|
+
let modelToDownload = selectedModel;
|
|
265
|
+
if (selectedModel === "other") {
|
|
266
|
+
const { customModel } = await inquirer.prompt([
|
|
267
|
+
{
|
|
268
|
+
type: "input",
|
|
269
|
+
name: "customModel",
|
|
270
|
+
message: "Enter the model name:",
|
|
271
|
+
validate: (input) => input.trim().length > 0 || "Model name is required",
|
|
272
|
+
},
|
|
273
|
+
]);
|
|
274
|
+
modelToDownload = customModel;
|
|
275
|
+
}
|
|
276
|
+
await pullModelHandler({ model: modelToDownload });
|
|
277
|
+
}
|
|
278
|
+
}
|
|
279
|
+
logger.always(chalk.green("\n✅ Setup complete!\n"));
|
|
280
|
+
logger.always(chalk.blue("Next steps:"));
|
|
281
|
+
logger.always("1. List models: " + chalk.gray("neurolink ollama list-models"));
|
|
282
|
+
logger.always("2. Generate text: " +
|
|
283
|
+
chalk.gray('neurolink generate "Hello!" --provider ollama'));
|
|
284
|
+
logger.always("3. Use specific model: " +
|
|
285
|
+
chalk.gray('neurolink generate "Hello!" --provider ollama --model codellama'));
|
|
286
|
+
logger.always(chalk.gray("\nFor more information, see: https://docs.neurolink.ai/providers/ollama"));
|
|
287
|
+
}
|
|
288
|
+
export default addOllamaCommands;
|
|
@@ -3,6 +3,10 @@ import type { CommandModule } from "yargs";
|
|
|
3
3
|
* Factory for creating Ollama CLI commands using the Factory Pattern
|
|
4
4
|
*/
|
|
5
5
|
export declare class OllamaCommandFactory {
|
|
6
|
+
/**
|
|
7
|
+
* Secure wrapper around spawnSync to prevent command injection.
|
|
8
|
+
*/
|
|
9
|
+
private static safeSpawn;
|
|
6
10
|
/**
|
|
7
11
|
* Create the Ollama command group
|
|
8
12
|
*/
|
|
@@ -1,12 +1,24 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { spawnSync, } from "child_process";
|
|
2
2
|
import chalk from "chalk";
|
|
3
3
|
import ora from "ora";
|
|
4
4
|
import inquirer from "inquirer";
|
|
5
5
|
import { logger } from "../../lib/utils/logger.js";
|
|
6
|
+
import { OllamaUtils } from "../utils/ollamaUtils.js";
|
|
6
7
|
/**
|
|
7
8
|
* Factory for creating Ollama CLI commands using the Factory Pattern
|
|
8
9
|
*/
|
|
9
10
|
export class OllamaCommandFactory {
|
|
11
|
+
/**
|
|
12
|
+
* Secure wrapper around spawnSync to prevent command injection.
|
|
13
|
+
*/
|
|
14
|
+
static safeSpawn(command, args, options = {}) {
|
|
15
|
+
// Command validation is now handled by TypeScript with AllowedCommand type
|
|
16
|
+
const defaultOptions = {
|
|
17
|
+
...options,
|
|
18
|
+
encoding: "utf8", // Always enforce utf8 encoding
|
|
19
|
+
};
|
|
20
|
+
return spawnSync(command, args, defaultOptions);
|
|
21
|
+
}
|
|
10
22
|
/**
|
|
11
23
|
* Create the Ollama command group
|
|
12
24
|
*/
|
|
@@ -46,9 +58,13 @@ export class OllamaCommandFactory {
|
|
|
46
58
|
static async listModelsHandler() {
|
|
47
59
|
const spinner = ora("Fetching installed models...").start();
|
|
48
60
|
try {
|
|
49
|
-
const
|
|
61
|
+
const res = this.safeSpawn("ollama", ["list"]);
|
|
62
|
+
if (res.error || res.status !== 0) {
|
|
63
|
+
throw res.error || new Error(res.stderr);
|
|
64
|
+
}
|
|
50
65
|
spinner.succeed("Installed models:");
|
|
51
|
-
|
|
66
|
+
const output = res.stdout.trim();
|
|
67
|
+
if (output) {
|
|
52
68
|
logger.always(output);
|
|
53
69
|
}
|
|
54
70
|
else {
|
|
@@ -71,7 +87,12 @@ export class OllamaCommandFactory {
|
|
|
71
87
|
logger.always(chalk.blue(`Downloading model: ${model}`));
|
|
72
88
|
logger.always(chalk.gray("This may take several minutes..."));
|
|
73
89
|
try {
|
|
74
|
-
|
|
90
|
+
const res = this.safeSpawn("ollama", ["pull", model], {
|
|
91
|
+
stdio: "inherit",
|
|
92
|
+
});
|
|
93
|
+
if (res.error || res.status !== 0) {
|
|
94
|
+
throw res.error || new Error("pull failed");
|
|
95
|
+
}
|
|
75
96
|
logger.always(chalk.green(`\n✅ Successfully downloaded ${model}`));
|
|
76
97
|
logger.always(chalk.blue(`\nTest it with: npx @juspay/neurolink generate "Hello!" --provider ollama --model ${model}`));
|
|
77
98
|
}
|
|
@@ -87,7 +108,6 @@ export class OllamaCommandFactory {
|
|
|
87
108
|
*/
|
|
88
109
|
static async removeModelHandler(argv) {
|
|
89
110
|
const { model } = argv;
|
|
90
|
-
// Confirm removal
|
|
91
111
|
const { confirm } = await inquirer.prompt([
|
|
92
112
|
{
|
|
93
113
|
type: "confirm",
|
|
@@ -102,7 +122,10 @@ export class OllamaCommandFactory {
|
|
|
102
122
|
}
|
|
103
123
|
const spinner = ora(`Removing model ${model}...`).start();
|
|
104
124
|
try {
|
|
105
|
-
|
|
125
|
+
const res = this.safeSpawn("ollama", ["rm", model]);
|
|
126
|
+
if (res.error || res.status !== 0) {
|
|
127
|
+
throw res.error || new Error(res.stderr);
|
|
128
|
+
}
|
|
106
129
|
spinner.succeed(`Successfully removed ${model}`);
|
|
107
130
|
}
|
|
108
131
|
catch (error) {
|
|
@@ -118,28 +141,50 @@ export class OllamaCommandFactory {
|
|
|
118
141
|
static async statusHandler() {
|
|
119
142
|
const spinner = ora("Checking Ollama service status...").start();
|
|
120
143
|
try {
|
|
121
|
-
|
|
122
|
-
|
|
144
|
+
const res = this.safeSpawn("ollama", ["list"]);
|
|
145
|
+
if (res.error || res.status !== 0) {
|
|
146
|
+
throw res.error || new Error(res.stderr);
|
|
147
|
+
}
|
|
123
148
|
spinner.succeed("Ollama service is running");
|
|
124
|
-
//
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
149
|
+
// Attempt to get model list with retry logic for JSON parsing
|
|
150
|
+
let modelsData = null;
|
|
151
|
+
for (let attempt = 1; attempt <= 3; attempt++) {
|
|
152
|
+
try {
|
|
153
|
+
const curlRes = this.safeSpawn("curl", [
|
|
154
|
+
"-s",
|
|
155
|
+
"http://localhost:11434/api/tags",
|
|
156
|
+
]);
|
|
157
|
+
if (!curlRes.error && curlRes.status === 0 && curlRes.stdout.trim()) {
|
|
158
|
+
try {
|
|
159
|
+
modelsData = JSON.parse(curlRes.stdout);
|
|
160
|
+
break; // Success, exit retry loop
|
|
161
|
+
}
|
|
162
|
+
catch (jsonError) {
|
|
163
|
+
logger.debug(`JSON parse failed on attempt ${attempt}: ${jsonError}`);
|
|
164
|
+
if (attempt < 3) {
|
|
165
|
+
// Brief delay before retry
|
|
166
|
+
await new Promise((resolve) => setTimeout(resolve, 500));
|
|
167
|
+
continue;
|
|
168
|
+
}
|
|
169
|
+
// Final attempt failed, log warning but don't throw
|
|
170
|
+
logger.debug("Failed to parse Ollama API response after 3 attempts");
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
catch (curlError) {
|
|
175
|
+
logger.debug(`Curl failed on attempt ${attempt}: ${curlError}`);
|
|
176
|
+
if (attempt < 3) {
|
|
177
|
+
await new Promise((resolve) => setTimeout(resolve, 500));
|
|
178
|
+
}
|
|
132
179
|
}
|
|
133
180
|
}
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
logger.
|
|
137
|
-
logger.debug("Optional curl command failed in statusHandler:", error);
|
|
181
|
+
// Display model count if we got valid data
|
|
182
|
+
if (modelsData?.models && modelsData.models.length > 0) {
|
|
183
|
+
logger.always(chalk.green(`\n${modelsData.models.length} models available`));
|
|
138
184
|
}
|
|
139
185
|
}
|
|
140
|
-
catch (
|
|
186
|
+
catch (_error) {
|
|
141
187
|
spinner.fail("Ollama service is not running");
|
|
142
|
-
logger.debug && logger.debug("Ollama status check failed:", error);
|
|
143
188
|
logger.always(chalk.yellow("\nStart Ollama with: ollama serve"));
|
|
144
189
|
logger.always(chalk.blue("Or restart the Ollama app if using the desktop version"));
|
|
145
190
|
process.exit(1);
|
|
@@ -149,59 +194,7 @@ export class OllamaCommandFactory {
|
|
|
149
194
|
* Handler for starting Ollama service
|
|
150
195
|
*/
|
|
151
196
|
static async startHandler() {
|
|
152
|
-
|
|
153
|
-
try {
|
|
154
|
-
// Check if already running
|
|
155
|
-
try {
|
|
156
|
-
execSync("ollama list", { encoding: "utf8" });
|
|
157
|
-
logger.always(chalk.yellow("Ollama service is already running!"));
|
|
158
|
-
return;
|
|
159
|
-
}
|
|
160
|
-
catch {
|
|
161
|
-
// Not running, continue to start
|
|
162
|
-
}
|
|
163
|
-
// Different approaches for different platforms
|
|
164
|
-
if (process.platform === "darwin") {
|
|
165
|
-
// macOS
|
|
166
|
-
logger.always(chalk.gray("Starting Ollama on macOS..."));
|
|
167
|
-
try {
|
|
168
|
-
execSync("open -a Ollama");
|
|
169
|
-
logger.always(chalk.green("✅ Ollama app started"));
|
|
170
|
-
}
|
|
171
|
-
catch {
|
|
172
|
-
// Try service command
|
|
173
|
-
execSync("ollama serve > /dev/null 2>&1 &", { stdio: "ignore" });
|
|
174
|
-
logger.always(chalk.green("✅ Ollama service started"));
|
|
175
|
-
}
|
|
176
|
-
}
|
|
177
|
-
else if (process.platform === "linux") {
|
|
178
|
-
// Linux
|
|
179
|
-
logger.always(chalk.gray("Starting Ollama service on Linux..."));
|
|
180
|
-
try {
|
|
181
|
-
execSync("systemctl start ollama", { encoding: "utf8" });
|
|
182
|
-
logger.always(chalk.green("✅ Ollama service started"));
|
|
183
|
-
}
|
|
184
|
-
catch {
|
|
185
|
-
// Try direct command
|
|
186
|
-
execSync("ollama serve > /dev/null 2>&1 &", { stdio: "ignore" });
|
|
187
|
-
logger.always(chalk.green("✅ Ollama service started"));
|
|
188
|
-
}
|
|
189
|
-
}
|
|
190
|
-
else {
|
|
191
|
-
// Windows
|
|
192
|
-
logger.always(chalk.gray("Starting Ollama on Windows..."));
|
|
193
|
-
execSync("start ollama serve", { stdio: "ignore" });
|
|
194
|
-
logger.always(chalk.green("✅ Ollama service started"));
|
|
195
|
-
}
|
|
196
|
-
logger.always(chalk.blue("\nWait a few seconds for the service to initialize..."));
|
|
197
|
-
}
|
|
198
|
-
catch (error) {
|
|
199
|
-
logger.error(chalk.red("Failed to start Ollama service"));
|
|
200
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
201
|
-
logger.error(chalk.red("Error:", errorMessage));
|
|
202
|
-
logger.always(chalk.blue("\nTry starting Ollama manually or check installation"));
|
|
203
|
-
process.exit(1);
|
|
204
|
-
}
|
|
197
|
+
await OllamaUtils.startOllamaService();
|
|
205
198
|
}
|
|
206
199
|
/**
|
|
207
200
|
* Handler for stopping Ollama service
|
|
@@ -210,26 +203,23 @@ export class OllamaCommandFactory {
|
|
|
210
203
|
const spinner = ora("Stopping Ollama service...").start();
|
|
211
204
|
try {
|
|
212
205
|
if (process.platform === "darwin") {
|
|
213
|
-
// macOS
|
|
214
206
|
try {
|
|
215
|
-
|
|
207
|
+
this.safeSpawn("pkill", ["ollama"]);
|
|
216
208
|
}
|
|
217
209
|
catch {
|
|
218
|
-
|
|
210
|
+
this.safeSpawn("killall", ["Ollama"]);
|
|
219
211
|
}
|
|
220
212
|
}
|
|
221
213
|
else if (process.platform === "linux") {
|
|
222
|
-
// Linux
|
|
223
214
|
try {
|
|
224
|
-
|
|
215
|
+
this.safeSpawn("systemctl", ["stop", "ollama"]);
|
|
225
216
|
}
|
|
226
217
|
catch {
|
|
227
|
-
|
|
218
|
+
this.safeSpawn("pkill", ["ollama"]);
|
|
228
219
|
}
|
|
229
220
|
}
|
|
230
221
|
else {
|
|
231
|
-
|
|
232
|
-
execSync("taskkill /F /IM ollama.exe", { encoding: "utf8" });
|
|
222
|
+
this.safeSpawn("taskkill", ["/F", "/IM", "ollama.exe"]);
|
|
233
223
|
}
|
|
234
224
|
spinner.succeed("Ollama service stopped");
|
|
235
225
|
}
|
|
@@ -244,13 +234,17 @@ export class OllamaCommandFactory {
|
|
|
244
234
|
*/
|
|
245
235
|
static async setupHandler() {
|
|
246
236
|
logger.always(chalk.blue("🦙 Welcome to Ollama Setup!\n"));
|
|
247
|
-
// Check if Ollama is installed
|
|
248
237
|
const checkSpinner = ora("Checking Ollama installation...").start();
|
|
249
238
|
let isInstalled = false;
|
|
250
239
|
try {
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
240
|
+
const res = this.safeSpawn("ollama", ["--version"]);
|
|
241
|
+
if (!res.error && res.status === 0) {
|
|
242
|
+
isInstalled = true;
|
|
243
|
+
checkSpinner.succeed("Ollama is installed");
|
|
244
|
+
}
|
|
245
|
+
else {
|
|
246
|
+
throw new Error(res.stderr);
|
|
247
|
+
}
|
|
254
248
|
}
|
|
255
249
|
catch {
|
|
256
250
|
checkSpinner.fail("Ollama is not installed");
|
|
@@ -284,12 +278,13 @@ export class OllamaCommandFactory {
|
|
|
284
278
|
return;
|
|
285
279
|
}
|
|
286
280
|
}
|
|
287
|
-
// Check if service is running
|
|
288
281
|
let serviceRunning = false;
|
|
289
282
|
try {
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
283
|
+
const res = this.safeSpawn("ollama", ["list"]);
|
|
284
|
+
if (!res.error && res.status === 0) {
|
|
285
|
+
serviceRunning = true;
|
|
286
|
+
logger.always(chalk.green("\n✅ Ollama service is running"));
|
|
287
|
+
}
|
|
293
288
|
}
|
|
294
289
|
catch {
|
|
295
290
|
logger.always(chalk.yellow("\n⚠️ Ollama service is not running"));
|
|
@@ -307,7 +302,6 @@ export class OllamaCommandFactory {
|
|
|
307
302
|
}
|
|
308
303
|
}
|
|
309
304
|
if (serviceRunning) {
|
|
310
|
-
// List available models
|
|
311
305
|
logger.always(chalk.blue("\n📦 Popular Ollama models:"));
|
|
312
306
|
logger.always(" • llama2 (7B) - General purpose");
|
|
313
307
|
logger.always(" • codellama (7B) - Code generation");
|
|
@@ -362,7 +356,6 @@ export class OllamaCommandFactory {
|
|
|
362
356
|
await this.pullModelHandler({ model: modelToDownload });
|
|
363
357
|
}
|
|
364
358
|
}
|
|
365
|
-
// Final instructions
|
|
366
359
|
logger.always(chalk.green("\n✅ Setup complete!\n"));
|
|
367
360
|
logger.always(chalk.blue("Next steps:"));
|
|
368
361
|
logger.always("1. List models: " + chalk.gray("neurolink ollama list-models"));
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { type SpawnSyncReturns, type SpawnSyncOptions } from "child_process";
|
|
2
|
+
type AllowedCommand = "ollama" | "curl" | "systemctl" | "pkill" | "killall" | "open" | "taskkill" | "start";
|
|
3
|
+
/**
|
|
4
|
+
* Shared Ollama utilities for CLI commands
|
|
5
|
+
*/
|
|
6
|
+
export declare class OllamaUtils {
|
|
7
|
+
/**
|
|
8
|
+
* Secure wrapper around spawnSync to prevent command injection.
|
|
9
|
+
*/
|
|
10
|
+
static safeSpawn(command: AllowedCommand, args: string[], options?: SpawnSyncOptions): SpawnSyncReturns<string>;
|
|
11
|
+
/**
|
|
12
|
+
* Wait for Ollama service to become ready with exponential backoff
|
|
13
|
+
*/
|
|
14
|
+
static waitForOllamaReady(maxAttempts?: number, initialDelay?: number): Promise<boolean>;
|
|
15
|
+
/**
|
|
16
|
+
* Check if Ollama service is already running
|
|
17
|
+
*/
|
|
18
|
+
static isOllamaRunning(): boolean;
|
|
19
|
+
/**
|
|
20
|
+
* Unified Ollama start logic that works across platforms
|
|
21
|
+
*/
|
|
22
|
+
static startOllamaService(): Promise<void>;
|
|
23
|
+
}
|
|
24
|
+
export {};
|