@edgible-team/cli 1.2.4 → 1.2.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commands/agent/agent-handlers.d.ts +45 -0
- package/dist/commands/agent/agent-handlers.d.ts.map +1 -0
- package/dist/commands/agent/agent-handlers.js +1159 -0
- package/dist/commands/agent/install.d.ts +11 -0
- package/dist/commands/agent/install.d.ts.map +1 -0
- package/dist/commands/agent/install.js +399 -0
- package/dist/commands/agent/logs.d.ts +12 -0
- package/dist/commands/agent/logs.d.ts.map +1 -0
- package/dist/commands/agent/logs.js +77 -0
- package/dist/commands/agent/restart.d.ts +4 -0
- package/dist/commands/agent/restart.d.ts.map +1 -0
- package/dist/commands/agent/restart.js +33 -0
- package/dist/commands/agent/set-log-level.d.ts +8 -0
- package/dist/commands/agent/set-log-level.d.ts.map +1 -0
- package/dist/commands/agent/set-log-level.js +133 -0
- package/dist/commands/agent/setup.d.ts +9 -0
- package/dist/commands/agent/setup.d.ts.map +1 -0
- package/dist/commands/agent/setup.js +149 -0
- package/dist/commands/agent/start.d.ts +12 -0
- package/dist/commands/agent/start.d.ts.map +1 -0
- package/dist/commands/agent/start.js +308 -0
- package/dist/commands/agent/status.d.ts +7 -0
- package/dist/commands/agent/status.d.ts.map +1 -0
- package/dist/commands/agent/status.js +68 -0
- package/dist/commands/agent/stop.d.ts +4 -0
- package/dist/commands/agent/stop.d.ts.map +1 -0
- package/dist/commands/agent/stop.js +33 -0
- package/dist/commands/agent/uninstall.d.ts +7 -0
- package/dist/commands/agent/uninstall.d.ts.map +1 -0
- package/dist/commands/agent/uninstall.js +168 -0
- package/dist/commands/agent.d.ts.map +1 -1
- package/dist/commands/agent.js +24 -1190
- package/dist/commands/ai/helpers.d.ts +139 -0
- package/dist/commands/ai/helpers.d.ts.map +1 -0
- package/dist/commands/ai/helpers.js +1470 -0
- package/dist/commands/ai/serve.d.ts +6 -0
- package/dist/commands/ai/serve.d.ts.map +1 -0
- package/dist/commands/ai/serve.js +124 -0
- package/dist/commands/ai/setup.d.ts +14 -0
- package/dist/commands/ai/setup.d.ts.map +1 -0
- package/dist/commands/ai/setup.js +86 -0
- package/dist/commands/ai/status.d.ts +2 -0
- package/dist/commands/ai/status.d.ts.map +1 -0
- package/dist/commands/ai/status.js +160 -0
- package/dist/commands/ai/stop.d.ts +2 -0
- package/dist/commands/ai/stop.d.ts.map +1 -0
- package/dist/commands/ai/stop.js +21 -0
- package/dist/commands/ai/teardown.d.ts +5 -0
- package/dist/commands/ai/teardown.d.ts.map +1 -0
- package/dist/commands/ai/teardown.js +78 -0
- package/dist/commands/ai/test.d.ts +4 -0
- package/dist/commands/ai/test.d.ts.map +1 -0
- package/dist/commands/ai/test.js +65 -0
- package/dist/commands/ai.d.ts.map +1 -1
- package/dist/commands/ai.js +16 -1938
- package/dist/commands/application/api-keys/create.d.ts +6 -0
- package/dist/commands/application/api-keys/create.d.ts.map +1 -0
- package/dist/commands/application/api-keys/create.js +68 -0
- package/dist/commands/application/api-keys/delete.d.ts +6 -0
- package/dist/commands/application/api-keys/delete.d.ts.map +1 -0
- package/dist/commands/application/api-keys/delete.js +79 -0
- package/dist/commands/application/api-keys/list.d.ts +5 -0
- package/dist/commands/application/api-keys/list.d.ts.map +1 -0
- package/dist/commands/application/api-keys/list.js +65 -0
- package/dist/commands/application/api-keys.d.ts +3 -0
- package/dist/commands/application/api-keys.d.ts.map +1 -0
- package/dist/commands/application/api-keys.js +227 -0
- package/dist/commands/application/create-compose.d.ts +3 -0
- package/dist/commands/application/create-compose.d.ts.map +1 -0
- package/dist/commands/application/create-compose.js +381 -0
- package/dist/commands/application/create-docker-compose.d.ts +10 -0
- package/dist/commands/application/create-docker-compose.d.ts.map +1 -0
- package/dist/commands/application/create-docker-compose.js +334 -0
- package/dist/commands/application/create-existing.d.ts +14 -0
- package/dist/commands/application/create-existing.d.ts.map +1 -0
- package/dist/commands/application/create-existing.js +359 -0
- package/dist/commands/application/create-interactive.d.ts +3 -0
- package/dist/commands/application/create-interactive.d.ts.map +1 -0
- package/dist/commands/application/create-interactive.js +326 -0
- package/dist/commands/application/create-managed-process.d.ts +15 -0
- package/dist/commands/application/create-managed-process.d.ts.map +1 -0
- package/dist/commands/application/create-managed-process.js +371 -0
- package/dist/commands/application/create-stubs.d.ts +4 -0
- package/dist/commands/application/create-stubs.d.ts.map +1 -0
- package/dist/commands/application/create-stubs.js +19 -0
- package/dist/commands/application/create-workload.d.ts +5 -0
- package/dist/commands/application/create-workload.d.ts.map +1 -0
- package/dist/commands/application/create-workload.js +48 -0
- package/dist/commands/application/delete.d.ts +6 -0
- package/dist/commands/application/delete.d.ts.map +1 -0
- package/dist/commands/application/delete.js +76 -0
- package/dist/commands/application/get.d.ts +5 -0
- package/dist/commands/application/get.d.ts.map +1 -0
- package/dist/commands/application/get.js +35 -0
- package/dist/commands/application/list.d.ts +4 -0
- package/dist/commands/application/list.d.ts.map +1 -0
- package/dist/commands/application/list.js +41 -0
- package/dist/commands/application/short-codes/create.d.ts +7 -0
- package/dist/commands/application/short-codes/create.d.ts.map +1 -0
- package/dist/commands/application/short-codes/create.js +69 -0
- package/dist/commands/application/short-codes/delete.d.ts +6 -0
- package/dist/commands/application/short-codes/delete.d.ts.map +1 -0
- package/dist/commands/application/short-codes/delete.js +79 -0
- package/dist/commands/application/short-codes/list.d.ts +5 -0
- package/dist/commands/application/short-codes/list.d.ts.map +1 -0
- package/dist/commands/application/short-codes/list.js +63 -0
- package/dist/commands/application/short-codes/toggle.d.ts +5 -0
- package/dist/commands/application/short-codes/toggle.d.ts.map +1 -0
- package/dist/commands/application/short-codes/toggle.js +71 -0
- package/dist/commands/application/short-codes.d.ts +3 -0
- package/dist/commands/application/short-codes.d.ts.map +1 -0
- package/dist/commands/application/short-codes.js +226 -0
- package/dist/commands/application/toggle.d.ts +2 -0
- package/dist/commands/application/toggle.d.ts.map +1 -0
- package/dist/commands/application/toggle.js +78 -0
- package/dist/commands/application/update.d.ts +4 -0
- package/dist/commands/application/update.d.ts.map +1 -0
- package/dist/commands/application/update.js +11 -0
- package/dist/commands/application.d.ts.map +1 -1
- package/dist/commands/application.js +32 -1630
- package/dist/commands/auth.d.ts.map +1 -1
- package/dist/commands/auth.js +31 -49
- package/dist/commands/base/BaseCommand.d.ts +3 -3
- package/dist/commands/base/BaseCommand.d.ts.map +1 -1
- package/dist/commands/base/BaseCommand.js +3 -3
- package/dist/commands/base/command-wrapper.d.ts +0 -4
- package/dist/commands/base/command-wrapper.d.ts.map +1 -1
- package/dist/commands/base/command-wrapper.js +13 -14
- package/dist/commands/base/middleware.d.ts +3 -3
- package/dist/commands/base/middleware.d.ts.map +1 -1
- package/dist/commands/base/middleware.js +4 -4
- package/dist/commands/config.d.ts.map +1 -1
- package/dist/commands/config.js +15 -32
- package/dist/commands/connectivity.d.ts.map +1 -1
- package/dist/commands/connectivity.js +6 -11
- package/dist/commands/debug.d.ts.map +1 -1
- package/dist/commands/debug.js +187 -46
- package/dist/commands/discover.d.ts.map +1 -1
- package/dist/commands/discover.js +4 -17
- package/dist/commands/gateway.d.ts.map +1 -1
- package/dist/commands/gateway.js +37 -77
- package/dist/commands/managedGateway/create.d.ts +6 -0
- package/dist/commands/managedGateway/create.d.ts.map +1 -0
- package/dist/commands/managedGateway/create.js +50 -0
- package/dist/commands/managedGateway/delete.d.ts +5 -0
- package/dist/commands/managedGateway/delete.d.ts.map +1 -0
- package/dist/commands/managedGateway/delete.js +57 -0
- package/dist/commands/managedGateway/get.d.ts +4 -0
- package/dist/commands/managedGateway/get.d.ts.map +1 -0
- package/dist/commands/managedGateway/get.js +71 -0
- package/dist/commands/managedGateway/haproxy-stats.d.ts +6 -0
- package/dist/commands/managedGateway/haproxy-stats.d.ts.map +1 -0
- package/dist/commands/managedGateway/haproxy-stats.js +131 -0
- package/dist/commands/managedGateway/list.d.ts +4 -0
- package/dist/commands/managedGateway/list.d.ts.map +1 -0
- package/dist/commands/managedGateway/list.js +50 -0
- package/dist/commands/managedGateway/logs.d.ts +10 -0
- package/dist/commands/managedGateway/logs.d.ts.map +1 -0
- package/dist/commands/managedGateway/logs.js +100 -0
- package/dist/commands/managedGateway/reboot.d.ts +5 -0
- package/dist/commands/managedGateway/reboot.d.ts.map +1 -0
- package/dist/commands/managedGateway/reboot.js +95 -0
- package/dist/commands/managedGateway/resync.d.ts +10 -0
- package/dist/commands/managedGateway/resync.d.ts.map +1 -0
- package/dist/commands/managedGateway/resync.js +69 -0
- package/dist/commands/managedGateway/ssh.d.ts +4 -0
- package/dist/commands/managedGateway/ssh.d.ts.map +1 -0
- package/dist/commands/managedGateway/ssh.js +130 -0
- package/dist/commands/managedGateway/wipe-logs.d.ts +4 -0
- package/dist/commands/managedGateway/wipe-logs.d.ts.map +1 -0
- package/dist/commands/managedGateway/wipe-logs.js +67 -0
- package/dist/commands/managedGateway/wireguard.d.ts +4 -0
- package/dist/commands/managedGateway/wireguard.d.ts.map +1 -0
- package/dist/commands/managedGateway/wireguard.js +68 -0
- package/dist/commands/managedGateway.d.ts.map +1 -1
- package/dist/commands/managedGateway.js +61 -117
- package/dist/commands/utils/config-validator.d.ts +5 -5
- package/dist/commands/utils/config-validator.d.ts.map +1 -1
- package/dist/commands/utils/config-validator.js +8 -8
- package/dist/commands/utils/output-formatter.js +1 -1
- package/dist/config/app-config.d.ts +1 -1
- package/dist/config/app-config.js +2 -2
- package/dist/index.js +0 -3
- package/dist/services/LocalAgentManager.d.ts.map +1 -1
- package/dist/services/LocalAgentManager.js +4 -2
- package/dist/services/agentDeployment/AgentDeploymentService.d.ts +35 -0
- package/dist/services/agentDeployment/AgentDeploymentService.d.ts.map +1 -0
- package/dist/services/agentDeployment/AgentDeploymentService.js +35 -0
- package/dist/services/application/ApplicationService.d.ts +5 -4
- package/dist/services/application/ApplicationService.d.ts.map +1 -1
- package/dist/services/application/ApplicationService.js +22 -35
- package/dist/services/auth/AuthService.d.ts +5 -5
- package/dist/services/auth/AuthService.d.ts.map +1 -1
- package/dist/services/auth/AuthService.js +11 -58
- package/dist/services/daemon/DaemonManagerFactory.d.ts +2 -0
- package/dist/services/daemon/DaemonManagerFactory.d.ts.map +1 -1
- package/dist/services/daemon/DaemonManagerFactory.js +14 -6
- package/dist/services/diagnostics/DiagnosticsService.d.ts +89 -0
- package/dist/services/diagnostics/DiagnosticsService.d.ts.map +1 -0
- package/dist/services/diagnostics/DiagnosticsService.js +37 -0
- package/dist/services/edgible.d.ts +6 -4
- package/dist/services/edgible.d.ts.map +1 -1
- package/dist/services/edgible.js +36 -86
- package/dist/services/gateway/GatewayService.d.ts +5 -6
- package/dist/services/gateway/GatewayService.d.ts.map +1 -1
- package/dist/services/gateway/GatewayService.js +22 -36
- package/dist/services/instances.d.ts +34 -0
- package/dist/services/instances.d.ts.map +1 -0
- package/dist/services/instances.js +64 -0
- package/dist/services/managedGateway/ManagedGatewayService.d.ts +75 -0
- package/dist/services/managedGateway/ManagedGatewayService.d.ts.map +1 -0
- package/dist/services/managedGateway/ManagedGatewayService.js +44 -0
- package/dist/services/token/TokenManager.d.ts +56 -0
- package/dist/services/token/TokenManager.d.ts.map +1 -0
- package/dist/services/token/TokenManager.js +85 -0
- package/dist/types/validation/schemas.d.ts +22 -22
- package/dist/utils/PlatformDetector.d.ts +2 -0
- package/dist/utils/PlatformDetector.d.ts.map +1 -1
- package/dist/utils/PlatformDetector.js +5 -34
- package/dist/validation/schemas.d.ts +6 -6
- package/package.json +1 -1
|
@@ -0,0 +1,1470 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
36
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
37
|
+
};
|
|
38
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
39
|
+
exports.selectModel = selectModel;
|
|
40
|
+
exports.setupLocalOllama = setupLocalOllama;
|
|
41
|
+
exports.setupPlatformIntegration = setupPlatformIntegration;
|
|
42
|
+
exports.displaySetupSummary = displaySetupSummary;
|
|
43
|
+
exports.checkOllamaInstalled = checkOllamaInstalled;
|
|
44
|
+
exports.installOllama = installOllama;
|
|
45
|
+
exports.checkModelAvailable = checkModelAvailable;
|
|
46
|
+
exports.pullModel = pullModel;
|
|
47
|
+
exports.startOllama = startOllama;
|
|
48
|
+
exports.checkOllamaRunning = checkOllamaRunning;
|
|
49
|
+
exports.checkOllamaListeningAddress = checkOllamaListeningAddress;
|
|
50
|
+
exports.fixOllamaBinding = fixOllamaBinding;
|
|
51
|
+
exports.stopOllama = stopOllama;
|
|
52
|
+
exports.normalizeModelName = normalizeModelName;
|
|
53
|
+
exports.checkDockerInstalled = checkDockerInstalled;
|
|
54
|
+
exports.checkHasModels = checkHasModels;
|
|
55
|
+
exports.detectOllamaUrlForDocker = detectOllamaUrlForDocker;
|
|
56
|
+
exports.getComposeDirectory = getComposeDirectory;
|
|
57
|
+
exports.startOpenWebUI = startOpenWebUI;
|
|
58
|
+
exports.stopOpenWebUI = stopOpenWebUI;
|
|
59
|
+
exports.checkOpenWebUIRunning = checkOpenWebUIRunning;
|
|
60
|
+
exports.getOpenWebUIInfo = getOpenWebUIInfo;
|
|
61
|
+
exports.testModelConnectivity = testModelConnectivity;
|
|
62
|
+
exports.verifyModelOnEndpoint = verifyModelOnEndpoint;
|
|
63
|
+
exports.checkOllamaUrlReachable = checkOllamaUrlReachable;
|
|
64
|
+
exports.checkUrlReachable = checkUrlReachable;
|
|
65
|
+
const chalk_1 = __importDefault(require("chalk"));
|
|
66
|
+
const inquirer_1 = __importDefault(require("inquirer"));
|
|
67
|
+
const child_process_1 = require("child_process");
|
|
68
|
+
const os = __importStar(require("os"));
|
|
69
|
+
const path = __importStar(require("path"));
|
|
70
|
+
const fs = __importStar(require("fs"));
|
|
71
|
+
const node_fetch_1 = __importDefault(require("node-fetch"));
|
|
72
|
+
const instances_1 = require("../../services/instances");
|
|
73
|
+
const SystemCapabilityDetector_1 = require("../../detection/SystemCapabilityDetector");
|
|
74
|
+
const config_validator_1 = require("../utils/config-validator");
|
|
75
|
+
const compose_constants_1 = require("../../generated/compose-constants");
|
|
76
|
+
async function selectModel(providedModel, capabilities) {
|
|
77
|
+
let selectedModel = providedModel;
|
|
78
|
+
if (!selectedModel) {
|
|
79
|
+
// Filter to only excellent and good recommendations
|
|
80
|
+
const suitableModels = capabilities.recommendedModels.filter((m) => m.suitability === 'excellent' || m.suitability === 'good');
|
|
81
|
+
if (suitableModels.length === 0) {
|
|
82
|
+
console.log(chalk_1.default.yellow('⚠ No models are well-suited for your system.'));
|
|
83
|
+
console.log(chalk_1.default.yellow('You can still run smaller models, but performance may be limited.\n'));
|
|
84
|
+
const allModels = capabilities.recommendedModels.map((m) => ({
|
|
85
|
+
name: `${m.modelName} (${m.size}) - ${m.suitability}`,
|
|
86
|
+
value: m.modelName.toLowerCase().replace(/\s+/g, '-').replace(/[()]/g, ''),
|
|
87
|
+
}));
|
|
88
|
+
// Add custom model option
|
|
89
|
+
allModels.push({
|
|
90
|
+
name: 'Enter custom model name',
|
|
91
|
+
value: '__custom__',
|
|
92
|
+
});
|
|
93
|
+
const answer = await inquirer_1.default.prompt([
|
|
94
|
+
{
|
|
95
|
+
type: 'list',
|
|
96
|
+
name: 'model',
|
|
97
|
+
message: 'Select a model to use:',
|
|
98
|
+
choices: allModels,
|
|
99
|
+
},
|
|
100
|
+
]);
|
|
101
|
+
if (answer.model === '__custom__') {
|
|
102
|
+
const customAnswer = await inquirer_1.default.prompt([
|
|
103
|
+
{
|
|
104
|
+
type: 'input',
|
|
105
|
+
name: 'model',
|
|
106
|
+
message: 'Enter Ollama model name (e.g., deepseek-r1:8b, llama3.2:3b):',
|
|
107
|
+
validate: (input) => {
|
|
108
|
+
if (!input || input.trim().length === 0) {
|
|
109
|
+
return 'Model name cannot be empty';
|
|
110
|
+
}
|
|
111
|
+
return true;
|
|
112
|
+
},
|
|
113
|
+
},
|
|
114
|
+
]);
|
|
115
|
+
selectedModel = customAnswer.model.trim();
|
|
116
|
+
}
|
|
117
|
+
else {
|
|
118
|
+
selectedModel = answer.model;
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
else {
|
|
122
|
+
console.log(chalk_1.default.green('Recommended models for your system:\n'));
|
|
123
|
+
suitableModels.forEach((model) => {
|
|
124
|
+
const icon = model.suitability === 'excellent' ? '✓' : '•';
|
|
125
|
+
const color = model.suitability === 'excellent' ? chalk_1.default.green : chalk_1.default.cyan;
|
|
126
|
+
console.log(color(` ${icon} ${model.modelName} (${model.size})`));
|
|
127
|
+
console.log(chalk_1.default.gray(` ${model.reasoning}\n`));
|
|
128
|
+
});
|
|
129
|
+
const modelChoices = suitableModels.map((m) => ({
|
|
130
|
+
name: `${m.modelName} (${m.size}) - ${m.suitability === 'excellent' ? 'Recommended' : 'Good fit'}`,
|
|
131
|
+
value: m.modelName.toLowerCase().replace(/\s+/g, '-').replace(/[()]/g, ''),
|
|
132
|
+
}));
|
|
133
|
+
// Add option to see all models
|
|
134
|
+
modelChoices.push({
|
|
135
|
+
name: 'Show all models (including marginal/insufficient)',
|
|
136
|
+
value: '__all__',
|
|
137
|
+
});
|
|
138
|
+
// Add custom model option
|
|
139
|
+
modelChoices.push({
|
|
140
|
+
name: 'Enter custom model name',
|
|
141
|
+
value: '__custom__',
|
|
142
|
+
});
|
|
143
|
+
const answer = await inquirer_1.default.prompt([
|
|
144
|
+
{
|
|
145
|
+
type: 'list',
|
|
146
|
+
name: 'model',
|
|
147
|
+
message: 'Select a model to use:',
|
|
148
|
+
choices: modelChoices,
|
|
149
|
+
},
|
|
150
|
+
]);
|
|
151
|
+
if (answer.model === '__custom__') {
|
|
152
|
+
const customAnswer = await inquirer_1.default.prompt([
|
|
153
|
+
{
|
|
154
|
+
type: 'input',
|
|
155
|
+
name: 'model',
|
|
156
|
+
message: 'Enter Ollama model name (e.g., deepseek-r1:8b, llama3.2:3b):',
|
|
157
|
+
validate: (input) => {
|
|
158
|
+
if (!input || input.trim().length === 0) {
|
|
159
|
+
return 'Model name cannot be empty';
|
|
160
|
+
}
|
|
161
|
+
return true;
|
|
162
|
+
},
|
|
163
|
+
},
|
|
164
|
+
]);
|
|
165
|
+
selectedModel = customAnswer.model.trim();
|
|
166
|
+
}
|
|
167
|
+
else if (answer.model === '__all__') {
|
|
168
|
+
const allModels = capabilities.recommendedModels.map((m) => ({
|
|
169
|
+
name: `${m.modelName} (${m.size}) - ${m.suitability}`,
|
|
170
|
+
value: m.modelName.toLowerCase().replace(/\s+/g, '-').replace(/[()]/g, ''),
|
|
171
|
+
}));
|
|
172
|
+
// Add custom model option to all models list too
|
|
173
|
+
allModels.push({
|
|
174
|
+
name: 'Enter custom model name',
|
|
175
|
+
value: '__custom__',
|
|
176
|
+
});
|
|
177
|
+
const allAnswer = await inquirer_1.default.prompt([
|
|
178
|
+
{
|
|
179
|
+
type: 'list',
|
|
180
|
+
name: 'model',
|
|
181
|
+
message: 'Select a model to use:',
|
|
182
|
+
choices: allModels,
|
|
183
|
+
},
|
|
184
|
+
]);
|
|
185
|
+
if (allAnswer.model === '__custom__') {
|
|
186
|
+
const customAnswer = await inquirer_1.default.prompt([
|
|
187
|
+
{
|
|
188
|
+
type: 'input',
|
|
189
|
+
name: 'model',
|
|
190
|
+
message: 'Enter Ollama model name (e.g., deepseek-r1:8b, llama3.2:3b):',
|
|
191
|
+
validate: (input) => {
|
|
192
|
+
if (!input || input.trim().length === 0) {
|
|
193
|
+
return 'Model name cannot be empty';
|
|
194
|
+
}
|
|
195
|
+
return true;
|
|
196
|
+
},
|
|
197
|
+
},
|
|
198
|
+
]);
|
|
199
|
+
selectedModel = customAnswer.model.trim();
|
|
200
|
+
}
|
|
201
|
+
else {
|
|
202
|
+
selectedModel = allAnswer.model;
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
else {
|
|
206
|
+
selectedModel = answer.model;
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
// Normalize model name (Ollama uses lowercase with dashes)
|
|
211
|
+
if (!selectedModel) {
|
|
212
|
+
throw new Error('No model selected');
|
|
213
|
+
}
|
|
214
|
+
// If it's already a custom model (contains : or /), use as-is
|
|
215
|
+
// Otherwise normalize it (recommended models need to be mapped to Ollama format)
|
|
216
|
+
let ollamaModelName;
|
|
217
|
+
if (selectedModel.includes(':') || selectedModel.includes('/')) {
|
|
218
|
+
// Custom model name format (e.g., deepseek-r1:14b, llama3.2:3b, or org/model:tag)
|
|
219
|
+
ollamaModelName = selectedModel;
|
|
220
|
+
}
|
|
221
|
+
else {
|
|
222
|
+
// Normalize recommended model names to Ollama format (e.g., deepseek-r1-14b -> deepseek-r1:14b)
|
|
223
|
+
ollamaModelName = normalizeModelName(selectedModel);
|
|
224
|
+
}
|
|
225
|
+
return ollamaModelName;
|
|
226
|
+
}
|
|
227
|
+
/**
|
|
228
|
+
* Setup local Ollama installation and configuration
|
|
229
|
+
*/
|
|
230
|
+
async function setupLocalOllama(options) {
|
|
231
|
+
// Step 1: Check if Ollama is installed
|
|
232
|
+
console.log(chalk_1.default.blue('Step 1: Checking Ollama installation...\n'));
|
|
233
|
+
const isOllamaInstalled = await checkOllamaInstalled();
|
|
234
|
+
if (!isOllamaInstalled) {
|
|
235
|
+
let shouldInstall = options.autoInstall || false;
|
|
236
|
+
if (!shouldInstall) {
|
|
237
|
+
const answer = await inquirer_1.default.prompt([
|
|
238
|
+
{
|
|
239
|
+
type: 'confirm',
|
|
240
|
+
name: 'install',
|
|
241
|
+
message: 'Ollama is not installed. Would you like to install it now?',
|
|
242
|
+
default: true,
|
|
243
|
+
},
|
|
244
|
+
]);
|
|
245
|
+
shouldInstall = answer.install;
|
|
246
|
+
}
|
|
247
|
+
if (shouldInstall) {
|
|
248
|
+
console.log(chalk_1.default.yellow('Installing Ollama...\n'));
|
|
249
|
+
await installOllama();
|
|
250
|
+
console.log(chalk_1.default.green('✓ Ollama installed successfully\n'));
|
|
251
|
+
}
|
|
252
|
+
else {
|
|
253
|
+
throw new Error('Ollama is required but not installed. Please install it manually from https://ollama.com');
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
else {
|
|
257
|
+
console.log(chalk_1.default.green('✓ Ollama is already installed\n'));
|
|
258
|
+
}
|
|
259
|
+
// Step 2: Discover system capabilities
|
|
260
|
+
console.log(chalk_1.default.blue('Step 2: Discovering system capabilities...\n'));
|
|
261
|
+
const capabilities = await SystemCapabilityDetector_1.SystemCapabilityDetector.detectCapabilities();
|
|
262
|
+
// Show GPU driver support status
|
|
263
|
+
if (capabilities.gpuDriverSupport.ollamaGpuReady) {
|
|
264
|
+
console.log(chalk_1.default.green('✓ GPU acceleration is ready for Ollama'));
|
|
265
|
+
if (capabilities.gpuDriverSupport.ollamaGpuReason) {
|
|
266
|
+
console.log(chalk_1.default.gray(` ${capabilities.gpuDriverSupport.ollamaGpuReason}\n`));
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
else {
|
|
270
|
+
console.log(chalk_1.default.yellow('⚠ GPU acceleration not detected - Ollama may run in CPU mode'));
|
|
271
|
+
if (capabilities.gpuDriverSupport.ollamaGpuReason) {
|
|
272
|
+
console.log(chalk_1.default.gray(` ${capabilities.gpuDriverSupport.ollamaGpuReason}\n`));
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
// Step 5: Start Ollama service
|
|
276
|
+
// console.log(chalk.blue('Step 5: Starting Ollama service...\n'));
|
|
277
|
+
// await startOllama();
|
|
278
|
+
//
|
|
279
|
+
// // Step 6: Verify Ollama is running
|
|
280
|
+
// console.log(chalk.blue('Step 6: Verifying Ollama is running...\n'));
|
|
281
|
+
// const isRunning = await checkOllamaRunning();
|
|
282
|
+
//
|
|
283
|
+
// if (!isRunning) {
|
|
284
|
+
// throw new Error('Ollama service failed to start. Please check the logs.');
|
|
285
|
+
// }
|
|
286
|
+
//
|
|
287
|
+
// console.log(chalk.green('✓ Ollama is running\n'));
|
|
288
|
+
//
|
|
289
|
+
// // Step 6.5: Check and fix Ollama binding if needed
|
|
290
|
+
// console.log(chalk.blue('Step 6.5: Checking Ollama network binding...\n'));
|
|
291
|
+
// const listeningAddress = await checkOllamaListeningAddress();
|
|
292
|
+
//
|
|
293
|
+
// if (listeningAddress === '127.0.0.1') {
|
|
294
|
+
// console.log(chalk.yellow('⚠ Ollama is listening on localhost only (127.0.0.1:11434)'));
|
|
295
|
+
// console.log(chalk.gray(' This will prevent Docker containers and network access from reaching Ollama.\n'));
|
|
296
|
+
// console.log(chalk.yellow(' Reconfiguring Ollama to listen on all interfaces (0.0.0.0:11434)...\n'));
|
|
297
|
+
//
|
|
298
|
+
// const fixed = await fixOllamaBinding();
|
|
299
|
+
// if (fixed) {
|
|
300
|
+
// // Wait a moment and verify it's now on 0.0.0.0
|
|
301
|
+
// await new Promise((resolve) => setTimeout(resolve, 2000));
|
|
302
|
+
// const newAddress = await checkOllamaListeningAddress();
|
|
303
|
+
// if (newAddress === '0.0.0.0') {
|
|
304
|
+
// console.log(chalk.green('✓ Ollama reconfigured to listen on 0.0.0.0:11434\n'));
|
|
305
|
+
// } else {
|
|
306
|
+
// console.log(chalk.yellow('⚠ Could not verify binding change. Ollama may need manual configuration.\n'));
|
|
307
|
+
// }
|
|
308
|
+
// } else {
|
|
309
|
+
// console.log(chalk.yellow('⚠ Could not automatically reconfigure Ollama binding.\n'));
|
|
310
|
+
// console.log(chalk.gray(' Please manually set OLLAMA_HOST=0.0.0.0:11434 and restart Ollama.\n'));
|
|
311
|
+
// }
|
|
312
|
+
// } else if (listeningAddress === '0.0.0.0') {
|
|
313
|
+
// console.log(chalk.green('✓ Ollama is listening on all interfaces (0.0.0.0:11434)\n'));
|
|
314
|
+
// } else {
|
|
315
|
+
// console.log(chalk.gray(' Could not determine Ollama binding address (this is usually fine)\n'));
|
|
316
|
+
// }
|
|
317
|
+
//
|
|
318
|
+
// Step 7: Test model connectivity
|
|
319
|
+
// console.log(chalk.blue('Step 7: Testing model connectivity...\n'));
|
|
320
|
+
// const modelWorks = await testModelConnectivity(ollamaModelName);
|
|
321
|
+
//
|
|
322
|
+
// if (modelWorks) {
|
|
323
|
+
// console.log(chalk.green('✓ Model is accessible and ready to use\n'));
|
|
324
|
+
// } else {
|
|
325
|
+
// console.log(chalk.yellow('⚠ Model may not be fully loaded yet\n'));
|
|
326
|
+
// console.log(chalk.gray(` This is normal for large models. The model will load on first use.\n`));
|
|
327
|
+
// }
|
|
328
|
+
//
|
|
329
|
+
console.log(chalk_1.default.green('✅ Phase 1: Local Ollama Setup Complete!\n'));
|
|
330
|
+
return { capabilities };
|
|
331
|
+
}
|
|
332
|
+
/**
|
|
333
|
+
* Setup platform integration (create applications)
|
|
334
|
+
*/
|
|
335
|
+
async function setupPlatformIntegration(options) {
|
|
336
|
+
const { model, capabilities, logger: funcLogger } = options;
|
|
337
|
+
// Always require auth and organization for platform integration
|
|
338
|
+
try {
|
|
339
|
+
(0, config_validator_1.validateConfig)(instances_1.configManager, {
|
|
340
|
+
requireAuth: true,
|
|
341
|
+
requireOrganization: true,
|
|
342
|
+
requireDeviceId: true,
|
|
343
|
+
});
|
|
344
|
+
}
|
|
345
|
+
catch (error) {
|
|
346
|
+
console.log(chalk_1.default.yellow('\n⚠ Platform integration requires authentication'));
|
|
347
|
+
console.log(chalk_1.default.blue('\nPlease login first:'));
|
|
348
|
+
console.log(chalk_1.default.gray(' edgible auth login\n'));
|
|
349
|
+
console.log(chalk_1.default.gray('Then run setup again to continue.\n'));
|
|
350
|
+
throw error;
|
|
351
|
+
}
|
|
352
|
+
// Get device ID from config (same device as agent)
|
|
353
|
+
const deviceId = (0, config_validator_1.requireDeviceId)(instances_1.configManager);
|
|
354
|
+
const deviceInfo = await instances_1.edgibleService.getDevice(deviceId);
|
|
355
|
+
const deviceName = deviceInfo.device?.name || deviceId;
|
|
356
|
+
console.log(chalk_1.default.blue('\n📡 Creating Platform Applications\n'));
|
|
357
|
+
// Step 3: Select model based on recommendations (before creating application)
|
|
358
|
+
console.log(chalk_1.default.blue('Step 3: Selecting model...\n'));
|
|
359
|
+
const ollamaModelName = await selectModel(model, capabilities);
|
|
360
|
+
console.log(chalk_1.default.blue(`\nSelected model: ${ollamaModelName}\n`));
|
|
361
|
+
// Step 7: Create Ollama API Application
|
|
362
|
+
console.log(chalk_1.default.blue('Step 7: Creating Ollama API application...\n'));
|
|
363
|
+
console.log(chalk_1.default.gray(` Device: ${deviceName} (${deviceId.substring(0, 8)}...)\n`));
|
|
364
|
+
console.log(chalk_1.default.gray(` Gateway: None (local/internal access only)\n`));
|
|
365
|
+
console.log(chalk_1.default.gray(` Protocol: HTTPS\n`));
|
|
366
|
+
const ollamaResult = await createOllamaApplication({
|
|
367
|
+
modelName: ollamaModelName,
|
|
368
|
+
deviceId: deviceId,
|
|
369
|
+
configManager: instances_1.configManager,
|
|
370
|
+
applicationService: instances_1.applicationService,
|
|
371
|
+
gatewayService: instances_1.gatewayService,
|
|
372
|
+
edgibleService: instances_1.edgibleService,
|
|
373
|
+
logger: instances_1.logger,
|
|
374
|
+
});
|
|
375
|
+
const createdOllamaApp = ollamaResult.app;
|
|
376
|
+
// Ensure URL has https:// protocol
|
|
377
|
+
const ollamaUrl = ollamaResult.url.startsWith('http://') || ollamaResult.url.startsWith('https://')
|
|
378
|
+
? ollamaResult.url
|
|
379
|
+
: `https://${ollamaResult.url}`;
|
|
380
|
+
console.log(chalk_1.default.green('✓ Ollama API application created'));
|
|
381
|
+
console.log(chalk_1.default.cyan(` URL: ${ollamaUrl}\n`));
|
|
382
|
+
// Step 8: Verify endpoint is accessible (retry mechanism)
|
|
383
|
+
console.log(chalk_1.default.blue('Step 8: Verifying endpoint is accessible...\n'));
|
|
384
|
+
console.log(chalk_1.default.gray(` Checking ${ollamaUrl}/api/tags...\n`));
|
|
385
|
+
let endpointAccessible = false;
|
|
386
|
+
const maxRetries = 20;
|
|
387
|
+
const retryDelay = 5000; // 5 seconds
|
|
388
|
+
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
|
389
|
+
try {
|
|
390
|
+
const controller = new AbortController();
|
|
391
|
+
const timeout = setTimeout(() => controller.abort(), 5000);
|
|
392
|
+
const response = await (0, node_fetch_1.default)(`${ollamaUrl}/api/tags`, {
|
|
393
|
+
method: 'GET',
|
|
394
|
+
signal: controller.signal,
|
|
395
|
+
});
|
|
396
|
+
clearTimeout(timeout);
|
|
397
|
+
if (response.ok) {
|
|
398
|
+
endpointAccessible = true;
|
|
399
|
+
console.log(chalk_1.default.green(`✓ Endpoint is accessible (attempt ${attempt}/${maxRetries})\n`));
|
|
400
|
+
break;
|
|
401
|
+
}
|
|
402
|
+
}
|
|
403
|
+
catch (error) {
|
|
404
|
+
// Endpoint not ready yet, continue retrying
|
|
405
|
+
if (attempt < maxRetries) {
|
|
406
|
+
console.log(chalk_1.default.gray(` Attempt ${attempt}/${maxRetries} failed, retrying in ${retryDelay / 1000} seconds...\n`));
|
|
407
|
+
await new Promise((resolve) => setTimeout(resolve, retryDelay));
|
|
408
|
+
}
|
|
409
|
+
}
|
|
410
|
+
}
|
|
411
|
+
if (!endpointAccessible) {
|
|
412
|
+
console.log(chalk_1.default.yellow(`⚠ Endpoint not accessible after ${maxRetries} attempts\n`));
|
|
413
|
+
console.log(chalk_1.default.gray(` The application may still be starting. This can take several minutes.\n`));
|
|
414
|
+
console.log(chalk_1.default.gray(` You can check status with: curl ${ollamaUrl}/api/tags\n`));
|
|
415
|
+
}
|
|
416
|
+
// Step 4: Check if model is already pulled
|
|
417
|
+
console.log(chalk_1.default.blue('Step 4: Checking if model is available...\n'));
|
|
418
|
+
const isModelAvailable = await checkModelAvailable(ollamaModelName);
|
|
419
|
+
if (!isModelAvailable) {
|
|
420
|
+
console.log(chalk_1.default.yellow(`Model ${ollamaModelName} is not available locally.`));
|
|
421
|
+
const answer = await inquirer_1.default.prompt([
|
|
422
|
+
{
|
|
423
|
+
type: 'confirm',
|
|
424
|
+
name: 'pull',
|
|
425
|
+
message: `Would you like to download ${ollamaModelName} now? (This may take a while)`,
|
|
426
|
+
default: true,
|
|
427
|
+
},
|
|
428
|
+
]);
|
|
429
|
+
if (answer.pull) {
|
|
430
|
+
console.log(chalk_1.default.yellow(`\nDownloading ${ollamaModelName}...\n`));
|
|
431
|
+
await pullModel(ollamaModelName);
|
|
432
|
+
console.log(chalk_1.default.green(`✓ Model ${ollamaModelName} downloaded successfully\n`));
|
|
433
|
+
}
|
|
434
|
+
else {
|
|
435
|
+
throw new Error(`Model ${ollamaModelName} is not available. Please pull it manually with: ollama pull ${ollamaModelName}`);
|
|
436
|
+
}
|
|
437
|
+
}
|
|
438
|
+
else {
|
|
439
|
+
console.log(chalk_1.default.green(`✓ Model ${ollamaModelName} is available\n`));
|
|
440
|
+
}
|
|
441
|
+
// Step 9: Create Open WebUI Application
|
|
442
|
+
console.log(chalk_1.default.blue('Step 9: Creating Open WebUI application...\n'));
|
|
443
|
+
console.log(chalk_1.default.gray(` Device: ${deviceName} (${deviceId.substring(0, 8)}...)\n`));
|
|
444
|
+
console.log(chalk_1.default.gray(` Gateway: Managed Gateway\n`));
|
|
445
|
+
console.log(chalk_1.default.gray(` Protocol: HTTPS\n`));
|
|
446
|
+
console.log(chalk_1.default.gray(` Connected to Ollama: ${ollamaUrl}\n`));
|
|
447
|
+
const webUIResult = await createOpenWebUIApplication({
|
|
448
|
+
ollamaUrl: ollamaUrl,
|
|
449
|
+
deviceId: deviceId,
|
|
450
|
+
configManager: instances_1.configManager,
|
|
451
|
+
applicationService: instances_1.applicationService,
|
|
452
|
+
gatewayService: instances_1.gatewayService,
|
|
453
|
+
edgibleService: instances_1.edgibleService,
|
|
454
|
+
logger: instances_1.logger,
|
|
455
|
+
});
|
|
456
|
+
const createdWebUIApp = webUIResult.app;
|
|
457
|
+
const webUIUrl = webUIResult.url;
|
|
458
|
+
console.log(chalk_1.default.green('✓ Open WebUI application created'));
|
|
459
|
+
console.log(chalk_1.default.cyan(` URL: ${webUIUrl}\n`));
|
|
460
|
+
return {
|
|
461
|
+
ollamaUrl,
|
|
462
|
+
webUIUrl,
|
|
463
|
+
createdOllamaApp,
|
|
464
|
+
createdWebUIApp,
|
|
465
|
+
deviceName,
|
|
466
|
+
deviceId,
|
|
467
|
+
ollamaModelName,
|
|
468
|
+
};
|
|
469
|
+
}
|
|
470
|
+
/**
|
|
471
|
+
* Display setup summary
|
|
472
|
+
*/
|
|
473
|
+
function displaySetupSummary(options) {
|
|
474
|
+
const { ollamaModelName, ollamaUrl, webUIUrl, deviceName, deviceId, createdOllamaApp, createdWebUIApp } = options;
|
|
475
|
+
console.log(chalk_1.default.blue('\n🎉 AI Setup Complete!\n'));
|
|
476
|
+
console.log(chalk_1.default.white('📋 Summary:\n'));
|
|
477
|
+
console.log(chalk_1.default.gray(` Model: ${ollamaModelName}`));
|
|
478
|
+
console.log(chalk_1.default.gray(` Ollama API: ${ollamaUrl} (local/internal only)`));
|
|
479
|
+
if (webUIUrl) {
|
|
480
|
+
console.log(chalk_1.default.gray(` Open WebUI: ${webUIUrl} (public via managed gateway)`));
|
|
481
|
+
}
|
|
482
|
+
console.log(chalk_1.default.gray(` Device: ${deviceName} (${deviceId.substring(0, 8)}...)`));
|
|
483
|
+
if (createdOllamaApp || createdWebUIApp) {
|
|
484
|
+
console.log(chalk_1.default.white('\n📱 Created Applications:\n'));
|
|
485
|
+
if (createdOllamaApp) {
|
|
486
|
+
console.log(chalk_1.default.gray(` • ollama-api (${createdOllamaApp.id}) - No gateway (local only)`));
|
|
487
|
+
}
|
|
488
|
+
if (createdWebUIApp) {
|
|
489
|
+
console.log(chalk_1.default.gray(` • open-webui (${createdWebUIApp.id}) - Managed gateway (public)`));
|
|
490
|
+
}
|
|
491
|
+
}
|
|
492
|
+
console.log(chalk_1.default.white('\n🔧 Next Steps:\n'));
|
|
493
|
+
console.log(chalk_1.default.gray(` • Test endpoint: curl ${ollamaUrl}/api/tags`));
|
|
494
|
+
console.log(chalk_1.default.gray(` • Test model: edgible ai test --model ${ollamaModelName}`));
|
|
495
|
+
console.log(chalk_1.default.gray(' • Status: edgible ai status'));
|
|
496
|
+
console.log(chalk_1.default.gray(' • List apps: edgible application list'));
|
|
497
|
+
if (createdOllamaApp) {
|
|
498
|
+
console.log(chalk_1.default.gray(' • Teardown: edgible ai teardown --remove-apps'));
|
|
499
|
+
}
|
|
500
|
+
}
|
|
501
|
+
/**
|
|
502
|
+
* Check if Ollama is installed
|
|
503
|
+
*/
|
|
504
|
+
async function checkOllamaInstalled() {
|
|
505
|
+
try {
|
|
506
|
+
(0, child_process_1.execSync)('ollama --version', { encoding: 'utf8', timeout: 2000, stdio: 'ignore' });
|
|
507
|
+
return true;
|
|
508
|
+
}
|
|
509
|
+
catch {
|
|
510
|
+
return false;
|
|
511
|
+
}
|
|
512
|
+
}
|
|
513
|
+
/**
|
|
514
|
+
* Install Ollama based on platform
|
|
515
|
+
*/
|
|
516
|
+
async function installOllama() {
|
|
517
|
+
const platform = os.platform();
|
|
518
|
+
try {
|
|
519
|
+
if (platform === 'linux') {
|
|
520
|
+
// Use official Ollama install script
|
|
521
|
+
(0, child_process_1.execSync)('curl -fsSL https://ollama.com/install.sh | sh', {
|
|
522
|
+
encoding: 'utf8',
|
|
523
|
+
stdio: 'inherit',
|
|
524
|
+
});
|
|
525
|
+
}
|
|
526
|
+
else if (platform === 'darwin') {
|
|
527
|
+
// macOS - check for Homebrew
|
|
528
|
+
try {
|
|
529
|
+
(0, child_process_1.execSync)('brew --version', { encoding: 'utf8', timeout: 2000, stdio: 'ignore' });
|
|
530
|
+
(0, child_process_1.execSync)('brew install ollama', {
|
|
531
|
+
encoding: 'utf8',
|
|
532
|
+
stdio: 'inherit',
|
|
533
|
+
});
|
|
534
|
+
}
|
|
535
|
+
catch {
|
|
536
|
+
// Fallback to manual install instructions
|
|
537
|
+
console.log(chalk_1.default.yellow('Homebrew not found. Please install Ollama manually:'));
|
|
538
|
+
console.log(chalk_1.default.gray(' Visit: https://ollama.com/download\n'));
|
|
539
|
+
throw new Error('Ollama installation requires Homebrew or manual installation');
|
|
540
|
+
}
|
|
541
|
+
}
|
|
542
|
+
else if (platform === 'win32') {
|
|
543
|
+
// Windows - use winget or provide instructions
|
|
544
|
+
try {
|
|
545
|
+
(0, child_process_1.execSync)('winget --version', { encoding: 'utf8', timeout: 2000, stdio: 'ignore' });
|
|
546
|
+
(0, child_process_1.execSync)('winget install Ollama.Ollama', {
|
|
547
|
+
encoding: 'utf8',
|
|
548
|
+
stdio: 'inherit',
|
|
549
|
+
});
|
|
550
|
+
}
|
|
551
|
+
catch {
|
|
552
|
+
console.log(chalk_1.default.yellow('winget not found. Please install Ollama manually:'));
|
|
553
|
+
console.log(chalk_1.default.gray(' Visit: https://ollama.com/download\n'));
|
|
554
|
+
throw new Error('Ollama installation requires winget or manual installation');
|
|
555
|
+
}
|
|
556
|
+
}
|
|
557
|
+
else {
|
|
558
|
+
throw new Error(`Unsupported platform: ${platform}`);
|
|
559
|
+
}
|
|
560
|
+
}
|
|
561
|
+
catch (error) {
|
|
562
|
+
console.error(chalk_1.default.red('Failed to install Ollama:'), error);
|
|
563
|
+
throw error;
|
|
564
|
+
}
|
|
565
|
+
}
|
|
566
|
+
/**
|
|
567
|
+
* Check if a model is available locally
|
|
568
|
+
*/
|
|
569
|
+
async function checkModelAvailable(modelName) {
|
|
570
|
+
try {
|
|
571
|
+
const output = (0, child_process_1.execSync)('ollama list', { encoding: 'utf8', timeout: 5000 });
|
|
572
|
+
// Check if model name appears in the list
|
|
573
|
+
return output.toLowerCase().includes(modelName.toLowerCase());
|
|
574
|
+
}
|
|
575
|
+
catch {
|
|
576
|
+
return false;
|
|
577
|
+
}
|
|
578
|
+
}
|
|
579
|
+
/**
|
|
580
|
+
* Pull a model from Ollama
|
|
581
|
+
*/
|
|
582
|
+
async function pullModel(modelName) {
|
|
583
|
+
try {
|
|
584
|
+
(0, child_process_1.execSync)(`OLLAMA_HOST=127.0.0.1:11435 ollama pull ${modelName}`, {
|
|
585
|
+
encoding: 'utf8',
|
|
586
|
+
stdio: 'inherit',
|
|
587
|
+
});
|
|
588
|
+
}
|
|
589
|
+
catch (error) {
|
|
590
|
+
console.error(chalk_1.default.red(`Failed to pull model ${modelName}:`), error);
|
|
591
|
+
throw error;
|
|
592
|
+
}
|
|
593
|
+
}
|
|
594
|
+
/**
|
|
595
|
+
* Start Ollama service
|
|
596
|
+
* Configures Ollama to listen on 0.0.0.0:11434 so Docker containers can access it
|
|
597
|
+
*/
|
|
598
|
+
async function startOllama() {
|
|
599
|
+
const platform = os.platform();
|
|
600
|
+
try {
|
|
601
|
+
if (platform === 'linux') {
|
|
602
|
+
// Check for systemd service (user or system)
|
|
603
|
+
let serviceType = null;
|
|
604
|
+
let serviceName = 'ollama';
|
|
605
|
+
// Check user service first
|
|
606
|
+
try {
|
|
607
|
+
(0, child_process_1.execSync)('systemctl --user is-enabled ollama > /dev/null 2>&1', { encoding: 'utf8', timeout: 2000 });
|
|
608
|
+
serviceType = 'user';
|
|
609
|
+
}
|
|
610
|
+
catch {
|
|
611
|
+
// Check system service
|
|
612
|
+
try {
|
|
613
|
+
(0, child_process_1.execSync)('systemctl is-enabled ollama > /dev/null 2>&1', { encoding: 'utf8', timeout: 2000 });
|
|
614
|
+
serviceType = 'system';
|
|
615
|
+
}
|
|
616
|
+
catch {
|
|
617
|
+
// No systemd service found
|
|
618
|
+
}
|
|
619
|
+
}
|
|
620
|
+
if (serviceType) {
|
|
621
|
+
// Configure systemd service to listen on all interfaces
|
|
622
|
+
const systemctlCmd = serviceType === 'user' ? 'systemctl --user' : 'sudo systemctl';
|
|
623
|
+
const serviceFile = serviceType === 'user'
|
|
624
|
+
? `${os.homedir()}/.config/systemd/user/ollama.service.d/override.conf`
|
|
625
|
+
: '/etc/systemd/system/ollama.service.d/override.conf';
|
|
626
|
+
const serviceDir = path.dirname(serviceFile);
|
|
627
|
+
try {
|
|
628
|
+
// Create override directory if it doesn't exist
|
|
629
|
+
if (!fs.existsSync(serviceDir)) {
|
|
630
|
+
(0, child_process_1.execSync)(`mkdir -p "${serviceDir}"`, { encoding: 'utf8' });
|
|
631
|
+
}
|
|
632
|
+
// Check if override already has OLLAMA_HOST
|
|
633
|
+
let needsUpdate = true;
|
|
634
|
+
if (fs.existsSync(serviceFile)) {
|
|
635
|
+
const content = fs.readFileSync(serviceFile, 'utf8');
|
|
636
|
+
if (content.includes('OLLAMA_HOST=0.0.0.0:11434')) {
|
|
637
|
+
needsUpdate = false;
|
|
638
|
+
}
|
|
639
|
+
}
|
|
640
|
+
if (needsUpdate) {
|
|
641
|
+
// Write override file
|
|
642
|
+
const overrideContent = `[Service]
|
|
643
|
+
Environment="OLLAMA_HOST=0.0.0.0:11434"
|
|
644
|
+
`;
|
|
645
|
+
fs.writeFileSync(serviceFile, overrideContent);
|
|
646
|
+
console.log(chalk_1.default.gray(`Configured systemd service to listen on 0.0.0.0:11434\n`));
|
|
647
|
+
}
|
|
648
|
+
// Reload and restart
|
|
649
|
+
(0, child_process_1.execSync)(`${systemctlCmd} daemon-reload`, { encoding: 'utf8', timeout: 3000 });
|
|
650
|
+
(0, child_process_1.execSync)(`${systemctlCmd} restart ollama`, { encoding: 'utf8', timeout: 5000 });
|
|
651
|
+
await new Promise((resolve) => setTimeout(resolve, 2000));
|
|
652
|
+
console.log(chalk_1.default.gray('Started Ollama via systemd listening on 0.0.0.0:11434 (accessible to Docker)\n'));
|
|
653
|
+
return;
|
|
654
|
+
}
|
|
655
|
+
catch (error) {
|
|
656
|
+
console.log(chalk_1.default.yellow(`⚠ Could not configure systemd service: ${error instanceof Error ? error.message : 'Unknown error'}`));
|
|
657
|
+
console.log(chalk_1.default.gray('Falling back to manual start...\n'));
|
|
658
|
+
// Fall through to manual start
|
|
659
|
+
}
|
|
660
|
+
}
|
|
661
|
+
// Fallback: start ollama serve in background with OLLAMA_HOST set
|
|
662
|
+
try {
|
|
663
|
+
// Kill any existing ollama processes first (if not managed by systemd)
|
|
664
|
+
try {
|
|
665
|
+
(0, child_process_1.execSync)('pkill -f "ollama serve"', { encoding: 'utf8', timeout: 2000, stdio: 'ignore' });
|
|
666
|
+
await new Promise((resolve) => setTimeout(resolve, 1000));
|
|
667
|
+
}
|
|
668
|
+
catch {
|
|
669
|
+
// Ignore if no process to kill
|
|
670
|
+
}
|
|
671
|
+
// Start Ollama with host binding to all interfaces
|
|
672
|
+
(0, child_process_1.execSync)('OLLAMA_HOST=0.0.0.0:11434 nohup ollama serve > /dev/null 2>&1 &', {
|
|
673
|
+
encoding: 'utf8',
|
|
674
|
+
timeout: 1000,
|
|
675
|
+
shell: '/bin/bash'
|
|
676
|
+
});
|
|
677
|
+
// Wait a bit for it to start
|
|
678
|
+
await new Promise((resolve) => setTimeout(resolve, 2000));
|
|
679
|
+
console.log(chalk_1.default.gray('Started Ollama listening on 0.0.0.0:11434 (accessible to Docker)\n'));
|
|
680
|
+
}
|
|
681
|
+
catch {
|
|
682
|
+
// Ignore - may already be running
|
|
683
|
+
}
|
|
684
|
+
}
|
|
685
|
+
else if (platform === 'darwin') {
|
|
686
|
+
// macOS - try launchctl or start directly
|
|
687
|
+
try {
|
|
688
|
+
(0, child_process_1.execSync)('launchctl start com.ollama.ollama', { encoding: 'utf8', timeout: 3000 });
|
|
689
|
+
console.log(chalk_1.default.gray('Note: Started via launchd. To allow Docker access, set OLLAMA_HOST in launchd config\n'));
|
|
690
|
+
}
|
|
691
|
+
catch {
|
|
692
|
+
// Fallback: start ollama serve with host binding
|
|
693
|
+
try {
|
|
694
|
+
// Kill any existing ollama processes first
|
|
695
|
+
try {
|
|
696
|
+
(0, child_process_1.execSync)('pkill -f "ollama serve"', { encoding: 'utf8', timeout: 2000, stdio: 'ignore' });
|
|
697
|
+
await new Promise((resolve) => setTimeout(resolve, 1000));
|
|
698
|
+
}
|
|
699
|
+
catch {
|
|
700
|
+
// Ignore if no process to kill
|
|
701
|
+
}
|
|
702
|
+
(0, child_process_1.execSync)('OLLAMA_HOST=0.0.0.0:11434 nohup ollama serve > /dev/null 2>&1 &', {
|
|
703
|
+
encoding: 'utf8',
|
|
704
|
+
timeout: 1000,
|
|
705
|
+
shell: '/bin/bash'
|
|
706
|
+
});
|
|
707
|
+
await new Promise((resolve) => setTimeout(resolve, 2000));
|
|
708
|
+
console.log(chalk_1.default.gray('Started Ollama listening on 0.0.0.0:11434 (accessible to Docker)\n'));
|
|
709
|
+
}
|
|
710
|
+
catch {
|
|
711
|
+
// Ignore
|
|
712
|
+
}
|
|
713
|
+
}
|
|
714
|
+
}
|
|
715
|
+
else if (platform === 'win32') {
|
|
716
|
+
// Windows - Ollama typically runs as a service
|
|
717
|
+
try {
|
|
718
|
+
(0, child_process_1.execSync)('net start Ollama', { encoding: 'utf8', timeout: 3000 });
|
|
719
|
+
console.log(chalk_1.default.gray('Note: Started as Windows service. To allow Docker access, set OLLAMA_HOST environment variable\n'));
|
|
720
|
+
}
|
|
721
|
+
catch {
|
|
722
|
+
// Service might already be running or not installed as service
|
|
723
|
+
// Try to start it directly with host binding
|
|
724
|
+
try {
|
|
725
|
+
(0, child_process_1.execSync)('set OLLAMA_HOST=0.0.0.0:11434 && start /B ollama serve', {
|
|
726
|
+
encoding: 'utf8',
|
|
727
|
+
timeout: 1000
|
|
728
|
+
});
|
|
729
|
+
await new Promise((resolve) => setTimeout(resolve, 2000));
|
|
730
|
+
console.log(chalk_1.default.gray('Started Ollama listening on 0.0.0.0:11434 (accessible to Docker)\n'));
|
|
731
|
+
}
|
|
732
|
+
catch {
|
|
733
|
+
// Ignore
|
|
734
|
+
}
|
|
735
|
+
}
|
|
736
|
+
}
|
|
737
|
+
}
|
|
738
|
+
catch (error) {
|
|
739
|
+
// Ollama might already be running, which is fine
|
|
740
|
+
console.log(chalk_1.default.gray('Note: Ollama service may already be running\n'));
|
|
741
|
+
}
|
|
742
|
+
}
|
|
743
|
+
/**
|
|
744
|
+
* Check if Ollama is running
|
|
745
|
+
*/
|
|
746
|
+
async function checkOllamaRunning() {
|
|
747
|
+
try {
|
|
748
|
+
// Try to query Ollama API
|
|
749
|
+
const controller = new AbortController();
|
|
750
|
+
const timeout = setTimeout(() => controller.abort(), 3000);
|
|
751
|
+
try {
|
|
752
|
+
const response = await (0, node_fetch_1.default)('http://localhost:11434/api/tags', {
|
|
753
|
+
method: 'GET',
|
|
754
|
+
signal: controller.signal,
|
|
755
|
+
});
|
|
756
|
+
clearTimeout(timeout);
|
|
757
|
+
return response.ok;
|
|
758
|
+
}
|
|
759
|
+
catch {
|
|
760
|
+
clearTimeout(timeout);
|
|
761
|
+
throw new Error('Fetch failed');
|
|
762
|
+
}
|
|
763
|
+
}
|
|
764
|
+
catch {
|
|
765
|
+
// Try alternative: check if ollama process is running
|
|
766
|
+
try {
|
|
767
|
+
const platform = os.platform();
|
|
768
|
+
if (platform === 'linux' || platform === 'darwin') {
|
|
769
|
+
(0, child_process_1.execSync)('pgrep -f ollama', { encoding: 'utf8', timeout: 2000, stdio: 'ignore' });
|
|
770
|
+
return true;
|
|
771
|
+
}
|
|
772
|
+
else if (platform === 'win32') {
|
|
773
|
+
(0, child_process_1.execSync)('tasklist /FI "IMAGENAME eq ollama.exe"', {
|
|
774
|
+
encoding: 'utf8',
|
|
775
|
+
timeout: 2000,
|
|
776
|
+
stdio: 'ignore',
|
|
777
|
+
});
|
|
778
|
+
return true;
|
|
779
|
+
}
|
|
780
|
+
}
|
|
781
|
+
catch {
|
|
782
|
+
return false;
|
|
783
|
+
}
|
|
784
|
+
return false;
|
|
785
|
+
}
|
|
786
|
+
}
|
|
787
|
+
/**
|
|
788
|
+
* Check what address Ollama is listening on
|
|
789
|
+
* @returns '0.0.0.0' if listening on all interfaces, '127.0.0.1' if localhost only, null if unknown
|
|
790
|
+
*/
|
|
791
|
+
async function checkOllamaListeningAddress() {
|
|
792
|
+
try {
|
|
793
|
+
const platform = os.platform();
|
|
794
|
+
let output;
|
|
795
|
+
if (platform === 'linux' || platform === 'darwin') {
|
|
796
|
+
// Try ss first (modern), fallback to netstat
|
|
797
|
+
try {
|
|
798
|
+
output = (0, child_process_1.execSync)('ss -tlnp 2>/dev/null | grep 11434', {
|
|
799
|
+
encoding: 'utf8',
|
|
800
|
+
timeout: 2000
|
|
801
|
+
});
|
|
802
|
+
}
|
|
803
|
+
catch {
|
|
804
|
+
try {
|
|
805
|
+
output = (0, child_process_1.execSync)('netstat -tlnp 2>/dev/null | grep 11434', {
|
|
806
|
+
encoding: 'utf8',
|
|
807
|
+
timeout: 2000
|
|
808
|
+
});
|
|
809
|
+
}
|
|
810
|
+
catch {
|
|
811
|
+
return null;
|
|
812
|
+
}
|
|
813
|
+
}
|
|
814
|
+
}
|
|
815
|
+
else if (platform === 'win32') {
|
|
816
|
+
try {
|
|
817
|
+
output = (0, child_process_1.execSync)('netstat -an | findstr :11434', {
|
|
818
|
+
encoding: 'utf8',
|
|
819
|
+
timeout: 2000,
|
|
820
|
+
});
|
|
821
|
+
}
|
|
822
|
+
catch {
|
|
823
|
+
return null;
|
|
824
|
+
}
|
|
825
|
+
}
|
|
826
|
+
else {
|
|
827
|
+
return null;
|
|
828
|
+
}
|
|
829
|
+
if (output.includes('0.0.0.0:11434') || output.includes('*:11434') || output.includes('[::]:11434')) {
|
|
830
|
+
return '0.0.0.0';
|
|
831
|
+
}
|
|
832
|
+
else if (output.includes('127.0.0.1:11434') || output.includes('::1:11434')) {
|
|
833
|
+
return '127.0.0.1';
|
|
834
|
+
}
|
|
835
|
+
return null;
|
|
836
|
+
}
|
|
837
|
+
catch {
|
|
838
|
+
return null;
|
|
839
|
+
}
|
|
840
|
+
}
|
|
841
|
+
/**
|
|
842
|
+
* Fix Ollama binding if it's listening on localhost only
|
|
843
|
+
* Attempts to reconfigure Ollama to listen on 0.0.0.0:11434
|
|
844
|
+
*/
|
|
845
|
+
async function fixOllamaBinding() {
|
|
846
|
+
const platform = os.platform();
|
|
847
|
+
try {
|
|
848
|
+
if (platform === 'linux') {
|
|
849
|
+
// Check for systemd service first
|
|
850
|
+
let serviceType = null;
|
|
851
|
+
try {
|
|
852
|
+
(0, child_process_1.execSync)('systemctl --user is-enabled ollama > /dev/null 2>&1', { encoding: 'utf8', timeout: 2000 });
|
|
853
|
+
serviceType = 'user';
|
|
854
|
+
}
|
|
855
|
+
catch {
|
|
856
|
+
try {
|
|
857
|
+
(0, child_process_1.execSync)('systemctl is-enabled ollama > /dev/null 2>&1', { encoding: 'utf8', timeout: 2000 });
|
|
858
|
+
serviceType = 'system';
|
|
859
|
+
}
|
|
860
|
+
catch {
|
|
861
|
+
// No systemd service
|
|
862
|
+
}
|
|
863
|
+
}
|
|
864
|
+
if (serviceType) {
|
|
865
|
+
// Configure systemd service
|
|
866
|
+
const systemctlCmd = serviceType === 'user' ? 'systemctl --user' : 'sudo systemctl';
|
|
867
|
+
const serviceFile = serviceType === 'user'
|
|
868
|
+
? `${os.homedir()}/.config/systemd/user/ollama.service.d/override.conf`
|
|
869
|
+
: '/etc/systemd/system/ollama.service.d/override.conf';
|
|
870
|
+
const serviceDir = path.dirname(serviceFile);
|
|
871
|
+
try {
|
|
872
|
+
// Create override directory if it doesn't exist
|
|
873
|
+
if (!fs.existsSync(serviceDir)) {
|
|
874
|
+
(0, child_process_1.execSync)(`mkdir -p "${serviceDir}"`, { encoding: 'utf8' });
|
|
875
|
+
}
|
|
876
|
+
// Check if override already has OLLAMA_HOST
|
|
877
|
+
let needsUpdate = true;
|
|
878
|
+
if (fs.existsSync(serviceFile)) {
|
|
879
|
+
const content = fs.readFileSync(serviceFile, 'utf8');
|
|
880
|
+
if (content.includes('OLLAMA_HOST=0.0.0.0:11434')) {
|
|
881
|
+
needsUpdate = false;
|
|
882
|
+
}
|
|
883
|
+
}
|
|
884
|
+
if (needsUpdate) {
|
|
885
|
+
// Write override file
|
|
886
|
+
const overrideContent = `[Service]
|
|
887
|
+
Environment="OLLAMA_HOST=0.0.0.0:11434"
|
|
888
|
+
`;
|
|
889
|
+
fs.writeFileSync(serviceFile, overrideContent);
|
|
890
|
+
// Reload and restart
|
|
891
|
+
(0, child_process_1.execSync)(`${systemctlCmd} daemon-reload`, { encoding: 'utf8', timeout: 3000 });
|
|
892
|
+
(0, child_process_1.execSync)(`${systemctlCmd} restart ollama`, { encoding: 'utf8', timeout: 5000 });
|
|
893
|
+
await new Promise((resolve) => setTimeout(resolve, 2000));
|
|
894
|
+
return true;
|
|
895
|
+
}
|
|
896
|
+
}
|
|
897
|
+
catch (error) {
|
|
898
|
+
// Fall through to manual restart
|
|
899
|
+
}
|
|
900
|
+
}
|
|
901
|
+
// Fallback: kill and restart with OLLAMA_HOST
|
|
902
|
+
try {
|
|
903
|
+
(0, child_process_1.execSync)('pkill -f "ollama serve"', { encoding: 'utf8', timeout: 2000, stdio: 'ignore' });
|
|
904
|
+
await new Promise((resolve) => setTimeout(resolve, 1000));
|
|
905
|
+
}
|
|
906
|
+
catch {
|
|
907
|
+
// Ignore if no process to kill
|
|
908
|
+
}
|
|
909
|
+
(0, child_process_1.execSync)('OLLAMA_HOST=0.0.0.0:11434 nohup ollama serve > /dev/null 2>&1 &', {
|
|
910
|
+
encoding: 'utf8',
|
|
911
|
+
timeout: 1000,
|
|
912
|
+
shell: '/bin/bash'
|
|
913
|
+
});
|
|
914
|
+
await new Promise((resolve) => setTimeout(resolve, 2000));
|
|
915
|
+
return true;
|
|
916
|
+
}
|
|
917
|
+
else if (platform === 'darwin') {
|
|
918
|
+
// macOS - try to restart with OLLAMA_HOST
|
|
919
|
+
try {
|
|
920
|
+
(0, child_process_1.execSync)('pkill -f "ollama serve"', { encoding: 'utf8', timeout: 2000, stdio: 'ignore' });
|
|
921
|
+
await new Promise((resolve) => setTimeout(resolve, 1000));
|
|
922
|
+
}
|
|
923
|
+
catch {
|
|
924
|
+
// Ignore
|
|
925
|
+
}
|
|
926
|
+
(0, child_process_1.execSync)('OLLAMA_HOST=0.0.0.0:11434 nohup ollama serve > /dev/null 2>&1 &', {
|
|
927
|
+
encoding: 'utf8',
|
|
928
|
+
timeout: 1000,
|
|
929
|
+
shell: '/bin/bash'
|
|
930
|
+
});
|
|
931
|
+
await new Promise((resolve) => setTimeout(resolve, 2000));
|
|
932
|
+
return true;
|
|
933
|
+
}
|
|
934
|
+
else if (platform === 'win32') {
|
|
935
|
+
// Windows - try to restart with OLLAMA_HOST
|
|
936
|
+
try {
|
|
937
|
+
(0, child_process_1.execSync)('taskkill /F /IM ollama.exe', { encoding: 'utf8', timeout: 2000, stdio: 'ignore' });
|
|
938
|
+
await new Promise((resolve) => setTimeout(resolve, 1000));
|
|
939
|
+
}
|
|
940
|
+
catch {
|
|
941
|
+
// Ignore
|
|
942
|
+
}
|
|
943
|
+
(0, child_process_1.execSync)('set OLLAMA_HOST=0.0.0.0:11434 && start /B ollama serve', {
|
|
944
|
+
encoding: 'utf8',
|
|
945
|
+
timeout: 1000
|
|
946
|
+
});
|
|
947
|
+
await new Promise((resolve) => setTimeout(resolve, 2000));
|
|
948
|
+
return true;
|
|
949
|
+
}
|
|
950
|
+
return false;
|
|
951
|
+
}
|
|
952
|
+
catch (error) {
|
|
953
|
+
return false;
|
|
954
|
+
}
|
|
955
|
+
}
|
|
956
|
+
/**
|
|
957
|
+
* Stop Ollama service
|
|
958
|
+
*/
|
|
959
|
+
async function stopOllama() {
|
|
960
|
+
const platform = os.platform();
|
|
961
|
+
try {
|
|
962
|
+
if (platform === 'linux') {
|
|
963
|
+
// Try systemd service first
|
|
964
|
+
try {
|
|
965
|
+
(0, child_process_1.execSync)('systemctl --user stop ollama', { encoding: 'utf8', timeout: 3000 });
|
|
966
|
+
return;
|
|
967
|
+
}
|
|
968
|
+
catch {
|
|
969
|
+
// Fallback to killing process
|
|
970
|
+
}
|
|
971
|
+
// Fallback: kill ollama processes
|
|
972
|
+
try {
|
|
973
|
+
(0, child_process_1.execSync)('pkill -f ollama', { encoding: 'utf8', timeout: 2000 });
|
|
974
|
+
}
|
|
975
|
+
catch {
|
|
976
|
+
// Process might not be running
|
|
977
|
+
}
|
|
978
|
+
}
|
|
979
|
+
else if (platform === 'darwin') {
|
|
980
|
+
// macOS
|
|
981
|
+
try {
|
|
982
|
+
(0, child_process_1.execSync)('launchctl stop com.ollama.ollama', { encoding: 'utf8', timeout: 3000 });
|
|
983
|
+
}
|
|
984
|
+
catch {
|
|
985
|
+
// Fallback
|
|
986
|
+
try {
|
|
987
|
+
(0, child_process_1.execSync)('pkill -f ollama', { encoding: 'utf8', timeout: 2000 });
|
|
988
|
+
}
|
|
989
|
+
catch {
|
|
990
|
+
// Ignore
|
|
991
|
+
}
|
|
992
|
+
}
|
|
993
|
+
}
|
|
994
|
+
else if (platform === 'win32') {
|
|
995
|
+
// Windows
|
|
996
|
+
try {
|
|
997
|
+
(0, child_process_1.execSync)('net stop Ollama', { encoding: 'utf8', timeout: 3000 });
|
|
998
|
+
}
|
|
999
|
+
catch {
|
|
1000
|
+
// Fallback
|
|
1001
|
+
try {
|
|
1002
|
+
(0, child_process_1.execSync)('taskkill /F /IM ollama.exe', { encoding: 'utf8', timeout: 2000 });
|
|
1003
|
+
}
|
|
1004
|
+
catch {
|
|
1005
|
+
// Ignore
|
|
1006
|
+
}
|
|
1007
|
+
}
|
|
1008
|
+
}
|
|
1009
|
+
}
|
|
1010
|
+
catch (error) {
|
|
1011
|
+
console.error(chalk_1.default.yellow('Warning: Error stopping Ollama service:'), error);
|
|
1012
|
+
}
|
|
1013
|
+
}
|
|
1014
|
+
/**
|
|
1015
|
+
* Normalize model name for Ollama (lowercase, dashes, no special chars)
|
|
1016
|
+
* Maps recommended model names to their Ollama model names
|
|
1017
|
+
* Converts last dash before size to colon (e.g., llama-3.2-3b -> llama3.2:3b)
|
|
1018
|
+
*/
|
|
1019
|
+
function normalizeModelName(modelName) {
|
|
1020
|
+
// First normalize: lowercase, replace spaces with dashes, remove parentheses
|
|
1021
|
+
// Keep dots for version numbers (e.g., 3.2, 2.5)
|
|
1022
|
+
const normalized = modelName.toLowerCase().replace(/\s+/g, '-').replace(/[()]/g, '');
|
|
1023
|
+
// Map model names to Ollama model names
|
|
1024
|
+
const modelMap = {
|
|
1025
|
+
'llama-32-1b': 'llama3.2:1b',
|
|
1026
|
+
'llama-32-3b': 'llama3.2:3b',
|
|
1027
|
+
'llama-31-8b': 'llama3.1:8b',
|
|
1028
|
+
'llama-31-70b': 'llama3.1:70b',
|
|
1029
|
+
'llama3.2-1b': 'llama3.2:1b',
|
|
1030
|
+
'llama3.2-3b': 'llama3.2:3b',
|
|
1031
|
+
'llama3.1-8b': 'llama3.1:8b',
|
|
1032
|
+
'llama3.1-70b': 'llama3.1:70b',
|
|
1033
|
+
'mistral-7b': 'mistral:7b',
|
|
1034
|
+
'phi-3-mini-38b': 'phi3:mini',
|
|
1035
|
+
'phi-3-mini-3.8b': 'phi3:mini',
|
|
1036
|
+
'qwen25-05b': 'qwen2.5:0.5b',
|
|
1037
|
+
'qwen25-7b': 'qwen2.5:7b',
|
|
1038
|
+
'qwen2.5-0.5b': 'qwen2.5:0.5b',
|
|
1039
|
+
'qwen2.5-7b': 'qwen2.5:7b',
|
|
1040
|
+
'deepseek-r1-15b': 'deepseek-r1:1.5b',
|
|
1041
|
+
'deepseek-r1-7b': 'deepseek-r1:7b',
|
|
1042
|
+
'deepseek-r1-8b': 'deepseek-r1:8b',
|
|
1043
|
+
'deepseek-r1-14b': 'deepseek-r1:14b',
|
|
1044
|
+
'deepseek-r1-32b': 'deepseek-r1:32b',
|
|
1045
|
+
'deepseek-r1-70b': 'deepseek-r1:70b',
|
|
1046
|
+
'deepseek-r1-671b': 'deepseek-r1:671b',
|
|
1047
|
+
'deepseek-r1-1.5b': 'deepseek-r1:1.5b',
|
|
1048
|
+
};
|
|
1049
|
+
// Check if we have a direct mapping
|
|
1050
|
+
if (modelMap[normalized]) {
|
|
1051
|
+
return modelMap[normalized];
|
|
1052
|
+
}
|
|
1053
|
+
// Fallback: try to convert last dash before size indicator to colon
|
|
1054
|
+
// Matches patterns like: model-name-XXb or model-name-X.Xb
|
|
1055
|
+
const match = normalized.match(/^(.+)-(\d+(?:\.\d+)?b)$/);
|
|
1056
|
+
if (match) {
|
|
1057
|
+
return `${match[1]}:${match[2]}`;
|
|
1058
|
+
}
|
|
1059
|
+
// Return as-is if no pattern matches
|
|
1060
|
+
return normalized;
|
|
1061
|
+
}
|
|
1062
|
+
/**
|
|
1063
|
+
* Check if Docker is installed
|
|
1064
|
+
*/
|
|
1065
|
+
async function checkDockerInstalled() {
|
|
1066
|
+
try {
|
|
1067
|
+
(0, child_process_1.execSync)('docker --version', { encoding: 'utf8', timeout: 2000, stdio: 'ignore' });
|
|
1068
|
+
return true;
|
|
1069
|
+
}
|
|
1070
|
+
catch {
|
|
1071
|
+
return false;
|
|
1072
|
+
}
|
|
1073
|
+
}
|
|
1074
|
+
/**
|
|
1075
|
+
* Check if any Ollama models are available
|
|
1076
|
+
*/
|
|
1077
|
+
async function checkHasModels() {
|
|
1078
|
+
try {
|
|
1079
|
+
const output = (0, child_process_1.execSync)('ollama list', { encoding: 'utf8', timeout: 5000 });
|
|
1080
|
+
const lines = output.trim().split('\n');
|
|
1081
|
+
// First line is header, so check if there's more than one line
|
|
1082
|
+
return lines.length > 1;
|
|
1083
|
+
}
|
|
1084
|
+
catch {
|
|
1085
|
+
return false;
|
|
1086
|
+
}
|
|
1087
|
+
}
|
|
1088
|
+
/**
|
|
1089
|
+
* Detect the appropriate Ollama URL for Docker containers to use
|
|
1090
|
+
*/
|
|
1091
|
+
async function detectOllamaUrlForDocker() {
|
|
1092
|
+
const platform = os.platform();
|
|
1093
|
+
if (platform === 'darwin' || platform === 'win32') {
|
|
1094
|
+
// macOS and Windows: Docker Desktop provides host.docker.internal
|
|
1095
|
+
return 'http://host.docker.internal:11434';
|
|
1096
|
+
}
|
|
1097
|
+
// Linux: host.docker.internal may not work, need to detect host IP
|
|
1098
|
+
try {
|
|
1099
|
+
// Try to get the docker0 bridge IP (typically 172.17.0.1)
|
|
1100
|
+
const output = (0, child_process_1.execSync)("ip -4 addr show docker0 | grep -oP '(?<=inet\\s)\\d+(\\.\\d+){3}'", { encoding: 'utf8', timeout: 2000 });
|
|
1101
|
+
const ip = output.trim();
|
|
1102
|
+
if (ip) {
|
|
1103
|
+
return `http://${ip}:11434`;
|
|
1104
|
+
}
|
|
1105
|
+
}
|
|
1106
|
+
catch {
|
|
1107
|
+
// Fallback: try to get the default gateway IP
|
|
1108
|
+
try {
|
|
1109
|
+
const output = (0, child_process_1.execSync)("ip route | grep default | awk '{print $3}'", { encoding: 'utf8', timeout: 2000 });
|
|
1110
|
+
const ip = output.trim();
|
|
1111
|
+
if (ip) {
|
|
1112
|
+
return `http://${ip}:11434`;
|
|
1113
|
+
}
|
|
1114
|
+
}
|
|
1115
|
+
catch {
|
|
1116
|
+
// Last resort
|
|
1117
|
+
}
|
|
1118
|
+
}
|
|
1119
|
+
// Fallback to host.docker.internal (works with newer Docker versions on Linux)
|
|
1120
|
+
return 'http://host.docker.internal:11434';
|
|
1121
|
+
}
|
|
1122
|
+
/**
|
|
1123
|
+
* Get the path to the docker-compose directory
|
|
1124
|
+
*/
|
|
1125
|
+
function getComposeDirectory() {
|
|
1126
|
+
// When packaged as npm module, recipes are in package root
|
|
1127
|
+
const packageRecipes = path.join(__dirname, '..', '..', 'recipes', 'compose', 'open-webui');
|
|
1128
|
+
// If copied to dist during build (optional)
|
|
1129
|
+
const distRecipes = path.join(__dirname, '..', 'recipes', 'compose', 'open-webui');
|
|
1130
|
+
// Development location (root level)
|
|
1131
|
+
const devRecipes = path.join(process.cwd(), 'recipes', 'compose', 'open-webui');
|
|
1132
|
+
// Check in order of likelihood
|
|
1133
|
+
if (fs.existsSync(packageRecipes)) {
|
|
1134
|
+
return packageRecipes;
|
|
1135
|
+
}
|
|
1136
|
+
if (fs.existsSync(distRecipes)) {
|
|
1137
|
+
return distRecipes;
|
|
1138
|
+
}
|
|
1139
|
+
if (fs.existsSync(devRecipes)) {
|
|
1140
|
+
return devRecipes;
|
|
1141
|
+
}
|
|
1142
|
+
throw new Error('Could not locate Open WebUI compose directory');
|
|
1143
|
+
}
|
|
1144
|
+
/**
|
|
1145
|
+
* Start Open WebUI with docker-compose
|
|
1146
|
+
*/
|
|
1147
|
+
async function startOpenWebUI(composeDir, env) {
|
|
1148
|
+
const composeFile = path.join(composeDir, 'docker-compose.yml');
|
|
1149
|
+
if (!fs.existsSync(composeFile)) {
|
|
1150
|
+
throw new Error(`Docker compose file not found: ${composeFile}`);
|
|
1151
|
+
}
|
|
1152
|
+
try {
|
|
1153
|
+
// Set environment variables
|
|
1154
|
+
const envVars = Object.entries(env)
|
|
1155
|
+
.map(([key, value]) => `${key}=${value}`)
|
|
1156
|
+
.join(' ');
|
|
1157
|
+
// Run docker compose up
|
|
1158
|
+
(0, child_process_1.execSync)(`${envVars} docker compose -f "${composeFile}" up -d`, {
|
|
1159
|
+
encoding: 'utf8',
|
|
1160
|
+
stdio: 'inherit',
|
|
1161
|
+
cwd: composeDir,
|
|
1162
|
+
});
|
|
1163
|
+
}
|
|
1164
|
+
catch (error) {
|
|
1165
|
+
console.error(chalk_1.default.red('Failed to start Open WebUI:'), error);
|
|
1166
|
+
throw error;
|
|
1167
|
+
}
|
|
1168
|
+
}
|
|
1169
|
+
/**
|
|
1170
|
+
* Stop Open WebUI
|
|
1171
|
+
*/
|
|
1172
|
+
async function stopOpenWebUI(composeDir, removeVolumes) {
|
|
1173
|
+
const composeFile = path.join(composeDir, 'docker-compose.yml');
|
|
1174
|
+
try {
|
|
1175
|
+
const volumeFlag = removeVolumes ? '-v' : '';
|
|
1176
|
+
(0, child_process_1.execSync)(`docker compose -f "${composeFile}" down ${volumeFlag}`, {
|
|
1177
|
+
encoding: 'utf8',
|
|
1178
|
+
stdio: 'inherit',
|
|
1179
|
+
cwd: composeDir,
|
|
1180
|
+
});
|
|
1181
|
+
}
|
|
1182
|
+
catch (error) {
|
|
1183
|
+
console.error(chalk_1.default.red('Failed to stop Open WebUI:'), error);
|
|
1184
|
+
throw error;
|
|
1185
|
+
}
|
|
1186
|
+
}
|
|
1187
|
+
/**
|
|
1188
|
+
* Check if Open WebUI is running
|
|
1189
|
+
*/
|
|
1190
|
+
async function checkOpenWebUIRunning() {
|
|
1191
|
+
try {
|
|
1192
|
+
const output = (0, child_process_1.execSync)('docker ps --format "{{.Names}}"', {
|
|
1193
|
+
encoding: 'utf8',
|
|
1194
|
+
timeout: 2000,
|
|
1195
|
+
});
|
|
1196
|
+
return output.includes('open-webui');
|
|
1197
|
+
}
|
|
1198
|
+
catch {
|
|
1199
|
+
return false;
|
|
1200
|
+
}
|
|
1201
|
+
}
|
|
1202
|
+
/**
|
|
1203
|
+
* Get Open WebUI information if running
|
|
1204
|
+
*/
|
|
1205
|
+
async function getOpenWebUIInfo() {
|
|
1206
|
+
try {
|
|
1207
|
+
const output = (0, child_process_1.execSync)('docker ps --filter "name=open-webui" --format "{{.Ports}}"', {
|
|
1208
|
+
encoding: 'utf8',
|
|
1209
|
+
timeout: 2000,
|
|
1210
|
+
});
|
|
1211
|
+
if (!output) {
|
|
1212
|
+
return null;
|
|
1213
|
+
}
|
|
1214
|
+
// Parse port from output like "0.0.0.0:3200->8080/tcp"
|
|
1215
|
+
const portMatch = output.match(/0\.0\.0\.0:(\d+)->/);
|
|
1216
|
+
if (portMatch) {
|
|
1217
|
+
return { port: parseInt(portMatch[1], 10) };
|
|
1218
|
+
}
|
|
1219
|
+
// Default port if we can't parse
|
|
1220
|
+
return { port: 3200 };
|
|
1221
|
+
}
|
|
1222
|
+
catch {
|
|
1223
|
+
return null;
|
|
1224
|
+
}
|
|
1225
|
+
}
|
|
1226
|
+
/**
|
|
1227
|
+
* Test if a model can be accessed and generates responses
|
|
1228
|
+
* @param modelName - The Ollama model name to test
|
|
1229
|
+
* @param verbose - Show detailed output
|
|
1230
|
+
* @returns true if model responds successfully
|
|
1231
|
+
*/
|
|
1232
|
+
async function testModelConnectivity(modelName, verbose = false) {
|
|
1233
|
+
try {
|
|
1234
|
+
if (verbose) {
|
|
1235
|
+
console.log(chalk_1.default.gray(` Sending test prompt to ${modelName}...`));
|
|
1236
|
+
}
|
|
1237
|
+
const controller = new AbortController();
|
|
1238
|
+
const timeout = setTimeout(() => controller.abort(), 30000); // 30 second timeout for model loading
|
|
1239
|
+
try {
|
|
1240
|
+
const response = await (0, node_fetch_1.default)('http://localhost:11434/api/generate', {
|
|
1241
|
+
method: 'POST',
|
|
1242
|
+
headers: {
|
|
1243
|
+
'Content-Type': 'application/json',
|
|
1244
|
+
},
|
|
1245
|
+
body: JSON.stringify({
|
|
1246
|
+
model: modelName,
|
|
1247
|
+
prompt: 'Hello',
|
|
1248
|
+
stream: false,
|
|
1249
|
+
}),
|
|
1250
|
+
signal: controller.signal,
|
|
1251
|
+
});
|
|
1252
|
+
clearTimeout(timeout);
|
|
1253
|
+
if (response.ok) {
|
|
1254
|
+
const data = await response.json();
|
|
1255
|
+
if (data.error) {
|
|
1256
|
+
if (verbose) {
|
|
1257
|
+
console.log(chalk_1.default.red(` ✗ Model error: ${data.error}`));
|
|
1258
|
+
}
|
|
1259
|
+
return false;
|
|
1260
|
+
}
|
|
1261
|
+
if (data.response) {
|
|
1262
|
+
if (verbose) {
|
|
1263
|
+
console.log(chalk_1.default.green(' ✓ Model responded successfully'));
|
|
1264
|
+
console.log(chalk_1.default.gray(` Response: "${data.response.substring(0, 50)}${data.response.length > 50 ? '...' : ''}"`));
|
|
1265
|
+
}
|
|
1266
|
+
return true;
|
|
1267
|
+
}
|
|
1268
|
+
}
|
|
1269
|
+
else {
|
|
1270
|
+
if (verbose) {
|
|
1271
|
+
console.log(chalk_1.default.red(` ✗ HTTP ${response.status}: ${response.statusText}`));
|
|
1272
|
+
}
|
|
1273
|
+
return false;
|
|
1274
|
+
}
|
|
1275
|
+
}
|
|
1276
|
+
catch (error) {
|
|
1277
|
+
clearTimeout(timeout);
|
|
1278
|
+
if (verbose) {
|
|
1279
|
+
if (error instanceof Error && error.name === 'AbortError') {
|
|
1280
|
+
console.log(chalk_1.default.yellow(' ⚠ Request timed out (model may be loading)'));
|
|
1281
|
+
}
|
|
1282
|
+
else {
|
|
1283
|
+
console.log(chalk_1.default.red(` ✗ Connection error: ${error instanceof Error ? error.message : 'Unknown error'}`));
|
|
1284
|
+
}
|
|
1285
|
+
}
|
|
1286
|
+
return false;
|
|
1287
|
+
}
|
|
1288
|
+
return false;
|
|
1289
|
+
}
|
|
1290
|
+
catch (error) {
|
|
1291
|
+
if (verbose) {
|
|
1292
|
+
console.log(chalk_1.default.red(` ✗ Test failed: ${error instanceof Error ? error.message : 'Unknown error'}`));
|
|
1293
|
+
}
|
|
1294
|
+
return false;
|
|
1295
|
+
}
|
|
1296
|
+
}
|
|
1297
|
+
/**
|
|
1298
|
+
* Verify model is available on endpoint
|
|
1299
|
+
*/
|
|
1300
|
+
async function verifyModelOnEndpoint(endpointUrl, modelName) {
|
|
1301
|
+
try {
|
|
1302
|
+
const controller = new AbortController();
|
|
1303
|
+
const timeout = setTimeout(() => controller.abort(), 10000);
|
|
1304
|
+
const response = await (0, node_fetch_1.default)(`${endpointUrl}/api/tags`, {
|
|
1305
|
+
method: 'GET',
|
|
1306
|
+
signal: controller.signal,
|
|
1307
|
+
});
|
|
1308
|
+
clearTimeout(timeout);
|
|
1309
|
+
if (response.ok) {
|
|
1310
|
+
const data = await response.json();
|
|
1311
|
+
if (data.models) {
|
|
1312
|
+
// Check if model exists (exact match or starts with model name)
|
|
1313
|
+
const modelExists = data.models.some(m => {
|
|
1314
|
+
const modelFullName = m.name;
|
|
1315
|
+
return modelFullName === modelName ||
|
|
1316
|
+
modelFullName.startsWith(modelName + ':') ||
|
|
1317
|
+
modelFullName === modelName.split(':')[0];
|
|
1318
|
+
});
|
|
1319
|
+
return modelExists;
|
|
1320
|
+
}
|
|
1321
|
+
}
|
|
1322
|
+
return false;
|
|
1323
|
+
}
|
|
1324
|
+
catch (error) {
|
|
1325
|
+
return false;
|
|
1326
|
+
}
|
|
1327
|
+
}
|
|
1328
|
+
/**
|
|
1329
|
+
* Helper to parse gateway IDs from comma-separated string
|
|
1330
|
+
*/
|
|
1331
|
+
function parseGatewayIds(ids) {
|
|
1332
|
+
if (!ids)
|
|
1333
|
+
return [];
|
|
1334
|
+
return ids.split(',').map(s => s.trim()).filter(Boolean);
|
|
1335
|
+
}
|
|
1336
|
+
/**
|
|
1337
|
+
* Create Ollama application on Edgible platform
|
|
1338
|
+
*/
|
|
1339
|
+
async function createOllamaApplication(config) {
|
|
1340
|
+
// Device ID is already provided from config (same device as agent)
|
|
1341
|
+
const ollamaDeviceId = config.deviceId;
|
|
1342
|
+
// No gateway - local/internal access only
|
|
1343
|
+
const useManagedGateway = false;
|
|
1344
|
+
// Build configuration for managed-process
|
|
1345
|
+
// Ollama should be installed and available in PATH on the device
|
|
1346
|
+
const configuration = {
|
|
1347
|
+
command: 'ollama serve',
|
|
1348
|
+
env: {
|
|
1349
|
+
OLLAMA_HOST: '0.0.0.0:11435',
|
|
1350
|
+
},
|
|
1351
|
+
};
|
|
1352
|
+
// Create application as managed-process without authentication
|
|
1353
|
+
const result = await config.applicationService.createApplicationProgrammatically({
|
|
1354
|
+
name: 'ollama-api',
|
|
1355
|
+
description: `Ollama AI API (${config.modelName}) - Managed Process`,
|
|
1356
|
+
port: 11435,
|
|
1357
|
+
protocol: 'https',
|
|
1358
|
+
deviceIds: [ollamaDeviceId],
|
|
1359
|
+
gatewayIds: [], // No gateway - local/internal access only
|
|
1360
|
+
useManagedGateway: false,
|
|
1361
|
+
subtype: 'managed-process',
|
|
1362
|
+
configuration,
|
|
1363
|
+
authModes: [], // No authentication required
|
|
1364
|
+
});
|
|
1365
|
+
return {
|
|
1366
|
+
app: result,
|
|
1367
|
+
url: result.url || 'https://ollama-api.your-domain.com',
|
|
1368
|
+
};
|
|
1369
|
+
}
|
|
1370
|
+
/**
|
|
1371
|
+
* Create Open WebUI application on Edgible platform
|
|
1372
|
+
*/
|
|
1373
|
+
async function createOpenWebUIApplication(config) {
|
|
1374
|
+
// Always use the same device as Ollama (same device as agent)
|
|
1375
|
+
const webuiDeviceId = config.deviceId;
|
|
1376
|
+
// Use managed gateway for public access
|
|
1377
|
+
const useManagedGateway = true;
|
|
1378
|
+
// Create application
|
|
1379
|
+
// Note: Backend should configure OLLAMA_BASE_URL environment variable to ${config.ollamaUrl}
|
|
1380
|
+
// This URL should be the platform Ollama application URL (not localhost) for remote deployments
|
|
1381
|
+
// Use generated base64 constant (no runtime file reading needed)
|
|
1382
|
+
const dockerComposePathValue = (0, compose_constants_1.getOpenWebUIComposeValue)();
|
|
1383
|
+
const envVars = {
|
|
1384
|
+
'OLLAMA_BASE_URL': config.ollamaUrl
|
|
1385
|
+
};
|
|
1386
|
+
const result = await config.applicationService.createApplicationProgrammatically({
|
|
1387
|
+
name: 'open-webui',
|
|
1388
|
+
description: `Open WebUI - AI Chat Interface (OLLAMA_BASE_URL: ${config.ollamaUrl})`,
|
|
1389
|
+
port: 3200,
|
|
1390
|
+
protocol: 'https',
|
|
1391
|
+
deviceIds: [webuiDeviceId],
|
|
1392
|
+
gatewayIds: undefined, // Managed gateway for public access
|
|
1393
|
+
useManagedGateway: true,
|
|
1394
|
+
subtype: 'docker-compose',
|
|
1395
|
+
configuration: {
|
|
1396
|
+
'dockerComposePath': dockerComposePathValue, // base64:... constant
|
|
1397
|
+
'env': envVars,
|
|
1398
|
+
'isWorking': true
|
|
1399
|
+
},
|
|
1400
|
+
requireOrgAuth: true // Enable authentication - X-Auth-Email header already passed by auth system
|
|
1401
|
+
});
|
|
1402
|
+
return {
|
|
1403
|
+
app: result,
|
|
1404
|
+
url: result.url || 'https://open-webui.your-domain.com',
|
|
1405
|
+
};
|
|
1406
|
+
}
|
|
1407
|
+
/**
|
|
1408
|
+
* Start Open WebUI locally with docker-compose
|
|
1409
|
+
*/
|
|
1410
|
+
async function startOpenWebUILocal(ollamaUrl) {
|
|
1411
|
+
// Check if Docker is installed
|
|
1412
|
+
const isDockerInstalled = await checkDockerInstalled();
|
|
1413
|
+
if (!isDockerInstalled) {
|
|
1414
|
+
throw new Error('Docker is required to run Open WebUI locally. Please install Docker first.');
|
|
1415
|
+
}
|
|
1416
|
+
const composeDir = getComposeDirectory();
|
|
1417
|
+
await startOpenWebUI(composeDir, {
|
|
1418
|
+
OLLAMA_BASE_URL: ollamaUrl,
|
|
1419
|
+
OPEN_WEBUI_PORT: '3200',
|
|
1420
|
+
});
|
|
1421
|
+
}
|
|
1422
|
+
/**
|
|
1423
|
+
* Check if an Ollama URL is reachable and responding
|
|
1424
|
+
*/
|
|
1425
|
+
async function checkOllamaUrlReachable(url) {
|
|
1426
|
+
try {
|
|
1427
|
+
const controller = new AbortController();
|
|
1428
|
+
const timeout = setTimeout(() => controller.abort(), 5000); // 5 second timeout
|
|
1429
|
+
try {
|
|
1430
|
+
const response = await (0, node_fetch_1.default)(`${url}/api/tags`, {
|
|
1431
|
+
method: 'GET',
|
|
1432
|
+
signal: controller.signal,
|
|
1433
|
+
});
|
|
1434
|
+
clearTimeout(timeout);
|
|
1435
|
+
return response.ok;
|
|
1436
|
+
}
|
|
1437
|
+
catch (error) {
|
|
1438
|
+
clearTimeout(timeout);
|
|
1439
|
+
return false;
|
|
1440
|
+
}
|
|
1441
|
+
}
|
|
1442
|
+
catch {
|
|
1443
|
+
return false;
|
|
1444
|
+
}
|
|
1445
|
+
}
|
|
1446
|
+
/**
|
|
1447
|
+
* Check if a URL is reachable (general purpose)
|
|
1448
|
+
*/
|
|
1449
|
+
async function checkUrlReachable(url) {
|
|
1450
|
+
try {
|
|
1451
|
+
const controller = new AbortController();
|
|
1452
|
+
const timeout = setTimeout(() => controller.abort(), 5000); // 5 second timeout
|
|
1453
|
+
try {
|
|
1454
|
+
const response = await (0, node_fetch_1.default)(url, {
|
|
1455
|
+
method: 'GET',
|
|
1456
|
+
signal: controller.signal,
|
|
1457
|
+
});
|
|
1458
|
+
clearTimeout(timeout);
|
|
1459
|
+
return response.ok || response.status < 500; // Accept redirects and client errors as "reachable"
|
|
1460
|
+
}
|
|
1461
|
+
catch (error) {
|
|
1462
|
+
clearTimeout(timeout);
|
|
1463
|
+
return false;
|
|
1464
|
+
}
|
|
1465
|
+
}
|
|
1466
|
+
catch {
|
|
1467
|
+
return false;
|
|
1468
|
+
}
|
|
1469
|
+
}
|
|
1470
|
+
//# sourceMappingURL=helpers.js.map
|