@mindstudio-ai/local-model-tunnel 0.1.9 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +30 -115
- package/dist/chunk-PTK4SJQK.js +1768 -0
- package/dist/chunk-PTK4SJQK.js.map +1 -0
- package/dist/cli.d.ts +0 -2
- package/dist/cli.js +8 -517
- package/dist/cli.js.map +1 -1
- package/dist/index.d.ts +24 -5
- package/dist/index.js +6 -13
- package/dist/index.js.map +1 -1
- package/dist/tui-56JFPKBP.js +1561 -0
- package/dist/tui-56JFPKBP.js.map +1 -0
- package/package.json +11 -4
- package/dist/api.d.ts +0 -88
- package/dist/api.d.ts.map +0 -1
- package/dist/api.js +0 -168
- package/dist/api.js.map +0 -1
- package/dist/cli.d.ts.map +0 -1
- package/dist/config.d.ts +0 -27
- package/dist/config.d.ts.map +0 -1
- package/dist/config.js +0 -109
- package/dist/config.js.map +0 -1
- package/dist/helpers.d.ts +0 -4
- package/dist/helpers.d.ts.map +0 -1
- package/dist/helpers.js +0 -33
- package/dist/helpers.js.map +0 -1
- package/dist/index.d.ts.map +0 -1
- package/dist/ollama.d.ts +0 -11
- package/dist/ollama.d.ts.map +0 -1
- package/dist/ollama.js +0 -36
- package/dist/ollama.js.map +0 -1
- package/dist/providers/comfyui.d.ts +0 -29
- package/dist/providers/comfyui.d.ts.map +0 -1
- package/dist/providers/comfyui.js +0 -359
- package/dist/providers/comfyui.js.map +0 -1
- package/dist/providers/index.d.ts +0 -63
- package/dist/providers/index.d.ts.map +0 -1
- package/dist/providers/index.js +0 -126
- package/dist/providers/index.js.map +0 -1
- package/dist/providers/lmstudio.d.ts +0 -11
- package/dist/providers/lmstudio.d.ts.map +0 -1
- package/dist/providers/lmstudio.js +0 -106
- package/dist/providers/lmstudio.js.map +0 -1
- package/dist/providers/ollama.d.ts +0 -11
- package/dist/providers/ollama.d.ts.map +0 -1
- package/dist/providers/ollama.js +0 -59
- package/dist/providers/ollama.js.map +0 -1
- package/dist/providers/stable-diffusion.d.ts +0 -41
- package/dist/providers/stable-diffusion.d.ts.map +0 -1
- package/dist/providers/stable-diffusion.js +0 -283
- package/dist/providers/stable-diffusion.js.map +0 -1
- package/dist/providers/types.d.ts +0 -196
- package/dist/providers/types.d.ts.map +0 -1
- package/dist/providers/types.js +0 -19
- package/dist/providers/types.js.map +0 -1
- package/dist/quickstart/QuickstartScreen.d.ts +0 -5
- package/dist/quickstart/QuickstartScreen.d.ts.map +0 -1
- package/dist/quickstart/QuickstartScreen.js +0 -616
- package/dist/quickstart/QuickstartScreen.js.map +0 -1
- package/dist/quickstart/detect.d.ts +0 -22
- package/dist/quickstart/detect.d.ts.map +0 -1
- package/dist/quickstart/detect.js +0 -243
- package/dist/quickstart/detect.js.map +0 -1
- package/dist/quickstart/index.d.ts +0 -4
- package/dist/quickstart/index.d.ts.map +0 -1
- package/dist/quickstart/index.js +0 -245
- package/dist/quickstart/index.js.map +0 -1
- package/dist/quickstart/installers.d.ts +0 -109
- package/dist/quickstart/installers.d.ts.map +0 -1
- package/dist/quickstart/installers.js +0 -1296
- package/dist/quickstart/installers.js.map +0 -1
- package/dist/runner.d.ts +0 -19
- package/dist/runner.d.ts.map +0 -1
- package/dist/runner.js +0 -314
- package/dist/runner.js.map +0 -1
- package/dist/tui/App.d.ts +0 -7
- package/dist/tui/App.d.ts.map +0 -1
- package/dist/tui/App.js +0 -53
- package/dist/tui/App.js.map +0 -1
- package/dist/tui/TunnelRunner.d.ts +0 -19
- package/dist/tui/TunnelRunner.d.ts.map +0 -1
- package/dist/tui/TunnelRunner.js +0 -228
- package/dist/tui/TunnelRunner.js.map +0 -1
- package/dist/tui/components/Header.d.ts +0 -9
- package/dist/tui/components/Header.d.ts.map +0 -1
- package/dist/tui/components/Header.js +0 -21
- package/dist/tui/components/Header.js.map +0 -1
- package/dist/tui/components/ModelsPanel.d.ts +0 -7
- package/dist/tui/components/ModelsPanel.d.ts.map +0 -1
- package/dist/tui/components/ModelsPanel.js +0 -28
- package/dist/tui/components/ModelsPanel.js.map +0 -1
- package/dist/tui/components/ProvidersPanel.d.ts +0 -7
- package/dist/tui/components/ProvidersPanel.d.ts.map +0 -1
- package/dist/tui/components/ProvidersPanel.js +0 -6
- package/dist/tui/components/ProvidersPanel.js.map +0 -1
- package/dist/tui/components/RequestLog.d.ts +0 -8
- package/dist/tui/components/RequestLog.d.ts.map +0 -1
- package/dist/tui/components/RequestLog.js +0 -60
- package/dist/tui/components/RequestLog.js.map +0 -1
- package/dist/tui/components/StatusBar.d.ts +0 -10
- package/dist/tui/components/StatusBar.d.ts.map +0 -1
- package/dist/tui/components/StatusBar.js +0 -7
- package/dist/tui/components/StatusBar.js.map +0 -1
- package/dist/tui/components/index.d.ts +0 -6
- package/dist/tui/components/index.d.ts.map +0 -1
- package/dist/tui/components/index.js +0 -6
- package/dist/tui/components/index.js.map +0 -1
- package/dist/tui/events.d.ts +0 -35
- package/dist/tui/events.d.ts.map +0 -1
- package/dist/tui/events.js +0 -26
- package/dist/tui/events.js.map +0 -1
- package/dist/tui/hooks/index.d.ts +0 -5
- package/dist/tui/hooks/index.d.ts.map +0 -1
- package/dist/tui/hooks/index.js +0 -5
- package/dist/tui/hooks/index.js.map +0 -1
- package/dist/tui/hooks/useConnection.d.ts +0 -10
- package/dist/tui/hooks/useConnection.d.ts.map +0 -1
- package/dist/tui/hooks/useConnection.js +0 -42
- package/dist/tui/hooks/useConnection.js.map +0 -1
- package/dist/tui/hooks/useModels.d.ts +0 -9
- package/dist/tui/hooks/useModels.d.ts.map +0 -1
- package/dist/tui/hooks/useModels.js +0 -28
- package/dist/tui/hooks/useModels.js.map +0 -1
- package/dist/tui/hooks/useProviders.d.ts +0 -9
- package/dist/tui/hooks/useProviders.d.ts.map +0 -1
- package/dist/tui/hooks/useProviders.js +0 -30
- package/dist/tui/hooks/useProviders.js.map +0 -1
- package/dist/tui/hooks/useRequests.d.ts +0 -9
- package/dist/tui/hooks/useRequests.d.ts.map +0 -1
- package/dist/tui/hooks/useRequests.js +0 -60
- package/dist/tui/hooks/useRequests.js.map +0 -1
- package/dist/tui/index.d.ts +0 -2
- package/dist/tui/index.d.ts.map +0 -1
- package/dist/tui/index.js +0 -19
- package/dist/tui/index.js.map +0 -1
- package/dist/tui/screens/ConfigScreen.d.ts +0 -2
- package/dist/tui/screens/ConfigScreen.d.ts.map +0 -1
- package/dist/tui/screens/ConfigScreen.js +0 -18
- package/dist/tui/screens/ConfigScreen.js.map +0 -1
- package/dist/tui/screens/HomeScreen.d.ts +0 -2
- package/dist/tui/screens/HomeScreen.d.ts.map +0 -1
- package/dist/tui/screens/HomeScreen.js +0 -156
- package/dist/tui/screens/HomeScreen.js.map +0 -1
- package/dist/tui/screens/ModelsScreen.d.ts +0 -2
- package/dist/tui/screens/ModelsScreen.d.ts.map +0 -1
- package/dist/tui/screens/ModelsScreen.js +0 -59
- package/dist/tui/screens/ModelsScreen.js.map +0 -1
- package/dist/tui/screens/StatusScreen.d.ts +0 -2
- package/dist/tui/screens/StatusScreen.d.ts.map +0 -1
- package/dist/tui/screens/StatusScreen.js +0 -53
- package/dist/tui/screens/StatusScreen.js.map +0 -1
- package/dist/tui/screens/index.d.ts +0 -9
- package/dist/tui/screens/index.d.ts.map +0 -1
- package/dist/tui/screens/index.js +0 -38
- package/dist/tui/screens/index.js.map +0 -1
- package/dist/tui/types.d.ts +0 -30
- package/dist/tui/types.d.ts.map +0 -1
- package/dist/tui/types.js +0 -2
- package/dist/tui/types.js.map +0 -1
- package/dist/workflows/index.d.ts +0 -47
- package/dist/workflows/index.d.ts.map +0 -1
- package/dist/workflows/index.js +0 -95
- package/dist/workflows/index.js.map +0 -1
- package/dist/workflows/ltx-video.d.ts +0 -45
- package/dist/workflows/ltx-video.d.ts.map +0 -1
- package/dist/workflows/ltx-video.js +0 -114
- package/dist/workflows/ltx-video.js.map +0 -1
- package/dist/workflows/wan2.1.d.ts +0 -44
- package/dist/workflows/wan2.1.d.ts.map +0 -1
- package/dist/workflows/wan2.1.js +0 -119
- package/dist/workflows/wan2.1.js.map +0 -1
|
@@ -1,1296 +0,0 @@
|
|
|
1
|
-
import { exec, spawn } from "child_process";
|
|
2
|
-
import { promisify } from "util";
|
|
3
|
-
import * as fs from "fs";
|
|
4
|
-
import * as path from "path";
|
|
5
|
-
import * as os from "os";
|
|
6
|
-
import open from "open";
|
|
7
|
-
import { setStableDiffusionInstallPath, getStableDiffusionInstallPath, setComfyUIInstallPath, getComfyUIInstallPath, } from "../config.js";
|
|
8
|
-
import chalk from "chalk";
|
|
9
|
-
export { getStableDiffusionInstallPath };
|
|
10
|
-
export { getComfyUIInstallPath };
|
|
11
|
-
const execAsync = promisify(exec);
|
|
12
|
-
/** Minimum Python version required by Forge Neo */
|
|
13
|
-
const REQUIRED_PYTHON_MAJOR = 3;
|
|
14
|
-
const REQUIRED_PYTHON_MINOR = 13;
|
|
15
|
-
/**
|
|
16
|
-
* Try to get Python version info from a specific command.
|
|
17
|
-
*/
|
|
18
|
-
async function tryPythonCommand(cmd) {
|
|
19
|
-
try {
|
|
20
|
-
const { stdout: versionOut } = await execAsync(`${cmd} --version`);
|
|
21
|
-
const match = versionOut.trim().match(/Python\s+(\d+)\.(\d+)\.(\d+)/);
|
|
22
|
-
if (!match)
|
|
23
|
-
return null;
|
|
24
|
-
const major = parseInt(match[1]);
|
|
25
|
-
const minor = parseInt(match[2]);
|
|
26
|
-
const patch = parseInt(match[3]);
|
|
27
|
-
const version = `${major}.${minor}.${patch}`;
|
|
28
|
-
// Get the actual executable path
|
|
29
|
-
let executable = cmd;
|
|
30
|
-
try {
|
|
31
|
-
const { stdout: exeOut } = await execAsync(`${cmd} -c "import sys; print(sys.executable)"`);
|
|
32
|
-
executable = exeOut.trim();
|
|
33
|
-
}
|
|
34
|
-
catch {
|
|
35
|
-
// Fall back to command name
|
|
36
|
-
}
|
|
37
|
-
return { major, minor, patch, version, executable };
|
|
38
|
-
}
|
|
39
|
-
catch {
|
|
40
|
-
return null;
|
|
41
|
-
}
|
|
42
|
-
}
|
|
43
|
-
/**
|
|
44
|
-
* Get the best available Python version.
|
|
45
|
-
* Checks multiple candidates and returns the newest one that meets
|
|
46
|
-
* the minimum requirement, or the newest overall if none qualify.
|
|
47
|
-
*/
|
|
48
|
-
export async function getPythonVersion() {
|
|
49
|
-
// Check multiple candidates - versioned commands first (more specific),
|
|
50
|
-
// then generic ones. This handles cases where a venv or older version
|
|
51
|
-
// shadows the newer install on PATH.
|
|
52
|
-
const candidates = [
|
|
53
|
-
"python3.13",
|
|
54
|
-
"python3.14",
|
|
55
|
-
"python3.15",
|
|
56
|
-
"python3",
|
|
57
|
-
"python",
|
|
58
|
-
];
|
|
59
|
-
const results = [];
|
|
60
|
-
for (const cmd of candidates) {
|
|
61
|
-
const info = await tryPythonCommand(cmd);
|
|
62
|
-
if (info) {
|
|
63
|
-
// If this one meets the requirement, return immediately
|
|
64
|
-
if (isPythonVersionOk(info)) {
|
|
65
|
-
return info;
|
|
66
|
-
}
|
|
67
|
-
results.push(info);
|
|
68
|
-
}
|
|
69
|
-
}
|
|
70
|
-
// No qualifying version found - return the best we have (for error messages)
|
|
71
|
-
if (results.length > 0) {
|
|
72
|
-
results.sort((a, b) => {
|
|
73
|
-
if (a.major !== b.major)
|
|
74
|
-
return b.major - a.major;
|
|
75
|
-
if (a.minor !== b.minor)
|
|
76
|
-
return b.minor - a.minor;
|
|
77
|
-
return b.patch - a.patch;
|
|
78
|
-
});
|
|
79
|
-
return results[0];
|
|
80
|
-
}
|
|
81
|
-
return null;
|
|
82
|
-
}
|
|
83
|
-
/**
|
|
84
|
-
* Check if the installed Python version meets Forge Neo requirements (>= 3.13).
|
|
85
|
-
*/
|
|
86
|
-
export function isPythonVersionOk(info) {
|
|
87
|
-
return (info.major > REQUIRED_PYTHON_MAJOR ||
|
|
88
|
-
(info.major === REQUIRED_PYTHON_MAJOR &&
|
|
89
|
-
info.minor >= REQUIRED_PYTHON_MINOR));
|
|
90
|
-
}
|
|
91
|
-
/**
|
|
92
|
-
* Install Ollama (macOS/Linux only)
|
|
93
|
-
*/
|
|
94
|
-
export async function installOllama(onProgress) {
|
|
95
|
-
if (process.platform === "win32") {
|
|
96
|
-
onProgress({
|
|
97
|
-
stage: "error",
|
|
98
|
-
message: "Auto-install not supported on Windows",
|
|
99
|
-
error: "Please download Ollama from https://ollama.com/download",
|
|
100
|
-
});
|
|
101
|
-
await open("https://ollama.com/download");
|
|
102
|
-
return false;
|
|
103
|
-
}
|
|
104
|
-
try {
|
|
105
|
-
onProgress({
|
|
106
|
-
stage: "download",
|
|
107
|
-
message: "Installing Ollama (you may be prompted for your password)...",
|
|
108
|
-
});
|
|
109
|
-
// Use spawn with inherited stdio so user can see and respond to sudo prompt
|
|
110
|
-
await new Promise((resolve, reject) => {
|
|
111
|
-
const proc = spawn("bash", ["-c", "curl -fsSL https://ollama.com/install.sh | sh"], {
|
|
112
|
-
stdio: "inherit", // Inherit stdin/stdout/stderr for password prompt
|
|
113
|
-
});
|
|
114
|
-
proc.on("close", (code) => {
|
|
115
|
-
if (code === 0) {
|
|
116
|
-
resolve();
|
|
117
|
-
}
|
|
118
|
-
else {
|
|
119
|
-
reject(new Error(`Installation exited with code ${code}`));
|
|
120
|
-
}
|
|
121
|
-
});
|
|
122
|
-
proc.on("error", (err) => {
|
|
123
|
-
reject(err);
|
|
124
|
-
});
|
|
125
|
-
});
|
|
126
|
-
onProgress({
|
|
127
|
-
stage: "complete",
|
|
128
|
-
message: "Ollama installed successfully!",
|
|
129
|
-
complete: true,
|
|
130
|
-
});
|
|
131
|
-
return true;
|
|
132
|
-
}
|
|
133
|
-
catch (error) {
|
|
134
|
-
const message = error instanceof Error ? error.message : "Unknown error";
|
|
135
|
-
onProgress({
|
|
136
|
-
stage: "error",
|
|
137
|
-
message: "Installation failed",
|
|
138
|
-
error: message,
|
|
139
|
-
});
|
|
140
|
-
return false;
|
|
141
|
-
}
|
|
142
|
-
}
|
|
143
|
-
/**
|
|
144
|
-
* Pull a model with Ollama
|
|
145
|
-
*/
|
|
146
|
-
export async function pullOllamaModel(model, onProgress) {
|
|
147
|
-
try {
|
|
148
|
-
onProgress({ stage: "pull", message: `Pulling ${model}...` });
|
|
149
|
-
// Use spawn to get real-time output
|
|
150
|
-
return new Promise((resolve) => {
|
|
151
|
-
const proc = spawn("ollama", ["pull", model], {
|
|
152
|
-
stdio: ["ignore", "pipe", "pipe"],
|
|
153
|
-
});
|
|
154
|
-
proc.stdout?.on("data", (data) => {
|
|
155
|
-
const line = data.toString().trim();
|
|
156
|
-
if (line) {
|
|
157
|
-
onProgress({ stage: "pull", message: line });
|
|
158
|
-
}
|
|
159
|
-
});
|
|
160
|
-
proc.stderr?.on("data", (data) => {
|
|
161
|
-
const line = data.toString().trim();
|
|
162
|
-
if (line) {
|
|
163
|
-
onProgress({ stage: "pull", message: line });
|
|
164
|
-
}
|
|
165
|
-
});
|
|
166
|
-
proc.on("close", (code) => {
|
|
167
|
-
if (code === 0) {
|
|
168
|
-
onProgress({
|
|
169
|
-
stage: "complete",
|
|
170
|
-
message: `${model} ready!`,
|
|
171
|
-
complete: true,
|
|
172
|
-
});
|
|
173
|
-
resolve(true);
|
|
174
|
-
}
|
|
175
|
-
else {
|
|
176
|
-
onProgress({
|
|
177
|
-
stage: "error",
|
|
178
|
-
message: "Pull failed",
|
|
179
|
-
error: `Exit code: ${code}`,
|
|
180
|
-
});
|
|
181
|
-
resolve(false);
|
|
182
|
-
}
|
|
183
|
-
});
|
|
184
|
-
proc.on("error", (error) => {
|
|
185
|
-
onProgress({
|
|
186
|
-
stage: "error",
|
|
187
|
-
message: "Pull failed",
|
|
188
|
-
error: error.message,
|
|
189
|
-
});
|
|
190
|
-
resolve(false);
|
|
191
|
-
});
|
|
192
|
-
});
|
|
193
|
-
}
|
|
194
|
-
catch (error) {
|
|
195
|
-
const message = error instanceof Error ? error.message : "Unknown error";
|
|
196
|
-
onProgress({ stage: "error", message: "Pull failed", error: message });
|
|
197
|
-
return false;
|
|
198
|
-
}
|
|
199
|
-
}
|
|
200
|
-
/**
|
|
201
|
-
* Open LM Studio download page
|
|
202
|
-
*/
|
|
203
|
-
export async function installLMStudio(onProgress) {
|
|
204
|
-
onProgress({
|
|
205
|
-
stage: "browser",
|
|
206
|
-
message: "Opening LM Studio download page...",
|
|
207
|
-
});
|
|
208
|
-
await open("https://lmstudio.ai/download");
|
|
209
|
-
onProgress({
|
|
210
|
-
stage: "manual",
|
|
211
|
-
message: "Please install LM Studio and enable the local server",
|
|
212
|
-
complete: true,
|
|
213
|
-
});
|
|
214
|
-
return true;
|
|
215
|
-
}
|
|
216
|
-
/**
|
|
217
|
-
* Install Stable Diffusion Forge
|
|
218
|
-
*/
|
|
219
|
-
export async function installStableDiffusion(onProgress, installDir) {
|
|
220
|
-
const targetDir = installDir || path.join(os.homedir(), "sd-webui-forge-neo");
|
|
221
|
-
try {
|
|
222
|
-
onProgress({
|
|
223
|
-
stage: "clone",
|
|
224
|
-
message: "Cloning Stable Diffusion Forge Neo repository (this may take a while)...",
|
|
225
|
-
});
|
|
226
|
-
// Use spawn with inherited stdio to show git clone progress
|
|
227
|
-
await new Promise((resolve, reject) => {
|
|
228
|
-
const proc = spawn("git", [
|
|
229
|
-
"clone",
|
|
230
|
-
"--progress",
|
|
231
|
-
"--branch",
|
|
232
|
-
"neo",
|
|
233
|
-
"https://github.com/Haoming02/sd-webui-forge-classic.git",
|
|
234
|
-
targetDir,
|
|
235
|
-
], {
|
|
236
|
-
stdio: "inherit", // Show clone progress
|
|
237
|
-
});
|
|
238
|
-
proc.on("close", (code) => {
|
|
239
|
-
if (code === 0) {
|
|
240
|
-
resolve();
|
|
241
|
-
}
|
|
242
|
-
else {
|
|
243
|
-
reject(new Error(`Git clone exited with code ${code}`));
|
|
244
|
-
}
|
|
245
|
-
});
|
|
246
|
-
proc.on("error", (err) => {
|
|
247
|
-
reject(err);
|
|
248
|
-
});
|
|
249
|
-
});
|
|
250
|
-
// Save the install path to config
|
|
251
|
-
setStableDiffusionInstallPath(targetDir);
|
|
252
|
-
onProgress({
|
|
253
|
-
stage: "complete",
|
|
254
|
-
message: `Installed to ${targetDir}`,
|
|
255
|
-
complete: true,
|
|
256
|
-
});
|
|
257
|
-
onProgress({
|
|
258
|
-
stage: "info",
|
|
259
|
-
message: `To start: cd "${targetDir}" && python launch.py --api`,
|
|
260
|
-
});
|
|
261
|
-
return true;
|
|
262
|
-
}
|
|
263
|
-
catch (error) {
|
|
264
|
-
const message = error instanceof Error ? error.message : "Unknown error";
|
|
265
|
-
if (message.includes("already exists") || message.includes("code 128")) {
|
|
266
|
-
// Still save the path even if already installed
|
|
267
|
-
setStableDiffusionInstallPath(targetDir);
|
|
268
|
-
onProgress({
|
|
269
|
-
stage: "complete",
|
|
270
|
-
message: "Already installed!",
|
|
271
|
-
complete: true,
|
|
272
|
-
});
|
|
273
|
-
return true;
|
|
274
|
-
}
|
|
275
|
-
onProgress({
|
|
276
|
-
stage: "error",
|
|
277
|
-
message: "Installation failed",
|
|
278
|
-
error: message,
|
|
279
|
-
});
|
|
280
|
-
return false;
|
|
281
|
-
}
|
|
282
|
-
}
|
|
283
|
-
/**
|
|
284
|
-
* Check if the default SDXL model already exists.
|
|
285
|
-
*/
|
|
286
|
-
export async function hasDefaultSdModel() {
|
|
287
|
-
const installPath = getStableDiffusionInstallPath();
|
|
288
|
-
if (!installPath)
|
|
289
|
-
return false;
|
|
290
|
-
const modelFile = path.join(installPath, "models", "Stable-diffusion", "sd_xl_base_1.0.safetensors");
|
|
291
|
-
return fs.existsSync(modelFile);
|
|
292
|
-
}
|
|
293
|
-
/**
|
|
294
|
-
* Download a default SDXL model for Stable Diffusion.
|
|
295
|
-
* Uses wget with progress output (falls back to curl).
|
|
296
|
-
*/
|
|
297
|
-
export async function downloadSdModel(onProgress) {
|
|
298
|
-
const installPath = getStableDiffusionInstallPath();
|
|
299
|
-
if (!installPath) {
|
|
300
|
-
onProgress({
|
|
301
|
-
stage: "error",
|
|
302
|
-
message: "Stable Diffusion install path not found",
|
|
303
|
-
error: "Please install Stable Diffusion first",
|
|
304
|
-
});
|
|
305
|
-
return false;
|
|
306
|
-
}
|
|
307
|
-
const modelsDir = path.join(installPath, "models", "Stable-diffusion");
|
|
308
|
-
const modelUrl = "https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_base_1.0.safetensors";
|
|
309
|
-
const modelFile = path.join(modelsDir, "sd_xl_base_1.0.safetensors");
|
|
310
|
-
try {
|
|
311
|
-
// Ensure models directory exists
|
|
312
|
-
fs.mkdirSync(modelsDir, { recursive: true });
|
|
313
|
-
// Check if model already exists
|
|
314
|
-
if (fs.existsSync(modelFile)) {
|
|
315
|
-
onProgress({
|
|
316
|
-
stage: "complete",
|
|
317
|
-
message: "SDXL base model already exists!",
|
|
318
|
-
complete: true,
|
|
319
|
-
});
|
|
320
|
-
return true;
|
|
321
|
-
}
|
|
322
|
-
onProgress({
|
|
323
|
-
stage: "download",
|
|
324
|
-
message: "Downloading SDXL base model (~6.5 GB)...",
|
|
325
|
-
});
|
|
326
|
-
// Check available download tools: wget (better progress on Linux/macOS), curl (everywhere)
|
|
327
|
-
const isWindows = process.platform === "win32";
|
|
328
|
-
const whichCmd = isWindows ? "where" : "which";
|
|
329
|
-
const hasWget = await new Promise((resolve) => {
|
|
330
|
-
exec(`${whichCmd} wget`, (error) => resolve(!error));
|
|
331
|
-
});
|
|
332
|
-
return new Promise((resolve) => {
|
|
333
|
-
let proc;
|
|
334
|
-
if (hasWget) {
|
|
335
|
-
proc = spawn("wget", ["-c", "--show-progress", "-O", modelFile, modelUrl], { stdio: "inherit" });
|
|
336
|
-
}
|
|
337
|
-
else {
|
|
338
|
-
// curl is available on macOS, Linux, and Windows 10+
|
|
339
|
-
proc = spawn("curl", ["-L", "-C", "-", "--progress-bar", "-o", modelFile, modelUrl], { stdio: "inherit" });
|
|
340
|
-
}
|
|
341
|
-
proc.on("close", (code) => {
|
|
342
|
-
if (code === 0) {
|
|
343
|
-
onProgress({
|
|
344
|
-
stage: "complete",
|
|
345
|
-
message: "SDXL base model downloaded!",
|
|
346
|
-
complete: true,
|
|
347
|
-
});
|
|
348
|
-
resolve(true);
|
|
349
|
-
}
|
|
350
|
-
else {
|
|
351
|
-
onProgress({
|
|
352
|
-
stage: "error",
|
|
353
|
-
message: "Download failed",
|
|
354
|
-
error: `Exit code ${code}. The model may require accepting the license at huggingface.co first. You can also download manually from Civitai.`,
|
|
355
|
-
});
|
|
356
|
-
// Clean up partial file
|
|
357
|
-
try {
|
|
358
|
-
fs.unlinkSync(modelFile);
|
|
359
|
-
}
|
|
360
|
-
catch {
|
|
361
|
-
/* ignore */
|
|
362
|
-
}
|
|
363
|
-
resolve(false);
|
|
364
|
-
}
|
|
365
|
-
});
|
|
366
|
-
proc.on("error", (err) => {
|
|
367
|
-
onProgress({
|
|
368
|
-
stage: "error",
|
|
369
|
-
message: "Download failed",
|
|
370
|
-
error: err.message,
|
|
371
|
-
});
|
|
372
|
-
resolve(false);
|
|
373
|
-
});
|
|
374
|
-
});
|
|
375
|
-
}
|
|
376
|
-
catch (error) {
|
|
377
|
-
const message = error instanceof Error ? error.message : "Unknown error";
|
|
378
|
-
onProgress({
|
|
379
|
-
stage: "error",
|
|
380
|
-
message: "Failed to download model",
|
|
381
|
-
error: message,
|
|
382
|
-
});
|
|
383
|
-
return false;
|
|
384
|
-
}
|
|
385
|
-
}
|
|
386
|
-
/**
|
|
387
|
-
* Start Stable Diffusion server
|
|
388
|
-
* Note: SD runs in the foreground and takes over the terminal.
|
|
389
|
-
* We start it and poll in the background until it's ready.
|
|
390
|
-
*/
|
|
391
|
-
export async function startStableDiffusion(onProgress) {
|
|
392
|
-
const installPath = getStableDiffusionInstallPath();
|
|
393
|
-
if (!installPath) {
|
|
394
|
-
onProgress({
|
|
395
|
-
stage: "error",
|
|
396
|
-
message: "Stable Diffusion install path not found",
|
|
397
|
-
error: "Please install Stable Diffusion first",
|
|
398
|
-
});
|
|
399
|
-
return false;
|
|
400
|
-
}
|
|
401
|
-
try {
|
|
402
|
-
// Check Python version before starting
|
|
403
|
-
const pyInfo = await getPythonVersion();
|
|
404
|
-
if (!pyInfo) {
|
|
405
|
-
onProgress({
|
|
406
|
-
stage: "error",
|
|
407
|
-
message: "Python not found",
|
|
408
|
-
error: [
|
|
409
|
-
chalk.white("Python is not installed. Forge Neo requires Python 3.13+."),
|
|
410
|
-
chalk.cyan("Install from https://www.python.org/downloads/"),
|
|
411
|
-
].join("\n"),
|
|
412
|
-
});
|
|
413
|
-
return false;
|
|
414
|
-
}
|
|
415
|
-
if (!isPythonVersionOk(pyInfo)) {
|
|
416
|
-
onProgress({
|
|
417
|
-
stage: "error",
|
|
418
|
-
message: `Python ${pyInfo.version} is too old`,
|
|
419
|
-
error: [
|
|
420
|
-
chalk.white(`Forge Neo requires Python ${REQUIRED_PYTHON_MAJOR}.${REQUIRED_PYTHON_MINOR}+. You have ${pyInfo.version}.`),
|
|
421
|
-
"",
|
|
422
|
-
chalk.yellow("How to fix:"),
|
|
423
|
-
chalk.white(" Install Python 3.13: ") +
|
|
424
|
-
chalk.cyan("https://www.python.org/downloads/"),
|
|
425
|
-
chalk.white(" If using pyenv: ") +
|
|
426
|
-
chalk.cyan("pyenv install 3.13.12 && pyenv global 3.13.12"),
|
|
427
|
-
chalk.white(" Then delete the old venv: ") +
|
|
428
|
-
chalk.cyan(`rm -rf ${installPath}/venv`),
|
|
429
|
-
].join("\n"),
|
|
430
|
-
});
|
|
431
|
-
return false;
|
|
432
|
-
}
|
|
433
|
-
onProgress({
|
|
434
|
-
stage: "start",
|
|
435
|
-
message: `Starting Stable Diffusion server (Python ${pyInfo.version})...`,
|
|
436
|
-
});
|
|
437
|
-
// Small delay to let the message display
|
|
438
|
-
await new Promise((resolve) => setTimeout(resolve, 1000));
|
|
439
|
-
// Determine the launch method based on platform
|
|
440
|
-
// Neo fork removed .sh scripts; on Linux/macOS we create/activate venv and run launch.py
|
|
441
|
-
const isWindows = process.platform === "win32";
|
|
442
|
-
// Detect the driver's max CUDA version so we can pick the right PyTorch build.
|
|
443
|
-
// Forge Neo defaults to cu130 which requires very new drivers.
|
|
444
|
-
let cudaEnv = {};
|
|
445
|
-
try {
|
|
446
|
-
const { stdout: smiOut } = await execAsync("nvidia-smi --query-gpu=driver_version --format=csv,noheader");
|
|
447
|
-
// nvidia-smi also shows CUDA version in its header; parse from full output
|
|
448
|
-
const { stdout: smiFullOut } = await execAsync("nvidia-smi");
|
|
449
|
-
const cudaMatch = smiFullOut.match(/CUDA Version:\s*(\d+)\.(\d+)/);
|
|
450
|
-
if (cudaMatch) {
|
|
451
|
-
const driverCudaMajor = parseInt(cudaMatch[1]);
|
|
452
|
-
const driverCudaMinor = parseInt(cudaMatch[2]);
|
|
453
|
-
// Map driver CUDA capability to the best compatible PyTorch CUDA build
|
|
454
|
-
// Available PyTorch CUDA builds: cu118, cu121, cu124, cu126, cu128, cu130
|
|
455
|
-
let cuTag;
|
|
456
|
-
if (driverCudaMajor >= 13) {
|
|
457
|
-
cuTag = "cu130";
|
|
458
|
-
}
|
|
459
|
-
else if (driverCudaMajor === 12 && driverCudaMinor >= 8) {
|
|
460
|
-
cuTag = "cu128";
|
|
461
|
-
}
|
|
462
|
-
else if (driverCudaMajor === 12 && driverCudaMinor >= 6) {
|
|
463
|
-
cuTag = "cu126";
|
|
464
|
-
}
|
|
465
|
-
else if (driverCudaMajor === 12 && driverCudaMinor >= 4) {
|
|
466
|
-
cuTag = "cu124";
|
|
467
|
-
}
|
|
468
|
-
else if (driverCudaMajor === 12) {
|
|
469
|
-
cuTag = "cu121";
|
|
470
|
-
}
|
|
471
|
-
else {
|
|
472
|
-
cuTag = "cu118";
|
|
473
|
-
}
|
|
474
|
-
// Only override if driver doesn't support the default cu130
|
|
475
|
-
if (cuTag !== "cu130") {
|
|
476
|
-
onProgress({
|
|
477
|
-
stage: "start",
|
|
478
|
-
message: `Driver supports CUDA ${driverCudaMajor}.${driverCudaMinor}, using PyTorch with ${cuTag}`,
|
|
479
|
-
});
|
|
480
|
-
const torchIndexUrl = `https://download.pytorch.org/whl/${cuTag}`;
|
|
481
|
-
cudaEnv = {
|
|
482
|
-
TORCH_INDEX_URL: torchIndexUrl,
|
|
483
|
-
TORCH_COMMAND: `pip install torch torchvision --extra-index-url ${torchIndexUrl}`,
|
|
484
|
-
};
|
|
485
|
-
await new Promise((r) => setTimeout(r, 1000));
|
|
486
|
-
}
|
|
487
|
-
}
|
|
488
|
-
}
|
|
489
|
-
catch {
|
|
490
|
-
// nvidia-smi not available or failed - let SD use its defaults
|
|
491
|
-
}
|
|
492
|
-
// Merge CUDA env overrides with current environment
|
|
493
|
-
const env = { ...process.env, ...cudaEnv };
|
|
494
|
-
// If we have CUDA env overrides and an existing venv (Linux/macOS),
|
|
495
|
-
// check if the venv has wrong PyTorch CUDA bindings and needs recreation
|
|
496
|
-
if (!isWindows && cudaEnv.TORCH_INDEX_URL) {
|
|
497
|
-
const venvPythonCheck = path.join(installPath, "venv", "bin", "python");
|
|
498
|
-
if (fs.existsSync(venvPythonCheck)) {
|
|
499
|
-
try {
|
|
500
|
-
const { stdout: torchCheck } = await execAsync(`"${venvPythonCheck}" -c "import torch; print(torch.version.cuda or 'none')"`);
|
|
501
|
-
const installedCuda = torchCheck.trim();
|
|
502
|
-
const targetCu = cudaEnv.TORCH_INDEX_URL.split("/").pop() || "";
|
|
503
|
-
// e.g. installedCuda="13.0" and targetCu="cu121" -> mismatch
|
|
504
|
-
const installedCuTag = "cu" + installedCuda.replace(".", "").replace(/0$/, "");
|
|
505
|
-
if (installedCuTag !== targetCu) {
|
|
506
|
-
onProgress({
|
|
507
|
-
stage: "start",
|
|
508
|
-
message: `Existing venv has PyTorch for CUDA ${installedCuda}, recreating with ${targetCu}...`,
|
|
509
|
-
});
|
|
510
|
-
await new Promise((r) => setTimeout(r, 1000));
|
|
511
|
-
// Remove the old venv so the launch script recreates it
|
|
512
|
-
fs.rmSync(path.join(installPath, "venv"), {
|
|
513
|
-
recursive: true,
|
|
514
|
-
force: true,
|
|
515
|
-
});
|
|
516
|
-
}
|
|
517
|
-
}
|
|
518
|
-
catch {
|
|
519
|
-
// torch not installed in venv yet - no need to recreate
|
|
520
|
-
}
|
|
521
|
-
}
|
|
522
|
-
}
|
|
523
|
-
// Run in foreground with inherited stdio - SD needs a proper terminal
|
|
524
|
-
// This will block until the server is killed
|
|
525
|
-
return new Promise((resolve) => {
|
|
526
|
-
let proc;
|
|
527
|
-
if (isWindows) {
|
|
528
|
-
// Windows: use webui-user.bat which handles venv and launches
|
|
529
|
-
proc = spawn("cmd", ["/c", "webui-user.bat"], {
|
|
530
|
-
cwd: installPath,
|
|
531
|
-
stdio: "inherit",
|
|
532
|
-
env,
|
|
533
|
-
});
|
|
534
|
-
}
|
|
535
|
-
else {
|
|
536
|
-
// Linux/macOS: create venv if needed using the correct python, activate, and run launch.py
|
|
537
|
-
const venvDir = path.join(installPath, "venv");
|
|
538
|
-
const venvPython = path.join(venvDir, "bin", "python");
|
|
539
|
-
const launchScript = [
|
|
540
|
-
`if [ ! -f "${venvPython}" ]; then`,
|
|
541
|
-
` echo "Creating virtual environment with Python ${pyInfo.version}..."`,
|
|
542
|
-
` "${pyInfo.executable}" -m venv "${venvDir}"`,
|
|
543
|
-
`fi`,
|
|
544
|
-
`source "${venvDir}/bin/activate"`,
|
|
545
|
-
`python launch.py --api`,
|
|
546
|
-
].join("\n");
|
|
547
|
-
proc = spawn("bash", ["-c", launchScript], {
|
|
548
|
-
cwd: installPath,
|
|
549
|
-
stdio: "inherit",
|
|
550
|
-
env,
|
|
551
|
-
});
|
|
552
|
-
}
|
|
553
|
-
// Start polling for server readiness in the background
|
|
554
|
-
const pollForReady = async () => {
|
|
555
|
-
const maxWaitTime = 15 * 60 * 1000; // 15 minutes for first run
|
|
556
|
-
const pollInterval = 5000;
|
|
557
|
-
const startTime = Date.now();
|
|
558
|
-
while (Date.now() - startTime < maxWaitTime) {
|
|
559
|
-
await new Promise((r) => setTimeout(r, pollInterval));
|
|
560
|
-
try {
|
|
561
|
-
const response = await fetch("http://127.0.0.1:7860/sdapi/v1/sd-models", { signal: AbortSignal.timeout(3000) });
|
|
562
|
-
if (response.ok) {
|
|
563
|
-
console.log("\n\n✓ Stable Diffusion server is ready!\n");
|
|
564
|
-
console.log(chalk.yellow("Please leave this terminal running and open another terminal to run mindstudio-local.\n"));
|
|
565
|
-
console.log("Press Ctrl+C to stop the server and return to the menu.\n");
|
|
566
|
-
return;
|
|
567
|
-
}
|
|
568
|
-
}
|
|
569
|
-
catch {
|
|
570
|
-
// Not ready yet
|
|
571
|
-
}
|
|
572
|
-
}
|
|
573
|
-
};
|
|
574
|
-
pollForReady();
|
|
575
|
-
proc.on("close", (code) => {
|
|
576
|
-
if (code === 0) {
|
|
577
|
-
onProgress({
|
|
578
|
-
stage: "complete",
|
|
579
|
-
message: "Stable Diffusion server stopped.",
|
|
580
|
-
complete: true,
|
|
581
|
-
});
|
|
582
|
-
resolve(true);
|
|
583
|
-
}
|
|
584
|
-
else {
|
|
585
|
-
onProgress({
|
|
586
|
-
stage: "error",
|
|
587
|
-
message: "Stable Diffusion failed to start",
|
|
588
|
-
error: [
|
|
589
|
-
`Process exited with code ${code}. Check the output above for details.`,
|
|
590
|
-
"",
|
|
591
|
-
chalk.yellow("Common fixes:"),
|
|
592
|
-
chalk.cyan(` - Delete the venv and retry: `) +
|
|
593
|
-
chalk.cyan(`rm -rf ${installPath}/venv`),
|
|
594
|
-
chalk.cyan(" - Ensure Python 3.13+ is installed"),
|
|
595
|
-
chalk.cyan(" - Ensure NVIDIA drivers and CUDA are up to date"),
|
|
596
|
-
].join("\n"),
|
|
597
|
-
});
|
|
598
|
-
resolve(false);
|
|
599
|
-
}
|
|
600
|
-
});
|
|
601
|
-
proc.on("error", (err) => {
|
|
602
|
-
onProgress({
|
|
603
|
-
stage: "error",
|
|
604
|
-
message: "Failed to start",
|
|
605
|
-
error: err.message,
|
|
606
|
-
});
|
|
607
|
-
resolve(false);
|
|
608
|
-
});
|
|
609
|
-
});
|
|
610
|
-
}
|
|
611
|
-
catch (error) {
|
|
612
|
-
const message = error instanceof Error ? error.message : "Unknown error";
|
|
613
|
-
onProgress({
|
|
614
|
-
stage: "error",
|
|
615
|
-
message: "Failed to start Stable Diffusion",
|
|
616
|
-
error: message,
|
|
617
|
-
});
|
|
618
|
-
return false;
|
|
619
|
-
}
|
|
620
|
-
}
|
|
621
|
-
/**
|
|
622
|
-
* Run a command that may need elevated privileges (cross-platform)
|
|
623
|
-
*/
|
|
624
|
-
async function runKillCommand(command) {
|
|
625
|
-
const isWindows = process.platform === "win32";
|
|
626
|
-
return new Promise((resolve) => {
|
|
627
|
-
let proc;
|
|
628
|
-
if (isWindows) {
|
|
629
|
-
proc = spawn("cmd", ["/c", command], { stdio: "inherit" });
|
|
630
|
-
}
|
|
631
|
-
else {
|
|
632
|
-
proc = spawn("sudo", ["bash", "-c", command], { stdio: "inherit" });
|
|
633
|
-
}
|
|
634
|
-
proc.on("close", (code) => {
|
|
635
|
-
resolve(code === 0 || code === 1); // 1 = no process found, which is OK
|
|
636
|
-
});
|
|
637
|
-
proc.on("error", () => {
|
|
638
|
-
resolve(false);
|
|
639
|
-
});
|
|
640
|
-
});
|
|
641
|
-
}
|
|
642
|
-
/**
|
|
643
|
-
* Stop Stable Diffusion server
|
|
644
|
-
*/
|
|
645
|
-
export async function stopStableDiffusion(onProgress) {
|
|
646
|
-
try {
|
|
647
|
-
onProgress({
|
|
648
|
-
stage: "start",
|
|
649
|
-
message: "Stopping Stable Diffusion server...",
|
|
650
|
-
});
|
|
651
|
-
const isWindows = process.platform === "win32";
|
|
652
|
-
if (!isWindows) {
|
|
653
|
-
onProgress({
|
|
654
|
-
stage: "info",
|
|
655
|
-
message: "You may be prompted for your password...",
|
|
656
|
-
});
|
|
657
|
-
}
|
|
658
|
-
// Kill python processes running webui
|
|
659
|
-
if (isWindows) {
|
|
660
|
-
await runKillCommand('taskkill /F /FI "IMAGENAME eq python.exe" /FI "WINDOWTITLE eq *launch*" 2>nul || exit 0');
|
|
661
|
-
await runKillCommand('taskkill /F /FI "IMAGENAME eq python.exe" /FI "WINDOWTITLE eq *webui*" 2>nul || exit 0');
|
|
662
|
-
}
|
|
663
|
-
else {
|
|
664
|
-
await runKillCommand("pkill -f 'python.*launch.py' || true");
|
|
665
|
-
await runKillCommand("pkill -f 'python.*webui.py' || true");
|
|
666
|
-
await runKillCommand("pkill -f 'stable-diffusion-webui' || true");
|
|
667
|
-
}
|
|
668
|
-
// Wait for processes to terminate
|
|
669
|
-
await new Promise((resolve) => setTimeout(resolve, 2000));
|
|
670
|
-
// Verify it's stopped
|
|
671
|
-
try {
|
|
672
|
-
const response = await fetch("http://127.0.0.1:7860/sdapi/v1/sd-models", {
|
|
673
|
-
signal: AbortSignal.timeout(2000),
|
|
674
|
-
});
|
|
675
|
-
if (response.ok) {
|
|
676
|
-
onProgress({
|
|
677
|
-
stage: "complete",
|
|
678
|
-
message: "Server may still be running. Try killing the process manually.",
|
|
679
|
-
complete: true,
|
|
680
|
-
});
|
|
681
|
-
return false;
|
|
682
|
-
}
|
|
683
|
-
}
|
|
684
|
-
catch {
|
|
685
|
-
// Connection refused = server is stopped
|
|
686
|
-
}
|
|
687
|
-
onProgress({
|
|
688
|
-
stage: "complete",
|
|
689
|
-
message: "Stable Diffusion server stopped!",
|
|
690
|
-
complete: true,
|
|
691
|
-
});
|
|
692
|
-
return true;
|
|
693
|
-
}
|
|
694
|
-
catch (error) {
|
|
695
|
-
const message = error instanceof Error ? error.message : "Unknown error";
|
|
696
|
-
onProgress({ stage: "error", message: "Failed to stop", error: message });
|
|
697
|
-
return false;
|
|
698
|
-
}
|
|
699
|
-
}
|
|
700
|
-
/**
|
|
701
|
-
* Start Ollama server
|
|
702
|
-
*/
|
|
703
|
-
export async function startOllama(onProgress) {
|
|
704
|
-
try {
|
|
705
|
-
onProgress({ stage: "start", message: "Starting Ollama server..." });
|
|
706
|
-
// Start in background
|
|
707
|
-
const proc = spawn("ollama", ["serve"], {
|
|
708
|
-
detached: true,
|
|
709
|
-
stdio: "ignore",
|
|
710
|
-
});
|
|
711
|
-
proc.unref();
|
|
712
|
-
// Wait a moment for it to start
|
|
713
|
-
await new Promise((resolve) => setTimeout(resolve, 2000));
|
|
714
|
-
// Check if it's running
|
|
715
|
-
try {
|
|
716
|
-
const response = await fetch("http://localhost:11434/api/tags", {
|
|
717
|
-
signal: AbortSignal.timeout(2000),
|
|
718
|
-
});
|
|
719
|
-
if (response.ok) {
|
|
720
|
-
onProgress({
|
|
721
|
-
stage: "complete",
|
|
722
|
-
message: "Ollama server started!",
|
|
723
|
-
complete: true,
|
|
724
|
-
});
|
|
725
|
-
return true;
|
|
726
|
-
}
|
|
727
|
-
}
|
|
728
|
-
catch {
|
|
729
|
-
// Fall through
|
|
730
|
-
}
|
|
731
|
-
onProgress({
|
|
732
|
-
stage: "complete",
|
|
733
|
-
message: "Ollama starting in background...",
|
|
734
|
-
complete: true,
|
|
735
|
-
});
|
|
736
|
-
return true;
|
|
737
|
-
}
|
|
738
|
-
catch (error) {
|
|
739
|
-
const message = error instanceof Error ? error.message : "Unknown error";
|
|
740
|
-
onProgress({ stage: "error", message: "Failed to start", error: message });
|
|
741
|
-
return false;
|
|
742
|
-
}
|
|
743
|
-
}
|
|
744
|
-
/**
|
|
745
|
-
* Stop Ollama server
|
|
746
|
-
*/
|
|
747
|
-
export async function stopOllama(onProgress) {
|
|
748
|
-
try {
|
|
749
|
-
onProgress({ stage: "start", message: "Stopping Ollama server..." });
|
|
750
|
-
const isWindows = process.platform === "win32";
|
|
751
|
-
if (!isWindows) {
|
|
752
|
-
onProgress({
|
|
753
|
-
stage: "info",
|
|
754
|
-
message: "You may be prompted for your password...",
|
|
755
|
-
});
|
|
756
|
-
}
|
|
757
|
-
if (isWindows) {
|
|
758
|
-
await runKillCommand("taskkill /F /IM ollama.exe 2>nul || exit 0");
|
|
759
|
-
await runKillCommand('taskkill /F /FI "IMAGENAME eq ollama_runners*" 2>nul || exit 0');
|
|
760
|
-
}
|
|
761
|
-
else {
|
|
762
|
-
// 1. Try systemctl first (if running as a service)
|
|
763
|
-
await runKillCommand("systemctl stop ollama 2>/dev/null || true");
|
|
764
|
-
// 2. pkill with -f flag to match full command line
|
|
765
|
-
await runKillCommand("pkill -f 'ollama serve' || true");
|
|
766
|
-
// 3. Try killall
|
|
767
|
-
await runKillCommand("killall ollama 2>/dev/null || true");
|
|
768
|
-
// 4. Try pkill without -f
|
|
769
|
-
await runKillCommand("pkill ollama || true");
|
|
770
|
-
}
|
|
771
|
-
// Wait a moment for process to terminate
|
|
772
|
-
await new Promise((resolve) => setTimeout(resolve, 1500));
|
|
773
|
-
// Verify it's stopped by checking if the API is still responding
|
|
774
|
-
try {
|
|
775
|
-
const response = await fetch("http://localhost:11434/api/tags", {
|
|
776
|
-
signal: AbortSignal.timeout(2000),
|
|
777
|
-
});
|
|
778
|
-
if (response.ok) {
|
|
779
|
-
// Still running
|
|
780
|
-
onProgress({
|
|
781
|
-
stage: "complete",
|
|
782
|
-
message: "Ollama may still be running. Check with: ps aux | grep ollama",
|
|
783
|
-
complete: true,
|
|
784
|
-
});
|
|
785
|
-
return false;
|
|
786
|
-
}
|
|
787
|
-
}
|
|
788
|
-
catch {
|
|
789
|
-
// Connection refused = server is stopped
|
|
790
|
-
}
|
|
791
|
-
onProgress({
|
|
792
|
-
stage: "complete",
|
|
793
|
-
message: "Ollama server stopped!",
|
|
794
|
-
complete: true,
|
|
795
|
-
});
|
|
796
|
-
return true;
|
|
797
|
-
}
|
|
798
|
-
catch (error) {
|
|
799
|
-
const message = error instanceof Error ? error.message : "Unknown error";
|
|
800
|
-
onProgress({ stage: "error", message: "Failed to stop", error: message });
|
|
801
|
-
return false;
|
|
802
|
-
}
|
|
803
|
-
}
|
|
804
|
-
// ============================================
|
|
805
|
-
// ComfyUI Installation & Management
|
|
806
|
-
// ============================================
|
|
807
|
-
/**
|
|
808
|
-
* Install ComfyUI via git clone and pip install.
|
|
809
|
-
*/
|
|
810
|
-
export async function installComfyUI(installPath, onProgress) {
|
|
811
|
-
try {
|
|
812
|
-
onProgress({ stage: "start", message: "Cloning ComfyUI repository..." });
|
|
813
|
-
await new Promise((resolve, reject) => {
|
|
814
|
-
const proc = spawn("git", ["clone", "https://github.com/comfyanonymous/ComfyUI.git", installPath], { stdio: "inherit" });
|
|
815
|
-
proc.on("close", (code) => {
|
|
816
|
-
if (code === 0)
|
|
817
|
-
resolve();
|
|
818
|
-
else
|
|
819
|
-
reject(new Error(`git clone failed with code ${code}`));
|
|
820
|
-
});
|
|
821
|
-
proc.on("error", reject);
|
|
822
|
-
});
|
|
823
|
-
onProgress({ stage: "venv", message: "Creating virtual environment..." });
|
|
824
|
-
const pyInfo = await getPythonVersion();
|
|
825
|
-
const pythonCmd = pyInfo?.executable || "python3";
|
|
826
|
-
await new Promise((resolve, reject) => {
|
|
827
|
-
const proc = spawn(pythonCmd, ["-m", "venv", path.join(installPath, "venv")], {
|
|
828
|
-
stdio: "inherit",
|
|
829
|
-
});
|
|
830
|
-
proc.on("close", (code) => {
|
|
831
|
-
if (code === 0)
|
|
832
|
-
resolve();
|
|
833
|
-
else
|
|
834
|
-
reject(new Error(`venv creation failed with code ${code}`));
|
|
835
|
-
});
|
|
836
|
-
proc.on("error", reject);
|
|
837
|
-
});
|
|
838
|
-
onProgress({
|
|
839
|
-
stage: "deps",
|
|
840
|
-
message: "Installing dependencies (this may take a while)...",
|
|
841
|
-
});
|
|
842
|
-
// Detect CUDA version for PyTorch index URL
|
|
843
|
-
let pipExtraArgs = [];
|
|
844
|
-
try {
|
|
845
|
-
const { stdout: smiFullOut } = await execAsync("nvidia-smi");
|
|
846
|
-
const cudaMatch = smiFullOut.match(/CUDA Version:\s*(\d+)\.(\d+)/);
|
|
847
|
-
if (cudaMatch) {
|
|
848
|
-
const driverCudaMajor = parseInt(cudaMatch[1]);
|
|
849
|
-
const driverCudaMinor = parseInt(cudaMatch[2]);
|
|
850
|
-
let cuTag;
|
|
851
|
-
if (driverCudaMajor >= 13)
|
|
852
|
-
cuTag = "cu130";
|
|
853
|
-
else if (driverCudaMajor === 12 && driverCudaMinor >= 8)
|
|
854
|
-
cuTag = "cu128";
|
|
855
|
-
else if (driverCudaMajor === 12 && driverCudaMinor >= 6)
|
|
856
|
-
cuTag = "cu126";
|
|
857
|
-
else if (driverCudaMajor === 12 && driverCudaMinor >= 4)
|
|
858
|
-
cuTag = "cu124";
|
|
859
|
-
else if (driverCudaMajor === 12)
|
|
860
|
-
cuTag = "cu121";
|
|
861
|
-
else
|
|
862
|
-
cuTag = "cu118";
|
|
863
|
-
pipExtraArgs = ["--extra-index-url", `https://download.pytorch.org/whl/${cuTag}`];
|
|
864
|
-
onProgress({
|
|
865
|
-
stage: "deps",
|
|
866
|
-
message: `Driver supports CUDA ${driverCudaMajor}.${driverCudaMinor}, using PyTorch with ${cuTag}`,
|
|
867
|
-
});
|
|
868
|
-
}
|
|
869
|
-
}
|
|
870
|
-
catch {
|
|
871
|
-
// No NVIDIA GPU
|
|
872
|
-
}
|
|
873
|
-
const isWindows = process.platform === "win32";
|
|
874
|
-
const pipPath = isWindows
|
|
875
|
-
? path.join(installPath, "venv", "Scripts", "pip")
|
|
876
|
-
: path.join(installPath, "venv", "bin", "pip");
|
|
877
|
-
await new Promise((resolve, reject) => {
|
|
878
|
-
const args = [
|
|
879
|
-
"install",
|
|
880
|
-
"-r",
|
|
881
|
-
path.join(installPath, "requirements.txt"),
|
|
882
|
-
...pipExtraArgs,
|
|
883
|
-
];
|
|
884
|
-
const proc = spawn(pipPath, args, { stdio: "inherit", cwd: installPath });
|
|
885
|
-
proc.on("close", (code) => {
|
|
886
|
-
if (code === 0)
|
|
887
|
-
resolve();
|
|
888
|
-
else
|
|
889
|
-
reject(new Error(`pip install failed with code ${code}`));
|
|
890
|
-
});
|
|
891
|
-
proc.on("error", reject);
|
|
892
|
-
});
|
|
893
|
-
setComfyUIInstallPath(installPath);
|
|
894
|
-
onProgress({
|
|
895
|
-
stage: "complete",
|
|
896
|
-
message: "ComfyUI installed successfully!",
|
|
897
|
-
complete: true,
|
|
898
|
-
});
|
|
899
|
-
return true;
|
|
900
|
-
}
|
|
901
|
-
catch (error) {
|
|
902
|
-
const message = error instanceof Error ? error.message : "Unknown error";
|
|
903
|
-
onProgress({
|
|
904
|
-
stage: "error",
|
|
905
|
-
message: "ComfyUI installation failed",
|
|
906
|
-
error: message,
|
|
907
|
-
});
|
|
908
|
-
return false;
|
|
909
|
-
}
|
|
910
|
-
}
|
|
911
|
-
/**
|
|
912
|
-
* Clone a custom node repo into ComfyUI's custom_nodes directory
|
|
913
|
-
* and install its requirements if present.
|
|
914
|
-
*/
|
|
915
|
-
async function installCustomNode(installPath, repoUrl, dirName, onProgress) {
|
|
916
|
-
const customNodesDir = path.join(installPath, "custom_nodes");
|
|
917
|
-
if (!fs.existsSync(customNodesDir)) {
|
|
918
|
-
fs.mkdirSync(customNodesDir, { recursive: true });
|
|
919
|
-
}
|
|
920
|
-
const nodeDir = path.join(customNodesDir, dirName);
|
|
921
|
-
if (fs.existsSync(nodeDir)) {
|
|
922
|
-
onProgress({
|
|
923
|
-
stage: "complete",
|
|
924
|
-
message: `${dirName} already installed, skipping.`,
|
|
925
|
-
});
|
|
926
|
-
return true;
|
|
927
|
-
}
|
|
928
|
-
onProgress({
|
|
929
|
-
stage: "start",
|
|
930
|
-
message: `Installing ${dirName}...`,
|
|
931
|
-
});
|
|
932
|
-
await new Promise((resolve, reject) => {
|
|
933
|
-
const proc = spawn("git", ["clone", repoUrl, nodeDir], {
|
|
934
|
-
stdio: "inherit",
|
|
935
|
-
});
|
|
936
|
-
proc.on("close", (code) => {
|
|
937
|
-
if (code === 0)
|
|
938
|
-
resolve();
|
|
939
|
-
else
|
|
940
|
-
reject(new Error(`git clone ${dirName} failed with code ${code}`));
|
|
941
|
-
});
|
|
942
|
-
proc.on("error", reject);
|
|
943
|
-
});
|
|
944
|
-
const reqFile = path.join(nodeDir, "requirements.txt");
|
|
945
|
-
if (fs.existsSync(reqFile)) {
|
|
946
|
-
const isWindows = process.platform === "win32";
|
|
947
|
-
const pipPath = isWindows
|
|
948
|
-
? path.join(installPath, "venv", "Scripts", "pip")
|
|
949
|
-
: path.join(installPath, "venv", "bin", "pip");
|
|
950
|
-
await new Promise((resolve, reject) => {
|
|
951
|
-
const proc = spawn(pipPath, ["install", "-r", reqFile], {
|
|
952
|
-
stdio: "inherit",
|
|
953
|
-
});
|
|
954
|
-
proc.on("close", (code) => {
|
|
955
|
-
if (code === 0)
|
|
956
|
-
resolve();
|
|
957
|
-
else
|
|
958
|
-
reject(new Error(`pip install ${dirName} deps failed with code ${code}`));
|
|
959
|
-
});
|
|
960
|
-
proc.on("error", reject);
|
|
961
|
-
});
|
|
962
|
-
}
|
|
963
|
-
return true;
|
|
964
|
-
}
|
|
965
|
-
/**
|
|
966
|
-
* Install required ComfyUI custom nodes:
|
|
967
|
-
* - ComfyUI-LTXVideo (LTX-Video support)
|
|
968
|
-
* - ComfyUI-VideoHelperSuite (MP4 output)
|
|
969
|
-
*/
|
|
970
|
-
export async function installComfyUICustomNodes(onProgress) {
|
|
971
|
-
const installPath = getComfyUIInstallPath();
|
|
972
|
-
if (!installPath) {
|
|
973
|
-
onProgress({
|
|
974
|
-
stage: "error",
|
|
975
|
-
message: "ComfyUI not installed",
|
|
976
|
-
error: "Install ComfyUI first",
|
|
977
|
-
});
|
|
978
|
-
return false;
|
|
979
|
-
}
|
|
980
|
-
try {
|
|
981
|
-
await installCustomNode(installPath, "https://github.com/Lightricks/ComfyUI-LTXVideo.git", "ComfyUI-LTXVideo", onProgress);
|
|
982
|
-
await installCustomNode(installPath, "https://github.com/Kosinkadink/ComfyUI-VideoHelperSuite.git", "ComfyUI-VideoHelperSuite", onProgress);
|
|
983
|
-
onProgress({
|
|
984
|
-
stage: "complete",
|
|
985
|
-
message: "Custom nodes installed!",
|
|
986
|
-
complete: true,
|
|
987
|
-
});
|
|
988
|
-
return true;
|
|
989
|
-
}
|
|
990
|
-
catch (error) {
|
|
991
|
-
const message = error instanceof Error ? error.message : "Unknown error";
|
|
992
|
-
onProgress({
|
|
993
|
-
stage: "error",
|
|
994
|
-
message: "Failed to install custom nodes",
|
|
995
|
-
error: message,
|
|
996
|
-
});
|
|
997
|
-
return false;
|
|
998
|
-
}
|
|
999
|
-
}
|
|
1000
|
-
/**
|
|
1001
|
-
* Start ComfyUI server (terminal takeover).
|
|
1002
|
-
*/
|
|
1003
|
-
export async function startComfyUI(onProgress) {
|
|
1004
|
-
const installPath = getComfyUIInstallPath();
|
|
1005
|
-
if (!installPath) {
|
|
1006
|
-
onProgress({
|
|
1007
|
-
stage: "error",
|
|
1008
|
-
message: "ComfyUI not installed",
|
|
1009
|
-
error: "Install ComfyUI first",
|
|
1010
|
-
});
|
|
1011
|
-
return false;
|
|
1012
|
-
}
|
|
1013
|
-
try {
|
|
1014
|
-
onProgress({ stage: "start", message: "Starting ComfyUI server..." });
|
|
1015
|
-
await new Promise((r) => setTimeout(r, 500));
|
|
1016
|
-
const isWindows = process.platform === "win32";
|
|
1017
|
-
return new Promise((resolve) => {
|
|
1018
|
-
let proc;
|
|
1019
|
-
if (isWindows) {
|
|
1020
|
-
const venvPython = path.join(installPath, "venv", "Scripts", "python.exe");
|
|
1021
|
-
proc = spawn(venvPython, ["main.py", "--listen", "--port", "8188"], {
|
|
1022
|
-
cwd: installPath,
|
|
1023
|
-
stdio: "inherit",
|
|
1024
|
-
});
|
|
1025
|
-
}
|
|
1026
|
-
else {
|
|
1027
|
-
const venvDir = path.join(installPath, "venv");
|
|
1028
|
-
const launchScript = [
|
|
1029
|
-
`source "${venvDir}/bin/activate"`,
|
|
1030
|
-
`python main.py --listen --port 8188`,
|
|
1031
|
-
].join("\n");
|
|
1032
|
-
proc = spawn("bash", ["-c", launchScript], {
|
|
1033
|
-
cwd: installPath,
|
|
1034
|
-
stdio: "inherit",
|
|
1035
|
-
});
|
|
1036
|
-
}
|
|
1037
|
-
proc.on("close", (code) => {
|
|
1038
|
-
if (code === 0) {
|
|
1039
|
-
onProgress({
|
|
1040
|
-
stage: "complete",
|
|
1041
|
-
message: "ComfyUI server stopped.",
|
|
1042
|
-
complete: true,
|
|
1043
|
-
});
|
|
1044
|
-
resolve(true);
|
|
1045
|
-
}
|
|
1046
|
-
else {
|
|
1047
|
-
onProgress({
|
|
1048
|
-
stage: "error",
|
|
1049
|
-
message: "ComfyUI failed to start",
|
|
1050
|
-
error: [
|
|
1051
|
-
`Process exited with code ${code}. Check the output above.`,
|
|
1052
|
-
"",
|
|
1053
|
-
chalk.yellow("Common fixes:"),
|
|
1054
|
-
chalk.white(" - Delete the venv and retry: ") +
|
|
1055
|
-
chalk.cyan(`rm -rf ${installPath}/venv`),
|
|
1056
|
-
chalk.white(" - Ensure Python 3.10+ is installed"),
|
|
1057
|
-
chalk.white(" - Ensure NVIDIA drivers and CUDA are up to date"),
|
|
1058
|
-
].join("\n"),
|
|
1059
|
-
});
|
|
1060
|
-
resolve(false);
|
|
1061
|
-
}
|
|
1062
|
-
});
|
|
1063
|
-
proc.on("error", (err) => {
|
|
1064
|
-
onProgress({
|
|
1065
|
-
stage: "error",
|
|
1066
|
-
message: "Failed to start",
|
|
1067
|
-
error: err.message,
|
|
1068
|
-
});
|
|
1069
|
-
resolve(false);
|
|
1070
|
-
});
|
|
1071
|
-
});
|
|
1072
|
-
}
|
|
1073
|
-
catch (error) {
|
|
1074
|
-
const message = error instanceof Error ? error.message : "Unknown error";
|
|
1075
|
-
onProgress({ stage: "error", message: "Failed to start", error: message });
|
|
1076
|
-
return false;
|
|
1077
|
-
}
|
|
1078
|
-
}
|
|
1079
|
-
/**
|
|
1080
|
-
* Stop ComfyUI server.
|
|
1081
|
-
*/
|
|
1082
|
-
export async function stopComfyUI(onProgress) {
|
|
1083
|
-
try {
|
|
1084
|
-
onProgress({ stage: "start", message: "Stopping ComfyUI server..." });
|
|
1085
|
-
const isWindows = process.platform === "win32";
|
|
1086
|
-
if (!isWindows) {
|
|
1087
|
-
onProgress({
|
|
1088
|
-
stage: "start",
|
|
1089
|
-
message: "You may be prompted for your password...",
|
|
1090
|
-
});
|
|
1091
|
-
}
|
|
1092
|
-
if (isWindows) {
|
|
1093
|
-
await runKillCommand('taskkill /F /FI "IMAGENAME eq python.exe" /FI "WINDOWTITLE eq *ComfyUI*" 2>nul || exit 0');
|
|
1094
|
-
}
|
|
1095
|
-
else {
|
|
1096
|
-
await runKillCommand("pkill -f 'python.*main.py.*--port 8188' || true");
|
|
1097
|
-
await runKillCommand("pkill -f 'python.*main.py.*comfyui' || true");
|
|
1098
|
-
}
|
|
1099
|
-
await new Promise((resolve) => setTimeout(resolve, 1500));
|
|
1100
|
-
// Verify
|
|
1101
|
-
try {
|
|
1102
|
-
const response = await fetch("http://127.0.0.1:8188/system_stats", {
|
|
1103
|
-
signal: AbortSignal.timeout(2000),
|
|
1104
|
-
});
|
|
1105
|
-
if (response.ok) {
|
|
1106
|
-
onProgress({
|
|
1107
|
-
stage: "complete",
|
|
1108
|
-
message: "ComfyUI may still be running.",
|
|
1109
|
-
complete: true,
|
|
1110
|
-
});
|
|
1111
|
-
return false;
|
|
1112
|
-
}
|
|
1113
|
-
}
|
|
1114
|
-
catch {
|
|
1115
|
-
// Stopped
|
|
1116
|
-
}
|
|
1117
|
-
onProgress({
|
|
1118
|
-
stage: "complete",
|
|
1119
|
-
message: "ComfyUI server stopped!",
|
|
1120
|
-
complete: true,
|
|
1121
|
-
});
|
|
1122
|
-
return true;
|
|
1123
|
-
}
|
|
1124
|
-
catch (error) {
|
|
1125
|
-
const message = error instanceof Error ? error.message : "Unknown error";
|
|
1126
|
-
onProgress({ stage: "error", message: "Failed to stop", error: message });
|
|
1127
|
-
return false;
|
|
1128
|
-
}
|
|
1129
|
-
}
|
|
1130
|
-
const COMFYUI_MODELS = [
|
|
1131
|
-
{
|
|
1132
|
-
id: "ltx-video",
|
|
1133
|
-
label: "LTX-Video 2B",
|
|
1134
|
-
files: [
|
|
1135
|
-
{
|
|
1136
|
-
url: "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltx-video-2b-v0.9.5.safetensors",
|
|
1137
|
-
dest: "models/checkpoints",
|
|
1138
|
-
filename: "ltx-video-2b-v0.9.5.safetensors",
|
|
1139
|
-
sizeLabel: "~6 GB",
|
|
1140
|
-
},
|
|
1141
|
-
{
|
|
1142
|
-
url: "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors",
|
|
1143
|
-
dest: "models/text_encoders",
|
|
1144
|
-
filename: "t5xxl_fp16.safetensors",
|
|
1145
|
-
sizeLabel: "~10 GB",
|
|
1146
|
-
},
|
|
1147
|
-
],
|
|
1148
|
-
},
|
|
1149
|
-
{
|
|
1150
|
-
id: "wan2.1-t2v",
|
|
1151
|
-
label: "Wan 2.1 T2V 1.3B",
|
|
1152
|
-
files: [
|
|
1153
|
-
{
|
|
1154
|
-
url: "https://huggingface.co/Comfy-Org/Wan_2.1_ComfyUI_repackaged/resolve/main/split_files/diffusion_models/wan2.1_t2v_1.3B_fp16.safetensors",
|
|
1155
|
-
dest: "models/diffusion_models",
|
|
1156
|
-
filename: "wan2.1_t2v_1.3B_fp16.safetensors",
|
|
1157
|
-
sizeLabel: "~2.6 GB",
|
|
1158
|
-
},
|
|
1159
|
-
{
|
|
1160
|
-
url: "https://huggingface.co/Comfy-Org/Wan_2.1_ComfyUI_repackaged/resolve/main/split_files/text_encoders/umt5_xxl_fp8_e4m3fn_scaled.safetensors",
|
|
1161
|
-
dest: "models/text_encoders",
|
|
1162
|
-
filename: "umt5_xxl_fp8_e4m3fn_scaled.safetensors",
|
|
1163
|
-
sizeLabel: "~5 GB",
|
|
1164
|
-
},
|
|
1165
|
-
{
|
|
1166
|
-
url: "https://huggingface.co/Comfy-Org/Wan_2.1_ComfyUI_repackaged/resolve/main/split_files/vae/wan_2.1_vae.safetensors",
|
|
1167
|
-
dest: "models/vae",
|
|
1168
|
-
filename: "wan_2.1_vae.safetensors",
|
|
1169
|
-
sizeLabel: "~0.3 GB",
|
|
1170
|
-
},
|
|
1171
|
-
],
|
|
1172
|
-
},
|
|
1173
|
-
];
|
|
1174
|
-
/**
|
|
1175
|
-
* Check if a ComfyUI video model is already downloaded.
|
|
1176
|
-
*/
|
|
1177
|
-
export function hasComfyUIModel(modelId) {
|
|
1178
|
-
const installPath = getComfyUIInstallPath();
|
|
1179
|
-
if (!installPath)
|
|
1180
|
-
return false;
|
|
1181
|
-
const modelDef = COMFYUI_MODELS.find((m) => m.id === modelId);
|
|
1182
|
-
if (!modelDef)
|
|
1183
|
-
return false;
|
|
1184
|
-
return modelDef.files.every((file) => fs.existsSync(path.join(installPath, file.dest, file.filename)));
|
|
1185
|
-
}
|
|
1186
|
-
/**
|
|
1187
|
-
* Get ComfyUI model download status.
|
|
1188
|
-
*/
|
|
1189
|
-
export function getComfyUIModelStatus() {
|
|
1190
|
-
return COMFYUI_MODELS.map((model) => ({
|
|
1191
|
-
id: model.id,
|
|
1192
|
-
label: model.label,
|
|
1193
|
-
installed: hasComfyUIModel(model.id),
|
|
1194
|
-
totalSize: model.files.map((f) => f.sizeLabel).join(" + "),
|
|
1195
|
-
}));
|
|
1196
|
-
}
|
|
1197
|
-
/**
|
|
1198
|
-
* Download a ComfyUI video model from HuggingFace.
|
|
1199
|
-
*/
|
|
1200
|
-
export async function downloadComfyUIModel(modelId, onProgress) {
|
|
1201
|
-
const installPath = getComfyUIInstallPath();
|
|
1202
|
-
if (!installPath) {
|
|
1203
|
-
onProgress({
|
|
1204
|
-
stage: "error",
|
|
1205
|
-
message: "ComfyUI not installed",
|
|
1206
|
-
error: "Install ComfyUI first",
|
|
1207
|
-
});
|
|
1208
|
-
return false;
|
|
1209
|
-
}
|
|
1210
|
-
const modelDef = COMFYUI_MODELS.find((m) => m.id === modelId);
|
|
1211
|
-
if (!modelDef) {
|
|
1212
|
-
onProgress({
|
|
1213
|
-
stage: "error",
|
|
1214
|
-
message: "Unknown model",
|
|
1215
|
-
error: `Model ID "${modelId}" is not recognized`,
|
|
1216
|
-
});
|
|
1217
|
-
return false;
|
|
1218
|
-
}
|
|
1219
|
-
try {
|
|
1220
|
-
for (let i = 0; i < modelDef.files.length; i++) {
|
|
1221
|
-
const file = modelDef.files[i];
|
|
1222
|
-
const destDir = path.join(installPath, file.dest);
|
|
1223
|
-
const destFile = path.join(destDir, file.filename);
|
|
1224
|
-
if (fs.existsSync(destFile)) {
|
|
1225
|
-
onProgress({
|
|
1226
|
-
stage: "download",
|
|
1227
|
-
message: `[${i + 1}/${modelDef.files.length}] ${file.filename} already exists, skipping.`,
|
|
1228
|
-
});
|
|
1229
|
-
continue;
|
|
1230
|
-
}
|
|
1231
|
-
fs.mkdirSync(destDir, { recursive: true });
|
|
1232
|
-
onProgress({
|
|
1233
|
-
stage: "download",
|
|
1234
|
-
message: `[${i + 1}/${modelDef.files.length}] Downloading ${file.filename} (${file.sizeLabel})...`,
|
|
1235
|
-
});
|
|
1236
|
-
const isWindows = process.platform === "win32";
|
|
1237
|
-
let downloadCmd;
|
|
1238
|
-
let downloadArgs;
|
|
1239
|
-
if (isWindows) {
|
|
1240
|
-
downloadCmd = "curl";
|
|
1241
|
-
downloadArgs = ["-L", "-o", destFile, "--progress-bar", file.url];
|
|
1242
|
-
}
|
|
1243
|
-
else {
|
|
1244
|
-
let hasWget = false;
|
|
1245
|
-
try {
|
|
1246
|
-
await execAsync("which wget");
|
|
1247
|
-
hasWget = true;
|
|
1248
|
-
}
|
|
1249
|
-
catch {
|
|
1250
|
-
// No wget
|
|
1251
|
-
}
|
|
1252
|
-
if (hasWget) {
|
|
1253
|
-
downloadCmd = "wget";
|
|
1254
|
-
downloadArgs = ["-O", destFile, "--show-progress", file.url];
|
|
1255
|
-
}
|
|
1256
|
-
else {
|
|
1257
|
-
downloadCmd = "curl";
|
|
1258
|
-
downloadArgs = ["-L", "-o", destFile, "--progress-bar", file.url];
|
|
1259
|
-
}
|
|
1260
|
-
}
|
|
1261
|
-
await new Promise((resolve, reject) => {
|
|
1262
|
-
const proc = spawn(downloadCmd, downloadArgs, { stdio: "inherit" });
|
|
1263
|
-
proc.on("close", (code) => {
|
|
1264
|
-
if (code === 0)
|
|
1265
|
-
resolve();
|
|
1266
|
-
else {
|
|
1267
|
-
if (fs.existsSync(destFile)) {
|
|
1268
|
-
try {
|
|
1269
|
-
fs.unlinkSync(destFile);
|
|
1270
|
-
}
|
|
1271
|
-
catch { /* ignore */ }
|
|
1272
|
-
}
|
|
1273
|
-
reject(new Error(`Download failed with code ${code}`));
|
|
1274
|
-
}
|
|
1275
|
-
});
|
|
1276
|
-
proc.on("error", reject);
|
|
1277
|
-
});
|
|
1278
|
-
}
|
|
1279
|
-
onProgress({
|
|
1280
|
-
stage: "complete",
|
|
1281
|
-
message: `${modelDef.label} model downloaded successfully!`,
|
|
1282
|
-
complete: true,
|
|
1283
|
-
});
|
|
1284
|
-
return true;
|
|
1285
|
-
}
|
|
1286
|
-
catch (error) {
|
|
1287
|
-
const message = error instanceof Error ? error.message : "Unknown error";
|
|
1288
|
-
onProgress({
|
|
1289
|
-
stage: "error",
|
|
1290
|
-
message: "Download failed",
|
|
1291
|
-
error: message,
|
|
1292
|
-
});
|
|
1293
|
-
return false;
|
|
1294
|
-
}
|
|
1295
|
-
}
|
|
1296
|
-
//# sourceMappingURL=installers.js.map
|