alvin-bot 4.8.0 ā 4.8.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +34 -0
- package/bin/cli.js +100 -24
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -2,6 +2,40 @@
|
|
|
2
2
|
|
|
3
3
|
All notable changes to Alvin Bot are documented here.
|
|
4
4
|
|
|
5
|
+
## [4.8.2] ā 2026-04-11
|
|
6
|
+
|
|
7
|
+
### š Offline setup: wait long enough for Ollama's first-run init
|
|
8
|
+
|
|
9
|
+
Second follow-up to 4.8.0's offline-gemma4 wizard. The 4.8.1 brew path successfully installs Ollama, but the subsequent `ensureOllamaServe()` was reporting "Could not start Ollama daemon" because it only waited **2 seconds** after spawning the server.
|
|
10
|
+
|
|
11
|
+
What actually happens on first run:
|
|
12
|
+
|
|
13
|
+
1. `nohup ollama serve &` spawns the server process
|
|
14
|
+
2. Server generates a fresh SSH keypair at `~/.ollama/id_ed25519` (~1 s)
|
|
15
|
+
3. Server discovers GPUs ā on Apple Silicon this initializes Metal (~5 s)
|
|
16
|
+
4. Server starts the runner subprocess (~1 s)
|
|
17
|
+
5. Server begins listening on `127.0.0.1:11434`
|
|
18
|
+
|
|
19
|
+
Total cold-start time: **5ā15 seconds**. The old 2-second wait was racing ahead of GPU discovery and failing the next `ollama list` call.
|
|
20
|
+
|
|
21
|
+
Fix: `ensureOllamaServe()` now polls `ollama list` every second for up to **30 seconds**. On success it reports which attempt worked (for visibility). On failure it dumps the last 15 lines of `/tmp/ollama-setup.log` so users can see what Ollama itself said.
|
|
22
|
+
|
|
23
|
+
Caught during the second run of the setup wizard on the fresh test MacBook ā brew install succeeded, daemon was actually running (PID confirmed via pgrep), but the wizard bailed out anyway because it gave up too soon.
|
|
24
|
+
|
|
25
|
+
## [4.8.1] ā 2026-04-11
|
|
26
|
+
|
|
27
|
+
### š Offline setup: Homebrew preferred on macOS
|
|
28
|
+
|
|
29
|
+
Caught during the first real run of the new offline setup wizard on a fresh test MacBook: the official Ollama `install.sh` script on macOS wants to drop `Ollama.app` into `/Applications` and start it as a GUI app. That requires a real user session with sudo and completely breaks over SSH or any non-interactive context. The install downloads the 25 MB .app, then fails at `Unable to find application named 'Ollama'` and drops the wizard back to the fallback provider picker.
|
|
30
|
+
|
|
31
|
+
Fix in `bin/cli.js` `installOllama()`:
|
|
32
|
+
|
|
33
|
+
- **macOS preferred path**: if Homebrew is available (`brew --version` succeeds), use `brew install ollama`. Brew installs `/opt/homebrew/bin/ollama` as a CLI binary with no sudo prompt, no /Applications drop, no GUI dependency ā works over SSH and in any CI/non-interactive context.
|
|
34
|
+
- **Fallback**: if brew is not installed or `brew install` itself fails, fall through to the official `install.sh` with an explicit heads-up that the installer may prompt for admin password and may only work in a local terminal.
|
|
35
|
+
- **Better error messaging**: on macOS install failure, suggest `brew install ollama` or the `.dmg` from ollama.com/download as alternatives. On Linux, unchanged.
|
|
36
|
+
|
|
37
|
+
Linux always uses `install.sh` ā systemd user units work non-interactively there.
|
|
38
|
+
|
|
5
39
|
## [4.8.0] ā 2026-04-11
|
|
6
40
|
|
|
7
41
|
### ⨠Offline mode ā Gemma 4 E4B via Ollama in the setup wizard
|
package/bin/cli.js
CHANGED
|
@@ -143,53 +143,129 @@ function hasOllama() {
|
|
|
143
143
|
}
|
|
144
144
|
|
|
145
145
|
/**
|
|
146
|
-
*
|
|
146
|
+
* Check whether Homebrew is available on PATH (macOS only path normally).
|
|
147
|
+
*/
|
|
148
|
+
function hasBrew() {
|
|
149
|
+
try {
|
|
150
|
+
execSync("brew --version", { stdio: "pipe" });
|
|
151
|
+
return true;
|
|
152
|
+
} catch {
|
|
153
|
+
return false;
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
/**
|
|
158
|
+
* Install Ollama. On macOS prefers `brew install ollama` because the
|
|
159
|
+
* official install.sh wants to drop Ollama.app into /Applications and
|
|
160
|
+
* start it as a GUI app ā that needs a real user session with sudo and
|
|
161
|
+
* breaks over SSH or in any non-interactive context.
|
|
162
|
+
*
|
|
163
|
+
* Linux always uses the official install.sh (systemd user services work
|
|
164
|
+
* non-interactively).
|
|
165
|
+
*
|
|
147
166
|
* Returns true on success, false on failure.
|
|
148
167
|
*/
|
|
149
168
|
function installOllama() {
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
169
|
+
if (process.platform !== "darwin" && process.platform !== "linux") {
|
|
170
|
+
console.log(" ā Offline mode only supported on macOS and Linux.");
|
|
171
|
+
console.log(" Windows users: download from https://ollama.com/download");
|
|
172
|
+
return false;
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
// macOS preferred path: Homebrew (non-interactive, no sudo, no GUI dependency)
|
|
176
|
+
if (process.platform === "darwin" && hasBrew()) {
|
|
177
|
+
console.log("\nš„ Installing Ollama via Homebrew (non-interactive)...");
|
|
178
|
+
try {
|
|
179
|
+
execSync("brew install ollama", {
|
|
154
180
|
stdio: "inherit",
|
|
155
|
-
timeout: 300_000,
|
|
181
|
+
timeout: 300_000,
|
|
156
182
|
});
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
183
|
+
if (hasOllama()) {
|
|
184
|
+
console.log(" ā
Ollama installed via Homebrew");
|
|
185
|
+
return true;
|
|
186
|
+
}
|
|
187
|
+
console.log(" ā ļø Homebrew finished but `ollama` not on PATH yet.");
|
|
188
|
+
} catch (err) {
|
|
189
|
+
console.log(`\n ā ļø brew install ollama failed: ${err.message || err}`);
|
|
190
|
+
console.log(" Falling back to the official install.sh ā this may need sudo and a GUI session.\n");
|
|
162
191
|
}
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
// Fallback: official installer
|
|
195
|
+
console.log("\nš„ Installing Ollama (official installer)...");
|
|
196
|
+
if (process.platform === "darwin") {
|
|
197
|
+
console.log(" ā ļø Heads-up: on macOS the installer drops Ollama.app into");
|
|
198
|
+
console.log(" /Applications and wants to start it ā this may prompt for");
|
|
199
|
+
console.log(" your admin password and only works in a local terminal,");
|
|
200
|
+
console.log(" not over SSH.\n");
|
|
201
|
+
}
|
|
202
|
+
try {
|
|
203
|
+
execSync("curl -fsSL https://ollama.com/install.sh | sh", {
|
|
204
|
+
stdio: "inherit",
|
|
205
|
+
timeout: 300_000,
|
|
206
|
+
});
|
|
207
|
+
return hasOllama();
|
|
163
208
|
} catch (err) {
|
|
164
209
|
console.log(`\n ā Ollama install failed: ${err.message || err}`);
|
|
165
|
-
|
|
210
|
+
if (process.platform === "darwin") {
|
|
211
|
+
console.log(" On macOS, try one of:");
|
|
212
|
+
console.log(" ⢠brew install ollama (recommended)");
|
|
213
|
+
console.log(" ⢠download the .dmg from https://ollama.com/download");
|
|
214
|
+
} else {
|
|
215
|
+
console.log(" Try manually: curl -fsSL https://ollama.com/install.sh | sh");
|
|
216
|
+
}
|
|
166
217
|
return false;
|
|
167
218
|
}
|
|
168
219
|
}
|
|
169
220
|
|
|
170
221
|
/**
|
|
171
|
-
* Ensure the Ollama daemon is running. Spawns it in the background if not
|
|
222
|
+
* Ensure the Ollama daemon is running. Spawns it in the background if not,
|
|
223
|
+
* then polls for readiness ā first-run initialization can take 5-15 seconds
|
|
224
|
+
* on macOS (SSH key generation + GPU discovery + runner startup).
|
|
172
225
|
*/
|
|
173
226
|
function ensureOllamaServe() {
|
|
227
|
+
// Fast path: already running
|
|
174
228
|
try {
|
|
175
|
-
// 'ollama list' needs the daemon running
|
|
176
229
|
execSync("ollama list", { stdio: "pipe", timeout: 5000 });
|
|
177
230
|
return true;
|
|
178
|
-
} catch {
|
|
179
|
-
|
|
231
|
+
} catch { /* not running ā try to start */ }
|
|
232
|
+
|
|
233
|
+
// Spawn in background (detached via `&` inside a shell)
|
|
234
|
+
try {
|
|
235
|
+
execSync("nohup ollama serve > /tmp/ollama-setup.log 2>&1 &", {
|
|
236
|
+
stdio: "pipe",
|
|
237
|
+
shell: "/bin/sh",
|
|
238
|
+
});
|
|
239
|
+
} catch (err) {
|
|
240
|
+
console.log(`\n ā ļø Could not spawn 'ollama serve': ${err.message || err}`);
|
|
241
|
+
return false;
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
// Poll for readiness ā up to 30 seconds total. First-run init is slow
|
|
245
|
+
// because ollama generates an SSH key pair, discovers GPUs, and starts
|
|
246
|
+
// the runner subprocess.
|
|
247
|
+
const deadlineMs = Date.now() + 30_000;
|
|
248
|
+
let lastError = "";
|
|
249
|
+
let attempt = 0;
|
|
250
|
+
while (Date.now() < deadlineMs) {
|
|
251
|
+
attempt++;
|
|
180
252
|
try {
|
|
181
|
-
execSync("nohup ollama serve > /tmp/ollama-setup.log 2>&1 &", {
|
|
182
|
-
stdio: "pipe",
|
|
183
|
-
shell: "/bin/sh",
|
|
184
|
-
});
|
|
185
|
-
// Give it a moment
|
|
186
|
-
execSync("sleep 2", { stdio: "pipe" });
|
|
187
253
|
execSync("ollama list", { stdio: "pipe", timeout: 5000 });
|
|
254
|
+
if (attempt > 1) console.log(` ā
Ollama daemon ready after ${attempt} attempts`);
|
|
188
255
|
return true;
|
|
189
|
-
} catch {
|
|
190
|
-
|
|
256
|
+
} catch (err) {
|
|
257
|
+
lastError = err instanceof Error ? err.message : String(err);
|
|
191
258
|
}
|
|
259
|
+
// Sleep 1 second between polls via execSync (cross-platform, no promise in sync ctx)
|
|
260
|
+
try { execSync("sleep 1", { stdio: "pipe" }); } catch { /* shouldn't fail */ }
|
|
192
261
|
}
|
|
262
|
+
console.log(` ā ļø Daemon did not become ready within 30s. Last error: ${lastError}`);
|
|
263
|
+
console.log(` Tail of /tmp/ollama-setup.log:`);
|
|
264
|
+
try {
|
|
265
|
+
const tail = execSync("tail -15 /tmp/ollama-setup.log", { encoding: "utf-8" });
|
|
266
|
+
tail.split("\n").forEach((line) => console.log(` ${line}`));
|
|
267
|
+
} catch { /* log missing */ }
|
|
268
|
+
return false;
|
|
193
269
|
}
|
|
194
270
|
|
|
195
271
|
/**
|