@tiens.nguyen/gonext-local-worker 1.0.8 → 1.0.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/gonext-local-worker.mjs +209 -11
- package/package.json +1 -1
package/gonext-local-worker.mjs
CHANGED
|
@@ -1,18 +1,87 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
/**
|
|
3
|
-
*
|
|
4
|
-
*
|
|
5
|
-
*
|
|
6
|
-
*
|
|
7
|
-
* Usage (from api/):
|
|
8
|
-
* export GONEXT_API_BASE=https://xxxx.execute-api....amazonaws.com
|
|
9
|
-
* export GONEXT_WORKER_KEY=<plaintext secret from Settings → Worker API key>
|
|
10
|
-
* node scripts/local-llm-worker.mjs
|
|
11
|
-
*
|
|
12
|
-
* Requires Node 18+ (global fetch). Uses the OpenAI SDK from this package.
|
|
3
|
+
* GoNext local worker:
|
|
4
|
+
* - `gonext-local-worker set <workerKey> [--api-base URL] [--poll-ms 1500]`
|
|
5
|
+
* writes ~/.gonext/worker.env
|
|
6
|
+
* - `gonext-local-worker` starts polling loop
|
|
13
7
|
*/
|
|
8
|
+
import { mkdir, readFile, writeFile } from "node:fs/promises";
|
|
9
|
+
import { homedir } from "node:os";
|
|
10
|
+
import { join } from "node:path";
|
|
11
|
+
import dotenv from "dotenv";
|
|
14
12
|
import OpenAI from "openai";
|
|
15
13
|
|
|
14
|
+
const ENV_FILE = join(homedir(), ".gonext", "worker.env");
|
|
15
|
+
dotenv.config({ path: ENV_FILE });
|
|
16
|
+
dotenv.config();
|
|
17
|
+
|
|
18
|
+
const args = process.argv.slice(2);
|
|
19
|
+
|
|
20
|
+
function printHelp() {
|
|
21
|
+
console.log(`
|
|
22
|
+
gonext-local-worker
|
|
23
|
+
|
|
24
|
+
Usage:
|
|
25
|
+
gonext-local-worker
|
|
26
|
+
gonext-local-worker set <workerKey> [--api-base <url>] [--poll-ms <ms>]
|
|
27
|
+
|
|
28
|
+
Examples:
|
|
29
|
+
gonext-local-worker set abc123 --api-base https://hwohu56e8d.execute-api.ap-southeast-1.amazonaws.com
|
|
30
|
+
gonext-local-worker
|
|
31
|
+
`);
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
function parseFlag(name) {
|
|
35
|
+
const idx = args.indexOf(name);
|
|
36
|
+
if (idx >= 0 && args[idx + 1]) {
|
|
37
|
+
return args[idx + 1];
|
|
38
|
+
}
|
|
39
|
+
return undefined;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
async function setConfig() {
|
|
43
|
+
const workerKey = args[1]?.trim();
|
|
44
|
+
if (!workerKey) {
|
|
45
|
+
console.error("Missing worker key. Usage: gonext-local-worker set <workerKey>");
|
|
46
|
+
process.exit(1);
|
|
47
|
+
}
|
|
48
|
+
const currentRaw = await readFile(ENV_FILE, "utf8").catch(() => "");
|
|
49
|
+
const current = dotenv.parse(currentRaw);
|
|
50
|
+
const apiBaseFromFlag = parseFlag("--api-base");
|
|
51
|
+
const pollMsFromFlag = parseFlag("--poll-ms");
|
|
52
|
+
const next = {
|
|
53
|
+
GONEXT_API_BASE: (
|
|
54
|
+
apiBaseFromFlag ??
|
|
55
|
+
current.GONEXT_API_BASE ??
|
|
56
|
+
process.env.GONEXT_API_BASE ??
|
|
57
|
+
""
|
|
58
|
+
).replace(/\/+$/, ""),
|
|
59
|
+
GONEXT_WORKER_KEY: workerKey,
|
|
60
|
+
GONEXT_POLL_MS:
|
|
61
|
+
pollMsFromFlag ?? current.GONEXT_POLL_MS ?? process.env.GONEXT_POLL_MS ?? "1500",
|
|
62
|
+
};
|
|
63
|
+
await mkdir(join(homedir(), ".gonext"), { recursive: true });
|
|
64
|
+
await writeFile(
|
|
65
|
+
ENV_FILE,
|
|
66
|
+
`GONEXT_API_BASE=${next.GONEXT_API_BASE}\nGONEXT_WORKER_KEY=${next.GONEXT_WORKER_KEY}\nGONEXT_POLL_MS=${next.GONEXT_POLL_MS}\n`,
|
|
67
|
+
"utf8"
|
|
68
|
+
);
|
|
69
|
+
console.log(`Saved ${ENV_FILE}`);
|
|
70
|
+
if (!next.GONEXT_API_BASE) {
|
|
71
|
+
console.log("Tip: set API base too: gonext-local-worker set <workerKey> --api-base <https-url>");
|
|
72
|
+
}
|
|
73
|
+
return;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
if (args.includes("--help") || args.includes("-h")) {
|
|
77
|
+
printHelp();
|
|
78
|
+
process.exit(0);
|
|
79
|
+
}
|
|
80
|
+
if (args[0] === "set") {
|
|
81
|
+
await setConfig();
|
|
82
|
+
process.exit(0);
|
|
83
|
+
}
|
|
84
|
+
|
|
16
85
|
const apiBase = (process.env.GONEXT_API_BASE ?? "").replace(/\/+$/, "");
|
|
17
86
|
const workerKey = process.env.GONEXT_WORKER_KEY ?? "";
|
|
18
87
|
const pollMs = Number(process.env.GONEXT_POLL_MS ?? "1500") || 1500;
|
|
@@ -25,7 +94,7 @@ if (!apiBase || !workerKey) {
|
|
|
25
94
|
}
|
|
26
95
|
|
|
27
96
|
function toOpenAIMessages(messages) {
|
|
28
|
-
return messages.map((m) => {
|
|
97
|
+
return (Array.isArray(messages) ? messages : []).map((m) => {
|
|
29
98
|
if (m.role === "user" && m.attachments?.length) {
|
|
30
99
|
return {
|
|
31
100
|
role: m.role,
|
|
@@ -54,6 +123,9 @@ async function workerFetch(path, init = {}) {
|
|
|
54
123
|
|
|
55
124
|
async function runChatJob(job) {
|
|
56
125
|
const { jobId, payload } = job;
|
|
126
|
+
if (!payload || !Array.isArray(payload.messages)) {
|
|
127
|
+
throw new Error("Invalid chat payload: messages array is missing.");
|
|
128
|
+
}
|
|
57
129
|
const start = Date.now();
|
|
58
130
|
await workerFetch(`/api/worker/jobs/${jobId}`, {
|
|
59
131
|
method: "PATCH",
|
|
@@ -97,6 +169,124 @@ async function runChatJob(job) {
|
|
|
97
169
|
}
|
|
98
170
|
}
|
|
99
171
|
|
|
172
|
+
function normalizeBaseUrl(raw) {
|
|
173
|
+
return typeof raw === "string" ? raw.trim().replace(/\/+$/, "") : "";
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
function normalizeOpenAiV1Root(raw) {
|
|
177
|
+
const base = normalizeBaseUrl(raw);
|
|
178
|
+
if (!base) return "";
|
|
179
|
+
return /\/v1$/i.test(base) ? base : `${base}/v1`;
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
async function checkOllamaTags(base) {
|
|
183
|
+
const endpoint = `${base}/api/tags`;
|
|
184
|
+
try {
|
|
185
|
+
const res = await fetch(endpoint, { method: "GET" });
|
|
186
|
+
if (!res.ok) return { online: false, endpoint, models: [] };
|
|
187
|
+
const j = await res.json();
|
|
188
|
+
const models = (j.models ?? []).map((m) => {
|
|
189
|
+
const name = m.name ?? m.model ?? "model";
|
|
190
|
+
return { id: name, name, value: `ollama:${name}` };
|
|
191
|
+
});
|
|
192
|
+
return { online: true, endpoint, models };
|
|
193
|
+
} catch {
|
|
194
|
+
return { online: false, endpoint, models: [] };
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
async function checkOpenAiModels(base, apiKey) {
|
|
199
|
+
const endpoint = `${base}/models`;
|
|
200
|
+
const headers = {};
|
|
201
|
+
if (apiKey?.trim()) {
|
|
202
|
+
headers.Authorization = `Bearer ${apiKey.trim()}`;
|
|
203
|
+
}
|
|
204
|
+
try {
|
|
205
|
+
const res = await fetch(endpoint, { method: "GET", headers });
|
|
206
|
+
if (!res.ok) return { online: false, endpoint, models: [] };
|
|
207
|
+
const j = await res.json();
|
|
208
|
+
const models = (j.data ?? [])
|
|
209
|
+
.map((d) => d.id)
|
|
210
|
+
.filter(Boolean)
|
|
211
|
+
.map((id) => ({ id, name: id, value: `mlx:${id}` }));
|
|
212
|
+
return { online: true, endpoint, models };
|
|
213
|
+
} catch {
|
|
214
|
+
return { online: false, endpoint, models: [] };
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
async function runLocalHealthJob(job) {
|
|
219
|
+
const { jobId, payload } = job;
|
|
220
|
+
const start = Date.now();
|
|
221
|
+
await workerFetch(`/api/worker/jobs/${jobId}`, {
|
|
222
|
+
method: "PATCH",
|
|
223
|
+
body: JSON.stringify({ jobStatus: "running" }),
|
|
224
|
+
});
|
|
225
|
+
try {
|
|
226
|
+
const ollamaBases = Array.isArray(payload?.ollamaBaseUrls)
|
|
227
|
+
? payload.ollamaBaseUrls.map(normalizeBaseUrl).filter(Boolean)
|
|
228
|
+
: [];
|
|
229
|
+
const dedup = new Map();
|
|
230
|
+
let ollamaOnline = false;
|
|
231
|
+
let ollamaEndpoint = "";
|
|
232
|
+
for (const base of ollamaBases) {
|
|
233
|
+
const r = await checkOllamaTags(base);
|
|
234
|
+
ollamaOnline = ollamaOnline || r.online;
|
|
235
|
+
if (!ollamaEndpoint) ollamaEndpoint = r.endpoint;
|
|
236
|
+
for (const m of r.models) {
|
|
237
|
+
if (!dedup.has(m.value)) dedup.set(m.value, m);
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
const mlxRoot = normalizeOpenAiV1Root(payload?.mlxOpenAiBaseUrl);
|
|
241
|
+
const mlx = mlxRoot
|
|
242
|
+
? await checkOpenAiModels(mlxRoot, payload?.mlxApiKey ?? "")
|
|
243
|
+
: null;
|
|
244
|
+
const result = {
|
|
245
|
+
ollama:
|
|
246
|
+
ollamaBases.length > 0
|
|
247
|
+
? {
|
|
248
|
+
configured: dedup.size > 0,
|
|
249
|
+
online: ollamaOnline,
|
|
250
|
+
models: [...dedup.values()],
|
|
251
|
+
endpoint: ollamaEndpoint,
|
|
252
|
+
}
|
|
253
|
+
: undefined,
|
|
254
|
+
mlx: mlx
|
|
255
|
+
? {
|
|
256
|
+
configured: mlx.models.length > 0,
|
|
257
|
+
online: mlx.online,
|
|
258
|
+
models: mlx.models,
|
|
259
|
+
endpoint: mlx.endpoint,
|
|
260
|
+
}
|
|
261
|
+
: undefined,
|
|
262
|
+
};
|
|
263
|
+
const totalTimeSeconds = (Date.now() - start) / 1000;
|
|
264
|
+
await workerFetch(`/api/worker/jobs/${jobId}`, {
|
|
265
|
+
method: "PATCH",
|
|
266
|
+
body: JSON.stringify({
|
|
267
|
+
jobStatus: "completed",
|
|
268
|
+
resultText: JSON.stringify(result),
|
|
269
|
+
tokenCount: 1,
|
|
270
|
+
totalTimeSeconds,
|
|
271
|
+
}),
|
|
272
|
+
});
|
|
273
|
+
console.log(
|
|
274
|
+
`[gonext-worker] completed local_health ${jobId} (${totalTimeSeconds.toFixed(1)}s)`
|
|
275
|
+
);
|
|
276
|
+
} catch (e) {
|
|
277
|
+
const message = e instanceof Error ? e.message : String(e);
|
|
278
|
+
await workerFetch(`/api/worker/jobs/${jobId}`, {
|
|
279
|
+
method: "PATCH",
|
|
280
|
+
body: JSON.stringify({
|
|
281
|
+
jobStatus: "failed",
|
|
282
|
+
errorMessage: message,
|
|
283
|
+
totalTimeSeconds: (Date.now() - start) / 1000,
|
|
284
|
+
}),
|
|
285
|
+
});
|
|
286
|
+
console.error(`[gonext-worker] failed local_health ${jobId}:`, message);
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
|
|
100
290
|
async function pollOnce() {
|
|
101
291
|
const res = await workerFetch("/api/worker/jobs/next", { method: "POST" });
|
|
102
292
|
if (res.status === 204) return;
|
|
@@ -106,6 +296,14 @@ async function pollOnce() {
|
|
|
106
296
|
}
|
|
107
297
|
const job = await res.json();
|
|
108
298
|
if (job?.jobId) {
|
|
299
|
+
const isLocalHealthByType = job.jobType === "local_health";
|
|
300
|
+
const isLocalHealthByModelKey = job.modelKey === "local_health";
|
|
301
|
+
const isLocalHealthByPayload =
|
|
302
|
+
Array.isArray(job.payload?.ollamaBaseUrls) || !!job.payload?.mlxOpenAiBaseUrl;
|
|
303
|
+
if (isLocalHealthByType || isLocalHealthByModelKey || isLocalHealthByPayload) {
|
|
304
|
+
await runLocalHealthJob(job);
|
|
305
|
+
return;
|
|
306
|
+
}
|
|
109
307
|
await runChatJob(job);
|
|
110
308
|
}
|
|
111
309
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@tiens.nguyen/gonext-local-worker",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.10",
|
|
4
4
|
"description": "Polls GoNext cloud API for async local LLM jobs and runs them against Ollama/OpenAI-compatible servers on this Mac",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"license": "MIT",
|