@tiens.nguyen/gonext-local-worker 1.0.10 → 1.0.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/gonext-local-worker.mjs +47 -5
- package/package.json +1 -1
package/gonext-local-worker.mjs
CHANGED
|
@@ -179,15 +179,28 @@ function normalizeOpenAiV1Root(raw) {
|
|
|
179
179
|
return /\/v1$/i.test(base) ? base : `${base}/v1`;
|
|
180
180
|
}
|
|
181
181
|
|
|
182
|
+
function sourceLabelFromBase(base) {
|
|
183
|
+
try {
|
|
184
|
+
return new URL(base).host || base;
|
|
185
|
+
} catch {
|
|
186
|
+
return base;
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
|
|
182
190
|
async function checkOllamaTags(base) {
|
|
183
191
|
const endpoint = `${base}/api/tags`;
|
|
184
192
|
try {
|
|
185
193
|
const res = await fetch(endpoint, { method: "GET" });
|
|
186
194
|
if (!res.ok) return { online: false, endpoint, models: [] };
|
|
187
195
|
const j = await res.json();
|
|
196
|
+
const source = sourceLabelFromBase(base);
|
|
188
197
|
const models = (j.models ?? []).map((m) => {
|
|
189
198
|
const name = m.name ?? m.model ?? "model";
|
|
190
|
-
return {
|
|
199
|
+
return {
|
|
200
|
+
id: `${name}@@${source}`,
|
|
201
|
+
name: `${name} (${source})`,
|
|
202
|
+
value: `ollama:${name}@@${encodeURIComponent(base)}`,
|
|
203
|
+
};
|
|
191
204
|
});
|
|
192
205
|
return { online: true, endpoint, models };
|
|
193
206
|
} catch {
|
|
@@ -218,6 +231,12 @@ async function checkOpenAiModels(base, apiKey) {
|
|
|
218
231
|
async function runLocalHealthJob(job) {
|
|
219
232
|
const { jobId, payload } = job;
|
|
220
233
|
const start = Date.now();
|
|
234
|
+
const ollamaPayloadCount = Array.isArray(payload?.ollamaBaseUrls)
|
|
235
|
+
? payload.ollamaBaseUrls.length
|
|
236
|
+
: 0;
|
|
237
|
+
console.log(
|
|
238
|
+
`[gonext-worker] local_health ${jobId} start (ollamaUrls=${ollamaPayloadCount}, mlx=${payload?.mlxOpenAiBaseUrl ? "yes" : "no"})`
|
|
239
|
+
);
|
|
221
240
|
await workerFetch(`/api/worker/jobs/${jobId}`, {
|
|
222
241
|
method: "PATCH",
|
|
223
242
|
body: JSON.stringify({ jobStatus: "running" }),
|
|
@@ -229,18 +248,36 @@ async function runLocalHealthJob(job) {
|
|
|
229
248
|
const dedup = new Map();
|
|
230
249
|
let ollamaOnline = false;
|
|
231
250
|
let ollamaEndpoint = "";
|
|
251
|
+
const ollamaSources = [];
|
|
232
252
|
for (const base of ollamaBases) {
|
|
253
|
+
const baseStart = Date.now();
|
|
254
|
+
console.log(`[gonext-worker] local_health ${jobId} check ollama ${base}`);
|
|
233
255
|
const r = await checkOllamaTags(base);
|
|
256
|
+
console.log(
|
|
257
|
+
`[gonext-worker] local_health ${jobId} ollama result ${base} online=${r.online} models=${r.models.length} took=${((Date.now() - baseStart) / 1000).toFixed(2)}s`
|
|
258
|
+
);
|
|
234
259
|
ollamaOnline = ollamaOnline || r.online;
|
|
235
260
|
if (!ollamaEndpoint) ollamaEndpoint = r.endpoint;
|
|
261
|
+
ollamaSources.push({
|
|
262
|
+
base,
|
|
263
|
+
label: sourceLabelFromBase(base),
|
|
264
|
+
endpoint: r.endpoint,
|
|
265
|
+
online: r.online,
|
|
266
|
+
});
|
|
236
267
|
for (const m of r.models) {
|
|
237
268
|
if (!dedup.has(m.value)) dedup.set(m.value, m);
|
|
238
269
|
}
|
|
239
270
|
}
|
|
240
271
|
const mlxRoot = normalizeOpenAiV1Root(payload?.mlxOpenAiBaseUrl);
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
272
|
+
let mlx = null;
|
|
273
|
+
if (mlxRoot) {
|
|
274
|
+
const mlxStart = Date.now();
|
|
275
|
+
console.log(`[gonext-worker] local_health ${jobId} check mlx ${mlxRoot}`);
|
|
276
|
+
mlx = await checkOpenAiModels(mlxRoot, payload?.mlxApiKey ?? "");
|
|
277
|
+
console.log(
|
|
278
|
+
`[gonext-worker] local_health ${jobId} mlx result ${mlxRoot} online=${mlx.online} models=${mlx.models.length} took=${((Date.now() - mlxStart) / 1000).toFixed(2)}s`
|
|
279
|
+
);
|
|
280
|
+
}
|
|
244
281
|
const result = {
|
|
245
282
|
ollama:
|
|
246
283
|
ollamaBases.length > 0
|
|
@@ -249,6 +286,7 @@ async function runLocalHealthJob(job) {
|
|
|
249
286
|
online: ollamaOnline,
|
|
250
287
|
models: [...dedup.values()],
|
|
251
288
|
endpoint: ollamaEndpoint,
|
|
289
|
+
sources: ollamaSources,
|
|
252
290
|
}
|
|
253
291
|
: undefined,
|
|
254
292
|
mlx: mlx
|
|
@@ -270,8 +308,9 @@ async function runLocalHealthJob(job) {
|
|
|
270
308
|
totalTimeSeconds,
|
|
271
309
|
}),
|
|
272
310
|
});
|
|
311
|
+
const onlineCount = ollamaSources.filter((s) => s.online).length;
|
|
273
312
|
console.log(
|
|
274
|
-
`[gonext-worker] completed local_health ${jobId} (${totalTimeSeconds.toFixed(1)}s)`
|
|
313
|
+
`[gonext-worker] completed local_health ${jobId} (${totalTimeSeconds.toFixed(1)}s) summary: ollamaOnline=${onlineCount}/${ollamaSources.length}, mlx=${mlx ? (mlx.online ? "online" : "offline") : "n/a"}`
|
|
275
314
|
);
|
|
276
315
|
} catch (e) {
|
|
277
316
|
const message = e instanceof Error ? e.message : String(e);
|
|
@@ -296,6 +335,9 @@ async function pollOnce() {
|
|
|
296
335
|
}
|
|
297
336
|
const job = await res.json();
|
|
298
337
|
if (job?.jobId) {
|
|
338
|
+
console.log(
|
|
339
|
+
`[gonext-worker] claimed ${job.jobId} type=${job.jobType ?? "unknown"} modelKey=${job.modelKey ?? "unknown"}`
|
|
340
|
+
);
|
|
299
341
|
const isLocalHealthByType = job.jobType === "local_health";
|
|
300
342
|
const isLocalHealthByModelKey = job.modelKey === "local_health";
|
|
301
343
|
const isLocalHealthByPayload =
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@tiens.nguyen/gonext-local-worker",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.11",
|
|
4
4
|
"description": "Polls GoNext cloud API for async local LLM jobs and runs them against Ollama/OpenAI-compatible servers on this Mac",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"license": "MIT",
|