@tiens.nguyen/gonext-local-worker 1.0.7 → 1.0.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -0
- package/gonext-local-worker.mjs +125 -0
- package/package.json +1 -1
package/README.md
ADDED
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
/**
|
|
3
|
+
* Polls GoNext Cloud API for pending local-LLM chat jobs, runs them against the
|
|
4
|
+
* URLs embedded in the job payload (your LAN Ollama / OpenAI-compatible server),
|
|
5
|
+
* then PATCHes completion back to the API.
|
|
6
|
+
*
|
|
7
|
+
* Usage (from api/):
|
|
8
|
+
* export GONEXT_API_BASE=https://xxxx.execute-api....amazonaws.com
|
|
9
|
+
* export GONEXT_WORKER_KEY=<plaintext secret from Settings → Worker API key>
|
|
10
|
+
* node scripts/local-llm-worker.mjs
|
|
11
|
+
*
|
|
12
|
+
* Requires Node 18+ (global fetch). Uses the OpenAI SDK from this package.
|
|
13
|
+
*/
|
|
14
|
+
import OpenAI from "openai";
|
|
15
|
+
|
|
16
|
+
const apiBase = (process.env.GONEXT_API_BASE ?? "").replace(/\/+$/, "");
|
|
17
|
+
const workerKey = process.env.GONEXT_WORKER_KEY ?? "";
|
|
18
|
+
const pollMs = Number(process.env.GONEXT_POLL_MS ?? "1500") || 1500;
|
|
19
|
+
|
|
20
|
+
if (!apiBase || !workerKey) {
|
|
21
|
+
console.error(
|
|
22
|
+
"Set GONEXT_API_BASE (HTTP API origin, no /api suffix) and GONEXT_WORKER_KEY."
|
|
23
|
+
);
|
|
24
|
+
process.exit(1);
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
function toOpenAIMessages(messages) {
|
|
28
|
+
return messages.map((m) => {
|
|
29
|
+
if (m.role === "user" && m.attachments?.length) {
|
|
30
|
+
return {
|
|
31
|
+
role: m.role,
|
|
32
|
+
content: [
|
|
33
|
+
{ type: "text", text: m.content },
|
|
34
|
+
...m.attachments.map((a) => ({
|
|
35
|
+
type: "image_url",
|
|
36
|
+
image_url: { url: `data:${a.mimeType};base64,${a.data}` },
|
|
37
|
+
})),
|
|
38
|
+
],
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
return { role: m.role, content: m.content };
|
|
42
|
+
});
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
async function workerFetch(path, init = {}) {
|
|
46
|
+
const url = `${apiBase}${path.startsWith("/") ? path : `/${path}`}`;
|
|
47
|
+
const headers = {
|
|
48
|
+
"Content-Type": "application/json",
|
|
49
|
+
"X-Worker-Key": workerKey,
|
|
50
|
+
...(init.headers ?? {}),
|
|
51
|
+
};
|
|
52
|
+
return fetch(url, { ...init, headers });
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
async function runChatJob(job) {
|
|
56
|
+
const { jobId, payload } = job;
|
|
57
|
+
const start = Date.now();
|
|
58
|
+
await workerFetch(`/api/worker/jobs/${jobId}`, {
|
|
59
|
+
method: "PATCH",
|
|
60
|
+
body: JSON.stringify({ jobStatus: "running" }),
|
|
61
|
+
});
|
|
62
|
+
|
|
63
|
+
const client = new OpenAI({
|
|
64
|
+
baseURL: payload.baseURL,
|
|
65
|
+
apiKey: payload.apiKey || "ollama",
|
|
66
|
+
});
|
|
67
|
+
|
|
68
|
+
try {
|
|
69
|
+
const completion = await client.chat.completions.create({
|
|
70
|
+
model: payload.modelId,
|
|
71
|
+
messages: toOpenAIMessages(payload.messages),
|
|
72
|
+
temperature: 0,
|
|
73
|
+
});
|
|
74
|
+
const text = completion.choices[0]?.message?.content ?? "";
|
|
75
|
+
const totalTimeSeconds = (Date.now() - start) / 1000;
|
|
76
|
+
await workerFetch(`/api/worker/jobs/${jobId}`, {
|
|
77
|
+
method: "PATCH",
|
|
78
|
+
body: JSON.stringify({
|
|
79
|
+
jobStatus: "completed",
|
|
80
|
+
resultText: text,
|
|
81
|
+
tokenCount: 1,
|
|
82
|
+
totalTimeSeconds,
|
|
83
|
+
}),
|
|
84
|
+
});
|
|
85
|
+
console.log(`[gonext-worker] completed ${jobId} (${totalTimeSeconds.toFixed(1)}s)`);
|
|
86
|
+
} catch (e) {
|
|
87
|
+
const message = e instanceof Error ? e.message : String(e);
|
|
88
|
+
await workerFetch(`/api/worker/jobs/${jobId}`, {
|
|
89
|
+
method: "PATCH",
|
|
90
|
+
body: JSON.stringify({
|
|
91
|
+
jobStatus: "failed",
|
|
92
|
+
errorMessage: message,
|
|
93
|
+
totalTimeSeconds: (Date.now() - start) / 1000,
|
|
94
|
+
}),
|
|
95
|
+
});
|
|
96
|
+
console.error(`[gonext-worker] failed ${jobId}:`, message);
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
async function pollOnce() {
|
|
101
|
+
const res = await workerFetch("/api/worker/jobs/next", { method: "POST" });
|
|
102
|
+
if (res.status === 204) return;
|
|
103
|
+
if (!res.ok) {
|
|
104
|
+
const t = await res.text().catch(() => "");
|
|
105
|
+
throw new Error(`next failed ${res.status}: ${t}`);
|
|
106
|
+
}
|
|
107
|
+
const job = await res.json();
|
|
108
|
+
if (job?.jobId) {
|
|
109
|
+
await runChatJob(job);
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
async function main() {
|
|
114
|
+
console.log(`[gonext-worker] polling ${apiBase} every ${pollMs}ms`);
|
|
115
|
+
for (;;) {
|
|
116
|
+
try {
|
|
117
|
+
await pollOnce();
|
|
118
|
+
} catch (e) {
|
|
119
|
+
console.error("[gonext-worker] poll error:", e);
|
|
120
|
+
}
|
|
121
|
+
await new Promise((r) => setTimeout(r, pollMs));
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
main();
|
package/package.json
CHANGED