@tiens.nguyen/gonext-local-worker 1.0.15 → 1.0.17
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/gonext-local-worker.mjs +9 -3
- package/package.json +1 -1
package/gonext-local-worker.mjs
CHANGED
|
@@ -135,10 +135,16 @@ async function runChatJob(job) {
|
|
|
135
135
|
throw new Error("Invalid chat payload: messages array is missing.");
|
|
136
136
|
}
|
|
137
137
|
const start = Date.now();
|
|
138
|
-
await workerFetch(`/api/worker/jobs/${jobId}`, {
|
|
138
|
+
const runRes = await workerFetch(`/api/worker/jobs/${jobId}`, {
|
|
139
139
|
method: "PATCH",
|
|
140
140
|
body: JSON.stringify({ jobStatus: "running" }),
|
|
141
141
|
});
|
|
142
|
+
if (!runRes.ok) {
|
|
143
|
+
const errBody = await runRes.text().catch(() => "");
|
|
144
|
+
throw new Error(
|
|
145
|
+
`mark running failed ${runRes.status}${errBody ? `: ${errBody}` : ""}`
|
|
146
|
+
);
|
|
147
|
+
}
|
|
142
148
|
|
|
143
149
|
const client = new OpenAI({
|
|
144
150
|
baseURL: payload.baseURL,
|
|
@@ -154,9 +160,9 @@ async function runChatJob(job) {
|
|
|
154
160
|
const t = buf;
|
|
155
161
|
buf = "";
|
|
156
162
|
if (!t) return;
|
|
157
|
-
const res = await workerFetch(`/api/worker/
|
|
163
|
+
const res = await workerFetch(`/api/worker/job-chunk`, {
|
|
158
164
|
method: "POST",
|
|
159
|
-
body: JSON.stringify({ text: t }),
|
|
165
|
+
body: JSON.stringify({ jobId, text: t }),
|
|
160
166
|
});
|
|
161
167
|
if (!res.ok && res.status !== 204) {
|
|
162
168
|
console.error(`[gonext-worker] chunk POST failed ${res.status} for ${jobId}`);
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@tiens.nguyen/gonext-local-worker",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.17",
|
|
4
4
|
"description": "Polls GoNext cloud API for async local LLM jobs and runs them against Ollama/OpenAI-compatible servers on this Mac",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"license": "MIT",
|