@tiens.nguyen/gonext-local-worker 1.0.23 → 1.0.27

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -154,6 +154,8 @@ async function runChatJob(job) {
154
154
  const client = new OpenAI({
155
155
  baseURL: payload.baseURL,
156
156
  apiKey: payload.apiKey || "ollama",
157
+ maxRetries: 0,
158
+ timeout: 90_000,
157
159
  });
158
160
 
159
161
  let buf = "";
@@ -265,6 +267,8 @@ async function runChatJob(job) {
265
267
  await flushTail;
266
268
  await flushChunks();
267
269
 
270
+ logModelResponseToWorker(jobId, payload.modelId, fullText);
271
+
268
272
  const totalTimeSeconds = (Date.now() - start) / 1000;
269
273
  await workerFetch(`/api/worker/jobs/${jobId}`, {
270
274
  method: "PATCH",
@@ -314,6 +318,16 @@ function sourceLabelFromBase(base) {
314
318
  }
315
319
  }
316
320
 
321
+ /** Log assistant text to stdout; cap size so huge replies do not flood the terminal. */
322
+ function logModelResponseToWorker(jobId, modelId, text) {
323
+ const max = 12000;
324
+ const n = text.length;
325
+ const body = n <= max ? text : `${text.slice(0, max)}\n… [log truncated: ${n - max} more chars]`;
326
+ console.log(
327
+ `[gonext-worker] model reply job=${jobId} model=${modelId} chars=${n}:\n${body}`
328
+ );
329
+ }
330
+
317
331
  async function checkOllamaTags(base) {
318
332
  const endpoint = `${base}/api/tags`;
319
333
  try {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@tiens.nguyen/gonext-local-worker",
3
- "version": "1.0.23",
3
+ "version": "1.0.27",
4
4
  "description": "Polls GoNext cloud API for async local LLM jobs and runs them against Ollama/OpenAI-compatible servers on this Mac",
5
5
  "type": "module",
6
6
  "license": "MIT",