open-agents-ai 0.186.65 → 0.186.67

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +30 -11
  2. package/package.json +1 -1
package/dist/index.js CHANGED
@@ -300740,7 +300740,14 @@ async function handleV1ChatCompletions(req2, res, ollamaUrl) {
300740
300740
 
300741
300741
  `);
300742
300742
  res.write("data: [DONE]\n\n");
300743
- } else if (ollamaChunk.message?.content) {
300743
+ } else if (ollamaChunk.message?.content || ollamaChunk.message?.tool_calls) {
300744
+ const delta = {};
300745
+ if (ollamaChunk.message.role)
300746
+ delta.role = ollamaChunk.message.role;
300747
+ if (ollamaChunk.message.content)
300748
+ delta.content = ollamaChunk.message.content;
300749
+ if (ollamaChunk.message.tool_calls)
300750
+ delta.tool_calls = ollamaChunk.message.tool_calls;
300744
300751
  const sseEvent = {
300745
300752
  id: chatId,
300746
300753
  object: "chat.completion.chunk",
@@ -300749,10 +300756,7 @@ async function handleV1ChatCompletions(req2, res, ollamaUrl) {
300749
300756
  choices: [
300750
300757
  {
300751
300758
  index: 0,
300752
- delta: {
300753
- role: ollamaChunk.message.role ?? "assistant",
300754
- content: ollamaChunk.message.content
300755
- },
300759
+ delta,
300756
300760
  finish_reason: null
300757
300761
  }
300758
300762
  ]
@@ -300815,6 +300819,16 @@ async function handleV1ChatCompletions(req2, res, ollamaUrl) {
300815
300819
  metrics.totalTokensIn += ollamaResp.prompt_eval_count;
300816
300820
  trackTokens("local", ollamaResp.prompt_eval_count ?? 0, ollamaResp.eval_count ?? 0);
300817
300821
  const chatId = `chatcmpl-${randomBytes19(12).toString("hex")}`;
300822
+ const responseMessage = {
300823
+ role: ollamaResp.message?.role ?? "assistant",
300824
+ content: ollamaResp.message?.content ?? ""
300825
+ };
300826
+ if (ollamaResp.message?.tool_calls && ollamaResp.message.tool_calls.length > 0) {
300827
+ responseMessage.tool_calls = ollamaResp.message.tool_calls;
300828
+ if (!ollamaResp.message.content)
300829
+ responseMessage.content = null;
300830
+ }
300831
+ const hasToolCalls = !!ollamaResp.message?.tool_calls?.length;
300818
300832
  const openaiResponse = {
300819
300833
  id: chatId,
300820
300834
  object: "chat.completion",
@@ -300823,11 +300837,8 @@ async function handleV1ChatCompletions(req2, res, ollamaUrl) {
300823
300837
  choices: [
300824
300838
  {
300825
300839
  index: 0,
300826
- message: {
300827
- role: ollamaResp.message?.role ?? "assistant",
300828
- content: ollamaResp.message?.content ?? ""
300829
- },
300830
- finish_reason: "stop"
300840
+ message: responseMessage,
300841
+ finish_reason: hasToolCalls ? "tool_calls" : "stop"
300831
300842
  }
300832
300843
  ],
300833
300844
  usage: {
@@ -307344,8 +307355,16 @@ async function runBackground(task, config, opts) {
307344
307355
  env: { ...process.env, OA_JOB_ID: id },
307345
307356
  stdio: ["ignore", "pipe", "pipe"],
307346
307357
  detached: true
307358
+ // Own process group for tree kill
307359
+ });
307360
+ process.on("exit", () => {
307361
+ if (child.pid && !child.killed) {
307362
+ try {
307363
+ process.kill(-child.pid, "SIGKILL");
307364
+ } catch {
307365
+ }
307366
+ }
307347
307367
  });
307348
- child.unref();
307349
307368
  job.pid = child.pid ?? 0;
307350
307369
  writeFileSync31(join78(dir, `${id}.json`), JSON.stringify(job, null, 2));
307351
307370
  let output = "";
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "open-agents-ai",
3
- "version": "0.186.65",
3
+ "version": "0.186.67",
4
4
  "description": "AI coding agent powered by open-source models (Ollama/vLLM) — interactive TUI with agentic tool-calling loop",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",