open-agents-ai 0.187.454 → 0.187.455
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +91 -15
- package/npm-shrinkwrap.json +2 -2
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -581217,14 +581217,14 @@ function sanitizeChatContent(raw) {
|
|
|
581217
581217
|
return cleaned.join("\n").trim();
|
|
581218
581218
|
}
|
|
581219
581219
|
async function directChatBackend(opts) {
|
|
581220
|
-
const { model, messages: messages2, stream, res, sessionId, ollamaUrl } = opts;
|
|
581220
|
+
const { model, messages: messages2, stream, res, sessionId, ollamaUrl, extraFields } = opts;
|
|
581221
581221
|
const cfg = loadConfig();
|
|
581222
581222
|
const isVllm = cfg.backendType === "vllm";
|
|
581223
581223
|
const cleanModel = model.replace(/^[a-z]+\//, "");
|
|
581224
581224
|
const headers = { "Content-Type": "application/json" };
|
|
581225
581225
|
if (cfg.apiKey) headers["Authorization"] = `Bearer ${cfg.apiKey}`;
|
|
581226
581226
|
if (isVllm) {
|
|
581227
|
-
const reqBody = JSON.stringify({ model: cleanModel, messages: messages2, stream });
|
|
581227
|
+
const reqBody = JSON.stringify({ model: cleanModel, messages: messages2, stream, ...extraFields || {} });
|
|
581228
581228
|
if (stream) {
|
|
581229
581229
|
res.writeHead(200, {
|
|
581230
581230
|
"Content-Type": "text/event-stream",
|
|
@@ -581292,12 +581292,27 @@ async function directChatBackend(opts) {
|
|
|
581292
581292
|
return content;
|
|
581293
581293
|
}
|
|
581294
581294
|
} else {
|
|
581295
|
+
const ef = extraFields || {};
|
|
581296
|
+
const ollamaOpts = {};
|
|
581297
|
+
if (typeof ef["temperature"] === "number") ollamaOpts["temperature"] = ef["temperature"];
|
|
581298
|
+
if (typeof ef["top_p"] === "number") ollamaOpts["top_p"] = ef["top_p"];
|
|
581299
|
+
if (typeof ef["max_tokens"] === "number") ollamaOpts["num_predict"] = ef["max_tokens"];
|
|
581300
|
+
if (typeof ef["seed"] === "number") ollamaOpts["seed"] = ef["seed"];
|
|
581301
|
+
if (typeof ef["frequency_penalty"] === "number") ollamaOpts["frequency_penalty"] = ef["frequency_penalty"];
|
|
581302
|
+
if (typeof ef["presence_penalty"] === "number") ollamaOpts["presence_penalty"] = ef["presence_penalty"];
|
|
581303
|
+
if (Array.isArray(ef["stop"]) || typeof ef["stop"] === "string") ollamaOpts["stop"] = ef["stop"];
|
|
581304
|
+
const hasTools = Array.isArray(ef["tools"]) && ef["tools"].length > 0;
|
|
581295
581305
|
const reqBody = JSON.stringify({
|
|
581296
581306
|
model: cleanModel,
|
|
581297
581307
|
messages: messages2,
|
|
581298
581308
|
stream,
|
|
581299
|
-
think:
|
|
581300
|
-
|
|
581309
|
+
// Don't force think:false when the caller is using tool calling —
|
|
581310
|
+
// thinking models often need their reasoning chain to choose a tool.
|
|
581311
|
+
...hasTools ? {} : { think: false },
|
|
581312
|
+
...hasTools ? { tools: ef["tools"] } : {},
|
|
581313
|
+
...ef["tool_choice"] !== void 0 ? { tool_choice: ef["tool_choice"] } : {},
|
|
581314
|
+
...ef["response_format"] !== void 0 ? { format: ef["response_format"] } : {},
|
|
581315
|
+
options: ollamaOpts
|
|
581301
581316
|
});
|
|
581302
581317
|
if (stream) {
|
|
581303
581318
|
res.writeHead(200, {
|
|
@@ -581353,7 +581368,20 @@ async function directChatBackend(opts) {
|
|
|
581353
581368
|
if (result.status >= 400) throw new Error(`Backend HTTP ${result.status}: ${result.body.slice(0, 200)}`);
|
|
581354
581369
|
const j = JSON.parse(result.body);
|
|
581355
581370
|
const content = j?.message?.content || "";
|
|
581356
|
-
|
|
581371
|
+
const rawToolCalls = j?.message?.tool_calls;
|
|
581372
|
+
const toolCalls = Array.isArray(rawToolCalls) ? rawToolCalls.map((tc, idx) => ({
|
|
581373
|
+
id: tc.id || `call_${Math.random().toString(36).slice(2, 10)}`,
|
|
581374
|
+
type: "function",
|
|
581375
|
+
function: {
|
|
581376
|
+
name: tc?.function?.name ?? tc?.name ?? "",
|
|
581377
|
+
arguments: typeof tc?.function?.arguments === "string" ? tc.function.arguments : JSON.stringify(tc?.function?.arguments ?? tc?.arguments ?? {})
|
|
581378
|
+
},
|
|
581379
|
+
index: idx
|
|
581380
|
+
})) : [];
|
|
581381
|
+
if (!content && toolCalls.length === 0) {
|
|
581382
|
+
throw new Error("Backend returned empty message (no content, no tool_calls)");
|
|
581383
|
+
}
|
|
581384
|
+
const finishReason = toolCalls.length > 0 ? "tool_calls" : "stop";
|
|
581357
581385
|
const promptTokens = j?.prompt_eval_count ?? 0;
|
|
581358
581386
|
const completionTokens = j?.eval_count ?? 0;
|
|
581359
581387
|
jsonResponse(res, 200, {
|
|
@@ -581363,8 +581391,12 @@ async function directChatBackend(opts) {
|
|
|
581363
581391
|
model: cleanModel,
|
|
581364
581392
|
choices: [{
|
|
581365
581393
|
index: 0,
|
|
581366
|
-
message: {
|
|
581367
|
-
|
|
581394
|
+
message: {
|
|
581395
|
+
role: "assistant",
|
|
581396
|
+
content,
|
|
581397
|
+
...toolCalls.length > 0 ? { tool_calls: toolCalls } : {}
|
|
581398
|
+
},
|
|
581399
|
+
finish_reason: finishReason
|
|
581368
581400
|
}],
|
|
581369
581401
|
usage: {
|
|
581370
581402
|
prompt_tokens: promptTokens,
|
|
@@ -582008,10 +582040,13 @@ async function handleV1ChatCompletions(req2, res, ollamaUrl) {
|
|
|
582008
582040
|
}
|
|
582009
582041
|
return;
|
|
582010
582042
|
}
|
|
582043
|
+
const callerProvidedThink = "think" in routedBody;
|
|
582044
|
+
const callerProvidedTools = Array.isArray(routedBody["tools"]) && routedBody["tools"].length > 0;
|
|
582045
|
+
const finalThink = callerProvidedThink ? routedBody["think"] : callerProvidedTools ? void 0 : false;
|
|
582011
582046
|
const ollamaPayload = JSON.stringify({
|
|
582012
582047
|
...routedBody,
|
|
582013
582048
|
stream,
|
|
582014
|
-
think:
|
|
582049
|
+
...finalThink !== void 0 ? { think: finalThink } : {}
|
|
582015
582050
|
});
|
|
582016
582051
|
if (stream) {
|
|
582017
582052
|
res.writeHead(200, {
|
|
@@ -582059,7 +582094,17 @@ async function handleV1ChatCompletions(req2, res, ollamaUrl) {
|
|
|
582059
582094
|
const delta = {};
|
|
582060
582095
|
if (ollamaChunk.message.role) delta.role = ollamaChunk.message.role;
|
|
582061
582096
|
if (ollamaChunk.message.content) delta.content = ollamaChunk.message.content;
|
|
582062
|
-
if (ollamaChunk.message.tool_calls)
|
|
582097
|
+
if (ollamaChunk.message.tool_calls) {
|
|
582098
|
+
delta.tool_calls = ollamaChunk.message.tool_calls.map((tc, idx) => ({
|
|
582099
|
+
id: tc.id || `call_${randomBytes21(8).toString("hex")}`,
|
|
582100
|
+
type: "function",
|
|
582101
|
+
function: {
|
|
582102
|
+
name: tc?.function?.name ?? tc?.name ?? "",
|
|
582103
|
+
arguments: typeof tc?.function?.arguments === "string" ? tc.function.arguments : JSON.stringify(tc?.function?.arguments ?? tc?.arguments ?? {})
|
|
582104
|
+
},
|
|
582105
|
+
index: idx
|
|
582106
|
+
}));
|
|
582107
|
+
}
|
|
582063
582108
|
const sseEvent = {
|
|
582064
582109
|
id: chatId,
|
|
582065
582110
|
object: "chat.completion.chunk",
|
|
@@ -582135,7 +582180,15 @@ async function handleV1ChatCompletions(req2, res, ollamaUrl) {
|
|
|
582135
582180
|
content: ollamaResp.message?.content ?? ""
|
|
582136
582181
|
};
|
|
582137
582182
|
if (ollamaResp.message?.tool_calls && ollamaResp.message.tool_calls.length > 0) {
|
|
582138
|
-
responseMessage.tool_calls = ollamaResp.message.tool_calls
|
|
582183
|
+
responseMessage.tool_calls = ollamaResp.message.tool_calls.map((tc, idx) => ({
|
|
582184
|
+
id: tc.id || `call_${randomBytes21(8).toString("hex")}`,
|
|
582185
|
+
type: "function",
|
|
582186
|
+
function: {
|
|
582187
|
+
name: tc?.function?.name ?? tc?.name ?? "",
|
|
582188
|
+
arguments: typeof tc?.function?.arguments === "string" ? tc.function.arguments : JSON.stringify(tc?.function?.arguments ?? tc?.arguments ?? {})
|
|
582189
|
+
},
|
|
582190
|
+
index: idx
|
|
582191
|
+
}));
|
|
582139
582192
|
if (!ollamaResp.message.content) responseMessage.content = null;
|
|
582140
582193
|
}
|
|
582141
582194
|
const hasToolCalls = !!ollamaResp.message?.tool_calls?.length;
|
|
@@ -584601,19 +584654,42 @@ data: ${JSON.stringify(data)}
|
|
|
584601
584654
|
const session = getSession(sessionId, model, cwdPath);
|
|
584602
584655
|
addUserMessage(session, chatBody.message);
|
|
584603
584656
|
compactSession(session);
|
|
584604
|
-
const wantsTools = chatBody["tools"] !== false && chatBody["use_tools"] !== false;
|
|
584605
584657
|
const streamMode = chatBody.stream !== false;
|
|
584606
|
-
|
|
584658
|
+
const toolsField = chatBody["tools"];
|
|
584659
|
+
const isOpenAIToolsArray = Array.isArray(toolsField) && toolsField.length > 0 && toolsField.every(
|
|
584660
|
+
(t2) => t2 && typeof t2 === "object" && t2["type"] === "function" && typeof t2["function"]?.["name"] === "string"
|
|
584661
|
+
);
|
|
584662
|
+
const wantsAgent = !isOpenAIToolsArray && chatBody["tools"] !== false && chatBody["use_tools"] !== false;
|
|
584663
|
+
if (Array.isArray(chatBody.messages)) {
|
|
584664
|
+
const incomingSystem = chatBody.messages.find((m2) => m2?.role === "system" && typeof m2.content === "string");
|
|
584665
|
+
if (incomingSystem && session.messages.length > 0 && session.messages[0].role === "system") {
|
|
584666
|
+
session.messages[0] = { ...session.messages[0], content: incomingSystem.content };
|
|
584667
|
+
}
|
|
584668
|
+
}
|
|
584669
|
+
if (!wantsAgent) {
|
|
584607
584670
|
try {
|
|
584608
584671
|
const ans = await directChatBackend({
|
|
584609
584672
|
model,
|
|
584610
|
-
// Filter out tool_call/tool_result messages — the upstream
|
|
584611
|
-
// model only accepts standard system/user/assistant turns.
|
|
584612
584673
|
messages: session.messages.filter((m2) => m2.role === "system" || m2.role === "user" || m2.role === "assistant").map((m2) => ({ role: m2.role, content: m2.content })),
|
|
584613
584674
|
stream: streamMode,
|
|
584614
584675
|
res,
|
|
584615
584676
|
sessionId: session.id,
|
|
584616
|
-
ollamaUrl
|
|
584677
|
+
ollamaUrl,
|
|
584678
|
+
// Pass-through of OpenAI fields — the upstream model needs
|
|
584679
|
+
// them for native tool calling, deterministic outputs, etc.
|
|
584680
|
+
extraFields: {
|
|
584681
|
+
...isOpenAIToolsArray ? { tools: toolsField } : {},
|
|
584682
|
+
...chatBody["tool_choice"] !== void 0 ? { tool_choice: chatBody["tool_choice"] } : {},
|
|
584683
|
+
...chatBody["temperature"] !== void 0 ? { temperature: chatBody["temperature"] } : {},
|
|
584684
|
+
...chatBody["top_p"] !== void 0 ? { top_p: chatBody["top_p"] } : {},
|
|
584685
|
+
...chatBody["max_tokens"] !== void 0 ? { max_tokens: chatBody["max_tokens"] } : {},
|
|
584686
|
+
...chatBody["response_format"] !== void 0 ? { response_format: chatBody["response_format"] } : {},
|
|
584687
|
+
...chatBody["seed"] !== void 0 ? { seed: chatBody["seed"] } : {},
|
|
584688
|
+
...chatBody["frequency_penalty"] !== void 0 ? { frequency_penalty: chatBody["frequency_penalty"] } : {},
|
|
584689
|
+
...chatBody["presence_penalty"] !== void 0 ? { presence_penalty: chatBody["presence_penalty"] } : {},
|
|
584690
|
+
...chatBody["stop"] !== void 0 ? { stop: chatBody["stop"] } : {},
|
|
584691
|
+
...chatBody["parallel_tool_calls"] !== void 0 ? { parallel_tool_calls: chatBody["parallel_tool_calls"] } : {}
|
|
584692
|
+
}
|
|
584617
584693
|
});
|
|
584618
584694
|
if (ans !== null) {
|
|
584619
584695
|
addAssistantMessage(session, ans);
|
package/npm-shrinkwrap.json
CHANGED
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "open-agents-ai",
|
|
3
|
-
"version": "0.187.
|
|
3
|
+
"version": "0.187.455",
|
|
4
4
|
"lockfileVersion": 3,
|
|
5
5
|
"requires": true,
|
|
6
6
|
"packages": {
|
|
7
7
|
"": {
|
|
8
8
|
"name": "open-agents-ai",
|
|
9
|
-
"version": "0.187.
|
|
9
|
+
"version": "0.187.455",
|
|
10
10
|
"hasInstallScript": true,
|
|
11
11
|
"license": "CC-BY-NC-4.0",
|
|
12
12
|
"dependencies": {
|
package/package.json
CHANGED