@aigne/core 1.8.0 → 1.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. package/CHANGELOG.md +12 -0
  2. package/lib/cjs/agents/agent.js +2 -2
  3. package/lib/cjs/loader/index.js +20 -4
  4. package/lib/cjs/models/deepseek-chat-model.d.ts +7 -0
  5. package/lib/cjs/models/deepseek-chat-model.js +19 -0
  6. package/lib/cjs/models/gemini-chat-model.d.ts +8 -0
  7. package/lib/cjs/models/gemini-chat-model.js +20 -0
  8. package/lib/cjs/models/ollama-chat-model.d.ts +6 -0
  9. package/lib/cjs/models/ollama-chat-model.js +18 -0
  10. package/lib/cjs/models/open-router-chat-model.d.ts +5 -0
  11. package/lib/cjs/models/open-router-chat-model.js +17 -0
  12. package/lib/cjs/models/openai-chat-model.d.ts +23 -1
  13. package/lib/cjs/models/openai-chat-model.js +182 -78
  14. package/lib/cjs/models/xai-chat-model.d.ts +3 -11
  15. package/lib/cjs/models/xai-chat-model.js +1 -14
  16. package/lib/cjs/prompt/prompt-builder.js +3 -0
  17. package/lib/cjs/utils/prompts.d.ts +1 -0
  18. package/lib/cjs/utils/prompts.js +13 -0
  19. package/lib/cjs/utils/type-utils.d.ts +1 -1
  20. package/lib/cjs/utils/type-utils.js +1 -1
  21. package/lib/dts/models/deepseek-chat-model.d.ts +7 -0
  22. package/lib/dts/models/gemini-chat-model.d.ts +8 -0
  23. package/lib/dts/models/ollama-chat-model.d.ts +6 -0
  24. package/lib/dts/models/open-router-chat-model.d.ts +5 -0
  25. package/lib/dts/models/openai-chat-model.d.ts +23 -1
  26. package/lib/dts/models/xai-chat-model.d.ts +3 -11
  27. package/lib/dts/utils/prompts.d.ts +1 -0
  28. package/lib/dts/utils/type-utils.d.ts +1 -1
  29. package/lib/esm/agents/agent.js +3 -3
  30. package/lib/esm/loader/index.js +20 -4
  31. package/lib/esm/models/deepseek-chat-model.d.ts +7 -0
  32. package/lib/esm/models/deepseek-chat-model.js +15 -0
  33. package/lib/esm/models/gemini-chat-model.d.ts +8 -0
  34. package/lib/esm/models/gemini-chat-model.js +16 -0
  35. package/lib/esm/models/ollama-chat-model.d.ts +6 -0
  36. package/lib/esm/models/ollama-chat-model.js +14 -0
  37. package/lib/esm/models/open-router-chat-model.d.ts +5 -0
  38. package/lib/esm/models/open-router-chat-model.js +13 -0
  39. package/lib/esm/models/openai-chat-model.d.ts +23 -1
  40. package/lib/esm/models/openai-chat-model.js +178 -78
  41. package/lib/esm/models/xai-chat-model.d.ts +3 -11
  42. package/lib/esm/models/xai-chat-model.js +1 -11
  43. package/lib/esm/prompt/prompt-builder.js +3 -0
  44. package/lib/esm/utils/prompts.d.ts +1 -0
  45. package/lib/esm/utils/prompts.js +10 -0
  46. package/lib/esm/utils/type-utils.d.ts +1 -1
  47. package/lib/esm/utils/type-utils.js +1 -1
  48. package/package.json +1 -1
package/CHANGELOG.md CHANGED
@@ -22,6 +22,18 @@
22
22
  * rename @aigne/core-next to @aigne/core ([3a81009](https://github.com/AIGNE-io/aigne-framework/commit/3a8100962c81813217b687ae28e8de604419c622))
23
23
  * use text resource from MCP correctly ([8b9eba8](https://github.com/AIGNE-io/aigne-framework/commit/8b9eba83352ec096a2a5d4f410d4c4bde7420bce))
24
24
 
25
+ ## [1.9.0](https://github.com/AIGNE-io/aigne-framework/compare/core-v1.8.0...core-v1.9.0) (2025-04-20)
26
+
27
+
28
+ ### Features
29
+
30
+ * **core:** add model adapters for DeepSeek, Gemini, OpenRouter, and Ollama ([#53](https://github.com/AIGNE-io/aigne-framework/issues/53)) ([5d40546](https://github.com/AIGNE-io/aigne-framework/commit/5d40546bd5ddb70233d27ea3b20e5711b2af320a))
31
+
32
+
33
+ ### Bug Fixes
34
+
35
+ * **dx:** custom error message for agent input/output validation ([#71](https://github.com/AIGNE-io/aigne-framework/issues/71)) ([5145673](https://github.com/AIGNE-io/aigne-framework/commit/5145673aaae2cd6665912e80b1c644e974c42b2f))
36
+
25
37
  ## [1.8.0](https://github.com/AIGNE-io/aigne-framework/compare/core-v1.7.0...core-v1.8.0) (2025-04-17)
26
38
 
27
39
 
@@ -137,12 +137,12 @@ class Agent {
137
137
  if (!this.disableEvents)
138
138
  ctx.emit("agentStarted", { agent: this, input: message });
139
139
  try {
140
- const parsedInput = this.inputSchema.parse(message);
140
+ const parsedInput = (0, type_utils_js_1.checkArguments)(`Agent ${this.name} input`, this.inputSchema, message);
141
141
  this.preprocess(parsedInput, ctx);
142
142
  this.checkContextStatus(ctx);
143
143
  const output = await this.process(parsedInput, ctx)
144
144
  .then((output) => {
145
- const parsedOutput = this.outputSchema.parse(output);
145
+ const parsedOutput = (0, type_utils_js_1.checkArguments)(`Agent ${this.name} output`, this.outputSchema, output);
146
146
  return this.includeInputInOutput ? { ...parsedInput, ...parsedOutput } : parsedOutput;
147
147
  })
148
148
  .then((output) => {
@@ -12,6 +12,10 @@ const agent_js_1 = require("../agents/agent.js");
12
12
  const ai_agent_js_1 = require("../agents/ai-agent.js");
13
13
  const mcp_agent_js_1 = require("../agents/mcp-agent.js");
14
14
  const claude_chat_model_js_1 = require("../models/claude-chat-model.js");
15
+ const deepseek_chat_model_js_1 = require("../models/deepseek-chat-model.js");
16
+ const gemini_chat_model_js_1 = require("../models/gemini-chat-model.js");
17
+ const ollama_chat_model_js_1 = require("../models/ollama-chat-model.js");
18
+ const open_router_chat_model_js_1 = require("../models/open-router-chat-model.js");
15
19
  const openai_chat_model_js_1 = require("../models/openai-chat-model.js");
16
20
  const xai_chat_model_js_1 = require("../models/xai-chat-model.js");
17
21
  const type_utils_js_1 = require("../utils/type-utils.js");
@@ -74,17 +78,29 @@ async function loadAgent(path) {
74
78
  }
75
79
  throw new Error(`Unsupported agent file type: ${path}`);
76
80
  }
77
- const { MODEL_PROVIDER = "openai", MODEL_NAME = "gpt-4o-mini" } = process.env;
81
+ const { MODEL_PROVIDER, MODEL_NAME } = process.env;
82
+ const DEFAULT_MODEL_PROVIDER = "openai";
83
+ const DEFAULT_MODEL_NAME = "gpt-4o-mini";
78
84
  async function loadModel(model, modelOptions) {
79
85
  const params = {
80
- model: model?.name ?? MODEL_NAME,
86
+ model: MODEL_NAME ?? model?.name ?? DEFAULT_MODEL_NAME,
81
87
  temperature: model?.temperature ?? undefined,
82
88
  topP: model?.top_p ?? undefined,
83
89
  frequencyPenalty: model?.frequent_penalty ?? undefined,
84
90
  presencePenalty: model?.presence_penalty ?? undefined,
85
91
  };
86
- const availableModels = [openai_chat_model_js_1.OpenAIChatModel, claude_chat_model_js_1.ClaudeChatModel, xai_chat_model_js_1.XAIChatModel];
87
- const M = availableModels.find((m) => m.name.toLowerCase().includes(model?.provider || MODEL_PROVIDER));
92
+ const availableModels = [
93
+ openai_chat_model_js_1.OpenAIChatModel,
94
+ claude_chat_model_js_1.ClaudeChatModel,
95
+ xai_chat_model_js_1.XAIChatModel,
96
+ gemini_chat_model_js_1.GeminiChatModel,
97
+ deepseek_chat_model_js_1.DeepSeekChatModel,
98
+ open_router_chat_model_js_1.OpenRouterChatModel,
99
+ ollama_chat_model_js_1.OllamaChatModel,
100
+ ];
101
+ const M = availableModels.find((m) => m.name
102
+ .toLowerCase()
103
+ .includes((MODEL_PROVIDER ?? model?.provider ?? DEFAULT_MODEL_PROVIDER).toLowerCase()));
88
104
  if (!M)
89
105
  throw new Error(`Unsupported model: ${model?.provider} ${model?.name}`);
90
106
  return new M({ model: params.model, modelOptions: { ...params, ...modelOptions } });
@@ -0,0 +1,7 @@
1
+ import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
2
+ export declare class DeepSeekChatModel extends OpenAIChatModel {
3
+ constructor(options?: OpenAIChatModelOptions);
4
+ protected apiKeyEnvName: string;
5
+ protected supportsNativeStructuredOutputs: boolean;
6
+ protected supportsToolsEmptyParameters: boolean;
7
+ }
@@ -0,0 +1,19 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.DeepSeekChatModel = void 0;
4
+ const openai_chat_model_js_1 = require("./openai-chat-model.js");
5
+ const DEEPSEEK_DEFAULT_CHAT_MODEL = "deepseek-chat";
6
+ const DEEPSEEK_BASE_URL = "https://api.deepseek.com";
7
+ class DeepSeekChatModel extends openai_chat_model_js_1.OpenAIChatModel {
8
+ constructor(options) {
9
+ super({
10
+ ...options,
11
+ model: options?.model || DEEPSEEK_DEFAULT_CHAT_MODEL,
12
+ baseURL: options?.baseURL || DEEPSEEK_BASE_URL,
13
+ });
14
+ }
15
+ apiKeyEnvName = "DEEPSEEK_API_KEY";
16
+ supportsNativeStructuredOutputs = false;
17
+ supportsToolsEmptyParameters = false;
18
+ }
19
+ exports.DeepSeekChatModel = DeepSeekChatModel;
@@ -0,0 +1,8 @@
1
+ import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
2
+ export declare class GeminiChatModel extends OpenAIChatModel {
3
+ constructor(options?: OpenAIChatModelOptions);
4
+ protected apiKeyEnvName: string;
5
+ protected supportsEndWithSystemMessage: boolean;
6
+ protected supportsToolsUseWithJsonSchema: boolean;
7
+ protected supportsParallelToolCalls: boolean;
8
+ }
@@ -0,0 +1,20 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.GeminiChatModel = void 0;
4
+ const openai_chat_model_js_1 = require("./openai-chat-model.js");
5
+ const GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai";
6
+ const GEMINI_DEFAULT_CHAT_MODEL = "gemini-2.0-flash";
7
+ class GeminiChatModel extends openai_chat_model_js_1.OpenAIChatModel {
8
+ constructor(options) {
9
+ super({
10
+ ...options,
11
+ model: options?.model || GEMINI_DEFAULT_CHAT_MODEL,
12
+ baseURL: options?.baseURL || GEMINI_BASE_URL,
13
+ });
14
+ }
15
+ apiKeyEnvName = "GEMINI_API_KEY";
16
+ supportsEndWithSystemMessage = false;
17
+ supportsToolsUseWithJsonSchema = false;
18
+ supportsParallelToolCalls = false;
19
+ }
20
+ exports.GeminiChatModel = GeminiChatModel;
@@ -0,0 +1,6 @@
1
+ import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
2
+ export declare class OllamaChatModel extends OpenAIChatModel {
3
+ constructor(options?: OpenAIChatModelOptions);
4
+ protected apiKeyEnvName: string;
5
+ protected apiKeyDefault: string;
6
+ }
@@ -0,0 +1,18 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.OllamaChatModel = void 0;
4
+ const openai_chat_model_js_1 = require("./openai-chat-model.js");
5
+ const OLLAMA_DEFAULT_BASE_URL = "http://localhost:11434/v1";
6
+ const OLLAMA_DEFAULT_CHAT_MODEL = "llama3.2";
7
+ class OllamaChatModel extends openai_chat_model_js_1.OpenAIChatModel {
8
+ constructor(options) {
9
+ super({
10
+ ...options,
11
+ model: options?.model || OLLAMA_DEFAULT_CHAT_MODEL,
12
+ baseURL: options?.baseURL || process.env.OLLAMA_BASE_URL || OLLAMA_DEFAULT_BASE_URL,
13
+ });
14
+ }
15
+ apiKeyEnvName = "OLLAMA_API_KEY";
16
+ apiKeyDefault = "ollama";
17
+ }
18
+ exports.OllamaChatModel = OllamaChatModel;
@@ -0,0 +1,5 @@
1
+ import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
2
+ export declare class OpenRouterChatModel extends OpenAIChatModel {
3
+ constructor(options?: OpenAIChatModelOptions);
4
+ protected apiKeyEnvName: string;
5
+ }
@@ -0,0 +1,17 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.OpenRouterChatModel = void 0;
4
+ const openai_chat_model_js_1 = require("./openai-chat-model.js");
5
+ const OPEN_ROUTER_DEFAULT_CHAT_MODEL = "openai/gpt-4o";
6
+ const OPEN_ROUTER_BASE_URL = "https://openrouter.ai/api/v1";
7
+ class OpenRouterChatModel extends openai_chat_model_js_1.OpenAIChatModel {
8
+ constructor(options) {
9
+ super({
10
+ ...options,
11
+ model: options?.model || OPEN_ROUTER_DEFAULT_CHAT_MODEL,
12
+ baseURL: options?.baseURL || OPEN_ROUTER_BASE_URL,
13
+ });
14
+ }
15
+ apiKeyEnvName = "OPEN_ROUTER_API_KEY";
16
+ }
17
+ exports.OpenRouterChatModel = OpenRouterChatModel;
@@ -1,6 +1,8 @@
1
1
  import OpenAI from "openai";
2
+ import type { ChatCompletionMessageParam, ChatCompletionTool } from "openai/resources";
3
+ import type { Stream } from "openai/streaming.js";
2
4
  import { z } from "zod";
3
- import { ChatModel, type ChatModelInput, type ChatModelOptions, type ChatModelOutput } from "./chat-model.js";
5
+ import { ChatModel, type ChatModelInput, type ChatModelInputMessage, type ChatModelInputTool, type ChatModelOptions, type ChatModelOutput, type Role } from "./chat-model.js";
4
6
  export interface OpenAIChatModelOptions {
5
7
  apiKey?: string;
6
8
  baseURL?: string;
@@ -62,7 +64,27 @@ export declare class OpenAIChatModel extends ChatModel {
62
64
  options?: OpenAIChatModelOptions | undefined;
63
65
  constructor(options?: OpenAIChatModelOptions | undefined);
64
66
  protected _client?: OpenAI;
67
+ protected apiKeyEnvName: string;
68
+ protected apiKeyDefault: string | undefined;
69
+ protected supportsNativeStructuredOutputs: boolean;
70
+ protected supportsEndWithSystemMessage: boolean;
71
+ protected supportsToolsUseWithJsonSchema: boolean;
72
+ protected supportsParallelToolCalls: boolean;
73
+ protected supportsToolsEmptyParameters: boolean;
65
74
  get client(): OpenAI;
66
75
  get modelOptions(): ChatModelOptions | undefined;
67
76
  process(input: ChatModelInput): Promise<ChatModelOutput>;
77
+ private getParallelToolCalls;
78
+ private getRunMessages;
79
+ private getRunResponseFormat;
80
+ private requestStructuredOutput;
68
81
  }
82
+ export declare const ROLE_MAP: {
83
+ [key in Role]: ChatCompletionMessageParam["role"];
84
+ };
85
+ export declare function contentsFromInputMessages(messages: ChatModelInputMessage[]): Promise<ChatCompletionMessageParam[]>;
86
+ export declare function toolsFromInputTools(tools?: ChatModelInputTool[], options?: {
87
+ addTypeToEmptyParameters?: boolean;
88
+ }): ChatCompletionTool[] | undefined;
89
+ export declare function jsonSchemaToOpenAIJsonSchema(schema: Record<string, unknown>): Record<string, unknown>;
90
+ export declare function extractResultFromStream(stream: Stream<OpenAI.Chat.Completions.ChatCompletionChunk>, jsonMode?: boolean): Promise<ChatModelOutput>;
@@ -3,11 +3,17 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
3
3
  return (mod && mod.__esModule) ? mod : { "default": mod };
4
4
  };
5
5
  Object.defineProperty(exports, "__esModule", { value: true });
6
- exports.OpenAIChatModel = exports.openAIChatModelOptionsSchema = void 0;
6
+ exports.ROLE_MAP = exports.OpenAIChatModel = exports.openAIChatModelOptionsSchema = void 0;
7
+ exports.contentsFromInputMessages = contentsFromInputMessages;
8
+ exports.toolsFromInputTools = toolsFromInputTools;
9
+ exports.jsonSchemaToOpenAIJsonSchema = jsonSchemaToOpenAIJsonSchema;
10
+ exports.extractResultFromStream = extractResultFromStream;
7
11
  const nanoid_1 = require("nanoid");
8
12
  const openai_1 = __importDefault(require("openai"));
9
13
  const zod_1 = require("zod");
10
14
  const json_schema_js_1 = require("../utils/json-schema.js");
15
+ const model_utils_js_1 = require("../utils/model-utils.js");
16
+ const prompts_js_1 = require("../utils/prompts.js");
11
17
  const type_utils_js_1 = require("../utils/type-utils.js");
12
18
  const chat_model_js_1 = require("./chat-model.js");
13
19
  const CHAT_MODEL_OPENAI_DEFAULT_MODEL = "gpt-4o-mini";
@@ -29,16 +35,23 @@ exports.openAIChatModelOptionsSchema = zod_1.z.object({
29
35
  class OpenAIChatModel extends chat_model_js_1.ChatModel {
30
36
  options;
31
37
  constructor(options) {
32
- if (options)
33
- (0, type_utils_js_1.checkArguments)("OpenAIChatModel", exports.openAIChatModelOptionsSchema, options);
34
38
  super();
35
39
  this.options = options;
40
+ if (options)
41
+ (0, type_utils_js_1.checkArguments)(this.name, exports.openAIChatModelOptionsSchema, options);
36
42
  }
37
43
  _client;
44
+ apiKeyEnvName = "OPENAI_API_KEY";
45
+ apiKeyDefault;
46
+ supportsNativeStructuredOutputs = true;
47
+ supportsEndWithSystemMessage = true;
48
+ supportsToolsUseWithJsonSchema = true;
49
+ supportsParallelToolCalls = true;
50
+ supportsToolsEmptyParameters = true;
38
51
  get client() {
39
- const apiKey = this.options?.apiKey || process.env.OPENAI_API_KEY;
52
+ const apiKey = this.options?.apiKey || process.env[this.apiKeyEnvName] || this.apiKeyDefault;
40
53
  if (!apiKey)
41
- throw new Error("Api Key is required for OpenAIChatModel");
54
+ throw new Error(`Api Key is required for ${this.name}`);
42
55
  this._client ??= new openai_1.default({
43
56
  baseURL: this.options?.baseURL,
44
57
  apiKey,
@@ -49,86 +62,99 @@ class OpenAIChatModel extends chat_model_js_1.ChatModel {
49
62
  return this.options?.modelOptions;
50
63
  }
51
64
  async process(input) {
52
- const res = await this.client.chat.completions.create({
65
+ const body = {
53
66
  model: this.options?.model || CHAT_MODEL_OPENAI_DEFAULT_MODEL,
54
67
  temperature: input.modelOptions?.temperature ?? this.modelOptions?.temperature,
55
68
  top_p: input.modelOptions?.topP ?? this.modelOptions?.topP,
56
69
  frequency_penalty: input.modelOptions?.frequencyPenalty ?? this.modelOptions?.frequencyPenalty,
57
70
  presence_penalty: input.modelOptions?.presencePenalty ?? this.modelOptions?.presencePenalty,
58
- messages: await contentsFromInputMessages(input.messages),
59
- tools: toolsFromInputTools(input.tools),
60
- tool_choice: input.toolChoice,
61
- parallel_tool_calls: !input.tools?.length
62
- ? undefined
63
- : (input.modelOptions?.parallelToolCalls ?? this.modelOptions?.parallelToolCalls),
64
- response_format: input.responseFormat?.type === "json_schema"
65
- ? {
66
- type: "json_schema",
67
- json_schema: {
68
- ...input.responseFormat.jsonSchema,
69
- schema: jsonSchemaToOpenAIJsonSchema(input.responseFormat.jsonSchema.schema),
70
- },
71
- }
72
- : undefined,
71
+ messages: await this.getRunMessages(input),
73
72
  stream_options: {
74
73
  include_usage: true,
75
74
  },
76
75
  stream: true,
76
+ };
77
+ const { jsonMode, responseFormat } = await this.getRunResponseFormat(input);
78
+ const stream = await this.client.chat.completions.create({
79
+ ...body,
80
+ tools: toolsFromInputTools(input.tools, {
81
+ addTypeToEmptyParameters: !this.supportsToolsEmptyParameters,
82
+ }),
83
+ tool_choice: input.toolChoice,
84
+ parallel_tool_calls: this.getParallelToolCalls(input),
85
+ response_format: responseFormat,
77
86
  });
78
- let text = "";
79
- const toolCalls = [];
80
- let usage;
81
- let model;
82
- for await (const chunk of res) {
83
- const choice = chunk.choices?.[0];
84
- model ??= chunk.model;
85
- if (choice?.delta.tool_calls?.length) {
86
- for (const call of choice.delta.tool_calls) {
87
- toolCalls[call.index] ??= {
88
- id: call.id || (0, nanoid_1.nanoid)(),
89
- type: "function",
90
- function: { name: "", arguments: {} },
91
- args: "",
92
- };
93
- const c = toolCalls[call.index];
94
- if (!c)
95
- throw new Error("Tool call not found");
96
- if (call.type)
97
- c.type = call.type;
98
- c.function.name = c.function.name + (call.function?.name || "");
99
- c.args = c.args.concat(call.function?.arguments || "");
100
- }
101
- }
102
- if (choice?.delta.content)
103
- text += choice.delta.content;
104
- if (chunk.usage) {
105
- usage = {
106
- inputTokens: chunk.usage.prompt_tokens,
107
- outputTokens: chunk.usage.completion_tokens,
108
- };
109
- }
87
+ const result = await extractResultFromStream(stream, jsonMode);
88
+ if (!this.supportsToolsUseWithJsonSchema &&
89
+ !result.toolCalls?.length &&
90
+ input.responseFormat?.type === "json_schema" &&
91
+ result.text) {
92
+ const output = await this.requestStructuredOutput(body, input.responseFormat);
93
+ return { ...output, usage: (0, model_utils_js_1.mergeUsage)(result.usage, output.usage) };
110
94
  }
111
- const result = {
112
- usage,
113
- model,
114
- };
115
- if (input.responseFormat?.type === "json_schema" && text) {
116
- result.json = (0, json_schema_js_1.parseJSON)(text);
95
+ return result;
96
+ }
97
+ getParallelToolCalls(input) {
98
+ if (!this.supportsParallelToolCalls)
99
+ return undefined;
100
+ if (!input.tools?.length)
101
+ return undefined;
102
+ return input.modelOptions?.parallelToolCalls ?? this.modelOptions?.parallelToolCalls;
103
+ }
104
+ async getRunMessages(input) {
105
+ const messages = await contentsFromInputMessages(input.messages);
106
+ if (!this.supportsEndWithSystemMessage && messages.at(-1)?.role !== "user") {
107
+ messages.push({ role: "user", content: "" });
117
108
  }
118
- else {
119
- result.text = text;
109
+ if (!this.supportsToolsUseWithJsonSchema && input.tools?.length)
110
+ return messages;
111
+ if (this.supportsNativeStructuredOutputs)
112
+ return messages;
113
+ if (input.responseFormat?.type === "json_schema") {
114
+ messages.unshift({
115
+ role: "system",
116
+ content: (0, prompts_js_1.getJsonOutputPrompt)(input.responseFormat.jsonSchema.schema),
117
+ });
120
118
  }
121
- if (toolCalls.length) {
122
- result.toolCalls = toolCalls.map(({ args, ...c }) => ({
123
- ...c,
124
- function: { ...c.function, arguments: (0, json_schema_js_1.parseJSON)(args) },
125
- }));
119
+ return messages;
120
+ }
121
+ async getRunResponseFormat(input) {
122
+ if (!this.supportsToolsUseWithJsonSchema && input.tools?.length)
123
+ return { jsonMode: false, responseFormat: undefined };
124
+ if (!this.supportsNativeStructuredOutputs) {
125
+ const jsonMode = input.responseFormat?.type === "json_schema";
126
+ return { jsonMode, responseFormat: jsonMode ? { type: "json_object" } : undefined };
126
127
  }
127
- return result;
128
+ if (input.responseFormat?.type === "json_schema") {
129
+ return {
130
+ jsonMode: true,
131
+ responseFormat: {
132
+ type: "json_schema",
133
+ json_schema: {
134
+ ...input.responseFormat.jsonSchema,
135
+ schema: jsonSchemaToOpenAIJsonSchema(input.responseFormat.jsonSchema.schema),
136
+ },
137
+ },
138
+ };
139
+ }
140
+ return { jsonMode: false, responseFormat: undefined };
141
+ }
142
+ async requestStructuredOutput(body, responseFormat) {
143
+ if (responseFormat?.type !== "json_schema") {
144
+ throw new Error("Expected json_schema response format");
145
+ }
146
+ const { jsonMode, responseFormat: resolvedResponseFormat } = await this.getRunResponseFormat({
147
+ responseFormat,
148
+ });
149
+ const res = await this.client.chat.completions.create({
150
+ ...body,
151
+ response_format: resolvedResponseFormat,
152
+ });
153
+ return extractResultFromStream(res, jsonMode);
128
154
  }
129
155
  }
130
156
  exports.OpenAIChatModel = OpenAIChatModel;
131
- const ROLE_MAP = {
157
+ exports.ROLE_MAP = {
132
158
  system: "system",
133
159
  user: "user",
134
160
  agent: "assistant",
@@ -136,7 +162,7 @@ const ROLE_MAP = {
136
162
  };
137
163
  async function contentsFromInputMessages(messages) {
138
164
  return messages.map((i) => ({
139
- role: ROLE_MAP[i.role],
165
+ role: exports.ROLE_MAP[i.role],
140
166
  content: typeof i.content === "string"
141
167
  ? i.content
142
168
  : i.content
@@ -163,16 +189,22 @@ async function contentsFromInputMessages(messages) {
163
189
  name: i.name,
164
190
  }));
165
191
  }
166
- function toolsFromInputTools(tools) {
192
+ function toolsFromInputTools(tools, options) {
167
193
  return tools?.length
168
- ? tools.map((i) => ({
169
- type: "function",
170
- function: {
171
- name: i.function.name,
172
- description: i.function.description,
173
- parameters: i.function.parameters,
174
- },
175
- }))
194
+ ? tools.map((i) => {
195
+ const parameters = i.function.parameters;
196
+ if (options?.addTypeToEmptyParameters && Object.keys(parameters).length === 0) {
197
+ parameters.type = "object";
198
+ }
199
+ return {
200
+ type: "function",
201
+ function: {
202
+ name: i.function.name,
203
+ description: i.function.description,
204
+ parameters,
205
+ },
206
+ };
207
+ })
176
208
  : undefined;
177
209
  }
178
210
  function jsonSchemaToOpenAIJsonSchema(schema) {
@@ -200,3 +232,75 @@ function jsonSchemaToOpenAIJsonSchema(schema) {
200
232
  }
201
233
  return schema;
202
234
  }
235
+ async function extractResultFromStream(stream, jsonMode = false) {
236
+ let text = "";
237
+ const toolCalls = [];
238
+ let usage;
239
+ let model;
240
+ for await (const chunk of stream) {
241
+ const choice = chunk.choices?.[0];
242
+ model ??= chunk.model;
243
+ if (choice?.delta.tool_calls?.length) {
244
+ for (const call of choice.delta.tool_calls) {
245
+ // Gemini not support tool call delta
246
+ if (call.index !== undefined) {
247
+ handleToolCallDelta(toolCalls, call);
248
+ }
249
+ else {
250
+ handleCompleteToolCall(toolCalls, call);
251
+ }
252
+ }
253
+ }
254
+ if (choice?.delta.content)
255
+ text += choice.delta.content;
256
+ if (chunk.usage) {
257
+ usage = {
258
+ inputTokens: chunk.usage.prompt_tokens,
259
+ outputTokens: chunk.usage.completion_tokens,
260
+ };
261
+ }
262
+ }
263
+ const result = {
264
+ usage,
265
+ model,
266
+ };
267
+ if (jsonMode && text) {
268
+ result.json = (0, json_schema_js_1.parseJSON)(text);
269
+ }
270
+ else {
271
+ result.text = text;
272
+ }
273
+ if (toolCalls.length) {
274
+ result.toolCalls = toolCalls.map(({ args, ...c }) => ({
275
+ ...c,
276
+ function: { ...c.function, arguments: (0, json_schema_js_1.parseJSON)(args) },
277
+ }));
278
+ }
279
+ return result;
280
+ }
281
+ function handleToolCallDelta(toolCalls, call) {
282
+ toolCalls[call.index] ??= {
283
+ id: call.id || (0, nanoid_1.nanoid)(),
284
+ type: "function",
285
+ function: { name: "", arguments: {} },
286
+ args: "",
287
+ };
288
+ const c = toolCalls[call.index];
289
+ if (!c)
290
+ throw new Error("Tool call not found");
291
+ if (call.type)
292
+ c.type = call.type;
293
+ c.function.name = c.function.name + (call.function?.name || "");
294
+ c.args = c.args.concat(call.function?.arguments || "");
295
+ }
296
+ function handleCompleteToolCall(toolCalls, call) {
297
+ toolCalls.push({
298
+ id: call.id || (0, nanoid_1.nanoid)(),
299
+ type: "function",
300
+ function: {
301
+ name: call.function?.name || "",
302
+ arguments: (0, json_schema_js_1.parseJSON)(call.function?.arguments || "{}"),
303
+ },
304
+ args: call.function?.arguments || "",
305
+ });
306
+ }
@@ -1,13 +1,5 @@
1
- import OpenAI from "openai";
2
- import type { ChatModelOptions } from "./chat-model.js";
3
- import { OpenAIChatModel } from "./openai-chat-model.js";
4
- export interface XAIChatModelOptions {
5
- apiKey?: string;
6
- model?: string;
7
- modelOptions?: ChatModelOptions;
8
- baseURL?: string;
9
- }
1
+ import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
10
2
  export declare class XAIChatModel extends OpenAIChatModel {
11
- constructor(options?: XAIChatModelOptions);
12
- get client(): OpenAI;
3
+ constructor(options?: OpenAIChatModelOptions);
4
+ protected apiKeyEnvName: string;
13
5
  }
@@ -1,10 +1,6 @@
1
1
  "use strict";
2
- var __importDefault = (this && this.__importDefault) || function (mod) {
3
- return (mod && mod.__esModule) ? mod : { "default": mod };
4
- };
5
2
  Object.defineProperty(exports, "__esModule", { value: true });
6
3
  exports.XAIChatModel = void 0;
7
- const openai_1 = __importDefault(require("openai"));
8
4
  const openai_chat_model_js_1 = require("./openai-chat-model.js");
9
5
  const XAI_DEFAULT_CHAT_MODEL = "grok-2-latest";
10
6
  const XAI_BASE_URL = "https://api.x.ai/v1";
@@ -16,15 +12,6 @@ class XAIChatModel extends openai_chat_model_js_1.OpenAIChatModel {
16
12
  baseURL: options?.baseURL || XAI_BASE_URL,
17
13
  });
18
14
  }
19
- get client() {
20
- const apiKey = this.options?.apiKey || process.env.XAI_API_KEY;
21
- if (!apiKey)
22
- throw new Error("Api Key is required for XAIChatModel");
23
- this._client ??= new openai_1.default({
24
- baseURL: this.options?.baseURL,
25
- apiKey,
26
- });
27
- return this._client;
28
- }
15
+ apiKeyEnvName = "XAI_API_KEY";
29
16
  }
30
17
  exports.XAIChatModel = XAIChatModel;
@@ -132,6 +132,7 @@ class PromptBuilder {
132
132
  },
133
133
  }));
134
134
  let toolChoice;
135
+ const modelOptions = {};
135
136
  // use manual choice if configured in the agent
136
137
  const manualChoice = options.agent?.toolChoice;
137
138
  if (manualChoice) {
@@ -146,6 +147,7 @@ class PromptBuilder {
146
147
  }
147
148
  else if (manualChoice === "router") {
148
149
  toolChoice = "required";
150
+ modelOptions.parallelToolCalls = false;
149
151
  }
150
152
  else {
151
153
  toolChoice = manualChoice;
@@ -159,6 +161,7 @@ class PromptBuilder {
159
161
  toolAgents: toolAgents.length ? toolAgents : undefined,
160
162
  tools: tools.length ? tools : undefined,
161
163
  toolChoice,
164
+ modelOptions: Object.keys(modelOptions).length ? modelOptions : undefined,
162
165
  };
163
166
  }
164
167
  }
@@ -0,0 +1 @@
1
+ export declare function getJsonOutputPrompt(schema: Record<string, unknown> | string): string;
@@ -0,0 +1,13 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.getJsonOutputPrompt = getJsonOutputPrompt;
4
+ function getJsonOutputPrompt(schema) {
5
+ let prompt = "Provide your output as a JSON containing the following fields:";
6
+ if (typeof schema === "string") {
7
+ prompt += `\n<json_fields>\n${schema}\n</json_fields>`;
8
+ }
9
+ else {
10
+ prompt += `\n<json_fields>\n${JSON.stringify(schema)}\n</json_fields>`;
11
+ }
12
+ return prompt;
13
+ }
@@ -11,5 +11,5 @@ export declare function orArrayToArray<T>(value?: T | T[]): T[];
11
11
  export declare function createAccessorArray<T>(array: T[], accessor: (array: T[], name: string) => T | undefined): T[] & {
12
12
  [key: string]: T;
13
13
  };
14
- export declare function checkArguments<T>(prefix: string, schema: ZodType<T>, args: T): void;
14
+ export declare function checkArguments<T>(prefix: string, schema: ZodType<T>, args: T): T;
15
15
  export declare function tryOrThrow<P extends PromiseOrValue<unknown>>(fn: () => P, error: string | Error | ((error: Error) => Error)): P;