@aigne/core 1.12.0 → 1.13.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. package/CHANGELOG.md +13 -0
  2. package/lib/cjs/loader/index.js +2 -0
  3. package/lib/cjs/models/bedrock-chat-model.d.ts +70 -0
  4. package/lib/cjs/models/bedrock-chat-model.js +273 -0
  5. package/lib/cjs/models/chat-model.d.ts +1 -0
  6. package/lib/cjs/models/chat-model.js +8 -0
  7. package/lib/cjs/models/gemini-chat-model.d.ts +1 -0
  8. package/lib/cjs/models/gemini-chat-model.js +1 -0
  9. package/lib/cjs/models/openai-chat-model.d.ts +3 -0
  10. package/lib/cjs/models/openai-chat-model.js +100 -100
  11. package/lib/cjs/prompt/prompt-builder.js +2 -2
  12. package/lib/cjs/utils/prompts.js +1 -1
  13. package/lib/cjs/utils/type-utils.d.ts +1 -0
  14. package/lib/cjs/utils/type-utils.js +12 -0
  15. package/lib/dts/models/bedrock-chat-model.d.ts +70 -0
  16. package/lib/dts/models/chat-model.d.ts +1 -0
  17. package/lib/dts/models/gemini-chat-model.d.ts +1 -0
  18. package/lib/dts/models/openai-chat-model.d.ts +3 -0
  19. package/lib/dts/utils/type-utils.d.ts +1 -0
  20. package/lib/esm/loader/index.js +2 -0
  21. package/lib/esm/models/bedrock-chat-model.d.ts +70 -0
  22. package/lib/esm/models/bedrock-chat-model.js +268 -0
  23. package/lib/esm/models/chat-model.d.ts +1 -0
  24. package/lib/esm/models/chat-model.js +8 -0
  25. package/lib/esm/models/gemini-chat-model.d.ts +1 -0
  26. package/lib/esm/models/gemini-chat-model.js +1 -0
  27. package/lib/esm/models/openai-chat-model.d.ts +3 -0
  28. package/lib/esm/models/openai-chat-model.js +100 -100
  29. package/lib/esm/prompt/prompt-builder.js +3 -3
  30. package/lib/esm/utils/prompts.js +1 -1
  31. package/lib/esm/utils/type-utils.d.ts +1 -0
  32. package/lib/esm/utils/type-utils.js +11 -0
  33. package/package.json +8 -1
@@ -117,10 +117,10 @@ class PromptBuilder {
117
117
  : undefined;
118
118
  }
119
119
  buildTools(options) {
120
- const toolAgents = (options.context?.skills ?? [])
120
+ const toolAgents = (0, type_utils_js_1.unique)((options.context?.skills ?? [])
121
121
  .concat(options.agent?.skills ?? [])
122
122
  // TODO: support nested tools?
123
- .flatMap((i) => (i.isInvokable ? i.skills.concat(i) : i.skills));
123
+ .flatMap((i) => (i.isInvokable ? i.skills.concat(i) : i.skills)), (i) => i.name);
124
124
  const tools = toolAgents.map((i) => ({
125
125
  type: "function",
126
126
  function: {
@@ -2,7 +2,7 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.getJsonOutputPrompt = getJsonOutputPrompt;
4
4
  function getJsonOutputPrompt(schema) {
5
- let prompt = "Provide your output as a JSON containing the following fields:";
5
+ let prompt = "Output must be a JSON object containing the following fields only.";
6
6
  if (typeof schema === "string") {
7
7
  prompt += `\n<json_fields>\n${schema}\n</json_fields>`;
8
8
  }
@@ -8,6 +8,7 @@ export declare function isEmpty(obj: unknown): boolean;
8
8
  export declare function isNonNullable<T>(value: T): value is NonNullable<T>;
9
9
  export declare function isNotEmpty<T>(arr: T[]): arr is [T, ...T[]];
10
10
  export declare function duplicates<T>(arr: T[], key?: (item: T) => unknown): T[];
11
+ export declare function unique<T>(arr: T[], key?: (item: T) => unknown): T[];
11
12
  export declare function omitBy<T extends Record<string, unknown>, K extends keyof T>(obj: T, predicate: (value: T[K], key: K) => boolean): Partial<T>;
12
13
  export declare function orArrayToArray<T>(value?: T | T[]): T[];
13
14
  export declare function createAccessorArray<T>(array: T[], accessor: (array: T[], name: string) => T | undefined): T[] & {
@@ -6,6 +6,7 @@ exports.isEmpty = isEmpty;
6
6
  exports.isNonNullable = isNonNullable;
7
7
  exports.isNotEmpty = isNotEmpty;
8
8
  exports.duplicates = duplicates;
9
+ exports.unique = unique;
9
10
  exports.omitBy = omitBy;
10
11
  exports.orArrayToArray = orArrayToArray;
11
12
  exports.createAccessorArray = createAccessorArray;
@@ -47,6 +48,17 @@ function duplicates(arr, key = (item) => item) {
47
48
  }
48
49
  return Array.from(duplicates);
49
50
  }
51
+ function unique(arr, key = (item) => item) {
52
+ const seen = new Set();
53
+ return arr.filter((item) => {
54
+ const k = key(item);
55
+ if (seen.has(k)) {
56
+ return false;
57
+ }
58
+ seen.add(k);
59
+ return true;
60
+ });
61
+ }
50
62
  function omitBy(obj, predicate) {
51
63
  return Object.fromEntries(Object.entries(obj).filter(([key, value]) => {
52
64
  const k = key;
@@ -0,0 +1,70 @@
1
+ import { BedrockRuntimeClient } from "@aws-sdk/client-bedrock-runtime";
2
+ import { z } from "zod";
3
+ import type { AgentInvokeOptions, AgentResponse } from "../agents/agent.js";
4
+ import type { Context } from "../aigne/context.js";
5
+ import { ChatModel, type ChatModelInput, type ChatModelOptions, type ChatModelOutput } from "./chat-model.js";
6
+ export declare function extractLastJsonObject(text: string): string | null;
7
+ export interface BedrockChatModelOptions {
8
+ accessKeyId?: string;
9
+ secretAccessKey?: string;
10
+ region?: string;
11
+ model?: string;
12
+ modelOptions?: ChatModelOptions;
13
+ }
14
+ export declare const bedrockChatModelOptionsSchema: z.ZodObject<{
15
+ region: z.ZodOptional<z.ZodString>;
16
+ model: z.ZodOptional<z.ZodString>;
17
+ modelOptions: z.ZodOptional<z.ZodObject<{
18
+ model: z.ZodOptional<z.ZodString>;
19
+ temperature: z.ZodOptional<z.ZodNumber>;
20
+ topP: z.ZodOptional<z.ZodNumber>;
21
+ frequencyPenalty: z.ZodOptional<z.ZodNumber>;
22
+ presencePenalty: z.ZodOptional<z.ZodNumber>;
23
+ parallelToolCalls: z.ZodDefault<z.ZodOptional<z.ZodBoolean>>;
24
+ }, "strip", z.ZodTypeAny, {
25
+ parallelToolCalls: boolean;
26
+ model?: string | undefined;
27
+ temperature?: number | undefined;
28
+ topP?: number | undefined;
29
+ frequencyPenalty?: number | undefined;
30
+ presencePenalty?: number | undefined;
31
+ }, {
32
+ model?: string | undefined;
33
+ temperature?: number | undefined;
34
+ topP?: number | undefined;
35
+ frequencyPenalty?: number | undefined;
36
+ presencePenalty?: number | undefined;
37
+ parallelToolCalls?: boolean | undefined;
38
+ }>>;
39
+ }, "strip", z.ZodTypeAny, {
40
+ modelOptions?: {
41
+ parallelToolCalls: boolean;
42
+ model?: string | undefined;
43
+ temperature?: number | undefined;
44
+ topP?: number | undefined;
45
+ frequencyPenalty?: number | undefined;
46
+ presencePenalty?: number | undefined;
47
+ } | undefined;
48
+ model?: string | undefined;
49
+ region?: string | undefined;
50
+ }, {
51
+ modelOptions?: {
52
+ model?: string | undefined;
53
+ temperature?: number | undefined;
54
+ topP?: number | undefined;
55
+ frequencyPenalty?: number | undefined;
56
+ presencePenalty?: number | undefined;
57
+ parallelToolCalls?: boolean | undefined;
58
+ } | undefined;
59
+ model?: string | undefined;
60
+ region?: string | undefined;
61
+ }>;
62
+ export declare class BedrockChatModel extends ChatModel {
63
+ options?: BedrockChatModelOptions | undefined;
64
+ constructor(options?: BedrockChatModelOptions | undefined);
65
+ protected _client?: BedrockRuntimeClient;
66
+ get client(): BedrockRuntimeClient;
67
+ get modelOptions(): ChatModelOptions | undefined;
68
+ process(input: ChatModelInput, _context: Context, options?: AgentInvokeOptions): Promise<AgentResponse<ChatModelOutput>>;
69
+ private extractResultFromStream;
70
+ }
@@ -6,6 +6,7 @@ export declare abstract class ChatModel extends Agent<ChatModelInput, ChatModelO
6
6
  getModelCapabilities(): {
7
7
  supportsParallelToolCalls: boolean;
8
8
  };
9
+ private validateToolNames;
9
10
  protected preprocess(input: ChatModelInput, context: Context): void;
10
11
  protected postprocess(input: ChatModelInput, output: ChatModelOutput, context: Context): void;
11
12
  }
@@ -5,4 +5,5 @@ export declare class GeminiChatModel extends OpenAIChatModel {
5
5
  protected supportsEndWithSystemMessage: boolean;
6
6
  protected supportsToolsUseWithJsonSchema: boolean;
7
7
  protected supportsParallelToolCalls: boolean;
8
+ protected supportsToolStreaming: boolean;
8
9
  }
@@ -10,6 +10,7 @@ export interface OpenAIChatModelCapabilities {
10
10
  supportsToolsUseWithJsonSchema: boolean;
11
11
  supportsParallelToolCalls: boolean;
12
12
  supportsToolsEmptyParameters: boolean;
13
+ supportsToolStreaming: boolean;
13
14
  supportsTemperature: boolean;
14
15
  }
15
16
  export interface OpenAIChatModelOptions {
@@ -80,6 +81,7 @@ export declare class OpenAIChatModel extends ChatModel {
80
81
  protected supportsToolsUseWithJsonSchema: boolean;
81
82
  protected supportsParallelToolCalls: boolean;
82
83
  protected supportsToolsEmptyParameters: boolean;
84
+ protected supportsToolStreaming: boolean;
83
85
  protected supportsTemperature: boolean;
84
86
  get client(): OpenAI;
85
87
  get modelOptions(): ChatModelOptions | undefined;
@@ -88,6 +90,7 @@ export declare class OpenAIChatModel extends ChatModel {
88
90
  private getRunMessages;
89
91
  private getRunResponseFormat;
90
92
  private requestStructuredOutput;
93
+ private extractResultFromStream;
91
94
  }
92
95
  export declare const ROLE_MAP: {
93
96
  [key in Role]: ChatCompletionMessageParam["role"];
@@ -8,6 +8,7 @@ export declare function isEmpty(obj: unknown): boolean;
8
8
  export declare function isNonNullable<T>(value: T): value is NonNullable<T>;
9
9
  export declare function isNotEmpty<T>(arr: T[]): arr is [T, ...T[]];
10
10
  export declare function duplicates<T>(arr: T[], key?: (item: T) => unknown): T[];
11
+ export declare function unique<T>(arr: T[], key?: (item: T) => unknown): T[];
11
12
  export declare function omitBy<T extends Record<string, unknown>, K extends keyof T>(obj: T, predicate: (value: T[K], key: K) => boolean): Partial<T>;
12
13
  export declare function orArrayToArray<T>(value?: T | T[]): T[];
13
14
  export declare function createAccessorArray<T>(array: T[], accessor: (array: T[], name: string) => T | undefined): T[] & {
@@ -5,6 +5,7 @@ import { z } from "zod";
5
5
  import { FunctionAgent } from "../agents/agent.js";
6
6
  import { AIAgent } from "../agents/ai-agent.js";
7
7
  import { MCPAgent } from "../agents/mcp-agent.js";
8
+ import { BedrockChatModel } from "../models/bedrock-chat-model.js";
8
9
  import { ClaudeChatModel } from "../models/claude-chat-model.js";
9
10
  import { DeepSeekChatModel } from "../models/deepseek-chat-model.js";
10
11
  import { GeminiChatModel } from "../models/gemini-chat-model.js";
@@ -80,6 +81,7 @@ export async function loadModel(model, modelOptions) {
80
81
  DeepSeekChatModel,
81
82
  OpenRouterChatModel,
82
83
  OllamaChatModel,
84
+ BedrockChatModel,
83
85
  ];
84
86
  const M = availableModels.find((m) => m.name
85
87
  .toLowerCase()
@@ -0,0 +1,70 @@
1
+ import { BedrockRuntimeClient } from "@aws-sdk/client-bedrock-runtime";
2
+ import { z } from "zod";
3
+ import type { AgentInvokeOptions, AgentResponse } from "../agents/agent.js";
4
+ import type { Context } from "../aigne/context.js";
5
+ import { ChatModel, type ChatModelInput, type ChatModelOptions, type ChatModelOutput } from "./chat-model.js";
6
+ export declare function extractLastJsonObject(text: string): string | null;
7
+ export interface BedrockChatModelOptions {
8
+ accessKeyId?: string;
9
+ secretAccessKey?: string;
10
+ region?: string;
11
+ model?: string;
12
+ modelOptions?: ChatModelOptions;
13
+ }
14
+ export declare const bedrockChatModelOptionsSchema: z.ZodObject<{
15
+ region: z.ZodOptional<z.ZodString>;
16
+ model: z.ZodOptional<z.ZodString>;
17
+ modelOptions: z.ZodOptional<z.ZodObject<{
18
+ model: z.ZodOptional<z.ZodString>;
19
+ temperature: z.ZodOptional<z.ZodNumber>;
20
+ topP: z.ZodOptional<z.ZodNumber>;
21
+ frequencyPenalty: z.ZodOptional<z.ZodNumber>;
22
+ presencePenalty: z.ZodOptional<z.ZodNumber>;
23
+ parallelToolCalls: z.ZodDefault<z.ZodOptional<z.ZodBoolean>>;
24
+ }, "strip", z.ZodTypeAny, {
25
+ parallelToolCalls: boolean;
26
+ model?: string | undefined;
27
+ temperature?: number | undefined;
28
+ topP?: number | undefined;
29
+ frequencyPenalty?: number | undefined;
30
+ presencePenalty?: number | undefined;
31
+ }, {
32
+ model?: string | undefined;
33
+ temperature?: number | undefined;
34
+ topP?: number | undefined;
35
+ frequencyPenalty?: number | undefined;
36
+ presencePenalty?: number | undefined;
37
+ parallelToolCalls?: boolean | undefined;
38
+ }>>;
39
+ }, "strip", z.ZodTypeAny, {
40
+ modelOptions?: {
41
+ parallelToolCalls: boolean;
42
+ model?: string | undefined;
43
+ temperature?: number | undefined;
44
+ topP?: number | undefined;
45
+ frequencyPenalty?: number | undefined;
46
+ presencePenalty?: number | undefined;
47
+ } | undefined;
48
+ model?: string | undefined;
49
+ region?: string | undefined;
50
+ }, {
51
+ modelOptions?: {
52
+ model?: string | undefined;
53
+ temperature?: number | undefined;
54
+ topP?: number | undefined;
55
+ frequencyPenalty?: number | undefined;
56
+ presencePenalty?: number | undefined;
57
+ parallelToolCalls?: boolean | undefined;
58
+ } | undefined;
59
+ model?: string | undefined;
60
+ region?: string | undefined;
61
+ }>;
62
+ export declare class BedrockChatModel extends ChatModel {
63
+ options?: BedrockChatModelOptions | undefined;
64
+ constructor(options?: BedrockChatModelOptions | undefined);
65
+ protected _client?: BedrockRuntimeClient;
66
+ get client(): BedrockRuntimeClient;
67
+ get modelOptions(): ChatModelOptions | undefined;
68
+ process(input: ChatModelInput, _context: Context, options?: AgentInvokeOptions): Promise<AgentResponse<ChatModelOutput>>;
69
+ private extractResultFromStream;
70
+ }
@@ -0,0 +1,268 @@
1
+ import { BedrockRuntimeClient, ConverseStreamCommand, } from "@aws-sdk/client-bedrock-runtime";
2
+ import { nanoid } from "nanoid";
3
+ import { z } from "zod";
4
+ import { parseJSON } from "../utils/json-schema.js";
5
+ import { getJsonOutputPrompt } from "../utils/prompts.js";
6
+ import { agentResponseStreamToObject } from "../utils/stream-utils.js";
7
+ import { checkArguments, isNonNullable } from "../utils/type-utils.js";
8
+ import { ChatModel, } from "./chat-model.js";
9
+ export function extractLastJsonObject(text) {
10
+ return text.replace(/<thinking>[\s\S]*?<\/thinking>/g, "").trim();
11
+ }
12
+ const BEDROCK_DEFAULT_CHAT_MODEL = "us.amazon.nova-lite-v1:0";
13
+ export const bedrockChatModelOptionsSchema = z.object({
14
+ region: z.string().optional(),
15
+ model: z.string().optional(),
16
+ modelOptions: z
17
+ .object({
18
+ model: z.string().optional(),
19
+ temperature: z.number().optional(),
20
+ topP: z.number().optional(),
21
+ frequencyPenalty: z.number().optional(),
22
+ presencePenalty: z.number().optional(),
23
+ parallelToolCalls: z.boolean().optional().default(true),
24
+ })
25
+ .optional(),
26
+ });
27
+ export class BedrockChatModel extends ChatModel {
28
+ options;
29
+ constructor(options) {
30
+ if (options)
31
+ checkArguments("BedrockChatModel", bedrockChatModelOptionsSchema, options);
32
+ super();
33
+ this.options = options;
34
+ }
35
+ _client;
36
+ get client() {
37
+ const credentials = this.options?.accessKeyId && this.options?.secretAccessKey
38
+ ? {
39
+ accessKeyId: this.options.accessKeyId,
40
+ secretAccessKey: this.options.secretAccessKey,
41
+ }
42
+ : undefined;
43
+ this._client ??= new BedrockRuntimeClient({
44
+ region: this.options?.region,
45
+ credentials,
46
+ });
47
+ return this._client;
48
+ }
49
+ get modelOptions() {
50
+ return this.options?.modelOptions;
51
+ }
52
+ async process(input, _context, options) {
53
+ const modelId = input.modelOptions?.model ?? this.modelOptions?.model ?? BEDROCK_DEFAULT_CHAT_MODEL;
54
+ const { messages, system } = getRunMessages(input);
55
+ const toolConfig = convertTools(input);
56
+ const body = {
57
+ modelId,
58
+ messages,
59
+ system,
60
+ toolConfig,
61
+ inferenceConfig: {
62
+ temperature: input.modelOptions?.temperature ?? this.modelOptions?.temperature,
63
+ topP: input.modelOptions?.topP ?? this.modelOptions?.topP,
64
+ },
65
+ };
66
+ const command = new ConverseStreamCommand(body);
67
+ const response = await this.client.send(command);
68
+ const jsonMode = input.responseFormat?.type === "json_schema";
69
+ if (options?.streaming && !jsonMode) {
70
+ return this.extractResultFromStream(response.stream, modelId, false, true);
71
+ }
72
+ const result = await this.extractResultFromStream(response.stream, modelId, jsonMode, false);
73
+ return result;
74
+ }
75
+ async extractResultFromStream(stream, modelId, jsonMode, streaming) {
76
+ if (!stream)
77
+ throw new Error("Unable to get AI model response.");
78
+ const result = new ReadableStream({
79
+ start: async (controller) => {
80
+ try {
81
+ controller.enqueue({ delta: { json: { model: modelId } } });
82
+ const toolCalls = [];
83
+ let text = "";
84
+ let usage;
85
+ for await (const chunk of stream) {
86
+ if (chunk.contentBlockStart?.start?.toolUse) {
87
+ const toolUse = chunk.contentBlockStart.start.toolUse;
88
+ if (!toolUse.name)
89
+ throw new Error("Tool use is invalid");
90
+ if (chunk.contentBlockStart.contentBlockIndex === undefined)
91
+ throw new Error("Tool use content block index is required");
92
+ toolCalls[chunk.contentBlockStart.contentBlockIndex] = {
93
+ type: "function",
94
+ id: toolUse.toolUseId || nanoid(),
95
+ function: {
96
+ name: toolUse.name,
97
+ arguments: {},
98
+ },
99
+ args: "",
100
+ };
101
+ }
102
+ if (chunk.contentBlockDelta) {
103
+ const block = chunk.contentBlockDelta;
104
+ const delta = block.delta;
105
+ if (delta?.text) {
106
+ text += delta.text;
107
+ if (!jsonMode) {
108
+ controller.enqueue({ delta: { text: { text: delta.text } } });
109
+ }
110
+ }
111
+ if (delta?.toolUse) {
112
+ if (block.contentBlockIndex === undefined)
113
+ throw new Error("Content block index is required");
114
+ const call = toolCalls[block.contentBlockIndex];
115
+ if (!call)
116
+ throw new Error("Tool call not found");
117
+ call.args += delta.toolUse.input;
118
+ }
119
+ }
120
+ if (chunk.metadata) {
121
+ usage = chunk.metadata.usage;
122
+ }
123
+ }
124
+ if (jsonMode && text) {
125
+ const match = extractLastJsonObject(text);
126
+ if (!match)
127
+ throw new Error("Failed to extract JSON object from model output");
128
+ controller.enqueue({
129
+ delta: { json: { json: parseJSON(match) } },
130
+ });
131
+ }
132
+ if (toolCalls.length) {
133
+ controller.enqueue({
134
+ delta: {
135
+ json: {
136
+ toolCalls: toolCalls
137
+ .map(({ args, ...c }) => ({
138
+ ...c,
139
+ function: { ...c.function, arguments: parseJSON(args) },
140
+ }))
141
+ .filter(isNonNullable),
142
+ },
143
+ },
144
+ });
145
+ }
146
+ controller.enqueue({ delta: { json: { usage } } });
147
+ controller.close();
148
+ }
149
+ catch (error) {
150
+ controller.error(error);
151
+ }
152
+ },
153
+ });
154
+ return streaming ? result : await agentResponseStreamToObject(result);
155
+ }
156
+ }
157
+ const getRunMessages = ({ messages: msgs, responseFormat, }) => {
158
+ const system = [];
159
+ const messages = [];
160
+ for (const msg of msgs) {
161
+ if (msg.role === "system") {
162
+ if (typeof msg.content !== "string")
163
+ throw new Error("System message must have content");
164
+ system.push({ text: msg.content });
165
+ }
166
+ else if (msg.role === "tool") {
167
+ if (!msg.toolCallId)
168
+ throw new Error("Tool message must have toolCallId");
169
+ if (typeof msg.content !== "string")
170
+ throw new Error("Tool message must have string content");
171
+ if (messages.at(-1)?.role === "user") {
172
+ messages.at(-1)?.content?.push({
173
+ toolResult: { toolUseId: msg.toolCallId, content: [{ json: parseJSON(msg.content) }] },
174
+ });
175
+ }
176
+ else {
177
+ messages.push({
178
+ role: "user",
179
+ content: [
180
+ {
181
+ toolResult: {
182
+ toolUseId: msg.toolCallId,
183
+ content: [{ json: parseJSON(msg.content) }],
184
+ },
185
+ },
186
+ ],
187
+ });
188
+ }
189
+ }
190
+ else if (msg.role === "user") {
191
+ if (!msg.content)
192
+ throw new Error("User message must have content");
193
+ messages.push({ role: "user", content: convertContent(msg.content) });
194
+ }
195
+ else if (msg.role === "agent") {
196
+ if (msg.toolCalls?.length) {
197
+ messages.push({
198
+ role: "assistant",
199
+ content: msg.toolCalls.map((i) => ({
200
+ toolUse: {
201
+ toolUseId: i.id,
202
+ name: i.function.name,
203
+ input: i.function.arguments,
204
+ },
205
+ })),
206
+ });
207
+ }
208
+ else if (msg.content) {
209
+ messages.push({ role: "assistant", content: convertContent(msg.content) });
210
+ }
211
+ else {
212
+ throw new Error("Agent message must have content or toolCalls");
213
+ }
214
+ }
215
+ }
216
+ if (messages.at(0)?.role !== "user") {
217
+ messages.unshift({ role: "user", content: [{ text: "." }] });
218
+ }
219
+ if (responseFormat?.type === "json_schema") {
220
+ system.push({
221
+ text: getJsonOutputPrompt(responseFormat.jsonSchema.schema),
222
+ });
223
+ }
224
+ return { messages, system };
225
+ };
226
+ function convertContent(content) {
227
+ if (typeof content === "string")
228
+ return [{ text: content }];
229
+ if (Array.isArray(content)) {
230
+ const blocks = [];
231
+ for (const item of content) {
232
+ if (item.type === "text")
233
+ blocks.push({ text: item.text });
234
+ }
235
+ return blocks;
236
+ }
237
+ throw new Error("Invalid chat message content");
238
+ }
239
+ function convertTools({ tools, toolChoice }) {
240
+ if (!tools?.length || toolChoice === "none")
241
+ return undefined;
242
+ let choice;
243
+ if (typeof toolChoice === "object" && "type" in toolChoice && toolChoice.type === "function") {
244
+ choice = { tool: { name: toolChoice.function.name } };
245
+ }
246
+ else if (toolChoice === "required") {
247
+ choice = { any: {} };
248
+ }
249
+ else if (toolChoice === "auto") {
250
+ choice = { auto: {} };
251
+ }
252
+ return {
253
+ tools: tools.map((i) => {
254
+ const parameters = i.function.parameters;
255
+ if (Object.keys(parameters).length === 0) {
256
+ parameters.type = "object";
257
+ }
258
+ return {
259
+ toolSpec: {
260
+ name: i.function.name,
261
+ description: i.function.description,
262
+ inputSchema: { json: parameters },
263
+ },
264
+ };
265
+ }),
266
+ toolChoice: choice,
267
+ };
268
+ }
@@ -6,6 +6,7 @@ export declare abstract class ChatModel extends Agent<ChatModelInput, ChatModelO
6
6
  getModelCapabilities(): {
7
7
  supportsParallelToolCalls: boolean;
8
8
  };
9
+ private validateToolNames;
9
10
  protected preprocess(input: ChatModelInput, context: Context): void;
10
11
  protected postprocess(input: ChatModelInput, output: ChatModelOutput, context: Context): void;
11
12
  }
@@ -13,6 +13,13 @@ export class ChatModel extends Agent {
13
13
  supportsParallelToolCalls: this.supportsParallelToolCalls,
14
14
  };
15
15
  }
16
+ validateToolNames(tools) {
17
+ for (const tool of tools ?? []) {
18
+ if (!/^[a-zA-Z0-9_]+$/.test(tool.function.name)) {
19
+ throw new Error(`Tool name "${tool.function.name}" can only contain letters, numbers, and underscores`);
20
+ }
21
+ }
22
+ }
16
23
  preprocess(input, context) {
17
24
  super.preprocess(input, context);
18
25
  const { limits, usage } = context;
@@ -20,6 +27,7 @@ export class ChatModel extends Agent {
20
27
  if (limits?.maxTokens && usedTokens >= limits.maxTokens) {
21
28
  throw new Error(`Exceeded max tokens ${usedTokens}/${limits.maxTokens}`);
22
29
  }
30
+ this.validateToolNames(input.tools);
23
31
  }
24
32
  postprocess(input, output, context) {
25
33
  super.postprocess(input, output, context);
@@ -5,4 +5,5 @@ export declare class GeminiChatModel extends OpenAIChatModel {
5
5
  protected supportsEndWithSystemMessage: boolean;
6
6
  protected supportsToolsUseWithJsonSchema: boolean;
7
7
  protected supportsParallelToolCalls: boolean;
8
+ protected supportsToolStreaming: boolean;
8
9
  }
@@ -13,4 +13,5 @@ export class GeminiChatModel extends OpenAIChatModel {
13
13
  supportsEndWithSystemMessage = false;
14
14
  supportsToolsUseWithJsonSchema = false;
15
15
  supportsParallelToolCalls = false;
16
+ supportsToolStreaming = false;
16
17
  }
@@ -10,6 +10,7 @@ export interface OpenAIChatModelCapabilities {
10
10
  supportsToolsUseWithJsonSchema: boolean;
11
11
  supportsParallelToolCalls: boolean;
12
12
  supportsToolsEmptyParameters: boolean;
13
+ supportsToolStreaming: boolean;
13
14
  supportsTemperature: boolean;
14
15
  }
15
16
  export interface OpenAIChatModelOptions {
@@ -80,6 +81,7 @@ export declare class OpenAIChatModel extends ChatModel {
80
81
  protected supportsToolsUseWithJsonSchema: boolean;
81
82
  protected supportsParallelToolCalls: boolean;
82
83
  protected supportsToolsEmptyParameters: boolean;
84
+ protected supportsToolStreaming: boolean;
83
85
  protected supportsTemperature: boolean;
84
86
  get client(): OpenAI;
85
87
  get modelOptions(): ChatModelOptions | undefined;
@@ -88,6 +90,7 @@ export declare class OpenAIChatModel extends ChatModel {
88
90
  private getRunMessages;
89
91
  private getRunResponseFormat;
90
92
  private requestStructuredOutput;
93
+ private extractResultFromStream;
91
94
  }
92
95
  export declare const ROLE_MAP: {
93
96
  [key in Role]: ChatCompletionMessageParam["role"];