@aigne/core 1.8.0 → 1.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. package/CHANGELOG.md +12 -0
  2. package/lib/cjs/agents/agent.js +2 -2
  3. package/lib/cjs/loader/index.js +20 -4
  4. package/lib/cjs/models/deepseek-chat-model.d.ts +7 -0
  5. package/lib/cjs/models/deepseek-chat-model.js +19 -0
  6. package/lib/cjs/models/gemini-chat-model.d.ts +8 -0
  7. package/lib/cjs/models/gemini-chat-model.js +20 -0
  8. package/lib/cjs/models/ollama-chat-model.d.ts +6 -0
  9. package/lib/cjs/models/ollama-chat-model.js +18 -0
  10. package/lib/cjs/models/open-router-chat-model.d.ts +5 -0
  11. package/lib/cjs/models/open-router-chat-model.js +17 -0
  12. package/lib/cjs/models/openai-chat-model.d.ts +23 -1
  13. package/lib/cjs/models/openai-chat-model.js +182 -78
  14. package/lib/cjs/models/xai-chat-model.d.ts +3 -11
  15. package/lib/cjs/models/xai-chat-model.js +1 -14
  16. package/lib/cjs/prompt/prompt-builder.js +3 -0
  17. package/lib/cjs/utils/prompts.d.ts +1 -0
  18. package/lib/cjs/utils/prompts.js +13 -0
  19. package/lib/cjs/utils/type-utils.d.ts +1 -1
  20. package/lib/cjs/utils/type-utils.js +1 -1
  21. package/lib/dts/models/deepseek-chat-model.d.ts +7 -0
  22. package/lib/dts/models/gemini-chat-model.d.ts +8 -0
  23. package/lib/dts/models/ollama-chat-model.d.ts +6 -0
  24. package/lib/dts/models/open-router-chat-model.d.ts +5 -0
  25. package/lib/dts/models/openai-chat-model.d.ts +23 -1
  26. package/lib/dts/models/xai-chat-model.d.ts +3 -11
  27. package/lib/dts/utils/prompts.d.ts +1 -0
  28. package/lib/dts/utils/type-utils.d.ts +1 -1
  29. package/lib/esm/agents/agent.js +3 -3
  30. package/lib/esm/loader/index.js +20 -4
  31. package/lib/esm/models/deepseek-chat-model.d.ts +7 -0
  32. package/lib/esm/models/deepseek-chat-model.js +15 -0
  33. package/lib/esm/models/gemini-chat-model.d.ts +8 -0
  34. package/lib/esm/models/gemini-chat-model.js +16 -0
  35. package/lib/esm/models/ollama-chat-model.d.ts +6 -0
  36. package/lib/esm/models/ollama-chat-model.js +14 -0
  37. package/lib/esm/models/open-router-chat-model.d.ts +5 -0
  38. package/lib/esm/models/open-router-chat-model.js +13 -0
  39. package/lib/esm/models/openai-chat-model.d.ts +23 -1
  40. package/lib/esm/models/openai-chat-model.js +178 -78
  41. package/lib/esm/models/xai-chat-model.d.ts +3 -11
  42. package/lib/esm/models/xai-chat-model.js +1 -11
  43. package/lib/esm/prompt/prompt-builder.js +3 -0
  44. package/lib/esm/utils/prompts.d.ts +1 -0
  45. package/lib/esm/utils/prompts.js +10 -0
  46. package/lib/esm/utils/type-utils.d.ts +1 -1
  47. package/lib/esm/utils/type-utils.js +1 -1
  48. package/package.json +1 -1
@@ -54,7 +54,7 @@ function createAccessorArray(array, accessor) {
54
54
  }
55
55
  function checkArguments(prefix, schema, args) {
56
56
  try {
57
- schema.parse(args, {
57
+ return schema.parse(args, {
58
58
  errorMap: (issue, ctx) => {
59
59
  if (issue.code === "invalid_union") {
60
60
  // handle all issues that are not invalid_type
@@ -0,0 +1,7 @@
1
+ import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
2
+ export declare class DeepSeekChatModel extends OpenAIChatModel {
3
+ constructor(options?: OpenAIChatModelOptions);
4
+ protected apiKeyEnvName: string;
5
+ protected supportsNativeStructuredOutputs: boolean;
6
+ protected supportsToolsEmptyParameters: boolean;
7
+ }
@@ -0,0 +1,8 @@
1
+ import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
2
+ export declare class GeminiChatModel extends OpenAIChatModel {
3
+ constructor(options?: OpenAIChatModelOptions);
4
+ protected apiKeyEnvName: string;
5
+ protected supportsEndWithSystemMessage: boolean;
6
+ protected supportsToolsUseWithJsonSchema: boolean;
7
+ protected supportsParallelToolCalls: boolean;
8
+ }
@@ -0,0 +1,6 @@
1
+ import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
2
+ export declare class OllamaChatModel extends OpenAIChatModel {
3
+ constructor(options?: OpenAIChatModelOptions);
4
+ protected apiKeyEnvName: string;
5
+ protected apiKeyDefault: string;
6
+ }
@@ -0,0 +1,5 @@
1
+ import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
2
+ export declare class OpenRouterChatModel extends OpenAIChatModel {
3
+ constructor(options?: OpenAIChatModelOptions);
4
+ protected apiKeyEnvName: string;
5
+ }
@@ -1,6 +1,8 @@
1
1
  import OpenAI from "openai";
2
+ import type { ChatCompletionMessageParam, ChatCompletionTool } from "openai/resources";
3
+ import type { Stream } from "openai/streaming.js";
2
4
  import { z } from "zod";
3
- import { ChatModel, type ChatModelInput, type ChatModelOptions, type ChatModelOutput } from "./chat-model.js";
5
+ import { ChatModel, type ChatModelInput, type ChatModelInputMessage, type ChatModelInputTool, type ChatModelOptions, type ChatModelOutput, type Role } from "./chat-model.js";
4
6
  export interface OpenAIChatModelOptions {
5
7
  apiKey?: string;
6
8
  baseURL?: string;
@@ -62,7 +64,27 @@ export declare class OpenAIChatModel extends ChatModel {
62
64
  options?: OpenAIChatModelOptions | undefined;
63
65
  constructor(options?: OpenAIChatModelOptions | undefined);
64
66
  protected _client?: OpenAI;
67
+ protected apiKeyEnvName: string;
68
+ protected apiKeyDefault: string | undefined;
69
+ protected supportsNativeStructuredOutputs: boolean;
70
+ protected supportsEndWithSystemMessage: boolean;
71
+ protected supportsToolsUseWithJsonSchema: boolean;
72
+ protected supportsParallelToolCalls: boolean;
73
+ protected supportsToolsEmptyParameters: boolean;
65
74
  get client(): OpenAI;
66
75
  get modelOptions(): ChatModelOptions | undefined;
67
76
  process(input: ChatModelInput): Promise<ChatModelOutput>;
77
+ private getParallelToolCalls;
78
+ private getRunMessages;
79
+ private getRunResponseFormat;
80
+ private requestStructuredOutput;
68
81
  }
82
+ export declare const ROLE_MAP: {
83
+ [key in Role]: ChatCompletionMessageParam["role"];
84
+ };
85
+ export declare function contentsFromInputMessages(messages: ChatModelInputMessage[]): Promise<ChatCompletionMessageParam[]>;
86
+ export declare function toolsFromInputTools(tools?: ChatModelInputTool[], options?: {
87
+ addTypeToEmptyParameters?: boolean;
88
+ }): ChatCompletionTool[] | undefined;
89
+ export declare function jsonSchemaToOpenAIJsonSchema(schema: Record<string, unknown>): Record<string, unknown>;
90
+ export declare function extractResultFromStream(stream: Stream<OpenAI.Chat.Completions.ChatCompletionChunk>, jsonMode?: boolean): Promise<ChatModelOutput>;
@@ -1,13 +1,5 @@
1
- import OpenAI from "openai";
2
- import type { ChatModelOptions } from "./chat-model.js";
3
- import { OpenAIChatModel } from "./openai-chat-model.js";
4
- export interface XAIChatModelOptions {
5
- apiKey?: string;
6
- model?: string;
7
- modelOptions?: ChatModelOptions;
8
- baseURL?: string;
9
- }
1
+ import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
10
2
  export declare class XAIChatModel extends OpenAIChatModel {
11
- constructor(options?: XAIChatModelOptions);
12
- get client(): OpenAI;
3
+ constructor(options?: OpenAIChatModelOptions);
4
+ protected apiKeyEnvName: string;
13
5
  }
@@ -0,0 +1 @@
1
+ export declare function getJsonOutputPrompt(schema: Record<string, unknown> | string): string;
@@ -11,5 +11,5 @@ export declare function orArrayToArray<T>(value?: T | T[]): T[];
11
11
  export declare function createAccessorArray<T>(array: T[], accessor: (array: T[], name: string) => T | undefined): T[] & {
12
12
  [key: string]: T;
13
13
  };
14
- export declare function checkArguments<T>(prefix: string, schema: ZodType<T>, args: T): void;
14
+ export declare function checkArguments<T>(prefix: string, schema: ZodType<T>, args: T): T;
15
15
  export declare function tryOrThrow<P extends PromiseOrValue<unknown>>(fn: () => P, error: string | Error | ((error: Error) => Error)): P;
@@ -2,7 +2,7 @@ import { inspect } from "node:util";
2
2
  import { ZodObject, z } from "zod";
3
3
  import { createMessage } from "../prompt/prompt-builder.js";
4
4
  import { logger } from "../utils/logger.js";
5
- import { createAccessorArray, orArrayToArray, } from "../utils/type-utils.js";
5
+ import { checkArguments, createAccessorArray, orArrayToArray, } from "../utils/type-utils.js";
6
6
  import { AgentMemory } from "./memory.js";
7
7
  import { replaceTransferAgentToName, transferToAgentOutput, } from "./types.js";
8
8
  export class Agent {
@@ -101,12 +101,12 @@ export class Agent {
101
101
  if (!this.disableEvents)
102
102
  ctx.emit("agentStarted", { agent: this, input: message });
103
103
  try {
104
- const parsedInput = this.inputSchema.parse(message);
104
+ const parsedInput = checkArguments(`Agent ${this.name} input`, this.inputSchema, message);
105
105
  this.preprocess(parsedInput, ctx);
106
106
  this.checkContextStatus(ctx);
107
107
  const output = await this.process(parsedInput, ctx)
108
108
  .then((output) => {
109
- const parsedOutput = this.outputSchema.parse(output);
109
+ const parsedOutput = checkArguments(`Agent ${this.name} output`, this.outputSchema, output);
110
110
  return this.includeInputInOutput ? { ...parsedInput, ...parsedOutput } : parsedOutput;
111
111
  })
112
112
  .then((output) => {
@@ -6,6 +6,10 @@ import { FunctionAgent } from "../agents/agent.js";
6
6
  import { AIAgent } from "../agents/ai-agent.js";
7
7
  import { MCPAgent } from "../agents/mcp-agent.js";
8
8
  import { ClaudeChatModel } from "../models/claude-chat-model.js";
9
+ import { DeepSeekChatModel } from "../models/deepseek-chat-model.js";
10
+ import { GeminiChatModel } from "../models/gemini-chat-model.js";
11
+ import { OllamaChatModel } from "../models/ollama-chat-model.js";
12
+ import { OpenRouterChatModel } from "../models/open-router-chat-model.js";
9
13
  import { OpenAIChatModel } from "../models/openai-chat-model.js";
10
14
  import { XAIChatModel } from "../models/xai-chat-model.js";
11
15
  import { tryOrThrow } from "../utils/type-utils.js";
@@ -68,17 +72,29 @@ export async function loadAgent(path) {
68
72
  }
69
73
  throw new Error(`Unsupported agent file type: ${path}`);
70
74
  }
71
- const { MODEL_PROVIDER = "openai", MODEL_NAME = "gpt-4o-mini" } = process.env;
75
+ const { MODEL_PROVIDER, MODEL_NAME } = process.env;
76
+ const DEFAULT_MODEL_PROVIDER = "openai";
77
+ const DEFAULT_MODEL_NAME = "gpt-4o-mini";
72
78
  export async function loadModel(model, modelOptions) {
73
79
  const params = {
74
- model: model?.name ?? MODEL_NAME,
80
+ model: MODEL_NAME ?? model?.name ?? DEFAULT_MODEL_NAME,
75
81
  temperature: model?.temperature ?? undefined,
76
82
  topP: model?.top_p ?? undefined,
77
83
  frequencyPenalty: model?.frequent_penalty ?? undefined,
78
84
  presencePenalty: model?.presence_penalty ?? undefined,
79
85
  };
80
- const availableModels = [OpenAIChatModel, ClaudeChatModel, XAIChatModel];
81
- const M = availableModels.find((m) => m.name.toLowerCase().includes(model?.provider || MODEL_PROVIDER));
86
+ const availableModels = [
87
+ OpenAIChatModel,
88
+ ClaudeChatModel,
89
+ XAIChatModel,
90
+ GeminiChatModel,
91
+ DeepSeekChatModel,
92
+ OpenRouterChatModel,
93
+ OllamaChatModel,
94
+ ];
95
+ const M = availableModels.find((m) => m.name
96
+ .toLowerCase()
97
+ .includes((MODEL_PROVIDER ?? model?.provider ?? DEFAULT_MODEL_PROVIDER).toLowerCase()));
82
98
  if (!M)
83
99
  throw new Error(`Unsupported model: ${model?.provider} ${model?.name}`);
84
100
  return new M({ model: params.model, modelOptions: { ...params, ...modelOptions } });
@@ -0,0 +1,7 @@
1
+ import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
2
+ export declare class DeepSeekChatModel extends OpenAIChatModel {
3
+ constructor(options?: OpenAIChatModelOptions);
4
+ protected apiKeyEnvName: string;
5
+ protected supportsNativeStructuredOutputs: boolean;
6
+ protected supportsToolsEmptyParameters: boolean;
7
+ }
@@ -0,0 +1,15 @@
1
+ import { OpenAIChatModel } from "./openai-chat-model.js";
2
+ const DEEPSEEK_DEFAULT_CHAT_MODEL = "deepseek-chat";
3
+ const DEEPSEEK_BASE_URL = "https://api.deepseek.com";
4
+ export class DeepSeekChatModel extends OpenAIChatModel {
5
+ constructor(options) {
6
+ super({
7
+ ...options,
8
+ model: options?.model || DEEPSEEK_DEFAULT_CHAT_MODEL,
9
+ baseURL: options?.baseURL || DEEPSEEK_BASE_URL,
10
+ });
11
+ }
12
+ apiKeyEnvName = "DEEPSEEK_API_KEY";
13
+ supportsNativeStructuredOutputs = false;
14
+ supportsToolsEmptyParameters = false;
15
+ }
@@ -0,0 +1,8 @@
1
+ import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
2
+ export declare class GeminiChatModel extends OpenAIChatModel {
3
+ constructor(options?: OpenAIChatModelOptions);
4
+ protected apiKeyEnvName: string;
5
+ protected supportsEndWithSystemMessage: boolean;
6
+ protected supportsToolsUseWithJsonSchema: boolean;
7
+ protected supportsParallelToolCalls: boolean;
8
+ }
@@ -0,0 +1,16 @@
1
+ import { OpenAIChatModel } from "./openai-chat-model.js";
2
+ const GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai";
3
+ const GEMINI_DEFAULT_CHAT_MODEL = "gemini-2.0-flash";
4
+ export class GeminiChatModel extends OpenAIChatModel {
5
+ constructor(options) {
6
+ super({
7
+ ...options,
8
+ model: options?.model || GEMINI_DEFAULT_CHAT_MODEL,
9
+ baseURL: options?.baseURL || GEMINI_BASE_URL,
10
+ });
11
+ }
12
+ apiKeyEnvName = "GEMINI_API_KEY";
13
+ supportsEndWithSystemMessage = false;
14
+ supportsToolsUseWithJsonSchema = false;
15
+ supportsParallelToolCalls = false;
16
+ }
@@ -0,0 +1,6 @@
1
+ import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
2
+ export declare class OllamaChatModel extends OpenAIChatModel {
3
+ constructor(options?: OpenAIChatModelOptions);
4
+ protected apiKeyEnvName: string;
5
+ protected apiKeyDefault: string;
6
+ }
@@ -0,0 +1,14 @@
1
+ import { OpenAIChatModel } from "./openai-chat-model.js";
2
+ const OLLAMA_DEFAULT_BASE_URL = "http://localhost:11434/v1";
3
+ const OLLAMA_DEFAULT_CHAT_MODEL = "llama3.2";
4
+ export class OllamaChatModel extends OpenAIChatModel {
5
+ constructor(options) {
6
+ super({
7
+ ...options,
8
+ model: options?.model || OLLAMA_DEFAULT_CHAT_MODEL,
9
+ baseURL: options?.baseURL || process.env.OLLAMA_BASE_URL || OLLAMA_DEFAULT_BASE_URL,
10
+ });
11
+ }
12
+ apiKeyEnvName = "OLLAMA_API_KEY";
13
+ apiKeyDefault = "ollama";
14
+ }
@@ -0,0 +1,5 @@
1
+ import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
2
+ export declare class OpenRouterChatModel extends OpenAIChatModel {
3
+ constructor(options?: OpenAIChatModelOptions);
4
+ protected apiKeyEnvName: string;
5
+ }
@@ -0,0 +1,13 @@
1
+ import { OpenAIChatModel } from "./openai-chat-model.js";
2
+ const OPEN_ROUTER_DEFAULT_CHAT_MODEL = "openai/gpt-4o";
3
+ const OPEN_ROUTER_BASE_URL = "https://openrouter.ai/api/v1";
4
+ export class OpenRouterChatModel extends OpenAIChatModel {
5
+ constructor(options) {
6
+ super({
7
+ ...options,
8
+ model: options?.model || OPEN_ROUTER_DEFAULT_CHAT_MODEL,
9
+ baseURL: options?.baseURL || OPEN_ROUTER_BASE_URL,
10
+ });
11
+ }
12
+ apiKeyEnvName = "OPEN_ROUTER_API_KEY";
13
+ }
@@ -1,6 +1,8 @@
1
1
  import OpenAI from "openai";
2
+ import type { ChatCompletionMessageParam, ChatCompletionTool } from "openai/resources";
3
+ import type { Stream } from "openai/streaming.js";
2
4
  import { z } from "zod";
3
- import { ChatModel, type ChatModelInput, type ChatModelOptions, type ChatModelOutput } from "./chat-model.js";
5
+ import { ChatModel, type ChatModelInput, type ChatModelInputMessage, type ChatModelInputTool, type ChatModelOptions, type ChatModelOutput, type Role } from "./chat-model.js";
4
6
  export interface OpenAIChatModelOptions {
5
7
  apiKey?: string;
6
8
  baseURL?: string;
@@ -62,7 +64,27 @@ export declare class OpenAIChatModel extends ChatModel {
62
64
  options?: OpenAIChatModelOptions | undefined;
63
65
  constructor(options?: OpenAIChatModelOptions | undefined);
64
66
  protected _client?: OpenAI;
67
+ protected apiKeyEnvName: string;
68
+ protected apiKeyDefault: string | undefined;
69
+ protected supportsNativeStructuredOutputs: boolean;
70
+ protected supportsEndWithSystemMessage: boolean;
71
+ protected supportsToolsUseWithJsonSchema: boolean;
72
+ protected supportsParallelToolCalls: boolean;
73
+ protected supportsToolsEmptyParameters: boolean;
65
74
  get client(): OpenAI;
66
75
  get modelOptions(): ChatModelOptions | undefined;
67
76
  process(input: ChatModelInput): Promise<ChatModelOutput>;
77
+ private getParallelToolCalls;
78
+ private getRunMessages;
79
+ private getRunResponseFormat;
80
+ private requestStructuredOutput;
68
81
  }
82
+ export declare const ROLE_MAP: {
83
+ [key in Role]: ChatCompletionMessageParam["role"];
84
+ };
85
+ export declare function contentsFromInputMessages(messages: ChatModelInputMessage[]): Promise<ChatCompletionMessageParam[]>;
86
+ export declare function toolsFromInputTools(tools?: ChatModelInputTool[], options?: {
87
+ addTypeToEmptyParameters?: boolean;
88
+ }): ChatCompletionTool[] | undefined;
89
+ export declare function jsonSchemaToOpenAIJsonSchema(schema: Record<string, unknown>): Record<string, unknown>;
90
+ export declare function extractResultFromStream(stream: Stream<OpenAI.Chat.Completions.ChatCompletionChunk>, jsonMode?: boolean): Promise<ChatModelOutput>;
@@ -2,6 +2,8 @@ import { nanoid } from "nanoid";
2
2
  import OpenAI from "openai";
3
3
  import { z } from "zod";
4
4
  import { parseJSON } from "../utils/json-schema.js";
5
+ import { mergeUsage } from "../utils/model-utils.js";
6
+ import { getJsonOutputPrompt } from "../utils/prompts.js";
5
7
  import { checkArguments, isNonNullable } from "../utils/type-utils.js";
6
8
  import { ChatModel, } from "./chat-model.js";
7
9
  const CHAT_MODEL_OPENAI_DEFAULT_MODEL = "gpt-4o-mini";
@@ -23,16 +25,23 @@ export const openAIChatModelOptionsSchema = z.object({
23
25
  export class OpenAIChatModel extends ChatModel {
24
26
  options;
25
27
  constructor(options) {
26
- if (options)
27
- checkArguments("OpenAIChatModel", openAIChatModelOptionsSchema, options);
28
28
  super();
29
29
  this.options = options;
30
+ if (options)
31
+ checkArguments(this.name, openAIChatModelOptionsSchema, options);
30
32
  }
31
33
  _client;
34
+ apiKeyEnvName = "OPENAI_API_KEY";
35
+ apiKeyDefault;
36
+ supportsNativeStructuredOutputs = true;
37
+ supportsEndWithSystemMessage = true;
38
+ supportsToolsUseWithJsonSchema = true;
39
+ supportsParallelToolCalls = true;
40
+ supportsToolsEmptyParameters = true;
32
41
  get client() {
33
- const apiKey = this.options?.apiKey || process.env.OPENAI_API_KEY;
42
+ const apiKey = this.options?.apiKey || process.env[this.apiKeyEnvName] || this.apiKeyDefault;
34
43
  if (!apiKey)
35
- throw new Error("Api Key is required for OpenAIChatModel");
44
+ throw new Error(`Api Key is required for ${this.name}`);
36
45
  this._client ??= new OpenAI({
37
46
  baseURL: this.options?.baseURL,
38
47
  apiKey,
@@ -43,91 +52,104 @@ export class OpenAIChatModel extends ChatModel {
43
52
  return this.options?.modelOptions;
44
53
  }
45
54
  async process(input) {
46
- const res = await this.client.chat.completions.create({
55
+ const body = {
47
56
  model: this.options?.model || CHAT_MODEL_OPENAI_DEFAULT_MODEL,
48
57
  temperature: input.modelOptions?.temperature ?? this.modelOptions?.temperature,
49
58
  top_p: input.modelOptions?.topP ?? this.modelOptions?.topP,
50
59
  frequency_penalty: input.modelOptions?.frequencyPenalty ?? this.modelOptions?.frequencyPenalty,
51
60
  presence_penalty: input.modelOptions?.presencePenalty ?? this.modelOptions?.presencePenalty,
52
- messages: await contentsFromInputMessages(input.messages),
53
- tools: toolsFromInputTools(input.tools),
54
- tool_choice: input.toolChoice,
55
- parallel_tool_calls: !input.tools?.length
56
- ? undefined
57
- : (input.modelOptions?.parallelToolCalls ?? this.modelOptions?.parallelToolCalls),
58
- response_format: input.responseFormat?.type === "json_schema"
59
- ? {
60
- type: "json_schema",
61
- json_schema: {
62
- ...input.responseFormat.jsonSchema,
63
- schema: jsonSchemaToOpenAIJsonSchema(input.responseFormat.jsonSchema.schema),
64
- },
65
- }
66
- : undefined,
61
+ messages: await this.getRunMessages(input),
67
62
  stream_options: {
68
63
  include_usage: true,
69
64
  },
70
65
  stream: true,
66
+ };
67
+ const { jsonMode, responseFormat } = await this.getRunResponseFormat(input);
68
+ const stream = await this.client.chat.completions.create({
69
+ ...body,
70
+ tools: toolsFromInputTools(input.tools, {
71
+ addTypeToEmptyParameters: !this.supportsToolsEmptyParameters,
72
+ }),
73
+ tool_choice: input.toolChoice,
74
+ parallel_tool_calls: this.getParallelToolCalls(input),
75
+ response_format: responseFormat,
71
76
  });
72
- let text = "";
73
- const toolCalls = [];
74
- let usage;
75
- let model;
76
- for await (const chunk of res) {
77
- const choice = chunk.choices?.[0];
78
- model ??= chunk.model;
79
- if (choice?.delta.tool_calls?.length) {
80
- for (const call of choice.delta.tool_calls) {
81
- toolCalls[call.index] ??= {
82
- id: call.id || nanoid(),
83
- type: "function",
84
- function: { name: "", arguments: {} },
85
- args: "",
86
- };
87
- const c = toolCalls[call.index];
88
- if (!c)
89
- throw new Error("Tool call not found");
90
- if (call.type)
91
- c.type = call.type;
92
- c.function.name = c.function.name + (call.function?.name || "");
93
- c.args = c.args.concat(call.function?.arguments || "");
94
- }
95
- }
96
- if (choice?.delta.content)
97
- text += choice.delta.content;
98
- if (chunk.usage) {
99
- usage = {
100
- inputTokens: chunk.usage.prompt_tokens,
101
- outputTokens: chunk.usage.completion_tokens,
102
- };
103
- }
77
+ const result = await extractResultFromStream(stream, jsonMode);
78
+ if (!this.supportsToolsUseWithJsonSchema &&
79
+ !result.toolCalls?.length &&
80
+ input.responseFormat?.type === "json_schema" &&
81
+ result.text) {
82
+ const output = await this.requestStructuredOutput(body, input.responseFormat);
83
+ return { ...output, usage: mergeUsage(result.usage, output.usage) };
104
84
  }
105
- const result = {
106
- usage,
107
- model,
108
- };
109
- if (input.responseFormat?.type === "json_schema" && text) {
110
- result.json = parseJSON(text);
85
+ return result;
86
+ }
87
+ getParallelToolCalls(input) {
88
+ if (!this.supportsParallelToolCalls)
89
+ return undefined;
90
+ if (!input.tools?.length)
91
+ return undefined;
92
+ return input.modelOptions?.parallelToolCalls ?? this.modelOptions?.parallelToolCalls;
93
+ }
94
+ async getRunMessages(input) {
95
+ const messages = await contentsFromInputMessages(input.messages);
96
+ if (!this.supportsEndWithSystemMessage && messages.at(-1)?.role !== "user") {
97
+ messages.push({ role: "user", content: "" });
111
98
  }
112
- else {
113
- result.text = text;
99
+ if (!this.supportsToolsUseWithJsonSchema && input.tools?.length)
100
+ return messages;
101
+ if (this.supportsNativeStructuredOutputs)
102
+ return messages;
103
+ if (input.responseFormat?.type === "json_schema") {
104
+ messages.unshift({
105
+ role: "system",
106
+ content: getJsonOutputPrompt(input.responseFormat.jsonSchema.schema),
107
+ });
114
108
  }
115
- if (toolCalls.length) {
116
- result.toolCalls = toolCalls.map(({ args, ...c }) => ({
117
- ...c,
118
- function: { ...c.function, arguments: parseJSON(args) },
119
- }));
109
+ return messages;
110
+ }
111
+ async getRunResponseFormat(input) {
112
+ if (!this.supportsToolsUseWithJsonSchema && input.tools?.length)
113
+ return { jsonMode: false, responseFormat: undefined };
114
+ if (!this.supportsNativeStructuredOutputs) {
115
+ const jsonMode = input.responseFormat?.type === "json_schema";
116
+ return { jsonMode, responseFormat: jsonMode ? { type: "json_object" } : undefined };
120
117
  }
121
- return result;
118
+ if (input.responseFormat?.type === "json_schema") {
119
+ return {
120
+ jsonMode: true,
121
+ responseFormat: {
122
+ type: "json_schema",
123
+ json_schema: {
124
+ ...input.responseFormat.jsonSchema,
125
+ schema: jsonSchemaToOpenAIJsonSchema(input.responseFormat.jsonSchema.schema),
126
+ },
127
+ },
128
+ };
129
+ }
130
+ return { jsonMode: false, responseFormat: undefined };
131
+ }
132
+ async requestStructuredOutput(body, responseFormat) {
133
+ if (responseFormat?.type !== "json_schema") {
134
+ throw new Error("Expected json_schema response format");
135
+ }
136
+ const { jsonMode, responseFormat: resolvedResponseFormat } = await this.getRunResponseFormat({
137
+ responseFormat,
138
+ });
139
+ const res = await this.client.chat.completions.create({
140
+ ...body,
141
+ response_format: resolvedResponseFormat,
142
+ });
143
+ return extractResultFromStream(res, jsonMode);
122
144
  }
123
145
  }
124
- const ROLE_MAP = {
146
+ export const ROLE_MAP = {
125
147
  system: "system",
126
148
  user: "user",
127
149
  agent: "assistant",
128
150
  tool: "tool",
129
151
  };
130
- async function contentsFromInputMessages(messages) {
152
+ export async function contentsFromInputMessages(messages) {
131
153
  return messages.map((i) => ({
132
154
  role: ROLE_MAP[i.role],
133
155
  content: typeof i.content === "string"
@@ -156,19 +178,25 @@ async function contentsFromInputMessages(messages) {
156
178
  name: i.name,
157
179
  }));
158
180
  }
159
- function toolsFromInputTools(tools) {
181
+ export function toolsFromInputTools(tools, options) {
160
182
  return tools?.length
161
- ? tools.map((i) => ({
162
- type: "function",
163
- function: {
164
- name: i.function.name,
165
- description: i.function.description,
166
- parameters: i.function.parameters,
167
- },
168
- }))
183
+ ? tools.map((i) => {
184
+ const parameters = i.function.parameters;
185
+ if (options?.addTypeToEmptyParameters && Object.keys(parameters).length === 0) {
186
+ parameters.type = "object";
187
+ }
188
+ return {
189
+ type: "function",
190
+ function: {
191
+ name: i.function.name,
192
+ description: i.function.description,
193
+ parameters,
194
+ },
195
+ };
196
+ })
169
197
  : undefined;
170
198
  }
171
- function jsonSchemaToOpenAIJsonSchema(schema) {
199
+ export function jsonSchemaToOpenAIJsonSchema(schema) {
172
200
  if (schema?.type === "object") {
173
201
  const { required, properties } = schema;
174
202
  return {
@@ -193,3 +221,75 @@ function jsonSchemaToOpenAIJsonSchema(schema) {
193
221
  }
194
222
  return schema;
195
223
  }
224
+ export async function extractResultFromStream(stream, jsonMode = false) {
225
+ let text = "";
226
+ const toolCalls = [];
227
+ let usage;
228
+ let model;
229
+ for await (const chunk of stream) {
230
+ const choice = chunk.choices?.[0];
231
+ model ??= chunk.model;
232
+ if (choice?.delta.tool_calls?.length) {
233
+ for (const call of choice.delta.tool_calls) {
234
+ // Gemini not support tool call delta
235
+ if (call.index !== undefined) {
236
+ handleToolCallDelta(toolCalls, call);
237
+ }
238
+ else {
239
+ handleCompleteToolCall(toolCalls, call);
240
+ }
241
+ }
242
+ }
243
+ if (choice?.delta.content)
244
+ text += choice.delta.content;
245
+ if (chunk.usage) {
246
+ usage = {
247
+ inputTokens: chunk.usage.prompt_tokens,
248
+ outputTokens: chunk.usage.completion_tokens,
249
+ };
250
+ }
251
+ }
252
+ const result = {
253
+ usage,
254
+ model,
255
+ };
256
+ if (jsonMode && text) {
257
+ result.json = parseJSON(text);
258
+ }
259
+ else {
260
+ result.text = text;
261
+ }
262
+ if (toolCalls.length) {
263
+ result.toolCalls = toolCalls.map(({ args, ...c }) => ({
264
+ ...c,
265
+ function: { ...c.function, arguments: parseJSON(args) },
266
+ }));
267
+ }
268
+ return result;
269
+ }
270
+ function handleToolCallDelta(toolCalls, call) {
271
+ toolCalls[call.index] ??= {
272
+ id: call.id || nanoid(),
273
+ type: "function",
274
+ function: { name: "", arguments: {} },
275
+ args: "",
276
+ };
277
+ const c = toolCalls[call.index];
278
+ if (!c)
279
+ throw new Error("Tool call not found");
280
+ if (call.type)
281
+ c.type = call.type;
282
+ c.function.name = c.function.name + (call.function?.name || "");
283
+ c.args = c.args.concat(call.function?.arguments || "");
284
+ }
285
+ function handleCompleteToolCall(toolCalls, call) {
286
+ toolCalls.push({
287
+ id: call.id || nanoid(),
288
+ type: "function",
289
+ function: {
290
+ name: call.function?.name || "",
291
+ arguments: parseJSON(call.function?.arguments || "{}"),
292
+ },
293
+ args: call.function?.arguments || "",
294
+ });
295
+ }
@@ -1,13 +1,5 @@
1
- import OpenAI from "openai";
2
- import type { ChatModelOptions } from "./chat-model.js";
3
- import { OpenAIChatModel } from "./openai-chat-model.js";
4
- export interface XAIChatModelOptions {
5
- apiKey?: string;
6
- model?: string;
7
- modelOptions?: ChatModelOptions;
8
- baseURL?: string;
9
- }
1
+ import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
10
2
  export declare class XAIChatModel extends OpenAIChatModel {
11
- constructor(options?: XAIChatModelOptions);
12
- get client(): OpenAI;
3
+ constructor(options?: OpenAIChatModelOptions);
4
+ protected apiKeyEnvName: string;
13
5
  }