@aigne/core 1.7.0 → 1.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. package/CHANGELOG.md +25 -0
  2. package/lib/cjs/agents/agent.js +2 -2
  3. package/lib/cjs/agents/mcp-agent.d.ts +2 -3
  4. package/lib/cjs/agents/mcp-agent.js +10 -6
  5. package/lib/cjs/execution-engine/execution-engine.js +1 -1
  6. package/lib/cjs/loader/agent-js.js +1 -1
  7. package/lib/cjs/loader/agent-yaml.d.ts +1 -0
  8. package/lib/cjs/loader/agent-yaml.js +4 -0
  9. package/lib/cjs/loader/index.d.ts +72 -1
  10. package/lib/cjs/loader/index.js +29 -13
  11. package/lib/cjs/models/claude-chat-model.js +1 -1
  12. package/lib/cjs/models/deepseek-chat-model.d.ts +7 -0
  13. package/lib/cjs/models/deepseek-chat-model.js +19 -0
  14. package/lib/cjs/models/gemini-chat-model.d.ts +8 -0
  15. package/lib/cjs/models/gemini-chat-model.js +20 -0
  16. package/lib/cjs/models/ollama-chat-model.d.ts +6 -0
  17. package/lib/cjs/models/ollama-chat-model.js +18 -0
  18. package/lib/cjs/models/open-router-chat-model.d.ts +5 -0
  19. package/lib/cjs/models/open-router-chat-model.js +17 -0
  20. package/lib/cjs/models/openai-chat-model.d.ts +23 -1
  21. package/lib/cjs/models/openai-chat-model.js +182 -78
  22. package/lib/cjs/models/xai-chat-model.d.ts +3 -11
  23. package/lib/cjs/models/xai-chat-model.js +1 -14
  24. package/lib/cjs/prompt/prompt-builder.js +3 -0
  25. package/lib/cjs/utils/prompts.d.ts +1 -0
  26. package/lib/cjs/utils/prompts.js +13 -0
  27. package/lib/cjs/utils/type-utils.d.ts +1 -1
  28. package/lib/cjs/utils/type-utils.js +1 -1
  29. package/lib/dts/agents/mcp-agent.d.ts +2 -3
  30. package/lib/dts/loader/agent-yaml.d.ts +1 -0
  31. package/lib/dts/loader/index.d.ts +72 -1
  32. package/lib/dts/models/deepseek-chat-model.d.ts +7 -0
  33. package/lib/dts/models/gemini-chat-model.d.ts +8 -0
  34. package/lib/dts/models/ollama-chat-model.d.ts +6 -0
  35. package/lib/dts/models/open-router-chat-model.d.ts +5 -0
  36. package/lib/dts/models/openai-chat-model.d.ts +23 -1
  37. package/lib/dts/models/xai-chat-model.d.ts +3 -11
  38. package/lib/dts/utils/prompts.d.ts +1 -0
  39. package/lib/dts/utils/type-utils.d.ts +1 -1
  40. package/lib/esm/agents/agent.js +3 -3
  41. package/lib/esm/agents/mcp-agent.d.ts +2 -3
  42. package/lib/esm/agents/mcp-agent.js +10 -6
  43. package/lib/esm/execution-engine/execution-engine.js +1 -1
  44. package/lib/esm/loader/agent-js.js +1 -1
  45. package/lib/esm/loader/agent-yaml.d.ts +1 -0
  46. package/lib/esm/loader/agent-yaml.js +4 -0
  47. package/lib/esm/loader/index.d.ts +72 -1
  48. package/lib/esm/loader/index.js +28 -13
  49. package/lib/esm/models/claude-chat-model.js +1 -1
  50. package/lib/esm/models/deepseek-chat-model.d.ts +7 -0
  51. package/lib/esm/models/deepseek-chat-model.js +15 -0
  52. package/lib/esm/models/gemini-chat-model.d.ts +8 -0
  53. package/lib/esm/models/gemini-chat-model.js +16 -0
  54. package/lib/esm/models/ollama-chat-model.d.ts +6 -0
  55. package/lib/esm/models/ollama-chat-model.js +14 -0
  56. package/lib/esm/models/open-router-chat-model.d.ts +5 -0
  57. package/lib/esm/models/open-router-chat-model.js +13 -0
  58. package/lib/esm/models/openai-chat-model.d.ts +23 -1
  59. package/lib/esm/models/openai-chat-model.js +178 -78
  60. package/lib/esm/models/xai-chat-model.d.ts +3 -11
  61. package/lib/esm/models/xai-chat-model.js +1 -11
  62. package/lib/esm/prompt/prompt-builder.js +3 -0
  63. package/lib/esm/utils/prompts.d.ts +1 -0
  64. package/lib/esm/utils/prompts.js +10 -0
  65. package/lib/esm/utils/type-utils.d.ts +1 -1
  66. package/lib/esm/utils/type-utils.js +1 -1
  67. package/package.json +6 -3
@@ -2,7 +2,7 @@ import { inspect } from "node:util";
2
2
  import { ZodObject, z } from "zod";
3
3
  import { createMessage } from "../prompt/prompt-builder.js";
4
4
  import { logger } from "../utils/logger.js";
5
- import { createAccessorArray, orArrayToArray, } from "../utils/type-utils.js";
5
+ import { checkArguments, createAccessorArray, orArrayToArray, } from "../utils/type-utils.js";
6
6
  import { AgentMemory } from "./memory.js";
7
7
  import { replaceTransferAgentToName, transferToAgentOutput, } from "./types.js";
8
8
  export class Agent {
@@ -101,12 +101,12 @@ export class Agent {
101
101
  if (!this.disableEvents)
102
102
  ctx.emit("agentStarted", { agent: this, input: message });
103
103
  try {
104
- const parsedInput = this.inputSchema.parse(message);
104
+ const parsedInput = checkArguments(`Agent ${this.name} input`, this.inputSchema, message);
105
105
  this.preprocess(parsedInput, ctx);
106
106
  this.checkContextStatus(ctx);
107
107
  const output = await this.process(parsedInput, ctx)
108
108
  .then((output) => {
109
- const parsedOutput = this.outputSchema.parse(output);
109
+ const parsedOutput = checkArguments(`Agent ${this.name} output`, this.outputSchema, output);
110
110
  return this.includeInputInOutput ? { ...parsedInput, ...parsedOutput } : parsedOutput;
111
111
  })
112
112
  .then((output) => {
@@ -22,7 +22,7 @@ export type SSEServerParameters = {
22
22
  opts?: SSEClientTransportOptions;
23
23
  /**
24
24
  * The timeout for requests to the server, in milliseconds.
25
- * @default 10000
25
+ * @default 60000
26
26
  */
27
27
  timeout?: number;
28
28
  /**
@@ -41,7 +41,7 @@ export declare class MCPAgent extends Agent {
41
41
  static from(options: MCPAgentOptions): MCPAgent;
42
42
  private static fromTransport;
43
43
  constructor(options: MCPAgentOptions);
44
- private client;
44
+ client: Client;
45
45
  readonly prompts: MCPPrompt[] & {
46
46
  [key: string]: MCPPrompt;
47
47
  };
@@ -71,7 +71,6 @@ export interface MCPBaseOptions<I extends Message = Message, O extends Message =
71
71
  export declare abstract class MCPBase<I extends Message, O extends Message> extends Agent<I, O> {
72
72
  constructor(options: MCPBaseOptions<I, O>);
73
73
  protected client: ClientWithReconnect;
74
- protected get mcpServer(): string | undefined;
75
74
  }
76
75
  export declare class MCPTool extends MCPBase<Message, CallToolResult> {
77
76
  process(input: Message): Promise<CallToolResult>;
@@ -11,6 +11,11 @@ import { Agent } from "./agent.js";
11
11
  const MCP_AGENT_CLIENT_NAME = "MCPAgent";
12
12
  const MCP_AGENT_CLIENT_VERSION = "0.0.1";
13
13
  const DEFAULT_MAX_RECONNECTS = 10;
14
+ const DEFAULT_TIMEOUT = () => z.coerce
15
+ .number()
16
+ .int()
17
+ .min(0)
18
+ .safeParse(process.env.MCP_TIMEOUT || process.env.TIMEOUT).data || 60e3;
14
19
  function isSSEServerParameters(options) {
15
20
  return "url" in options && typeof options.url === "string";
16
21
  }
@@ -133,7 +138,9 @@ class ClientWithReconnect extends Client {
133
138
  throw new Error("reconnect requires a transportCreator");
134
139
  await pRetry(async () => {
135
140
  await this.close();
136
- await this.connect(await transportCreator());
141
+ await this.connect(await transportCreator(), {
142
+ timeout: this.reconnectOptions?.timeout ?? DEFAULT_TIMEOUT(),
143
+ });
137
144
  }, {
138
145
  retries: this.reconnectOptions?.maxReconnects ?? DEFAULT_MAX_RECONNECTS,
139
146
  shouldRetry: this.shouldReconnect,
@@ -142,8 +149,8 @@ class ClientWithReconnect extends Client {
142
149
  }
143
150
  async request(request, resultSchema, options) {
144
151
  const mergedOptions = {
145
- ...(options ?? {}),
146
- timeout: options?.timeout ?? this.reconnectOptions?.timeout ?? 10000,
152
+ ...options,
153
+ timeout: options?.timeout ?? DEFAULT_TIMEOUT(),
147
154
  };
148
155
  try {
149
156
  return await super.request(request, resultSchema, mergedOptions);
@@ -164,9 +171,6 @@ export class MCPBase extends Agent {
164
171
  this.client = options.client;
165
172
  }
166
173
  client;
167
- get mcpServer() {
168
- return getMCPServerName(this.client);
169
- }
170
174
  }
171
175
  export class MCPTool extends MCPBase {
172
176
  async process(input) {
@@ -9,8 +9,8 @@ export class ExecutionEngine {
9
9
  static async load({ path, ...options }) {
10
10
  const { model, agents, tools, ...aigne } = await load({ path });
11
11
  return new ExecutionEngine({
12
- model,
13
12
  ...options,
13
+ model: options.model || model,
14
14
  name: options.name || aigne.name || undefined,
15
15
  description: options.description || aigne.description || undefined,
16
16
  agents: agents.concat(options.agents ?? []),
@@ -22,7 +22,7 @@ export async function loadAgentFromJsFile(path) {
22
22
  throw new Error(`Agent file ${path} must export a default function, but got ${typeof agent}`);
23
23
  }
24
24
  return tryOrThrow(() => agentJsFileSchema.parse({
25
- name: agent.name,
25
+ name: agent.agent_name || agent.name,
26
26
  description: agent.description,
27
27
  input_schema: agent.input_schema,
28
28
  output_schema: agent.output_schema,
@@ -10,6 +10,7 @@ export declare function loadAgentFromYamlFile(path: string): Promise<{
10
10
  }, {
11
11
  [x: string]: any;
12
12
  }> | undefined;
13
+ tool_choice?: "auto" | "none" | "required" | "router" | undefined;
13
14
  output_schema?: ZodObject<Record<string, ZodType<any, z.ZodTypeDef, any>>, z.UnknownKeysParam, z.ZodTypeAny, {
14
15
  [x: string]: any;
15
16
  }, {
@@ -30,6 +30,10 @@ const agentFileSchema = z.discriminatedUnion("type", [
30
30
  .array(z.string())
31
31
  .nullish()
32
32
  .transform((v) => v ?? undefined),
33
+ tool_choice: z
34
+ .union([z.literal("auto"), z.literal("none"), z.literal("required"), z.literal("router")])
35
+ .nullish()
36
+ .transform((v) => v ?? undefined),
33
37
  }),
34
38
  z.object({
35
39
  type: z.literal("mcp"),
@@ -1,5 +1,6 @@
1
+ import { z } from "zod";
1
2
  import { type Agent } from "../agents/agent.js";
2
- import type { ChatModel } from "../models/chat-model.js";
3
+ import type { ChatModel, ChatModelOptions } from "../models/chat-model.js";
3
4
  export interface LoadOptions {
4
5
  path: string;
5
6
  }
@@ -19,6 +20,75 @@ export declare function load(options: LoadOptions): Promise<{
19
20
  } | null | undefined;
20
21
  }>;
21
22
  export declare function loadAgent(path: string): Promise<Agent>;
23
+ export declare function loadModel(model?: z.infer<typeof aigneFileSchema>["chat_model"], modelOptions?: ChatModelOptions): Promise<ChatModel | undefined>;
24
+ declare const aigneFileSchema: z.ZodObject<{
25
+ name: z.ZodOptional<z.ZodNullable<z.ZodString>>;
26
+ description: z.ZodOptional<z.ZodNullable<z.ZodString>>;
27
+ chat_model: z.ZodEffects<z.ZodOptional<z.ZodNullable<z.ZodUnion<[z.ZodString, z.ZodObject<{
28
+ provider: z.ZodOptional<z.ZodNullable<z.ZodString>>;
29
+ name: z.ZodOptional<z.ZodNullable<z.ZodString>>;
30
+ temperature: z.ZodOptional<z.ZodNullable<z.ZodNumber>>;
31
+ top_p: z.ZodOptional<z.ZodNullable<z.ZodNumber>>;
32
+ frequent_penalty: z.ZodOptional<z.ZodNullable<z.ZodNumber>>;
33
+ presence_penalty: z.ZodOptional<z.ZodNullable<z.ZodNumber>>;
34
+ }, "strip", z.ZodTypeAny, {
35
+ name?: string | null | undefined;
36
+ temperature?: number | null | undefined;
37
+ provider?: string | null | undefined;
38
+ top_p?: number | null | undefined;
39
+ frequent_penalty?: number | null | undefined;
40
+ presence_penalty?: number | null | undefined;
41
+ }, {
42
+ name?: string | null | undefined;
43
+ temperature?: number | null | undefined;
44
+ provider?: string | null | undefined;
45
+ top_p?: number | null | undefined;
46
+ frequent_penalty?: number | null | undefined;
47
+ presence_penalty?: number | null | undefined;
48
+ }>]>>>, {
49
+ name?: string | null | undefined;
50
+ temperature?: number | null | undefined;
51
+ provider?: string | null | undefined;
52
+ top_p?: number | null | undefined;
53
+ frequent_penalty?: number | null | undefined;
54
+ presence_penalty?: number | null | undefined;
55
+ } | null | undefined, string | {
56
+ name?: string | null | undefined;
57
+ temperature?: number | null | undefined;
58
+ provider?: string | null | undefined;
59
+ top_p?: number | null | undefined;
60
+ frequent_penalty?: number | null | undefined;
61
+ presence_penalty?: number | null | undefined;
62
+ } | null | undefined>;
63
+ agents: z.ZodOptional<z.ZodNullable<z.ZodArray<z.ZodString, "many">>>;
64
+ tools: z.ZodOptional<z.ZodNullable<z.ZodArray<z.ZodString, "many">>>;
65
+ }, "strip", z.ZodTypeAny, {
66
+ description?: string | null | undefined;
67
+ tools?: string[] | null | undefined;
68
+ name?: string | null | undefined;
69
+ chat_model?: {
70
+ name?: string | null | undefined;
71
+ temperature?: number | null | undefined;
72
+ provider?: string | null | undefined;
73
+ top_p?: number | null | undefined;
74
+ frequent_penalty?: number | null | undefined;
75
+ presence_penalty?: number | null | undefined;
76
+ } | null | undefined;
77
+ agents?: string[] | null | undefined;
78
+ }, {
79
+ description?: string | null | undefined;
80
+ tools?: string[] | null | undefined;
81
+ name?: string | null | undefined;
82
+ chat_model?: string | {
83
+ name?: string | null | undefined;
84
+ temperature?: number | null | undefined;
85
+ provider?: string | null | undefined;
86
+ top_p?: number | null | undefined;
87
+ frequent_penalty?: number | null | undefined;
88
+ presence_penalty?: number | null | undefined;
89
+ } | null | undefined;
90
+ agents?: string[] | null | undefined;
91
+ }>;
22
92
  export declare function loadAIGNEFile(path: string): Promise<{
23
93
  description?: string | null | undefined;
24
94
  tools?: string[] | null | undefined;
@@ -33,3 +103,4 @@ export declare function loadAIGNEFile(path: string): Promise<{
33
103
  } | null | undefined;
34
104
  agents?: string[] | null | undefined;
35
105
  }>;
106
+ export {};
@@ -6,12 +6,15 @@ import { FunctionAgent } from "../agents/agent.js";
6
6
  import { AIAgent } from "../agents/ai-agent.js";
7
7
  import { MCPAgent } from "../agents/mcp-agent.js";
8
8
  import { ClaudeChatModel } from "../models/claude-chat-model.js";
9
+ import { DeepSeekChatModel } from "../models/deepseek-chat-model.js";
10
+ import { GeminiChatModel } from "../models/gemini-chat-model.js";
11
+ import { OllamaChatModel } from "../models/ollama-chat-model.js";
12
+ import { OpenRouterChatModel } from "../models/open-router-chat-model.js";
9
13
  import { OpenAIChatModel } from "../models/openai-chat-model.js";
10
14
  import { XAIChatModel } from "../models/xai-chat-model.js";
11
15
  import { tryOrThrow } from "../utils/type-utils.js";
12
16
  import { loadAgentFromJsFile } from "./agent-js.js";
13
17
  import { loadAgentFromYamlFile } from "./agent-yaml.js";
14
- const DEFAULT_MODEL_PROVIDER = "openai";
15
18
  const AIGNE_FILE_NAME = ["aigne.yaml", "aigne.yml"];
16
19
  export async function load(options) {
17
20
  const { path } = options;
@@ -49,6 +52,7 @@ export async function loadAgent(path) {
49
52
  outputSchema: agent.output_schema,
50
53
  outputKey: agent.output_key,
51
54
  tools: await Promise.all((agent.tools ?? []).map((filename) => loadAgent(join(dirname(path), filename)))),
55
+ toolChoice: agent.tool_choice,
52
56
  });
53
57
  }
54
58
  if (agent.type === "mcp") {
@@ -68,21 +72,32 @@ export async function loadAgent(path) {
68
72
  }
69
73
  throw new Error(`Unsupported agent file type: ${path}`);
70
74
  }
71
- async function loadModel(model) {
72
- if (!model?.name)
73
- return undefined;
75
+ const { MODEL_PROVIDER, MODEL_NAME } = process.env;
76
+ const DEFAULT_MODEL_PROVIDER = "openai";
77
+ const DEFAULT_MODEL_NAME = "gpt-4o-mini";
78
+ export async function loadModel(model, modelOptions) {
74
79
  const params = {
75
- model: model.name,
76
- temperature: model.temperature ?? undefined,
77
- topP: model.top_p ?? undefined,
78
- frequencyPenalty: model.frequent_penalty ?? undefined,
79
- presencePenalty: model.presence_penalty ?? undefined,
80
+ model: MODEL_NAME ?? model?.name ?? DEFAULT_MODEL_NAME,
81
+ temperature: model?.temperature ?? undefined,
82
+ topP: model?.top_p ?? undefined,
83
+ frequencyPenalty: model?.frequent_penalty ?? undefined,
84
+ presencePenalty: model?.presence_penalty ?? undefined,
80
85
  };
81
- const availableModels = [OpenAIChatModel, ClaudeChatModel, XAIChatModel];
82
- const M = availableModels.find((m) => m.name.toLowerCase().includes(model.provider || DEFAULT_MODEL_PROVIDER));
86
+ const availableModels = [
87
+ OpenAIChatModel,
88
+ ClaudeChatModel,
89
+ XAIChatModel,
90
+ GeminiChatModel,
91
+ DeepSeekChatModel,
92
+ OpenRouterChatModel,
93
+ OllamaChatModel,
94
+ ];
95
+ const M = availableModels.find((m) => m.name
96
+ .toLowerCase()
97
+ .includes((MODEL_PROVIDER ?? model?.provider ?? DEFAULT_MODEL_PROVIDER).toLowerCase()));
83
98
  if (!M)
84
- throw new Error(`Unsupported model: ${model.provider} ${model.name}`);
85
- return new M(params);
99
+ throw new Error(`Unsupported model: ${model?.provider} ${model?.name}`);
100
+ return new M({ model: params.model, modelOptions: { ...params, ...modelOptions } });
86
101
  }
87
102
  const aigneFileSchema = z.object({
88
103
  name: z.string().nullish(),
@@ -30,7 +30,7 @@ export class ClaudeChatModel extends ChatModel {
30
30
  }
31
31
  _client;
32
32
  get client() {
33
- const apiKey = this.options?.apiKey || process.env.CLAUDE_API_KEY;
33
+ const apiKey = this.options?.apiKey || process.env.ANTHROPIC_API_KEY || process.env.CLAUDE_API_KEY;
34
34
  if (!apiKey)
35
35
  throw new Error("Api Key is required for ClaudeChatModel");
36
36
  this._client ??= new Anthropic({ apiKey });
@@ -0,0 +1,7 @@
1
+ import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
2
+ export declare class DeepSeekChatModel extends OpenAIChatModel {
3
+ constructor(options?: OpenAIChatModelOptions);
4
+ protected apiKeyEnvName: string;
5
+ protected supportsNativeStructuredOutputs: boolean;
6
+ protected supportsToolsEmptyParameters: boolean;
7
+ }
@@ -0,0 +1,15 @@
1
+ import { OpenAIChatModel } from "./openai-chat-model.js";
2
+ const DEEPSEEK_DEFAULT_CHAT_MODEL = "deepseek-chat";
3
+ const DEEPSEEK_BASE_URL = "https://api.deepseek.com";
4
+ export class DeepSeekChatModel extends OpenAIChatModel {
5
+ constructor(options) {
6
+ super({
7
+ ...options,
8
+ model: options?.model || DEEPSEEK_DEFAULT_CHAT_MODEL,
9
+ baseURL: options?.baseURL || DEEPSEEK_BASE_URL,
10
+ });
11
+ }
12
+ apiKeyEnvName = "DEEPSEEK_API_KEY";
13
+ supportsNativeStructuredOutputs = false;
14
+ supportsToolsEmptyParameters = false;
15
+ }
@@ -0,0 +1,8 @@
1
+ import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
2
+ export declare class GeminiChatModel extends OpenAIChatModel {
3
+ constructor(options?: OpenAIChatModelOptions);
4
+ protected apiKeyEnvName: string;
5
+ protected supportsEndWithSystemMessage: boolean;
6
+ protected supportsToolsUseWithJsonSchema: boolean;
7
+ protected supportsParallelToolCalls: boolean;
8
+ }
@@ -0,0 +1,16 @@
1
+ import { OpenAIChatModel } from "./openai-chat-model.js";
2
+ const GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai";
3
+ const GEMINI_DEFAULT_CHAT_MODEL = "gemini-2.0-flash";
4
+ export class GeminiChatModel extends OpenAIChatModel {
5
+ constructor(options) {
6
+ super({
7
+ ...options,
8
+ model: options?.model || GEMINI_DEFAULT_CHAT_MODEL,
9
+ baseURL: options?.baseURL || GEMINI_BASE_URL,
10
+ });
11
+ }
12
+ apiKeyEnvName = "GEMINI_API_KEY";
13
+ supportsEndWithSystemMessage = false;
14
+ supportsToolsUseWithJsonSchema = false;
15
+ supportsParallelToolCalls = false;
16
+ }
@@ -0,0 +1,6 @@
1
+ import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
2
+ export declare class OllamaChatModel extends OpenAIChatModel {
3
+ constructor(options?: OpenAIChatModelOptions);
4
+ protected apiKeyEnvName: string;
5
+ protected apiKeyDefault: string;
6
+ }
@@ -0,0 +1,14 @@
1
+ import { OpenAIChatModel } from "./openai-chat-model.js";
2
+ const OLLAMA_DEFAULT_BASE_URL = "http://localhost:11434/v1";
3
+ const OLLAMA_DEFAULT_CHAT_MODEL = "llama3.2";
4
+ export class OllamaChatModel extends OpenAIChatModel {
5
+ constructor(options) {
6
+ super({
7
+ ...options,
8
+ model: options?.model || OLLAMA_DEFAULT_CHAT_MODEL,
9
+ baseURL: options?.baseURL || process.env.OLLAMA_BASE_URL || OLLAMA_DEFAULT_BASE_URL,
10
+ });
11
+ }
12
+ apiKeyEnvName = "OLLAMA_API_KEY";
13
+ apiKeyDefault = "ollama";
14
+ }
@@ -0,0 +1,5 @@
1
+ import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
2
+ export declare class OpenRouterChatModel extends OpenAIChatModel {
3
+ constructor(options?: OpenAIChatModelOptions);
4
+ protected apiKeyEnvName: string;
5
+ }
@@ -0,0 +1,13 @@
1
+ import { OpenAIChatModel } from "./openai-chat-model.js";
2
+ const OPEN_ROUTER_DEFAULT_CHAT_MODEL = "openai/gpt-4o";
3
+ const OPEN_ROUTER_BASE_URL = "https://openrouter.ai/api/v1";
4
+ export class OpenRouterChatModel extends OpenAIChatModel {
5
+ constructor(options) {
6
+ super({
7
+ ...options,
8
+ model: options?.model || OPEN_ROUTER_DEFAULT_CHAT_MODEL,
9
+ baseURL: options?.baseURL || OPEN_ROUTER_BASE_URL,
10
+ });
11
+ }
12
+ apiKeyEnvName = "OPEN_ROUTER_API_KEY";
13
+ }
@@ -1,6 +1,8 @@
1
1
  import OpenAI from "openai";
2
+ import type { ChatCompletionMessageParam, ChatCompletionTool } from "openai/resources";
3
+ import type { Stream } from "openai/streaming.js";
2
4
  import { z } from "zod";
3
- import { ChatModel, type ChatModelInput, type ChatModelOptions, type ChatModelOutput } from "./chat-model.js";
5
+ import { ChatModel, type ChatModelInput, type ChatModelInputMessage, type ChatModelInputTool, type ChatModelOptions, type ChatModelOutput, type Role } from "./chat-model.js";
4
6
  export interface OpenAIChatModelOptions {
5
7
  apiKey?: string;
6
8
  baseURL?: string;
@@ -62,7 +64,27 @@ export declare class OpenAIChatModel extends ChatModel {
62
64
  options?: OpenAIChatModelOptions | undefined;
63
65
  constructor(options?: OpenAIChatModelOptions | undefined);
64
66
  protected _client?: OpenAI;
67
+ protected apiKeyEnvName: string;
68
+ protected apiKeyDefault: string | undefined;
69
+ protected supportsNativeStructuredOutputs: boolean;
70
+ protected supportsEndWithSystemMessage: boolean;
71
+ protected supportsToolsUseWithJsonSchema: boolean;
72
+ protected supportsParallelToolCalls: boolean;
73
+ protected supportsToolsEmptyParameters: boolean;
65
74
  get client(): OpenAI;
66
75
  get modelOptions(): ChatModelOptions | undefined;
67
76
  process(input: ChatModelInput): Promise<ChatModelOutput>;
77
+ private getParallelToolCalls;
78
+ private getRunMessages;
79
+ private getRunResponseFormat;
80
+ private requestStructuredOutput;
68
81
  }
82
+ export declare const ROLE_MAP: {
83
+ [key in Role]: ChatCompletionMessageParam["role"];
84
+ };
85
+ export declare function contentsFromInputMessages(messages: ChatModelInputMessage[]): Promise<ChatCompletionMessageParam[]>;
86
+ export declare function toolsFromInputTools(tools?: ChatModelInputTool[], options?: {
87
+ addTypeToEmptyParameters?: boolean;
88
+ }): ChatCompletionTool[] | undefined;
89
+ export declare function jsonSchemaToOpenAIJsonSchema(schema: Record<string, unknown>): Record<string, unknown>;
90
+ export declare function extractResultFromStream(stream: Stream<OpenAI.Chat.Completions.ChatCompletionChunk>, jsonMode?: boolean): Promise<ChatModelOutput>;