@aigne/core 1.12.0 → 1.14.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (197) hide show
  1. package/CHANGELOG.md +28 -0
  2. package/README.md +13 -26
  3. package/README.zh.md +24 -37
  4. package/lib/cjs/agents/agent.d.ts +522 -15
  5. package/lib/cjs/agents/agent.js +357 -36
  6. package/lib/cjs/agents/ai-agent.d.ts +210 -52
  7. package/lib/cjs/agents/ai-agent.js +182 -24
  8. package/lib/cjs/agents/mcp-agent.d.ts +112 -0
  9. package/lib/cjs/agents/mcp-agent.js +79 -1
  10. package/lib/cjs/agents/team-agent.d.ts +99 -0
  11. package/lib/cjs/agents/team-agent.js +94 -0
  12. package/lib/cjs/agents/user-agent.d.ts +6 -4
  13. package/lib/cjs/agents/user-agent.js +16 -5
  14. package/lib/cjs/aigne/aigne.d.ts +263 -16
  15. package/lib/cjs/aigne/aigne.js +130 -20
  16. package/lib/cjs/aigne/context.d.ts +24 -8
  17. package/lib/cjs/aigne/context.js +8 -22
  18. package/lib/cjs/aigne/message-queue.d.ts +26 -4
  19. package/lib/cjs/aigne/message-queue.js +42 -7
  20. package/lib/cjs/aigne/usage.d.ts +9 -0
  21. package/lib/cjs/aigne/usage.js +3 -0
  22. package/lib/cjs/client/client.d.ts +81 -3
  23. package/lib/cjs/client/client.js +38 -0
  24. package/lib/cjs/client/index.d.ts +1 -0
  25. package/lib/cjs/client/index.js +17 -0
  26. package/lib/cjs/index.d.ts +0 -1
  27. package/lib/cjs/index.js +0 -1
  28. package/lib/cjs/loader/agent-js.d.ts +1 -1
  29. package/lib/cjs/loader/agent-js.js +2 -2
  30. package/lib/cjs/loader/agent-yaml.d.ts +3 -2
  31. package/lib/cjs/loader/agent-yaml.js +2 -1
  32. package/lib/cjs/loader/index.d.ts +4 -4
  33. package/lib/cjs/loader/index.js +2 -0
  34. package/lib/cjs/memory/default-memory.d.ts +16 -0
  35. package/lib/cjs/memory/default-memory.js +70 -0
  36. package/lib/cjs/memory/index.d.ts +3 -0
  37. package/lib/cjs/memory/index.js +19 -0
  38. package/lib/cjs/memory/memory.d.ts +89 -0
  39. package/lib/cjs/memory/memory.js +132 -0
  40. package/lib/cjs/memory/recorder.d.ts +86 -0
  41. package/lib/cjs/memory/recorder.js +50 -0
  42. package/lib/cjs/memory/retriever.d.ts +99 -0
  43. package/lib/cjs/memory/retriever.js +51 -0
  44. package/lib/cjs/models/bedrock-chat-model.d.ts +79 -0
  45. package/lib/cjs/models/bedrock-chat-model.js +303 -0
  46. package/lib/cjs/models/chat-model.d.ts +279 -1
  47. package/lib/cjs/models/chat-model.js +62 -0
  48. package/lib/cjs/models/claude-chat-model.d.ts +49 -3
  49. package/lib/cjs/models/claude-chat-model.js +34 -2
  50. package/lib/cjs/models/deepseek-chat-model.d.ts +16 -0
  51. package/lib/cjs/models/deepseek-chat-model.js +16 -0
  52. package/lib/cjs/models/gemini-chat-model.d.ts +15 -0
  53. package/lib/cjs/models/gemini-chat-model.js +15 -0
  54. package/lib/cjs/models/ollama-chat-model.d.ts +16 -0
  55. package/lib/cjs/models/ollama-chat-model.js +16 -0
  56. package/lib/cjs/models/open-router-chat-model.d.ts +16 -0
  57. package/lib/cjs/models/open-router-chat-model.js +16 -0
  58. package/lib/cjs/models/openai-chat-model.d.ts +70 -3
  59. package/lib/cjs/models/openai-chat-model.js +147 -102
  60. package/lib/cjs/models/xai-chat-model.d.ts +16 -0
  61. package/lib/cjs/models/xai-chat-model.js +16 -0
  62. package/lib/cjs/prompt/prompt-builder.d.ts +4 -4
  63. package/lib/cjs/prompt/prompt-builder.js +21 -20
  64. package/lib/cjs/prompt/prompts/memory-message-template.d.ts +1 -0
  65. package/lib/cjs/prompt/prompts/memory-message-template.js +10 -0
  66. package/lib/cjs/prompt/template.js +5 -1
  67. package/lib/cjs/server/error.d.ts +11 -0
  68. package/lib/cjs/server/error.js +11 -0
  69. package/lib/cjs/server/index.d.ts +2 -0
  70. package/lib/cjs/server/index.js +18 -0
  71. package/lib/cjs/server/server.d.ts +89 -8
  72. package/lib/cjs/server/server.js +58 -0
  73. package/lib/cjs/utils/fs.d.ts +2 -0
  74. package/lib/cjs/utils/fs.js +25 -0
  75. package/lib/cjs/utils/prompts.d.ts +1 -0
  76. package/lib/cjs/utils/prompts.js +11 -2
  77. package/lib/cjs/utils/type-utils.d.ts +2 -0
  78. package/lib/cjs/utils/type-utils.js +26 -0
  79. package/lib/dts/agents/agent.d.ts +522 -15
  80. package/lib/dts/agents/ai-agent.d.ts +210 -52
  81. package/lib/dts/agents/mcp-agent.d.ts +112 -0
  82. package/lib/dts/agents/team-agent.d.ts +99 -0
  83. package/lib/dts/agents/user-agent.d.ts +6 -4
  84. package/lib/dts/aigne/aigne.d.ts +263 -16
  85. package/lib/dts/aigne/context.d.ts +24 -8
  86. package/lib/dts/aigne/message-queue.d.ts +26 -4
  87. package/lib/dts/aigne/usage.d.ts +9 -0
  88. package/lib/dts/client/client.d.ts +81 -3
  89. package/lib/dts/client/index.d.ts +1 -0
  90. package/lib/dts/index.d.ts +0 -1
  91. package/lib/dts/loader/agent-js.d.ts +1 -1
  92. package/lib/dts/loader/agent-yaml.d.ts +3 -2
  93. package/lib/dts/loader/index.d.ts +4 -4
  94. package/lib/dts/memory/default-memory.d.ts +16 -0
  95. package/lib/dts/memory/index.d.ts +3 -0
  96. package/lib/dts/memory/memory.d.ts +89 -0
  97. package/lib/dts/memory/recorder.d.ts +86 -0
  98. package/lib/dts/memory/retriever.d.ts +99 -0
  99. package/lib/dts/models/bedrock-chat-model.d.ts +79 -0
  100. package/lib/dts/models/chat-model.d.ts +279 -1
  101. package/lib/dts/models/claude-chat-model.d.ts +49 -3
  102. package/lib/dts/models/deepseek-chat-model.d.ts +16 -0
  103. package/lib/dts/models/gemini-chat-model.d.ts +15 -0
  104. package/lib/dts/models/ollama-chat-model.d.ts +16 -0
  105. package/lib/dts/models/open-router-chat-model.d.ts +16 -0
  106. package/lib/dts/models/openai-chat-model.d.ts +70 -3
  107. package/lib/dts/models/xai-chat-model.d.ts +16 -0
  108. package/lib/dts/prompt/prompt-builder.d.ts +4 -4
  109. package/lib/dts/prompt/prompts/memory-message-template.d.ts +1 -0
  110. package/lib/dts/server/error.d.ts +11 -0
  111. package/lib/dts/server/index.d.ts +2 -0
  112. package/lib/dts/server/server.d.ts +89 -8
  113. package/lib/dts/utils/fs.d.ts +2 -0
  114. package/lib/dts/utils/prompts.d.ts +1 -0
  115. package/lib/dts/utils/type-utils.d.ts +2 -0
  116. package/lib/esm/agents/agent.d.ts +522 -15
  117. package/lib/esm/agents/agent.js +351 -35
  118. package/lib/esm/agents/ai-agent.d.ts +210 -52
  119. package/lib/esm/agents/ai-agent.js +183 -25
  120. package/lib/esm/agents/mcp-agent.d.ts +112 -0
  121. package/lib/esm/agents/mcp-agent.js +79 -1
  122. package/lib/esm/agents/team-agent.d.ts +99 -0
  123. package/lib/esm/agents/team-agent.js +94 -0
  124. package/lib/esm/agents/user-agent.d.ts +6 -4
  125. package/lib/esm/agents/user-agent.js +17 -6
  126. package/lib/esm/aigne/aigne.d.ts +263 -16
  127. package/lib/esm/aigne/aigne.js +132 -22
  128. package/lib/esm/aigne/context.d.ts +24 -8
  129. package/lib/esm/aigne/context.js +9 -22
  130. package/lib/esm/aigne/message-queue.d.ts +26 -4
  131. package/lib/esm/aigne/message-queue.js +42 -8
  132. package/lib/esm/aigne/usage.d.ts +9 -0
  133. package/lib/esm/aigne/usage.js +3 -0
  134. package/lib/esm/client/client.d.ts +81 -3
  135. package/lib/esm/client/client.js +38 -0
  136. package/lib/esm/client/index.d.ts +1 -0
  137. package/lib/esm/client/index.js +1 -0
  138. package/lib/esm/index.d.ts +0 -1
  139. package/lib/esm/index.js +0 -1
  140. package/lib/esm/loader/agent-js.d.ts +1 -1
  141. package/lib/esm/loader/agent-js.js +2 -2
  142. package/lib/esm/loader/agent-yaml.d.ts +3 -2
  143. package/lib/esm/loader/agent-yaml.js +2 -1
  144. package/lib/esm/loader/index.d.ts +4 -4
  145. package/lib/esm/loader/index.js +2 -0
  146. package/lib/esm/memory/default-memory.d.ts +16 -0
  147. package/lib/esm/memory/default-memory.js +63 -0
  148. package/lib/esm/memory/index.d.ts +3 -0
  149. package/lib/esm/memory/index.js +3 -0
  150. package/lib/esm/memory/memory.d.ts +89 -0
  151. package/lib/esm/memory/memory.js +127 -0
  152. package/lib/esm/memory/recorder.d.ts +86 -0
  153. package/lib/esm/memory/recorder.js +46 -0
  154. package/lib/esm/memory/retriever.d.ts +99 -0
  155. package/lib/esm/memory/retriever.js +47 -0
  156. package/lib/esm/models/bedrock-chat-model.d.ts +79 -0
  157. package/lib/esm/models/bedrock-chat-model.js +298 -0
  158. package/lib/esm/models/chat-model.d.ts +279 -1
  159. package/lib/esm/models/chat-model.js +62 -0
  160. package/lib/esm/models/claude-chat-model.d.ts +49 -3
  161. package/lib/esm/models/claude-chat-model.js +35 -3
  162. package/lib/esm/models/deepseek-chat-model.d.ts +16 -0
  163. package/lib/esm/models/deepseek-chat-model.js +16 -0
  164. package/lib/esm/models/gemini-chat-model.d.ts +15 -0
  165. package/lib/esm/models/gemini-chat-model.js +15 -0
  166. package/lib/esm/models/ollama-chat-model.d.ts +16 -0
  167. package/lib/esm/models/ollama-chat-model.js +16 -0
  168. package/lib/esm/models/open-router-chat-model.d.ts +16 -0
  169. package/lib/esm/models/open-router-chat-model.js +16 -0
  170. package/lib/esm/models/openai-chat-model.d.ts +70 -3
  171. package/lib/esm/models/openai-chat-model.js +147 -102
  172. package/lib/esm/models/xai-chat-model.d.ts +16 -0
  173. package/lib/esm/models/xai-chat-model.js +16 -0
  174. package/lib/esm/prompt/prompt-builder.d.ts +4 -4
  175. package/lib/esm/prompt/prompt-builder.js +22 -21
  176. package/lib/esm/prompt/prompts/memory-message-template.d.ts +1 -0
  177. package/lib/esm/prompt/prompts/memory-message-template.js +7 -0
  178. package/lib/esm/prompt/template.js +5 -1
  179. package/lib/esm/server/error.d.ts +11 -0
  180. package/lib/esm/server/error.js +11 -0
  181. package/lib/esm/server/index.d.ts +2 -0
  182. package/lib/esm/server/index.js +2 -0
  183. package/lib/esm/server/server.d.ts +89 -8
  184. package/lib/esm/server/server.js +58 -0
  185. package/lib/esm/utils/fs.d.ts +2 -0
  186. package/lib/esm/utils/fs.js +21 -0
  187. package/lib/esm/utils/prompts.d.ts +1 -0
  188. package/lib/esm/utils/prompts.js +10 -2
  189. package/lib/esm/utils/type-utils.d.ts +2 -0
  190. package/lib/esm/utils/type-utils.js +24 -0
  191. package/package.json +21 -11
  192. package/lib/cjs/agents/memory.d.ts +0 -26
  193. package/lib/cjs/agents/memory.js +0 -45
  194. package/lib/dts/agents/memory.d.ts +0 -26
  195. package/lib/esm/agents/memory.d.ts +0 -26
  196. package/lib/esm/agents/memory.js +0 -41
  197. /package/{LICENSE → LICENSE.md} +0 -0
@@ -1,5 +1,28 @@
1
1
  import { z } from "zod";
2
2
  import { Agent } from "../agents/agent.js";
3
+ /**
4
+ * ChatModel is an abstract base class for interacting with Large Language Models (LLMs).
5
+ *
6
+ * This class extends the Agent class and provides a common interface for handling model inputs,
7
+ * outputs, and capabilities. Specific model implementations (like OpenAI, Anthropic, etc.)
8
+ * should inherit from this class and implement their specific functionalities.
9
+ *
10
+ * @example
11
+ * Here's how to implement a custom ChatModel:
12
+ * {@includeCode ../../test/models/chat-model.test.ts#example-chat-model}
13
+ *
14
+ * @example
15
+ * Here's an example showing streaming response with readable stream:
16
+ * {@includeCode ../../test/models/chat-model.test.ts#example-chat-model-streaming}
17
+ *
18
+ * @example
19
+ * Here's an example showing streaming response with async generator:
20
+ * {@includeCode ../../test/models/chat-model.test.ts#example-chat-model-streaming-async-generator}
21
+ *
22
+ * @example
23
+ * Here's an example with tool calls:
24
+ * {@includeCode ../../test/models/chat-model.test.ts#example-chat-model-tools}
25
+ */
3
26
  export class ChatModel extends Agent {
4
27
  constructor() {
5
28
  super({
@@ -7,12 +30,41 @@ export class ChatModel extends Agent {
7
30
  outputSchema: chatModelOutputSchema,
8
31
  });
9
32
  }
33
+ /**
34
+ * Indicates whether the model supports parallel tool calls
35
+ *
36
+ * Defaults to true, subclasses can override this property based on
37
+ * specific model capabilities
38
+ */
10
39
  supportsParallelToolCalls = true;
40
+ /**
41
+ * Gets the model's supported capabilities
42
+ *
43
+ * Currently returns capabilities including: whether parallel tool calls are supported
44
+ *
45
+ * @returns An object containing model capabilities
46
+ */
11
47
  getModelCapabilities() {
12
48
  return {
13
49
  supportsParallelToolCalls: this.supportsParallelToolCalls,
14
50
  };
15
51
  }
52
+ validateToolNames(tools) {
53
+ for (const tool of tools ?? []) {
54
+ if (!/^[a-zA-Z0-9_]+$/.test(tool.function.name)) {
55
+ throw new Error(`Tool name "${tool.function.name}" can only contain letters, numbers, and underscores`);
56
+ }
57
+ }
58
+ }
59
+ /**
60
+ * Performs preprocessing operations before handling input
61
+ *
62
+ * Primarily checks if token usage exceeds limits, throwing an exception if limits are exceeded
63
+ *
64
+ * @param input Input message
65
+ * @param context Execution context
66
+ * @throws Error if token usage exceeds maximum limit
67
+ */
16
68
  preprocess(input, context) {
17
69
  super.preprocess(input, context);
18
70
  const { limits, usage } = context;
@@ -20,7 +72,17 @@ export class ChatModel extends Agent {
20
72
  if (limits?.maxTokens && usedTokens >= limits.maxTokens) {
21
73
  throw new Error(`Exceeded max tokens ${usedTokens}/${limits.maxTokens}`);
22
74
  }
75
+ this.validateToolNames(input.tools);
23
76
  }
77
+ /**
78
+ * Performs postprocessing operations after handling output
79
+ *
80
+ * Primarily updates token usage statistics in the context
81
+ *
82
+ * @param input Input message
83
+ * @param output Output message
84
+ * @param context Execution context
85
+ */
24
86
  postprocess(input, output, context) {
25
87
  super.postprocess(input, output, context);
26
88
  const { usage } = output;
@@ -1,13 +1,32 @@
1
1
  import Anthropic from "@anthropic-ai/sdk";
2
2
  import { z } from "zod";
3
- import type { AgentInvokeOptions, AgentResponse } from "../agents/agent.js";
4
- import type { Context } from "../aigne/context.js";
3
+ import type { AgentProcessResult } from "../agents/agent.js";
4
+ import { type PromiseOrValue } from "../utils/type-utils.js";
5
5
  import { ChatModel, type ChatModelInput, type ChatModelOptions, type ChatModelOutput } from "./chat-model.js";
6
+ /**
7
+ * Configuration options for Claude Chat Model
8
+ */
6
9
  export interface ClaudeChatModelOptions {
10
+ /**
11
+ * API key for Anthropic's Claude API
12
+ *
13
+ * If not provided, will look for ANTHROPIC_API_KEY or CLAUDE_API_KEY in environment variables
14
+ */
7
15
  apiKey?: string;
16
+ /**
17
+ * Claude model to use
18
+ *
19
+ * Defaults to 'claude-3-7-sonnet-latest'
20
+ */
8
21
  model?: string;
22
+ /**
23
+ * Additional model options to control behavior
24
+ */
9
25
  modelOptions?: ChatModelOptions;
10
26
  }
27
+ /**
28
+ * @hidden
29
+ */
11
30
  export declare const claudeChatModelOptionsSchema: z.ZodObject<{
12
31
  apiKey: z.ZodOptional<z.ZodString>;
13
32
  model: z.ZodOptional<z.ZodString>;
@@ -56,13 +75,40 @@ export declare const claudeChatModelOptionsSchema: z.ZodObject<{
56
75
  model?: string | undefined;
57
76
  apiKey?: string | undefined;
58
77
  }>;
78
+ /**
79
+ * Implementation of the ChatModel interface for Anthropic's Claude API
80
+ *
81
+ * This model provides access to Claude's capabilities including:
82
+ * - Text generation
83
+ * - Tool use
84
+ * - JSON structured output
85
+ *
86
+ * Default model: 'claude-3-7-sonnet-latest'
87
+ *
88
+ * @example
89
+ * Here's how to create and use a Claude chat model:
90
+ * {@includeCode ../../test/models/claude-chat-model.test.ts#example-claude-chat-model}
91
+ *
92
+ * @example
93
+ * Here's an example with streaming response:
94
+ * {@includeCode ../../test/models/claude-chat-model.test.ts#example-claude-chat-model-streaming-async-generator}
95
+ */
59
96
  export declare class ClaudeChatModel extends ChatModel {
60
97
  options?: ClaudeChatModelOptions | undefined;
61
98
  constructor(options?: ClaudeChatModelOptions | undefined);
99
+ /**
100
+ * @hidden
101
+ */
62
102
  protected _client?: Anthropic;
63
103
  get client(): Anthropic;
64
104
  get modelOptions(): ChatModelOptions | undefined;
65
- process(input: ChatModelInput, _context: Context, options?: AgentInvokeOptions): Promise<AgentResponse<ChatModelOutput>>;
105
+ /**
106
+ * Process the input using Claude's chat model
107
+ * @param input - The input to process
108
+ * @returns The processed output from the model
109
+ */
110
+ process(input: ChatModelInput): PromiseOrValue<AgentProcessResult<ChatModelOutput>>;
111
+ private _process;
66
112
  private extractResultFromClaudeStream;
67
113
  private requestStructuredOutput;
68
114
  }
@@ -3,9 +3,12 @@ import { z } from "zod";
3
3
  import { parseJSON } from "../utils/json-schema.js";
4
4
  import { mergeUsage } from "../utils/model-utils.js";
5
5
  import { agentResponseStreamToObject } from "../utils/stream-utils.js";
6
- import { checkArguments, isEmpty, isNonNullable } from "../utils/type-utils.js";
6
+ import { checkArguments, isEmpty, isNonNullable, } from "../utils/type-utils.js";
7
7
  import { ChatModel, } from "./chat-model.js";
8
8
  const CHAT_MODEL_CLAUDE_DEFAULT_MODEL = "claude-3-7-sonnet-latest";
9
+ /**
10
+ * @hidden
11
+ */
9
12
  export const claudeChatModelOptionsSchema = z.object({
10
13
  apiKey: z.string().optional(),
11
14
  model: z.string().optional(),
@@ -20,6 +23,24 @@ export const claudeChatModelOptionsSchema = z.object({
20
23
  })
21
24
  .optional(),
22
25
  });
26
+ /**
27
+ * Implementation of the ChatModel interface for Anthropic's Claude API
28
+ *
29
+ * This model provides access to Claude's capabilities including:
30
+ * - Text generation
31
+ * - Tool use
32
+ * - JSON structured output
33
+ *
34
+ * Default model: 'claude-3-7-sonnet-latest'
35
+ *
36
+ * @example
37
+ * Here's how to create and use a Claude chat model:
38
+ * {@includeCode ../../test/models/claude-chat-model.test.ts#example-claude-chat-model}
39
+ *
40
+ * @example
41
+ * Here's an example with streaming response:
42
+ * {@includeCode ../../test/models/claude-chat-model.test.ts#example-claude-chat-model-streaming-async-generator}
43
+ */
23
44
  export class ClaudeChatModel extends ChatModel {
24
45
  options;
25
46
  constructor(options) {
@@ -28,6 +49,9 @@ export class ClaudeChatModel extends ChatModel {
28
49
  super();
29
50
  this.options = options;
30
51
  }
52
+ /**
53
+ * @hidden
54
+ */
31
55
  _client;
32
56
  get client() {
33
57
  const apiKey = this.options?.apiKey || process.env.ANTHROPIC_API_KEY || process.env.CLAUDE_API_KEY;
@@ -39,7 +63,15 @@ export class ClaudeChatModel extends ChatModel {
39
63
  get modelOptions() {
40
64
  return this.options?.modelOptions;
41
65
  }
42
- async process(input, _context, options) {
66
+ /**
67
+ * Process the input using Claude's chat model
68
+ * @param input - The input to process
69
+ * @returns The processed output from the model
70
+ */
71
+ process(input) {
72
+ return this._process(input);
73
+ }
74
+ async _process(input) {
43
75
  const model = this.options?.model || CHAT_MODEL_CLAUDE_DEFAULT_MODEL;
44
76
  const disableParallelToolUse = input.modelOptions?.parallelToolCalls === false ||
45
77
  this.modelOptions?.parallelToolCalls === false;
@@ -56,7 +88,7 @@ export class ClaudeChatModel extends ChatModel {
56
88
  ...body,
57
89
  stream: true,
58
90
  });
59
- if (options?.streaming && input.responseFormat?.type !== "json_schema") {
91
+ if (input.responseFormat?.type !== "json_schema") {
60
92
  return this.extractResultFromClaudeStream(stream, true);
61
93
  }
62
94
  const result = await this.extractResultFromClaudeStream(stream);
@@ -1,4 +1,20 @@
1
1
  import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
2
+ /**
3
+ * Implementation of the ChatModel interface for DeepSeek's API
4
+ *
5
+ * This model uses OpenAI-compatible API format to interact with DeepSeek's models,
6
+ * but with specific configuration and capabilities for DeepSeek.
7
+ *
8
+ * Default model: 'deepseek-chat'
9
+ *
10
+ * @example
11
+ * Here's how to create and use a DeepSeek chat model:
12
+ * {@includeCode ../../test/models/deepseek-chat-model.test.ts#example-deepseek-chat-model}
13
+ *
14
+ * @example
15
+ * Here's an example with streaming response:
16
+ * {@includeCode ../../test/models/deepseek-chat-model.test.ts#example-deepseek-chat-model-streaming}
17
+ */
2
18
  export declare class DeepSeekChatModel extends OpenAIChatModel {
3
19
  constructor(options?: OpenAIChatModelOptions);
4
20
  protected apiKeyEnvName: string;
@@ -1,6 +1,22 @@
1
1
  import { OpenAIChatModel } from "./openai-chat-model.js";
2
2
  const DEEPSEEK_DEFAULT_CHAT_MODEL = "deepseek-chat";
3
3
  const DEEPSEEK_BASE_URL = "https://api.deepseek.com";
4
+ /**
5
+ * Implementation of the ChatModel interface for DeepSeek's API
6
+ *
7
+ * This model uses OpenAI-compatible API format to interact with DeepSeek's models,
8
+ * but with specific configuration and capabilities for DeepSeek.
9
+ *
10
+ * Default model: 'deepseek-chat'
11
+ *
12
+ * @example
13
+ * Here's how to create and use a DeepSeek chat model:
14
+ * {@includeCode ../../test/models/deepseek-chat-model.test.ts#example-deepseek-chat-model}
15
+ *
16
+ * @example
17
+ * Here's an example with streaming response:
18
+ * {@includeCode ../../test/models/deepseek-chat-model.test.ts#example-deepseek-chat-model-streaming}
19
+ */
4
20
  export class DeepSeekChatModel extends OpenAIChatModel {
5
21
  constructor(options) {
6
22
  super({
@@ -1,8 +1,23 @@
1
1
  import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
2
+ /**
3
+ * Implementation of the ChatModel interface for Google's Gemini API
4
+ *
5
+ * This model uses OpenAI-compatible API format to interact with Google's Gemini models,
6
+ * providing access to models like Gemini 1.5 and Gemini 2.0.
7
+ *
8
+ * @example
9
+ * Here's how to create and use a Gemini chat model:
10
+ * {@includeCode ../../test/models/gemini-chat-model.test.ts#example-gemini-chat-model}
11
+ *
12
+ * @example
13
+ * Here's an example with streaming response:
14
+ * {@includeCode ../../test/models/gemini-chat-model.test.ts#example-gemini-chat-model-streaming}
15
+ */
2
16
  export declare class GeminiChatModel extends OpenAIChatModel {
3
17
  constructor(options?: OpenAIChatModelOptions);
4
18
  protected apiKeyEnvName: string;
5
19
  protected supportsEndWithSystemMessage: boolean;
6
20
  protected supportsToolsUseWithJsonSchema: boolean;
7
21
  protected supportsParallelToolCalls: boolean;
22
+ protected supportsToolStreaming: boolean;
8
23
  }
@@ -1,6 +1,20 @@
1
1
  import { OpenAIChatModel } from "./openai-chat-model.js";
2
2
  const GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai";
3
3
  const GEMINI_DEFAULT_CHAT_MODEL = "gemini-2.0-flash";
4
+ /**
5
+ * Implementation of the ChatModel interface for Google's Gemini API
6
+ *
7
+ * This model uses OpenAI-compatible API format to interact with Google's Gemini models,
8
+ * providing access to models like Gemini 1.5 and Gemini 2.0.
9
+ *
10
+ * @example
11
+ * Here's how to create and use a Gemini chat model:
12
+ * {@includeCode ../../test/models/gemini-chat-model.test.ts#example-gemini-chat-model}
13
+ *
14
+ * @example
15
+ * Here's an example with streaming response:
16
+ * {@includeCode ../../test/models/gemini-chat-model.test.ts#example-gemini-chat-model-streaming}
17
+ */
4
18
  export class GeminiChatModel extends OpenAIChatModel {
5
19
  constructor(options) {
6
20
  super({
@@ -13,4 +27,5 @@ export class GeminiChatModel extends OpenAIChatModel {
13
27
  supportsEndWithSystemMessage = false;
14
28
  supportsToolsUseWithJsonSchema = false;
15
29
  supportsParallelToolCalls = false;
30
+ supportsToolStreaming = false;
16
31
  }
@@ -1,4 +1,20 @@
1
1
  import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
2
+ /**
3
+ * Implementation of the ChatModel interface for Ollama
4
+ *
5
+ * This model allows you to run open-source LLMs locally using Ollama,
6
+ * with an OpenAI-compatible API interface.
7
+ *
8
+ * Default model: 'llama3.2'
9
+ *
10
+ * @example
11
+ * Here's how to create and use an Ollama chat model:
12
+ * {@includeCode ../../test/models/ollama-chat-model.test.ts#example-ollama-chat-model}
13
+ *
14
+ * @example
15
+ * Here's an example with streaming response:
16
+ * {@includeCode ../../test/models/ollama-chat-model.test.ts#example-ollama-chat-model-streaming}
17
+ */
2
18
  export declare class OllamaChatModel extends OpenAIChatModel {
3
19
  constructor(options?: OpenAIChatModelOptions);
4
20
  protected apiKeyEnvName: string;
@@ -1,6 +1,22 @@
1
1
  import { OpenAIChatModel } from "./openai-chat-model.js";
2
2
  const OLLAMA_DEFAULT_BASE_URL = "http://localhost:11434/v1";
3
3
  const OLLAMA_DEFAULT_CHAT_MODEL = "llama3.2";
4
+ /**
5
+ * Implementation of the ChatModel interface for Ollama
6
+ *
7
+ * This model allows you to run open-source LLMs locally using Ollama,
8
+ * with an OpenAI-compatible API interface.
9
+ *
10
+ * Default model: 'llama3.2'
11
+ *
12
+ * @example
13
+ * Here's how to create and use an Ollama chat model:
14
+ * {@includeCode ../../test/models/ollama-chat-model.test.ts#example-ollama-chat-model}
15
+ *
16
+ * @example
17
+ * Here's an example with streaming response:
18
+ * {@includeCode ../../test/models/ollama-chat-model.test.ts#example-ollama-chat-model-streaming}
19
+ */
4
20
  export class OllamaChatModel extends OpenAIChatModel {
5
21
  constructor(options) {
6
22
  super({
@@ -1,4 +1,20 @@
1
1
  import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
2
+ /**
3
+ * Implementation of the ChatModel interface for OpenRouter service
4
+ *
5
+ * OpenRouter provides access to a variety of large language models through a unified API.
6
+ * This implementation uses the OpenAI-compatible interface to connect to OpenRouter's service.
7
+ *
8
+ * Default model: 'openai/gpt-4o'
9
+ *
10
+ * @example
11
+ * Here's how to create and use an OpenRouter chat model:
12
+ * {@includeCode ../../test/models/open-router-chat-model.test.ts#example-openrouter-chat-model}
13
+ *
14
+ * @example
15
+ * Here's an example with streaming response:
16
+ * {@includeCode ../../test/models/open-router-chat-model.test.ts#example-openrouter-chat-model-streaming}
17
+ */
2
18
  export declare class OpenRouterChatModel extends OpenAIChatModel {
3
19
  constructor(options?: OpenAIChatModelOptions);
4
20
  protected apiKeyEnvName: string;
@@ -1,6 +1,22 @@
1
1
  import { OpenAIChatModel } from "./openai-chat-model.js";
2
2
  const OPEN_ROUTER_DEFAULT_CHAT_MODEL = "openai/gpt-4o";
3
3
  const OPEN_ROUTER_BASE_URL = "https://openrouter.ai/api/v1";
4
+ /**
5
+ * Implementation of the ChatModel interface for OpenRouter service
6
+ *
7
+ * OpenRouter provides access to a variety of large language models through a unified API.
8
+ * This implementation uses the OpenAI-compatible interface to connect to OpenRouter's service.
9
+ *
10
+ * Default model: 'openai/gpt-4o'
11
+ *
12
+ * @example
13
+ * Here's how to create and use an OpenRouter chat model:
14
+ * {@includeCode ../../test/models/open-router-chat-model.test.ts#example-openrouter-chat-model}
15
+ *
16
+ * @example
17
+ * Here's an example with streaming response:
18
+ * {@includeCode ../../test/models/open-router-chat-model.test.ts#example-openrouter-chat-model-streaming}
19
+ */
4
20
  export class OpenRouterChatModel extends OpenAIChatModel {
5
21
  constructor(options) {
6
22
  super({
@@ -1,8 +1,8 @@
1
1
  import OpenAI from "openai";
2
2
  import type { ChatCompletionMessageParam, ChatCompletionTool } from "openai/resources";
3
3
  import { z } from "zod";
4
- import type { AgentInvokeOptions, AgentResponse } from "../agents/agent.js";
5
- import type { Context } from "../aigne/context.js";
4
+ import type { AgentProcessResult } from "../agents/agent.js";
5
+ import { type PromiseOrValue } from "../utils/type-utils.js";
6
6
  import { ChatModel, type ChatModelInput, type ChatModelInputMessage, type ChatModelInputTool, type ChatModelOptions, type ChatModelOutput, type Role } from "./chat-model.js";
7
7
  export interface OpenAIChatModelCapabilities {
8
8
  supportsNativeStructuredOutputs: boolean;
@@ -10,14 +10,39 @@ export interface OpenAIChatModelCapabilities {
10
10
  supportsToolsUseWithJsonSchema: boolean;
11
11
  supportsParallelToolCalls: boolean;
12
12
  supportsToolsEmptyParameters: boolean;
13
+ supportsToolStreaming: boolean;
13
14
  supportsTemperature: boolean;
14
15
  }
16
+ /**
17
+ * Configuration options for OpenAI Chat Model
18
+ */
15
19
  export interface OpenAIChatModelOptions {
20
+ /**
21
+ * API key for OpenAI API
22
+ *
23
+ * If not provided, will look for OPENAI_API_KEY in environment variables
24
+ */
16
25
  apiKey?: string;
26
+ /**
27
+ * Base URL for OpenAI API
28
+ *
29
+ * Useful for proxies or alternate endpoints
30
+ */
17
31
  baseURL?: string;
32
+ /**
33
+ * OpenAI model to use
34
+ *
35
+ * Defaults to 'gpt-4o-mini'
36
+ */
18
37
  model?: string;
38
+ /**
39
+ * Additional model options to control behavior
40
+ */
19
41
  modelOptions?: ChatModelOptions;
20
42
  }
43
+ /**
44
+ * @hidden
45
+ */
21
46
  export declare const openAIChatModelOptionsSchema: z.ZodObject<{
22
47
  apiKey: z.ZodOptional<z.ZodString>;
23
48
  baseURL: z.ZodOptional<z.ZodString>;
@@ -69,9 +94,31 @@ export declare const openAIChatModelOptionsSchema: z.ZodObject<{
69
94
  apiKey?: string | undefined;
70
95
  baseURL?: string | undefined;
71
96
  }>;
97
+ /**
98
+ * Implementation of the ChatModel interface for OpenAI's API
99
+ *
100
+ * This model provides access to OpenAI's capabilities including:
101
+ * - Text generation
102
+ * - Tool use with parallel tool calls
103
+ * - JSON structured output
104
+ * - Image understanding
105
+ *
106
+ * Default model: 'gpt-4o-mini'
107
+ *
108
+ * @example
109
+ * Here's how to create and use an OpenAI chat model:
110
+ * {@includeCode ../../test/models/openai-chat-model.test.ts#example-openai-chat-model}
111
+ *
112
+ * @example
113
+ * Here's an example with streaming response:
114
+ * {@includeCode ../../test/models/openai-chat-model.test.ts#example-openai-chat-model-streaming}
115
+ */
72
116
  export declare class OpenAIChatModel extends ChatModel {
73
117
  options?: OpenAIChatModelOptions | undefined;
74
118
  constructor(options?: OpenAIChatModelOptions | undefined);
119
+ /**
120
+ * @hidden
121
+ */
75
122
  protected _client?: OpenAI;
76
123
  protected apiKeyEnvName: string;
77
124
  protected apiKeyDefault: string | undefined;
@@ -80,20 +127,40 @@ export declare class OpenAIChatModel extends ChatModel {
80
127
  protected supportsToolsUseWithJsonSchema: boolean;
81
128
  protected supportsParallelToolCalls: boolean;
82
129
  protected supportsToolsEmptyParameters: boolean;
130
+ protected supportsToolStreaming: boolean;
83
131
  protected supportsTemperature: boolean;
84
132
  get client(): OpenAI;
85
133
  get modelOptions(): ChatModelOptions | undefined;
86
- process(input: ChatModelInput, _context: Context, options?: AgentInvokeOptions): Promise<AgentResponse<ChatModelOutput>>;
134
+ /**
135
+ * Process the input and generate a response
136
+ * @param input The input to process
137
+ * @returns The generated response
138
+ */
139
+ process(input: ChatModelInput): PromiseOrValue<AgentProcessResult<ChatModelOutput>>;
140
+ private _process;
87
141
  private getParallelToolCalls;
88
142
  private getRunMessages;
89
143
  private getRunResponseFormat;
90
144
  private requestStructuredOutput;
145
+ private extractResultFromStream;
91
146
  }
147
+ /**
148
+ * @hidden
149
+ */
92
150
  export declare const ROLE_MAP: {
93
151
  [key in Role]: ChatCompletionMessageParam["role"];
94
152
  };
153
+ /**
154
+ * @hidden
155
+ */
95
156
  export declare function contentsFromInputMessages(messages: ChatModelInputMessage[]): Promise<ChatCompletionMessageParam[]>;
157
+ /**
158
+ * @hidden
159
+ */
96
160
  export declare function toolsFromInputTools(tools?: ChatModelInputTool[], options?: {
97
161
  addTypeToEmptyParameters?: boolean;
98
162
  }): ChatCompletionTool[] | undefined;
163
+ /**
164
+ * @hidden
165
+ */
99
166
  export declare function jsonSchemaToOpenAIJsonSchema(schema: Record<string, unknown>): Record<string, unknown>;