@aigne/core 1.12.0 → 1.14.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (197) hide show
  1. package/CHANGELOG.md +28 -0
  2. package/README.md +13 -26
  3. package/README.zh.md +24 -37
  4. package/lib/cjs/agents/agent.d.ts +522 -15
  5. package/lib/cjs/agents/agent.js +357 -36
  6. package/lib/cjs/agents/ai-agent.d.ts +210 -52
  7. package/lib/cjs/agents/ai-agent.js +182 -24
  8. package/lib/cjs/agents/mcp-agent.d.ts +112 -0
  9. package/lib/cjs/agents/mcp-agent.js +79 -1
  10. package/lib/cjs/agents/team-agent.d.ts +99 -0
  11. package/lib/cjs/agents/team-agent.js +94 -0
  12. package/lib/cjs/agents/user-agent.d.ts +6 -4
  13. package/lib/cjs/agents/user-agent.js +16 -5
  14. package/lib/cjs/aigne/aigne.d.ts +263 -16
  15. package/lib/cjs/aigne/aigne.js +130 -20
  16. package/lib/cjs/aigne/context.d.ts +24 -8
  17. package/lib/cjs/aigne/context.js +8 -22
  18. package/lib/cjs/aigne/message-queue.d.ts +26 -4
  19. package/lib/cjs/aigne/message-queue.js +42 -7
  20. package/lib/cjs/aigne/usage.d.ts +9 -0
  21. package/lib/cjs/aigne/usage.js +3 -0
  22. package/lib/cjs/client/client.d.ts +81 -3
  23. package/lib/cjs/client/client.js +38 -0
  24. package/lib/cjs/client/index.d.ts +1 -0
  25. package/lib/cjs/client/index.js +17 -0
  26. package/lib/cjs/index.d.ts +0 -1
  27. package/lib/cjs/index.js +0 -1
  28. package/lib/cjs/loader/agent-js.d.ts +1 -1
  29. package/lib/cjs/loader/agent-js.js +2 -2
  30. package/lib/cjs/loader/agent-yaml.d.ts +3 -2
  31. package/lib/cjs/loader/agent-yaml.js +2 -1
  32. package/lib/cjs/loader/index.d.ts +4 -4
  33. package/lib/cjs/loader/index.js +2 -0
  34. package/lib/cjs/memory/default-memory.d.ts +16 -0
  35. package/lib/cjs/memory/default-memory.js +70 -0
  36. package/lib/cjs/memory/index.d.ts +3 -0
  37. package/lib/cjs/memory/index.js +19 -0
  38. package/lib/cjs/memory/memory.d.ts +89 -0
  39. package/lib/cjs/memory/memory.js +132 -0
  40. package/lib/cjs/memory/recorder.d.ts +86 -0
  41. package/lib/cjs/memory/recorder.js +50 -0
  42. package/lib/cjs/memory/retriever.d.ts +99 -0
  43. package/lib/cjs/memory/retriever.js +51 -0
  44. package/lib/cjs/models/bedrock-chat-model.d.ts +79 -0
  45. package/lib/cjs/models/bedrock-chat-model.js +303 -0
  46. package/lib/cjs/models/chat-model.d.ts +279 -1
  47. package/lib/cjs/models/chat-model.js +62 -0
  48. package/lib/cjs/models/claude-chat-model.d.ts +49 -3
  49. package/lib/cjs/models/claude-chat-model.js +34 -2
  50. package/lib/cjs/models/deepseek-chat-model.d.ts +16 -0
  51. package/lib/cjs/models/deepseek-chat-model.js +16 -0
  52. package/lib/cjs/models/gemini-chat-model.d.ts +15 -0
  53. package/lib/cjs/models/gemini-chat-model.js +15 -0
  54. package/lib/cjs/models/ollama-chat-model.d.ts +16 -0
  55. package/lib/cjs/models/ollama-chat-model.js +16 -0
  56. package/lib/cjs/models/open-router-chat-model.d.ts +16 -0
  57. package/lib/cjs/models/open-router-chat-model.js +16 -0
  58. package/lib/cjs/models/openai-chat-model.d.ts +70 -3
  59. package/lib/cjs/models/openai-chat-model.js +147 -102
  60. package/lib/cjs/models/xai-chat-model.d.ts +16 -0
  61. package/lib/cjs/models/xai-chat-model.js +16 -0
  62. package/lib/cjs/prompt/prompt-builder.d.ts +4 -4
  63. package/lib/cjs/prompt/prompt-builder.js +21 -20
  64. package/lib/cjs/prompt/prompts/memory-message-template.d.ts +1 -0
  65. package/lib/cjs/prompt/prompts/memory-message-template.js +10 -0
  66. package/lib/cjs/prompt/template.js +5 -1
  67. package/lib/cjs/server/error.d.ts +11 -0
  68. package/lib/cjs/server/error.js +11 -0
  69. package/lib/cjs/server/index.d.ts +2 -0
  70. package/lib/cjs/server/index.js +18 -0
  71. package/lib/cjs/server/server.d.ts +89 -8
  72. package/lib/cjs/server/server.js +58 -0
  73. package/lib/cjs/utils/fs.d.ts +2 -0
  74. package/lib/cjs/utils/fs.js +25 -0
  75. package/lib/cjs/utils/prompts.d.ts +1 -0
  76. package/lib/cjs/utils/prompts.js +11 -2
  77. package/lib/cjs/utils/type-utils.d.ts +2 -0
  78. package/lib/cjs/utils/type-utils.js +26 -0
  79. package/lib/dts/agents/agent.d.ts +522 -15
  80. package/lib/dts/agents/ai-agent.d.ts +210 -52
  81. package/lib/dts/agents/mcp-agent.d.ts +112 -0
  82. package/lib/dts/agents/team-agent.d.ts +99 -0
  83. package/lib/dts/agents/user-agent.d.ts +6 -4
  84. package/lib/dts/aigne/aigne.d.ts +263 -16
  85. package/lib/dts/aigne/context.d.ts +24 -8
  86. package/lib/dts/aigne/message-queue.d.ts +26 -4
  87. package/lib/dts/aigne/usage.d.ts +9 -0
  88. package/lib/dts/client/client.d.ts +81 -3
  89. package/lib/dts/client/index.d.ts +1 -0
  90. package/lib/dts/index.d.ts +0 -1
  91. package/lib/dts/loader/agent-js.d.ts +1 -1
  92. package/lib/dts/loader/agent-yaml.d.ts +3 -2
  93. package/lib/dts/loader/index.d.ts +4 -4
  94. package/lib/dts/memory/default-memory.d.ts +16 -0
  95. package/lib/dts/memory/index.d.ts +3 -0
  96. package/lib/dts/memory/memory.d.ts +89 -0
  97. package/lib/dts/memory/recorder.d.ts +86 -0
  98. package/lib/dts/memory/retriever.d.ts +99 -0
  99. package/lib/dts/models/bedrock-chat-model.d.ts +79 -0
  100. package/lib/dts/models/chat-model.d.ts +279 -1
  101. package/lib/dts/models/claude-chat-model.d.ts +49 -3
  102. package/lib/dts/models/deepseek-chat-model.d.ts +16 -0
  103. package/lib/dts/models/gemini-chat-model.d.ts +15 -0
  104. package/lib/dts/models/ollama-chat-model.d.ts +16 -0
  105. package/lib/dts/models/open-router-chat-model.d.ts +16 -0
  106. package/lib/dts/models/openai-chat-model.d.ts +70 -3
  107. package/lib/dts/models/xai-chat-model.d.ts +16 -0
  108. package/lib/dts/prompt/prompt-builder.d.ts +4 -4
  109. package/lib/dts/prompt/prompts/memory-message-template.d.ts +1 -0
  110. package/lib/dts/server/error.d.ts +11 -0
  111. package/lib/dts/server/index.d.ts +2 -0
  112. package/lib/dts/server/server.d.ts +89 -8
  113. package/lib/dts/utils/fs.d.ts +2 -0
  114. package/lib/dts/utils/prompts.d.ts +1 -0
  115. package/lib/dts/utils/type-utils.d.ts +2 -0
  116. package/lib/esm/agents/agent.d.ts +522 -15
  117. package/lib/esm/agents/agent.js +351 -35
  118. package/lib/esm/agents/ai-agent.d.ts +210 -52
  119. package/lib/esm/agents/ai-agent.js +183 -25
  120. package/lib/esm/agents/mcp-agent.d.ts +112 -0
  121. package/lib/esm/agents/mcp-agent.js +79 -1
  122. package/lib/esm/agents/team-agent.d.ts +99 -0
  123. package/lib/esm/agents/team-agent.js +94 -0
  124. package/lib/esm/agents/user-agent.d.ts +6 -4
  125. package/lib/esm/agents/user-agent.js +17 -6
  126. package/lib/esm/aigne/aigne.d.ts +263 -16
  127. package/lib/esm/aigne/aigne.js +132 -22
  128. package/lib/esm/aigne/context.d.ts +24 -8
  129. package/lib/esm/aigne/context.js +9 -22
  130. package/lib/esm/aigne/message-queue.d.ts +26 -4
  131. package/lib/esm/aigne/message-queue.js +42 -8
  132. package/lib/esm/aigne/usage.d.ts +9 -0
  133. package/lib/esm/aigne/usage.js +3 -0
  134. package/lib/esm/client/client.d.ts +81 -3
  135. package/lib/esm/client/client.js +38 -0
  136. package/lib/esm/client/index.d.ts +1 -0
  137. package/lib/esm/client/index.js +1 -0
  138. package/lib/esm/index.d.ts +0 -1
  139. package/lib/esm/index.js +0 -1
  140. package/lib/esm/loader/agent-js.d.ts +1 -1
  141. package/lib/esm/loader/agent-js.js +2 -2
  142. package/lib/esm/loader/agent-yaml.d.ts +3 -2
  143. package/lib/esm/loader/agent-yaml.js +2 -1
  144. package/lib/esm/loader/index.d.ts +4 -4
  145. package/lib/esm/loader/index.js +2 -0
  146. package/lib/esm/memory/default-memory.d.ts +16 -0
  147. package/lib/esm/memory/default-memory.js +63 -0
  148. package/lib/esm/memory/index.d.ts +3 -0
  149. package/lib/esm/memory/index.js +3 -0
  150. package/lib/esm/memory/memory.d.ts +89 -0
  151. package/lib/esm/memory/memory.js +127 -0
  152. package/lib/esm/memory/recorder.d.ts +86 -0
  153. package/lib/esm/memory/recorder.js +46 -0
  154. package/lib/esm/memory/retriever.d.ts +99 -0
  155. package/lib/esm/memory/retriever.js +47 -0
  156. package/lib/esm/models/bedrock-chat-model.d.ts +79 -0
  157. package/lib/esm/models/bedrock-chat-model.js +298 -0
  158. package/lib/esm/models/chat-model.d.ts +279 -1
  159. package/lib/esm/models/chat-model.js +62 -0
  160. package/lib/esm/models/claude-chat-model.d.ts +49 -3
  161. package/lib/esm/models/claude-chat-model.js +35 -3
  162. package/lib/esm/models/deepseek-chat-model.d.ts +16 -0
  163. package/lib/esm/models/deepseek-chat-model.js +16 -0
  164. package/lib/esm/models/gemini-chat-model.d.ts +15 -0
  165. package/lib/esm/models/gemini-chat-model.js +15 -0
  166. package/lib/esm/models/ollama-chat-model.d.ts +16 -0
  167. package/lib/esm/models/ollama-chat-model.js +16 -0
  168. package/lib/esm/models/open-router-chat-model.d.ts +16 -0
  169. package/lib/esm/models/open-router-chat-model.js +16 -0
  170. package/lib/esm/models/openai-chat-model.d.ts +70 -3
  171. package/lib/esm/models/openai-chat-model.js +147 -102
  172. package/lib/esm/models/xai-chat-model.d.ts +16 -0
  173. package/lib/esm/models/xai-chat-model.js +16 -0
  174. package/lib/esm/prompt/prompt-builder.d.ts +4 -4
  175. package/lib/esm/prompt/prompt-builder.js +22 -21
  176. package/lib/esm/prompt/prompts/memory-message-template.d.ts +1 -0
  177. package/lib/esm/prompt/prompts/memory-message-template.js +7 -0
  178. package/lib/esm/prompt/template.js +5 -1
  179. package/lib/esm/server/error.d.ts +11 -0
  180. package/lib/esm/server/error.js +11 -0
  181. package/lib/esm/server/index.d.ts +2 -0
  182. package/lib/esm/server/index.js +2 -0
  183. package/lib/esm/server/server.d.ts +89 -8
  184. package/lib/esm/server/server.js +58 -0
  185. package/lib/esm/utils/fs.d.ts +2 -0
  186. package/lib/esm/utils/fs.js +21 -0
  187. package/lib/esm/utils/prompts.d.ts +1 -0
  188. package/lib/esm/utils/prompts.js +10 -2
  189. package/lib/esm/utils/type-utils.d.ts +2 -0
  190. package/lib/esm/utils/type-utils.js +24 -0
  191. package/package.json +21 -11
  192. package/lib/cjs/agents/memory.d.ts +0 -26
  193. package/lib/cjs/agents/memory.js +0 -45
  194. package/lib/dts/agents/memory.d.ts +0 -26
  195. package/lib/esm/agents/memory.d.ts +0 -26
  196. package/lib/esm/agents/memory.js +0 -41
  197. /package/{LICENSE → LICENSE.md} +0 -0
@@ -3,6 +3,29 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.ChatModel = void 0;
4
4
  const zod_1 = require("zod");
5
5
  const agent_js_1 = require("../agents/agent.js");
6
+ /**
7
+ * ChatModel is an abstract base class for interacting with Large Language Models (LLMs).
8
+ *
9
+ * This class extends the Agent class and provides a common interface for handling model inputs,
10
+ * outputs, and capabilities. Specific model implementations (like OpenAI, Anthropic, etc.)
11
+ * should inherit from this class and implement their specific functionalities.
12
+ *
13
+ * @example
14
+ * Here's how to implement a custom ChatModel:
15
+ * {@includeCode ../../test/models/chat-model.test.ts#example-chat-model}
16
+ *
17
+ * @example
18
+ * Here's an example showing streaming response with readable stream:
19
+ * {@includeCode ../../test/models/chat-model.test.ts#example-chat-model-streaming}
20
+ *
21
+ * @example
22
+ * Here's an example showing streaming response with async generator:
23
+ * {@includeCode ../../test/models/chat-model.test.ts#example-chat-model-streaming-async-generator}
24
+ *
25
+ * @example
26
+ * Here's an example with tool calls:
27
+ * {@includeCode ../../test/models/chat-model.test.ts#example-chat-model-tools}
28
+ */
6
29
  class ChatModel extends agent_js_1.Agent {
7
30
  constructor() {
8
31
  super({
@@ -10,12 +33,41 @@ class ChatModel extends agent_js_1.Agent {
10
33
  outputSchema: chatModelOutputSchema,
11
34
  });
12
35
  }
36
+ /**
37
+ * Indicates whether the model supports parallel tool calls
38
+ *
39
+ * Defaults to true, subclasses can override this property based on
40
+ * specific model capabilities
41
+ */
13
42
  supportsParallelToolCalls = true;
43
+ /**
44
+ * Gets the model's supported capabilities
45
+ *
46
+ * Currently returns capabilities including: whether parallel tool calls are supported
47
+ *
48
+ * @returns An object containing model capabilities
49
+ */
14
50
  getModelCapabilities() {
15
51
  return {
16
52
  supportsParallelToolCalls: this.supportsParallelToolCalls,
17
53
  };
18
54
  }
55
+ validateToolNames(tools) {
56
+ for (const tool of tools ?? []) {
57
+ if (!/^[a-zA-Z0-9_]+$/.test(tool.function.name)) {
58
+ throw new Error(`Tool name "${tool.function.name}" can only contain letters, numbers, and underscores`);
59
+ }
60
+ }
61
+ }
62
+ /**
63
+ * Performs preprocessing operations before handling input
64
+ *
65
+ * Primarily checks if token usage exceeds limits, throwing an exception if limits are exceeded
66
+ *
67
+ * @param input Input message
68
+ * @param context Execution context
69
+ * @throws Error if token usage exceeds maximum limit
70
+ */
19
71
  preprocess(input, context) {
20
72
  super.preprocess(input, context);
21
73
  const { limits, usage } = context;
@@ -23,7 +75,17 @@ class ChatModel extends agent_js_1.Agent {
23
75
  if (limits?.maxTokens && usedTokens >= limits.maxTokens) {
24
76
  throw new Error(`Exceeded max tokens ${usedTokens}/${limits.maxTokens}`);
25
77
  }
78
+ this.validateToolNames(input.tools);
26
79
  }
80
+ /**
81
+ * Performs postprocessing operations after handling output
82
+ *
83
+ * Primarily updates token usage statistics in the context
84
+ *
85
+ * @param input Input message
86
+ * @param output Output message
87
+ * @param context Execution context
88
+ */
27
89
  postprocess(input, output, context) {
28
90
  super.postprocess(input, output, context);
29
91
  const { usage } = output;
@@ -1,13 +1,32 @@
1
1
  import Anthropic from "@anthropic-ai/sdk";
2
2
  import { z } from "zod";
3
- import type { AgentInvokeOptions, AgentResponse } from "../agents/agent.js";
4
- import type { Context } from "../aigne/context.js";
3
+ import type { AgentProcessResult } from "../agents/agent.js";
4
+ import { type PromiseOrValue } from "../utils/type-utils.js";
5
5
  import { ChatModel, type ChatModelInput, type ChatModelOptions, type ChatModelOutput } from "./chat-model.js";
6
+ /**
7
+ * Configuration options for Claude Chat Model
8
+ */
6
9
  export interface ClaudeChatModelOptions {
10
+ /**
11
+ * API key for Anthropic's Claude API
12
+ *
13
+ * If not provided, will look for ANTHROPIC_API_KEY or CLAUDE_API_KEY in environment variables
14
+ */
7
15
  apiKey?: string;
16
+ /**
17
+ * Claude model to use
18
+ *
19
+ * Defaults to 'claude-3-7-sonnet-latest'
20
+ */
8
21
  model?: string;
22
+ /**
23
+ * Additional model options to control behavior
24
+ */
9
25
  modelOptions?: ChatModelOptions;
10
26
  }
27
+ /**
28
+ * @hidden
29
+ */
11
30
  export declare const claudeChatModelOptionsSchema: z.ZodObject<{
12
31
  apiKey: z.ZodOptional<z.ZodString>;
13
32
  model: z.ZodOptional<z.ZodString>;
@@ -56,13 +75,40 @@ export declare const claudeChatModelOptionsSchema: z.ZodObject<{
56
75
  model?: string | undefined;
57
76
  apiKey?: string | undefined;
58
77
  }>;
78
+ /**
79
+ * Implementation of the ChatModel interface for Anthropic's Claude API
80
+ *
81
+ * This model provides access to Claude's capabilities including:
82
+ * - Text generation
83
+ * - Tool use
84
+ * - JSON structured output
85
+ *
86
+ * Default model: 'claude-3-7-sonnet-latest'
87
+ *
88
+ * @example
89
+ * Here's how to create and use a Claude chat model:
90
+ * {@includeCode ../../test/models/claude-chat-model.test.ts#example-claude-chat-model}
91
+ *
92
+ * @example
93
+ * Here's an example with streaming response:
94
+ * {@includeCode ../../test/models/claude-chat-model.test.ts#example-claude-chat-model-streaming-async-generator}
95
+ */
59
96
  export declare class ClaudeChatModel extends ChatModel {
60
97
  options?: ClaudeChatModelOptions | undefined;
61
98
  constructor(options?: ClaudeChatModelOptions | undefined);
99
+ /**
100
+ * @hidden
101
+ */
62
102
  protected _client?: Anthropic;
63
103
  get client(): Anthropic;
64
104
  get modelOptions(): ChatModelOptions | undefined;
65
- process(input: ChatModelInput, _context: Context, options?: AgentInvokeOptions): Promise<AgentResponse<ChatModelOutput>>;
105
+ /**
106
+ * Process the input using Claude's chat model
107
+ * @param input - The input to process
108
+ * @returns The processed output from the model
109
+ */
110
+ process(input: ChatModelInput): PromiseOrValue<AgentProcessResult<ChatModelOutput>>;
111
+ private _process;
66
112
  private extractResultFromClaudeStream;
67
113
  private requestStructuredOutput;
68
114
  }
@@ -12,6 +12,9 @@ const stream_utils_js_1 = require("../utils/stream-utils.js");
12
12
  const type_utils_js_1 = require("../utils/type-utils.js");
13
13
  const chat_model_js_1 = require("./chat-model.js");
14
14
  const CHAT_MODEL_CLAUDE_DEFAULT_MODEL = "claude-3-7-sonnet-latest";
15
+ /**
16
+ * @hidden
17
+ */
15
18
  exports.claudeChatModelOptionsSchema = zod_1.z.object({
16
19
  apiKey: zod_1.z.string().optional(),
17
20
  model: zod_1.z.string().optional(),
@@ -26,6 +29,24 @@ exports.claudeChatModelOptionsSchema = zod_1.z.object({
26
29
  })
27
30
  .optional(),
28
31
  });
32
+ /**
33
+ * Implementation of the ChatModel interface for Anthropic's Claude API
34
+ *
35
+ * This model provides access to Claude's capabilities including:
36
+ * - Text generation
37
+ * - Tool use
38
+ * - JSON structured output
39
+ *
40
+ * Default model: 'claude-3-7-sonnet-latest'
41
+ *
42
+ * @example
43
+ * Here's how to create and use a Claude chat model:
44
+ * {@includeCode ../../test/models/claude-chat-model.test.ts#example-claude-chat-model}
45
+ *
46
+ * @example
47
+ * Here's an example with streaming response:
48
+ * {@includeCode ../../test/models/claude-chat-model.test.ts#example-claude-chat-model-streaming-async-generator}
49
+ */
29
50
  class ClaudeChatModel extends chat_model_js_1.ChatModel {
30
51
  options;
31
52
  constructor(options) {
@@ -34,6 +55,9 @@ class ClaudeChatModel extends chat_model_js_1.ChatModel {
34
55
  super();
35
56
  this.options = options;
36
57
  }
58
+ /**
59
+ * @hidden
60
+ */
37
61
  _client;
38
62
  get client() {
39
63
  const apiKey = this.options?.apiKey || process.env.ANTHROPIC_API_KEY || process.env.CLAUDE_API_KEY;
@@ -45,7 +69,15 @@ class ClaudeChatModel extends chat_model_js_1.ChatModel {
45
69
  get modelOptions() {
46
70
  return this.options?.modelOptions;
47
71
  }
48
- async process(input, _context, options) {
72
+ /**
73
+ * Process the input using Claude's chat model
74
+ * @param input - The input to process
75
+ * @returns The processed output from the model
76
+ */
77
+ process(input) {
78
+ return this._process(input);
79
+ }
80
+ async _process(input) {
49
81
  const model = this.options?.model || CHAT_MODEL_CLAUDE_DEFAULT_MODEL;
50
82
  const disableParallelToolUse = input.modelOptions?.parallelToolCalls === false ||
51
83
  this.modelOptions?.parallelToolCalls === false;
@@ -62,7 +94,7 @@ class ClaudeChatModel extends chat_model_js_1.ChatModel {
62
94
  ...body,
63
95
  stream: true,
64
96
  });
65
- if (options?.streaming && input.responseFormat?.type !== "json_schema") {
97
+ if (input.responseFormat?.type !== "json_schema") {
66
98
  return this.extractResultFromClaudeStream(stream, true);
67
99
  }
68
100
  const result = await this.extractResultFromClaudeStream(stream);
@@ -1,4 +1,20 @@
1
1
  import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
2
+ /**
3
+ * Implementation of the ChatModel interface for DeepSeek's API
4
+ *
5
+ * This model uses OpenAI-compatible API format to interact with DeepSeek's models,
6
+ * but with specific configuration and capabilities for DeepSeek.
7
+ *
8
+ * Default model: 'deepseek-chat'
9
+ *
10
+ * @example
11
+ * Here's how to create and use a DeepSeek chat model:
12
+ * {@includeCode ../../test/models/deepseek-chat-model.test.ts#example-deepseek-chat-model}
13
+ *
14
+ * @example
15
+ * Here's an example with streaming response:
16
+ * {@includeCode ../../test/models/deepseek-chat-model.test.ts#example-deepseek-chat-model-streaming}
17
+ */
2
18
  export declare class DeepSeekChatModel extends OpenAIChatModel {
3
19
  constructor(options?: OpenAIChatModelOptions);
4
20
  protected apiKeyEnvName: string;
@@ -4,6 +4,22 @@ exports.DeepSeekChatModel = void 0;
4
4
  const openai_chat_model_js_1 = require("./openai-chat-model.js");
5
5
  const DEEPSEEK_DEFAULT_CHAT_MODEL = "deepseek-chat";
6
6
  const DEEPSEEK_BASE_URL = "https://api.deepseek.com";
7
+ /**
8
+ * Implementation of the ChatModel interface for DeepSeek's API
9
+ *
10
+ * This model uses OpenAI-compatible API format to interact with DeepSeek's models,
11
+ * but with specific configuration and capabilities for DeepSeek.
12
+ *
13
+ * Default model: 'deepseek-chat'
14
+ *
15
+ * @example
16
+ * Here's how to create and use a DeepSeek chat model:
17
+ * {@includeCode ../../test/models/deepseek-chat-model.test.ts#example-deepseek-chat-model}
18
+ *
19
+ * @example
20
+ * Here's an example with streaming response:
21
+ * {@includeCode ../../test/models/deepseek-chat-model.test.ts#example-deepseek-chat-model-streaming}
22
+ */
7
23
  class DeepSeekChatModel extends openai_chat_model_js_1.OpenAIChatModel {
8
24
  constructor(options) {
9
25
  super({
@@ -1,8 +1,23 @@
1
1
  import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
2
+ /**
3
+ * Implementation of the ChatModel interface for Google's Gemini API
4
+ *
5
+ * This model uses OpenAI-compatible API format to interact with Google's Gemini models,
6
+ * providing access to models like Gemini 1.5 and Gemini 2.0.
7
+ *
8
+ * @example
9
+ * Here's how to create and use a Gemini chat model:
10
+ * {@includeCode ../../test/models/gemini-chat-model.test.ts#example-gemini-chat-model}
11
+ *
12
+ * @example
13
+ * Here's an example with streaming response:
14
+ * {@includeCode ../../test/models/gemini-chat-model.test.ts#example-gemini-chat-model-streaming}
15
+ */
2
16
  export declare class GeminiChatModel extends OpenAIChatModel {
3
17
  constructor(options?: OpenAIChatModelOptions);
4
18
  protected apiKeyEnvName: string;
5
19
  protected supportsEndWithSystemMessage: boolean;
6
20
  protected supportsToolsUseWithJsonSchema: boolean;
7
21
  protected supportsParallelToolCalls: boolean;
22
+ protected supportsToolStreaming: boolean;
8
23
  }
@@ -4,6 +4,20 @@ exports.GeminiChatModel = void 0;
4
4
  const openai_chat_model_js_1 = require("./openai-chat-model.js");
5
5
  const GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai";
6
6
  const GEMINI_DEFAULT_CHAT_MODEL = "gemini-2.0-flash";
7
+ /**
8
+ * Implementation of the ChatModel interface for Google's Gemini API
9
+ *
10
+ * This model uses OpenAI-compatible API format to interact with Google's Gemini models,
11
+ * providing access to models like Gemini 1.5 and Gemini 2.0.
12
+ *
13
+ * @example
14
+ * Here's how to create and use a Gemini chat model:
15
+ * {@includeCode ../../test/models/gemini-chat-model.test.ts#example-gemini-chat-model}
16
+ *
17
+ * @example
18
+ * Here's an example with streaming response:
19
+ * {@includeCode ../../test/models/gemini-chat-model.test.ts#example-gemini-chat-model-streaming}
20
+ */
7
21
  class GeminiChatModel extends openai_chat_model_js_1.OpenAIChatModel {
8
22
  constructor(options) {
9
23
  super({
@@ -16,5 +30,6 @@ class GeminiChatModel extends openai_chat_model_js_1.OpenAIChatModel {
16
30
  supportsEndWithSystemMessage = false;
17
31
  supportsToolsUseWithJsonSchema = false;
18
32
  supportsParallelToolCalls = false;
33
+ supportsToolStreaming = false;
19
34
  }
20
35
  exports.GeminiChatModel = GeminiChatModel;
@@ -1,4 +1,20 @@
1
1
  import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
2
+ /**
3
+ * Implementation of the ChatModel interface for Ollama
4
+ *
5
+ * This model allows you to run open-source LLMs locally using Ollama,
6
+ * with an OpenAI-compatible API interface.
7
+ *
8
+ * Default model: 'llama3.2'
9
+ *
10
+ * @example
11
+ * Here's how to create and use an Ollama chat model:
12
+ * {@includeCode ../../test/models/ollama-chat-model.test.ts#example-ollama-chat-model}
13
+ *
14
+ * @example
15
+ * Here's an example with streaming response:
16
+ * {@includeCode ../../test/models/ollama-chat-model.test.ts#example-ollama-chat-model-streaming}
17
+ */
2
18
  export declare class OllamaChatModel extends OpenAIChatModel {
3
19
  constructor(options?: OpenAIChatModelOptions);
4
20
  protected apiKeyEnvName: string;
@@ -4,6 +4,22 @@ exports.OllamaChatModel = void 0;
4
4
  const openai_chat_model_js_1 = require("./openai-chat-model.js");
5
5
  const OLLAMA_DEFAULT_BASE_URL = "http://localhost:11434/v1";
6
6
  const OLLAMA_DEFAULT_CHAT_MODEL = "llama3.2";
7
+ /**
8
+ * Implementation of the ChatModel interface for Ollama
9
+ *
10
+ * This model allows you to run open-source LLMs locally using Ollama,
11
+ * with an OpenAI-compatible API interface.
12
+ *
13
+ * Default model: 'llama3.2'
14
+ *
15
+ * @example
16
+ * Here's how to create and use an Ollama chat model:
17
+ * {@includeCode ../../test/models/ollama-chat-model.test.ts#example-ollama-chat-model}
18
+ *
19
+ * @example
20
+ * Here's an example with streaming response:
21
+ * {@includeCode ../../test/models/ollama-chat-model.test.ts#example-ollama-chat-model-streaming}
22
+ */
7
23
  class OllamaChatModel extends openai_chat_model_js_1.OpenAIChatModel {
8
24
  constructor(options) {
9
25
  super({
@@ -1,4 +1,20 @@
1
1
  import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
2
+ /**
3
+ * Implementation of the ChatModel interface for OpenRouter service
4
+ *
5
+ * OpenRouter provides access to a variety of large language models through a unified API.
6
+ * This implementation uses the OpenAI-compatible interface to connect to OpenRouter's service.
7
+ *
8
+ * Default model: 'openai/gpt-4o'
9
+ *
10
+ * @example
11
+ * Here's how to create and use an OpenRouter chat model:
12
+ * {@includeCode ../../test/models/open-router-chat-model.test.ts#example-openrouter-chat-model}
13
+ *
14
+ * @example
15
+ * Here's an example with streaming response:
16
+ * {@includeCode ../../test/models/open-router-chat-model.test.ts#example-openrouter-chat-model-streaming}
17
+ */
2
18
  export declare class OpenRouterChatModel extends OpenAIChatModel {
3
19
  constructor(options?: OpenAIChatModelOptions);
4
20
  protected apiKeyEnvName: string;
@@ -4,6 +4,22 @@ exports.OpenRouterChatModel = void 0;
4
4
  const openai_chat_model_js_1 = require("./openai-chat-model.js");
5
5
  const OPEN_ROUTER_DEFAULT_CHAT_MODEL = "openai/gpt-4o";
6
6
  const OPEN_ROUTER_BASE_URL = "https://openrouter.ai/api/v1";
7
+ /**
8
+ * Implementation of the ChatModel interface for OpenRouter service
9
+ *
10
+ * OpenRouter provides access to a variety of large language models through a unified API.
11
+ * This implementation uses the OpenAI-compatible interface to connect to OpenRouter's service.
12
+ *
13
+ * Default model: 'openai/gpt-4o'
14
+ *
15
+ * @example
16
+ * Here's how to create and use an OpenRouter chat model:
17
+ * {@includeCode ../../test/models/open-router-chat-model.test.ts#example-openrouter-chat-model}
18
+ *
19
+ * @example
20
+ * Here's an example with streaming response:
21
+ * {@includeCode ../../test/models/open-router-chat-model.test.ts#example-openrouter-chat-model-streaming}
22
+ */
7
23
  class OpenRouterChatModel extends openai_chat_model_js_1.OpenAIChatModel {
8
24
  constructor(options) {
9
25
  super({
@@ -1,8 +1,8 @@
1
1
  import OpenAI from "openai";
2
2
  import type { ChatCompletionMessageParam, ChatCompletionTool } from "openai/resources";
3
3
  import { z } from "zod";
4
- import type { AgentInvokeOptions, AgentResponse } from "../agents/agent.js";
5
- import type { Context } from "../aigne/context.js";
4
+ import type { AgentProcessResult } from "../agents/agent.js";
5
+ import { type PromiseOrValue } from "../utils/type-utils.js";
6
6
  import { ChatModel, type ChatModelInput, type ChatModelInputMessage, type ChatModelInputTool, type ChatModelOptions, type ChatModelOutput, type Role } from "./chat-model.js";
7
7
  export interface OpenAIChatModelCapabilities {
8
8
  supportsNativeStructuredOutputs: boolean;
@@ -10,14 +10,39 @@ export interface OpenAIChatModelCapabilities {
10
10
  supportsToolsUseWithJsonSchema: boolean;
11
11
  supportsParallelToolCalls: boolean;
12
12
  supportsToolsEmptyParameters: boolean;
13
+ supportsToolStreaming: boolean;
13
14
  supportsTemperature: boolean;
14
15
  }
16
+ /**
17
+ * Configuration options for OpenAI Chat Model
18
+ */
15
19
  export interface OpenAIChatModelOptions {
20
+ /**
21
+ * API key for OpenAI API
22
+ *
23
+ * If not provided, will look for OPENAI_API_KEY in environment variables
24
+ */
16
25
  apiKey?: string;
26
+ /**
27
+ * Base URL for OpenAI API
28
+ *
29
+ * Useful for proxies or alternate endpoints
30
+ */
17
31
  baseURL?: string;
32
+ /**
33
+ * OpenAI model to use
34
+ *
35
+ * Defaults to 'gpt-4o-mini'
36
+ */
18
37
  model?: string;
38
+ /**
39
+ * Additional model options to control behavior
40
+ */
19
41
  modelOptions?: ChatModelOptions;
20
42
  }
43
+ /**
44
+ * @hidden
45
+ */
21
46
  export declare const openAIChatModelOptionsSchema: z.ZodObject<{
22
47
  apiKey: z.ZodOptional<z.ZodString>;
23
48
  baseURL: z.ZodOptional<z.ZodString>;
@@ -69,9 +94,31 @@ export declare const openAIChatModelOptionsSchema: z.ZodObject<{
69
94
  apiKey?: string | undefined;
70
95
  baseURL?: string | undefined;
71
96
  }>;
97
+ /**
98
+ * Implementation of the ChatModel interface for OpenAI's API
99
+ *
100
+ * This model provides access to OpenAI's capabilities including:
101
+ * - Text generation
102
+ * - Tool use with parallel tool calls
103
+ * - JSON structured output
104
+ * - Image understanding
105
+ *
106
+ * Default model: 'gpt-4o-mini'
107
+ *
108
+ * @example
109
+ * Here's how to create and use an OpenAI chat model:
110
+ * {@includeCode ../../test/models/openai-chat-model.test.ts#example-openai-chat-model}
111
+ *
112
+ * @example
113
+ * Here's an example with streaming response:
114
+ * {@includeCode ../../test/models/openai-chat-model.test.ts#example-openai-chat-model-streaming}
115
+ */
72
116
  export declare class OpenAIChatModel extends ChatModel {
73
117
  options?: OpenAIChatModelOptions | undefined;
74
118
  constructor(options?: OpenAIChatModelOptions | undefined);
119
+ /**
120
+ * @hidden
121
+ */
75
122
  protected _client?: OpenAI;
76
123
  protected apiKeyEnvName: string;
77
124
  protected apiKeyDefault: string | undefined;
@@ -80,20 +127,40 @@ export declare class OpenAIChatModel extends ChatModel {
80
127
  protected supportsToolsUseWithJsonSchema: boolean;
81
128
  protected supportsParallelToolCalls: boolean;
82
129
  protected supportsToolsEmptyParameters: boolean;
130
+ protected supportsToolStreaming: boolean;
83
131
  protected supportsTemperature: boolean;
84
132
  get client(): OpenAI;
85
133
  get modelOptions(): ChatModelOptions | undefined;
86
- process(input: ChatModelInput, _context: Context, options?: AgentInvokeOptions): Promise<AgentResponse<ChatModelOutput>>;
134
+ /**
135
+ * Process the input and generate a response
136
+ * @param input The input to process
137
+ * @returns The generated response
138
+ */
139
+ process(input: ChatModelInput): PromiseOrValue<AgentProcessResult<ChatModelOutput>>;
140
+ private _process;
87
141
  private getParallelToolCalls;
88
142
  private getRunMessages;
89
143
  private getRunResponseFormat;
90
144
  private requestStructuredOutput;
145
+ private extractResultFromStream;
91
146
  }
147
+ /**
148
+ * @hidden
149
+ */
92
150
  export declare const ROLE_MAP: {
93
151
  [key in Role]: ChatCompletionMessageParam["role"];
94
152
  };
153
+ /**
154
+ * @hidden
155
+ */
95
156
  export declare function contentsFromInputMessages(messages: ChatModelInputMessage[]): Promise<ChatCompletionMessageParam[]>;
157
+ /**
158
+ * @hidden
159
+ */
96
160
  export declare function toolsFromInputTools(tools?: ChatModelInputTool[], options?: {
97
161
  addTypeToEmptyParameters?: boolean;
98
162
  }): ChatCompletionTool[] | undefined;
163
+ /**
164
+ * @hidden
165
+ */
99
166
  export declare function jsonSchemaToOpenAIJsonSchema(schema: Record<string, unknown>): Record<string, unknown>;