@aigne/core 1.14.0 → 1.16.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (145) hide show
  1. package/CHANGELOG.md +27 -0
  2. package/README.md +9 -7
  3. package/README.zh.md +9 -7
  4. package/lib/cjs/agents/agent.d.ts +129 -6
  5. package/lib/cjs/agents/agent.js +112 -20
  6. package/lib/cjs/agents/ai-agent.d.ts +3 -2
  7. package/lib/cjs/agents/ai-agent.js +12 -9
  8. package/lib/{esm/models → cjs/agents}/chat-model.d.ts +24 -13
  9. package/lib/cjs/{models → agents}/chat-model.js +48 -7
  10. package/lib/cjs/agents/guide-rail-agent.d.ts +62 -0
  11. package/lib/cjs/agents/guide-rail-agent.js +14 -0
  12. package/lib/cjs/agents/mcp-agent.js +9 -9
  13. package/lib/cjs/agents/team-agent.js +1 -1
  14. package/lib/cjs/aigne/aigne.d.ts +3 -2
  15. package/lib/cjs/aigne/aigne.js +2 -2
  16. package/lib/cjs/aigne/context.d.ts +2 -1
  17. package/lib/cjs/aigne/context.js +8 -1
  18. package/lib/cjs/index.d.ts +1 -1
  19. package/lib/cjs/index.js +1 -1
  20. package/lib/cjs/loader/agent-yaml.d.ts +1 -1
  21. package/lib/cjs/loader/index.d.ts +18 -11
  22. package/lib/cjs/loader/index.js +8 -27
  23. package/lib/cjs/memory/retriever.d.ts +2 -2
  24. package/lib/cjs/prompt/prompt-builder.d.ts +3 -3
  25. package/lib/cjs/prompt/template.d.ts +3 -3
  26. package/lib/cjs/prompt/template.js +1 -1
  27. package/lib/cjs/utils/json-schema.js +1 -1
  28. package/lib/cjs/utils/logger.d.ts +33 -8
  29. package/lib/cjs/utils/logger.js +63 -5
  30. package/lib/cjs/utils/model-utils.d.ts +1 -1
  31. package/lib/cjs/utils/stream-utils.d.ts +3 -2
  32. package/lib/cjs/utils/stream-utils.js +50 -26
  33. package/lib/cjs/utils/type-utils.d.ts +5 -0
  34. package/lib/dts/agents/agent.d.ts +129 -6
  35. package/lib/dts/agents/ai-agent.d.ts +3 -2
  36. package/lib/{cjs/models → dts/agents}/chat-model.d.ts +24 -13
  37. package/lib/dts/agents/guide-rail-agent.d.ts +62 -0
  38. package/lib/dts/aigne/aigne.d.ts +3 -2
  39. package/lib/dts/aigne/context.d.ts +2 -1
  40. package/lib/dts/index.d.ts +1 -1
  41. package/lib/dts/loader/agent-yaml.d.ts +1 -1
  42. package/lib/dts/loader/index.d.ts +18 -11
  43. package/lib/dts/memory/retriever.d.ts +2 -2
  44. package/lib/dts/prompt/prompt-builder.d.ts +3 -3
  45. package/lib/dts/prompt/template.d.ts +3 -3
  46. package/lib/dts/utils/logger.d.ts +33 -8
  47. package/lib/dts/utils/model-utils.d.ts +1 -1
  48. package/lib/dts/utils/stream-utils.d.ts +3 -2
  49. package/lib/dts/utils/type-utils.d.ts +5 -0
  50. package/lib/esm/agents/agent.d.ts +129 -6
  51. package/lib/esm/agents/agent.js +112 -20
  52. package/lib/esm/agents/ai-agent.d.ts +3 -2
  53. package/lib/esm/agents/ai-agent.js +12 -9
  54. package/lib/{dts/models → esm/agents}/chat-model.d.ts +24 -13
  55. package/lib/esm/{models → agents}/chat-model.js +48 -7
  56. package/lib/esm/agents/guide-rail-agent.d.ts +62 -0
  57. package/lib/esm/agents/guide-rail-agent.js +11 -0
  58. package/lib/esm/agents/mcp-agent.js +9 -9
  59. package/lib/esm/agents/team-agent.js +2 -2
  60. package/lib/esm/aigne/aigne.d.ts +3 -2
  61. package/lib/esm/aigne/aigne.js +2 -2
  62. package/lib/esm/aigne/context.d.ts +2 -1
  63. package/lib/esm/aigne/context.js +9 -2
  64. package/lib/esm/index.d.ts +1 -1
  65. package/lib/esm/index.js +1 -1
  66. package/lib/esm/loader/agent-yaml.d.ts +1 -1
  67. package/lib/esm/loader/index.d.ts +18 -11
  68. package/lib/esm/loader/index.js +8 -27
  69. package/lib/esm/memory/retriever.d.ts +2 -2
  70. package/lib/esm/prompt/prompt-builder.d.ts +3 -3
  71. package/lib/esm/prompt/template.d.ts +3 -3
  72. package/lib/esm/prompt/template.js +1 -1
  73. package/lib/esm/utils/json-schema.js +1 -1
  74. package/lib/esm/utils/logger.d.ts +33 -8
  75. package/lib/esm/utils/logger.js +61 -4
  76. package/lib/esm/utils/model-utils.d.ts +1 -1
  77. package/lib/esm/utils/stream-utils.d.ts +3 -2
  78. package/lib/esm/utils/stream-utils.js +48 -25
  79. package/lib/esm/utils/type-utils.d.ts +5 -0
  80. package/package.json +1 -20
  81. package/lib/cjs/client/client.d.ts +0 -97
  82. package/lib/cjs/client/client.js +0 -87
  83. package/lib/cjs/client/index.d.ts +0 -1
  84. package/lib/cjs/client/index.js +0 -17
  85. package/lib/cjs/models/bedrock-chat-model.d.ts +0 -79
  86. package/lib/cjs/models/bedrock-chat-model.js +0 -303
  87. package/lib/cjs/models/claude-chat-model.d.ts +0 -114
  88. package/lib/cjs/models/claude-chat-model.js +0 -317
  89. package/lib/cjs/models/deepseek-chat-model.d.ts +0 -23
  90. package/lib/cjs/models/deepseek-chat-model.js +0 -35
  91. package/lib/cjs/models/gemini-chat-model.d.ts +0 -23
  92. package/lib/cjs/models/gemini-chat-model.js +0 -35
  93. package/lib/cjs/models/ollama-chat-model.d.ts +0 -22
  94. package/lib/cjs/models/ollama-chat-model.js +0 -34
  95. package/lib/cjs/models/open-router-chat-model.d.ts +0 -22
  96. package/lib/cjs/models/open-router-chat-model.js +0 -34
  97. package/lib/cjs/models/openai-chat-model.d.ts +0 -166
  98. package/lib/cjs/models/openai-chat-model.js +0 -415
  99. package/lib/cjs/models/xai-chat-model.d.ts +0 -21
  100. package/lib/cjs/models/xai-chat-model.js +0 -33
  101. package/lib/cjs/server/error.d.ts +0 -15
  102. package/lib/cjs/server/error.js +0 -22
  103. package/lib/cjs/server/index.d.ts +0 -2
  104. package/lib/cjs/server/index.js +0 -18
  105. package/lib/cjs/server/server.d.ts +0 -135
  106. package/lib/cjs/server/server.js +0 -188
  107. package/lib/dts/client/client.d.ts +0 -97
  108. package/lib/dts/client/index.d.ts +0 -1
  109. package/lib/dts/models/bedrock-chat-model.d.ts +0 -79
  110. package/lib/dts/models/claude-chat-model.d.ts +0 -114
  111. package/lib/dts/models/deepseek-chat-model.d.ts +0 -23
  112. package/lib/dts/models/gemini-chat-model.d.ts +0 -23
  113. package/lib/dts/models/ollama-chat-model.d.ts +0 -22
  114. package/lib/dts/models/open-router-chat-model.d.ts +0 -22
  115. package/lib/dts/models/openai-chat-model.d.ts +0 -166
  116. package/lib/dts/models/xai-chat-model.d.ts +0 -21
  117. package/lib/dts/server/error.d.ts +0 -15
  118. package/lib/dts/server/index.d.ts +0 -2
  119. package/lib/dts/server/server.d.ts +0 -135
  120. package/lib/esm/client/client.d.ts +0 -97
  121. package/lib/esm/client/client.js +0 -83
  122. package/lib/esm/client/index.d.ts +0 -1
  123. package/lib/esm/client/index.js +0 -1
  124. package/lib/esm/models/bedrock-chat-model.d.ts +0 -79
  125. package/lib/esm/models/bedrock-chat-model.js +0 -298
  126. package/lib/esm/models/claude-chat-model.d.ts +0 -114
  127. package/lib/esm/models/claude-chat-model.js +0 -310
  128. package/lib/esm/models/deepseek-chat-model.d.ts +0 -23
  129. package/lib/esm/models/deepseek-chat-model.js +0 -31
  130. package/lib/esm/models/gemini-chat-model.d.ts +0 -23
  131. package/lib/esm/models/gemini-chat-model.js +0 -31
  132. package/lib/esm/models/ollama-chat-model.d.ts +0 -22
  133. package/lib/esm/models/ollama-chat-model.js +0 -30
  134. package/lib/esm/models/open-router-chat-model.d.ts +0 -22
  135. package/lib/esm/models/open-router-chat-model.js +0 -30
  136. package/lib/esm/models/openai-chat-model.d.ts +0 -166
  137. package/lib/esm/models/openai-chat-model.js +0 -405
  138. package/lib/esm/models/xai-chat-model.d.ts +0 -21
  139. package/lib/esm/models/xai-chat-model.js +0 -29
  140. package/lib/esm/server/error.d.ts +0 -15
  141. package/lib/esm/server/error.js +0 -18
  142. package/lib/esm/server/index.d.ts +0 -2
  143. package/lib/esm/server/index.js +0 -2
  144. package/lib/esm/server/server.d.ts +0 -135
  145. package/lib/esm/server/server.js +0 -181
@@ -1,317 +0,0 @@
1
- "use strict";
2
- var __importDefault = (this && this.__importDefault) || function (mod) {
3
- return (mod && mod.__esModule) ? mod : { "default": mod };
4
- };
5
- Object.defineProperty(exports, "__esModule", { value: true });
6
- exports.ClaudeChatModel = exports.claudeChatModelOptionsSchema = void 0;
7
- const sdk_1 = __importDefault(require("@anthropic-ai/sdk"));
8
- const zod_1 = require("zod");
9
- const json_schema_js_1 = require("../utils/json-schema.js");
10
- const model_utils_js_1 = require("../utils/model-utils.js");
11
- const stream_utils_js_1 = require("../utils/stream-utils.js");
12
- const type_utils_js_1 = require("../utils/type-utils.js");
13
- const chat_model_js_1 = require("./chat-model.js");
14
- const CHAT_MODEL_CLAUDE_DEFAULT_MODEL = "claude-3-7-sonnet-latest";
15
- /**
16
- * @hidden
17
- */
18
- exports.claudeChatModelOptionsSchema = zod_1.z.object({
19
- apiKey: zod_1.z.string().optional(),
20
- model: zod_1.z.string().optional(),
21
- modelOptions: zod_1.z
22
- .object({
23
- model: zod_1.z.string().optional(),
24
- temperature: zod_1.z.number().optional(),
25
- topP: zod_1.z.number().optional(),
26
- frequencyPenalty: zod_1.z.number().optional(),
27
- presencePenalty: zod_1.z.number().optional(),
28
- parallelToolCalls: zod_1.z.boolean().optional().default(true),
29
- })
30
- .optional(),
31
- });
32
- /**
33
- * Implementation of the ChatModel interface for Anthropic's Claude API
34
- *
35
- * This model provides access to Claude's capabilities including:
36
- * - Text generation
37
- * - Tool use
38
- * - JSON structured output
39
- *
40
- * Default model: 'claude-3-7-sonnet-latest'
41
- *
42
- * @example
43
- * Here's how to create and use a Claude chat model:
44
- * {@includeCode ../../test/models/claude-chat-model.test.ts#example-claude-chat-model}
45
- *
46
- * @example
47
- * Here's an example with streaming response:
48
- * {@includeCode ../../test/models/claude-chat-model.test.ts#example-claude-chat-model-streaming-async-generator}
49
- */
50
- class ClaudeChatModel extends chat_model_js_1.ChatModel {
51
- options;
52
- constructor(options) {
53
- if (options)
54
- (0, type_utils_js_1.checkArguments)("ClaudeChatModel", exports.claudeChatModelOptionsSchema, options);
55
- super();
56
- this.options = options;
57
- }
58
- /**
59
- * @hidden
60
- */
61
- _client;
62
- get client() {
63
- const apiKey = this.options?.apiKey || process.env.ANTHROPIC_API_KEY || process.env.CLAUDE_API_KEY;
64
- if (!apiKey)
65
- throw new Error("Api Key is required for ClaudeChatModel");
66
- this._client ??= new sdk_1.default({ apiKey });
67
- return this._client;
68
- }
69
- get modelOptions() {
70
- return this.options?.modelOptions;
71
- }
72
- /**
73
- * Process the input using Claude's chat model
74
- * @param input - The input to process
75
- * @returns The processed output from the model
76
- */
77
- process(input) {
78
- return this._process(input);
79
- }
80
- async _process(input) {
81
- const model = this.options?.model || CHAT_MODEL_CLAUDE_DEFAULT_MODEL;
82
- const disableParallelToolUse = input.modelOptions?.parallelToolCalls === false ||
83
- this.modelOptions?.parallelToolCalls === false;
84
- const body = {
85
- model,
86
- temperature: input.modelOptions?.temperature ?? this.modelOptions?.temperature,
87
- top_p: input.modelOptions?.topP ?? this.modelOptions?.topP,
88
- // TODO: make dynamic based on model https://docs.anthropic.com/en/docs/about-claude/models/all-models
89
- max_tokens: /claude-3-[5|7]/.test(model) ? 8192 : 4096,
90
- ...convertMessages(input),
91
- ...convertTools({ ...input, disableParallelToolUse }),
92
- };
93
- const stream = this.client.messages.stream({
94
- ...body,
95
- stream: true,
96
- });
97
- if (input.responseFormat?.type !== "json_schema") {
98
- return this.extractResultFromClaudeStream(stream, true);
99
- }
100
- const result = await this.extractResultFromClaudeStream(stream);
101
- // Claude doesn't support json_schema response and tool calls in the same request,
102
- // so we need to make a separate request for json_schema response when the tool calls is empty
103
- if (!result.toolCalls?.length && input.responseFormat?.type === "json_schema") {
104
- const output = await this.requestStructuredOutput(body, input.responseFormat);
105
- return {
106
- ...output,
107
- // merge usage from both requests
108
- usage: (0, model_utils_js_1.mergeUsage)(result.usage, output.usage),
109
- };
110
- }
111
- return result;
112
- }
113
- async extractResultFromClaudeStream(stream, streaming) {
114
- const logs = [];
115
- const result = new ReadableStream({
116
- async start(controller) {
117
- try {
118
- const toolCalls = [];
119
- let usage;
120
- let model;
121
- for await (const chunk of stream) {
122
- if (chunk.type === "message_start") {
123
- if (!model) {
124
- model = chunk.message.model;
125
- controller.enqueue({ delta: { json: { model } } });
126
- }
127
- const { input_tokens, output_tokens } = chunk.message.usage;
128
- usage = {
129
- inputTokens: input_tokens,
130
- outputTokens: output_tokens,
131
- };
132
- }
133
- if (chunk.type === "message_delta" && usage) {
134
- usage.outputTokens = chunk.usage.output_tokens;
135
- }
136
- logs.push(JSON.stringify(chunk));
137
- // handle streaming text
138
- if (chunk.type === "content_block_delta" && chunk.delta.type === "text_delta") {
139
- controller.enqueue({ delta: { text: { text: chunk.delta.text } } });
140
- }
141
- if (chunk.type === "content_block_start" && chunk.content_block.type === "tool_use") {
142
- toolCalls[chunk.index] = {
143
- type: "function",
144
- id: chunk.content_block.id,
145
- function: {
146
- name: chunk.content_block.name,
147
- arguments: {},
148
- },
149
- args: "",
150
- };
151
- }
152
- if (chunk.type === "content_block_delta" && chunk.delta.type === "input_json_delta") {
153
- const call = toolCalls[chunk.index];
154
- if (!call)
155
- throw new Error("Tool call not found");
156
- call.args += chunk.delta.partial_json;
157
- }
158
- }
159
- controller.enqueue({ delta: { json: { usage } } });
160
- if (toolCalls.length) {
161
- controller.enqueue({
162
- delta: {
163
- json: {
164
- toolCalls: toolCalls
165
- .map(({ args, ...c }) => ({
166
- ...c,
167
- function: {
168
- ...c.function,
169
- // NOTE: claude may return a blank string for empty object (the tool's input schema is a empty object)
170
- arguments: args.trim() ? (0, json_schema_js_1.parseJSON)(args) : {},
171
- },
172
- }))
173
- .filter(type_utils_js_1.isNonNullable),
174
- },
175
- },
176
- });
177
- }
178
- controller.close();
179
- }
180
- catch (error) {
181
- controller.error(error);
182
- }
183
- },
184
- });
185
- return streaming ? result : await (0, stream_utils_js_1.agentResponseStreamToObject)(result);
186
- }
187
- async requestStructuredOutput(body, responseFormat) {
188
- if (responseFormat?.type !== "json_schema") {
189
- throw new Error("Expected json_schema response format");
190
- }
191
- const result = await this.client.messages.create({
192
- ...body,
193
- tools: [
194
- {
195
- name: "generate_json",
196
- description: "Generate a json result by given context",
197
- input_schema: responseFormat.jsonSchema.schema,
198
- },
199
- ],
200
- tool_choice: {
201
- type: "tool",
202
- name: "generate_json",
203
- disable_parallel_tool_use: true,
204
- },
205
- stream: false,
206
- });
207
- const jsonTool = result.content.find((i) => i.type === "tool_use" && i.name === "generate_json");
208
- if (!jsonTool)
209
- throw new Error("Json tool not found");
210
- return {
211
- json: jsonTool.input,
212
- model: result.model,
213
- usage: {
214
- inputTokens: result.usage.input_tokens,
215
- outputTokens: result.usage.output_tokens,
216
- },
217
- };
218
- }
219
- }
220
- exports.ClaudeChatModel = ClaudeChatModel;
221
- function convertMessages({ messages, responseFormat }) {
222
- const systemMessages = [];
223
- const msgs = [];
224
- for (const msg of messages) {
225
- if (msg.role === "system") {
226
- if (typeof msg.content !== "string")
227
- throw new Error("System message must have content");
228
- systemMessages.push(msg.content);
229
- }
230
- else if (msg.role === "tool") {
231
- if (!msg.toolCallId)
232
- throw new Error("Tool message must have toolCallId");
233
- if (typeof msg.content !== "string")
234
- throw new Error("Tool message must have string content");
235
- msgs.push({
236
- role: "user",
237
- content: [{ type: "tool_result", tool_use_id: msg.toolCallId, content: msg.content }],
238
- });
239
- }
240
- else if (msg.role === "user") {
241
- if (!msg.content)
242
- throw new Error("User message must have content");
243
- msgs.push({ role: "user", content: convertContent(msg.content) });
244
- }
245
- else if (msg.role === "agent") {
246
- if (msg.toolCalls?.length) {
247
- msgs.push({
248
- role: "assistant",
249
- content: msg.toolCalls.map((i) => ({
250
- type: "tool_use",
251
- id: i.id,
252
- name: i.function.name,
253
- input: i.function.arguments,
254
- })),
255
- });
256
- }
257
- else if (msg.content) {
258
- msgs.push({ role: "assistant", content: convertContent(msg.content) });
259
- }
260
- else {
261
- throw new Error("Agent message must have content or toolCalls");
262
- }
263
- }
264
- }
265
- if (responseFormat?.type === "json_schema") {
266
- systemMessages.push(`You should provide a json response with schema: ${JSON.stringify(responseFormat.jsonSchema.schema)}`);
267
- }
268
- const system = systemMessages.join("\n").trim() || undefined;
269
- // Claude requires at least one message, so we add a system message if there are no messages
270
- if (msgs.length === 0) {
271
- if (!system)
272
- throw new Error("No messages provided");
273
- return { messages: [{ role: "user", content: system }] };
274
- }
275
- return { messages: msgs, system };
276
- }
277
- function convertContent(content) {
278
- if (typeof content === "string")
279
- return content;
280
- if (Array.isArray(content)) {
281
- return content.map((item) => item.type === "image_url"
282
- ? { type: "image", source: { type: "url", url: item.url } }
283
- : { type: "text", text: item.text });
284
- }
285
- throw new Error("Invalid chat message content");
286
- }
287
- function convertTools({ tools, toolChoice, disableParallelToolUse, }) {
288
- let choice;
289
- if (typeof toolChoice === "object" && "type" in toolChoice && toolChoice.type === "function") {
290
- choice = {
291
- type: "tool",
292
- name: toolChoice.function.name,
293
- disable_parallel_tool_use: disableParallelToolUse,
294
- };
295
- }
296
- else if (toolChoice === "required") {
297
- choice = { type: "any", disable_parallel_tool_use: disableParallelToolUse };
298
- }
299
- else if (toolChoice === "auto") {
300
- choice = { type: "auto", disable_parallel_tool_use: disableParallelToolUse };
301
- }
302
- else if (toolChoice === "none") {
303
- choice = { type: "none" };
304
- }
305
- return {
306
- tools: tools?.length
307
- ? tools.map((i) => ({
308
- name: i.function.name,
309
- description: i.function.description,
310
- input_schema: (0, type_utils_js_1.isEmpty)(i.function.parameters)
311
- ? { type: "object" }
312
- : i.function.parameters,
313
- }))
314
- : undefined,
315
- tool_choice: choice,
316
- };
317
- }
@@ -1,23 +0,0 @@
1
- import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
2
- /**
3
- * Implementation of the ChatModel interface for DeepSeek's API
4
- *
5
- * This model uses OpenAI-compatible API format to interact with DeepSeek's models,
6
- * but with specific configuration and capabilities for DeepSeek.
7
- *
8
- * Default model: 'deepseek-chat'
9
- *
10
- * @example
11
- * Here's how to create and use a DeepSeek chat model:
12
- * {@includeCode ../../test/models/deepseek-chat-model.test.ts#example-deepseek-chat-model}
13
- *
14
- * @example
15
- * Here's an example with streaming response:
16
- * {@includeCode ../../test/models/deepseek-chat-model.test.ts#example-deepseek-chat-model-streaming}
17
- */
18
- export declare class DeepSeekChatModel extends OpenAIChatModel {
19
- constructor(options?: OpenAIChatModelOptions);
20
- protected apiKeyEnvName: string;
21
- protected supportsNativeStructuredOutputs: boolean;
22
- protected supportsToolsEmptyParameters: boolean;
23
- }
@@ -1,35 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.DeepSeekChatModel = void 0;
4
- const openai_chat_model_js_1 = require("./openai-chat-model.js");
5
- const DEEPSEEK_DEFAULT_CHAT_MODEL = "deepseek-chat";
6
- const DEEPSEEK_BASE_URL = "https://api.deepseek.com";
7
- /**
8
- * Implementation of the ChatModel interface for DeepSeek's API
9
- *
10
- * This model uses OpenAI-compatible API format to interact with DeepSeek's models,
11
- * but with specific configuration and capabilities for DeepSeek.
12
- *
13
- * Default model: 'deepseek-chat'
14
- *
15
- * @example
16
- * Here's how to create and use a DeepSeek chat model:
17
- * {@includeCode ../../test/models/deepseek-chat-model.test.ts#example-deepseek-chat-model}
18
- *
19
- * @example
20
- * Here's an example with streaming response:
21
- * {@includeCode ../../test/models/deepseek-chat-model.test.ts#example-deepseek-chat-model-streaming}
22
- */
23
- class DeepSeekChatModel extends openai_chat_model_js_1.OpenAIChatModel {
24
- constructor(options) {
25
- super({
26
- ...options,
27
- model: options?.model || DEEPSEEK_DEFAULT_CHAT_MODEL,
28
- baseURL: options?.baseURL || DEEPSEEK_BASE_URL,
29
- });
30
- }
31
- apiKeyEnvName = "DEEPSEEK_API_KEY";
32
- supportsNativeStructuredOutputs = false;
33
- supportsToolsEmptyParameters = false;
34
- }
35
- exports.DeepSeekChatModel = DeepSeekChatModel;
@@ -1,23 +0,0 @@
1
- import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
2
- /**
3
- * Implementation of the ChatModel interface for Google's Gemini API
4
- *
5
- * This model uses OpenAI-compatible API format to interact with Google's Gemini models,
6
- * providing access to models like Gemini 1.5 and Gemini 2.0.
7
- *
8
- * @example
9
- * Here's how to create and use a Gemini chat model:
10
- * {@includeCode ../../test/models/gemini-chat-model.test.ts#example-gemini-chat-model}
11
- *
12
- * @example
13
- * Here's an example with streaming response:
14
- * {@includeCode ../../test/models/gemini-chat-model.test.ts#example-gemini-chat-model-streaming}
15
- */
16
- export declare class GeminiChatModel extends OpenAIChatModel {
17
- constructor(options?: OpenAIChatModelOptions);
18
- protected apiKeyEnvName: string;
19
- protected supportsEndWithSystemMessage: boolean;
20
- protected supportsToolsUseWithJsonSchema: boolean;
21
- protected supportsParallelToolCalls: boolean;
22
- protected supportsToolStreaming: boolean;
23
- }
@@ -1,35 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.GeminiChatModel = void 0;
4
- const openai_chat_model_js_1 = require("./openai-chat-model.js");
5
- const GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai";
6
- const GEMINI_DEFAULT_CHAT_MODEL = "gemini-2.0-flash";
7
- /**
8
- * Implementation of the ChatModel interface for Google's Gemini API
9
- *
10
- * This model uses OpenAI-compatible API format to interact with Google's Gemini models,
11
- * providing access to models like Gemini 1.5 and Gemini 2.0.
12
- *
13
- * @example
14
- * Here's how to create and use a Gemini chat model:
15
- * {@includeCode ../../test/models/gemini-chat-model.test.ts#example-gemini-chat-model}
16
- *
17
- * @example
18
- * Here's an example with streaming response:
19
- * {@includeCode ../../test/models/gemini-chat-model.test.ts#example-gemini-chat-model-streaming}
20
- */
21
- class GeminiChatModel extends openai_chat_model_js_1.OpenAIChatModel {
22
- constructor(options) {
23
- super({
24
- ...options,
25
- model: options?.model || GEMINI_DEFAULT_CHAT_MODEL,
26
- baseURL: options?.baseURL || GEMINI_BASE_URL,
27
- });
28
- }
29
- apiKeyEnvName = "GEMINI_API_KEY";
30
- supportsEndWithSystemMessage = false;
31
- supportsToolsUseWithJsonSchema = false;
32
- supportsParallelToolCalls = false;
33
- supportsToolStreaming = false;
34
- }
35
- exports.GeminiChatModel = GeminiChatModel;
@@ -1,22 +0,0 @@
1
- import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
2
- /**
3
- * Implementation of the ChatModel interface for Ollama
4
- *
5
- * This model allows you to run open-source LLMs locally using Ollama,
6
- * with an OpenAI-compatible API interface.
7
- *
8
- * Default model: 'llama3.2'
9
- *
10
- * @example
11
- * Here's how to create and use an Ollama chat model:
12
- * {@includeCode ../../test/models/ollama-chat-model.test.ts#example-ollama-chat-model}
13
- *
14
- * @example
15
- * Here's an example with streaming response:
16
- * {@includeCode ../../test/models/ollama-chat-model.test.ts#example-ollama-chat-model-streaming}
17
- */
18
- export declare class OllamaChatModel extends OpenAIChatModel {
19
- constructor(options?: OpenAIChatModelOptions);
20
- protected apiKeyEnvName: string;
21
- protected apiKeyDefault: string;
22
- }
@@ -1,34 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.OllamaChatModel = void 0;
4
- const openai_chat_model_js_1 = require("./openai-chat-model.js");
5
- const OLLAMA_DEFAULT_BASE_URL = "http://localhost:11434/v1";
6
- const OLLAMA_DEFAULT_CHAT_MODEL = "llama3.2";
7
- /**
8
- * Implementation of the ChatModel interface for Ollama
9
- *
10
- * This model allows you to run open-source LLMs locally using Ollama,
11
- * with an OpenAI-compatible API interface.
12
- *
13
- * Default model: 'llama3.2'
14
- *
15
- * @example
16
- * Here's how to create and use an Ollama chat model:
17
- * {@includeCode ../../test/models/ollama-chat-model.test.ts#example-ollama-chat-model}
18
- *
19
- * @example
20
- * Here's an example with streaming response:
21
- * {@includeCode ../../test/models/ollama-chat-model.test.ts#example-ollama-chat-model-streaming}
22
- */
23
- class OllamaChatModel extends openai_chat_model_js_1.OpenAIChatModel {
24
- constructor(options) {
25
- super({
26
- ...options,
27
- model: options?.model || OLLAMA_DEFAULT_CHAT_MODEL,
28
- baseURL: options?.baseURL || process.env.OLLAMA_BASE_URL || OLLAMA_DEFAULT_BASE_URL,
29
- });
30
- }
31
- apiKeyEnvName = "OLLAMA_API_KEY";
32
- apiKeyDefault = "ollama";
33
- }
34
- exports.OllamaChatModel = OllamaChatModel;
@@ -1,22 +0,0 @@
1
- import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
2
- /**
3
- * Implementation of the ChatModel interface for OpenRouter service
4
- *
5
- * OpenRouter provides access to a variety of large language models through a unified API.
6
- * This implementation uses the OpenAI-compatible interface to connect to OpenRouter's service.
7
- *
8
- * Default model: 'openai/gpt-4o'
9
- *
10
- * @example
11
- * Here's how to create and use an OpenRouter chat model:
12
- * {@includeCode ../../test/models/open-router-chat-model.test.ts#example-openrouter-chat-model}
13
- *
14
- * @example
15
- * Here's an example with streaming response:
16
- * {@includeCode ../../test/models/open-router-chat-model.test.ts#example-openrouter-chat-model-streaming}
17
- */
18
- export declare class OpenRouterChatModel extends OpenAIChatModel {
19
- constructor(options?: OpenAIChatModelOptions);
20
- protected apiKeyEnvName: string;
21
- protected supportsParallelToolCalls: boolean;
22
- }
@@ -1,34 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.OpenRouterChatModel = void 0;
4
- const openai_chat_model_js_1 = require("./openai-chat-model.js");
5
- const OPEN_ROUTER_DEFAULT_CHAT_MODEL = "openai/gpt-4o";
6
- const OPEN_ROUTER_BASE_URL = "https://openrouter.ai/api/v1";
7
- /**
8
- * Implementation of the ChatModel interface for OpenRouter service
9
- *
10
- * OpenRouter provides access to a variety of large language models through a unified API.
11
- * This implementation uses the OpenAI-compatible interface to connect to OpenRouter's service.
12
- *
13
- * Default model: 'openai/gpt-4o'
14
- *
15
- * @example
16
- * Here's how to create and use an OpenRouter chat model:
17
- * {@includeCode ../../test/models/open-router-chat-model.test.ts#example-openrouter-chat-model}
18
- *
19
- * @example
20
- * Here's an example with streaming response:
21
- * {@includeCode ../../test/models/open-router-chat-model.test.ts#example-openrouter-chat-model-streaming}
22
- */
23
- class OpenRouterChatModel extends openai_chat_model_js_1.OpenAIChatModel {
24
- constructor(options) {
25
- super({
26
- ...options,
27
- model: options?.model || OPEN_ROUTER_DEFAULT_CHAT_MODEL,
28
- baseURL: options?.baseURL || OPEN_ROUTER_BASE_URL,
29
- });
30
- }
31
- apiKeyEnvName = "OPEN_ROUTER_API_KEY";
32
- supportsParallelToolCalls = false;
33
- }
34
- exports.OpenRouterChatModel = OpenRouterChatModel;