@aigne/core 1.7.0 → 1.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +25 -0
- package/lib/cjs/agents/agent.js +2 -2
- package/lib/cjs/agents/mcp-agent.d.ts +2 -3
- package/lib/cjs/agents/mcp-agent.js +10 -6
- package/lib/cjs/execution-engine/execution-engine.js +1 -1
- package/lib/cjs/loader/agent-js.js +1 -1
- package/lib/cjs/loader/agent-yaml.d.ts +1 -0
- package/lib/cjs/loader/agent-yaml.js +4 -0
- package/lib/cjs/loader/index.d.ts +72 -1
- package/lib/cjs/loader/index.js +29 -13
- package/lib/cjs/models/claude-chat-model.js +1 -1
- package/lib/cjs/models/deepseek-chat-model.d.ts +7 -0
- package/lib/cjs/models/deepseek-chat-model.js +19 -0
- package/lib/cjs/models/gemini-chat-model.d.ts +8 -0
- package/lib/cjs/models/gemini-chat-model.js +20 -0
- package/lib/cjs/models/ollama-chat-model.d.ts +6 -0
- package/lib/cjs/models/ollama-chat-model.js +18 -0
- package/lib/cjs/models/open-router-chat-model.d.ts +5 -0
- package/lib/cjs/models/open-router-chat-model.js +17 -0
- package/lib/cjs/models/openai-chat-model.d.ts +23 -1
- package/lib/cjs/models/openai-chat-model.js +182 -78
- package/lib/cjs/models/xai-chat-model.d.ts +3 -11
- package/lib/cjs/models/xai-chat-model.js +1 -14
- package/lib/cjs/prompt/prompt-builder.js +3 -0
- package/lib/cjs/utils/prompts.d.ts +1 -0
- package/lib/cjs/utils/prompts.js +13 -0
- package/lib/cjs/utils/type-utils.d.ts +1 -1
- package/lib/cjs/utils/type-utils.js +1 -1
- package/lib/dts/agents/mcp-agent.d.ts +2 -3
- package/lib/dts/loader/agent-yaml.d.ts +1 -0
- package/lib/dts/loader/index.d.ts +72 -1
- package/lib/dts/models/deepseek-chat-model.d.ts +7 -0
- package/lib/dts/models/gemini-chat-model.d.ts +8 -0
- package/lib/dts/models/ollama-chat-model.d.ts +6 -0
- package/lib/dts/models/open-router-chat-model.d.ts +5 -0
- package/lib/dts/models/openai-chat-model.d.ts +23 -1
- package/lib/dts/models/xai-chat-model.d.ts +3 -11
- package/lib/dts/utils/prompts.d.ts +1 -0
- package/lib/dts/utils/type-utils.d.ts +1 -1
- package/lib/esm/agents/agent.js +3 -3
- package/lib/esm/agents/mcp-agent.d.ts +2 -3
- package/lib/esm/agents/mcp-agent.js +10 -6
- package/lib/esm/execution-engine/execution-engine.js +1 -1
- package/lib/esm/loader/agent-js.js +1 -1
- package/lib/esm/loader/agent-yaml.d.ts +1 -0
- package/lib/esm/loader/agent-yaml.js +4 -0
- package/lib/esm/loader/index.d.ts +72 -1
- package/lib/esm/loader/index.js +28 -13
- package/lib/esm/models/claude-chat-model.js +1 -1
- package/lib/esm/models/deepseek-chat-model.d.ts +7 -0
- package/lib/esm/models/deepseek-chat-model.js +15 -0
- package/lib/esm/models/gemini-chat-model.d.ts +8 -0
- package/lib/esm/models/gemini-chat-model.js +16 -0
- package/lib/esm/models/ollama-chat-model.d.ts +6 -0
- package/lib/esm/models/ollama-chat-model.js +14 -0
- package/lib/esm/models/open-router-chat-model.d.ts +5 -0
- package/lib/esm/models/open-router-chat-model.js +13 -0
- package/lib/esm/models/openai-chat-model.d.ts +23 -1
- package/lib/esm/models/openai-chat-model.js +178 -78
- package/lib/esm/models/xai-chat-model.d.ts +3 -11
- package/lib/esm/models/xai-chat-model.js +1 -11
- package/lib/esm/prompt/prompt-builder.js +3 -0
- package/lib/esm/utils/prompts.d.ts +1 -0
- package/lib/esm/utils/prompts.js +10 -0
- package/lib/esm/utils/type-utils.d.ts +1 -1
- package/lib/esm/utils/type-utils.js +1 -1
- package/package.json +6 -3
package/CHANGELOG.md
CHANGED
|
@@ -22,6 +22,31 @@
|
|
|
22
22
|
* rename @aigne/core-next to @aigne/core ([3a81009](https://github.com/AIGNE-io/aigne-framework/commit/3a8100962c81813217b687ae28e8de604419c622))
|
|
23
23
|
* use text resource from MCP correctly ([8b9eba8](https://github.com/AIGNE-io/aigne-framework/commit/8b9eba83352ec096a2a5d4f410d4c4bde7420bce))
|
|
24
24
|
|
|
25
|
+
## [1.9.0](https://github.com/AIGNE-io/aigne-framework/compare/core-v1.8.0...core-v1.9.0) (2025-04-20)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
### Features
|
|
29
|
+
|
|
30
|
+
* **core:** add model adapters for DeepSeek, Gemini, OpenRouter, and Ollama ([#53](https://github.com/AIGNE-io/aigne-framework/issues/53)) ([5d40546](https://github.com/AIGNE-io/aigne-framework/commit/5d40546bd5ddb70233d27ea3b20e5711b2af320a))
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
### Bug Fixes
|
|
34
|
+
|
|
35
|
+
* **dx:** custom error message for agent input/output validation ([#71](https://github.com/AIGNE-io/aigne-framework/issues/71)) ([5145673](https://github.com/AIGNE-io/aigne-framework/commit/5145673aaae2cd6665912e80b1c644e974c42b2f))
|
|
36
|
+
|
|
37
|
+
## [1.8.0](https://github.com/AIGNE-io/aigne-framework/compare/core-v1.7.0...core-v1.8.0) (2025-04-17)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
### Features
|
|
41
|
+
|
|
42
|
+
* **ci:** support coverage examples with model matrix ([#59](https://github.com/AIGNE-io/aigne-framework/issues/59)) ([1edd704](https://github.com/AIGNE-io/aigne-framework/commit/1edd70426b80a69e3751b2d5fe818297711d0777))
|
|
43
|
+
* **cli:** support model and download customization for aigne run ([#61](https://github.com/AIGNE-io/aigne-framework/issues/61)) ([51f6619](https://github.com/AIGNE-io/aigne-framework/commit/51f6619e6c591a84f1f2339b26ef66d89fa9486e))
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
### Bug Fixes
|
|
47
|
+
|
|
48
|
+
* **mcp:** set default timeout to 60s ([#67](https://github.com/AIGNE-io/aigne-framework/issues/67)) ([40dc029](https://github.com/AIGNE-io/aigne-framework/commit/40dc029b7795650283a505fd71b9566e5f0a4471))
|
|
49
|
+
|
|
25
50
|
## [1.7.0](https://github.com/AIGNE-io/aigne-framework/compare/core-v1.6.0...core-v1.7.0) (2025-04-15)
|
|
26
51
|
|
|
27
52
|
|
package/lib/cjs/agents/agent.js
CHANGED
|
@@ -137,12 +137,12 @@ class Agent {
|
|
|
137
137
|
if (!this.disableEvents)
|
|
138
138
|
ctx.emit("agentStarted", { agent: this, input: message });
|
|
139
139
|
try {
|
|
140
|
-
const parsedInput = this.inputSchema
|
|
140
|
+
const parsedInput = (0, type_utils_js_1.checkArguments)(`Agent ${this.name} input`, this.inputSchema, message);
|
|
141
141
|
this.preprocess(parsedInput, ctx);
|
|
142
142
|
this.checkContextStatus(ctx);
|
|
143
143
|
const output = await this.process(parsedInput, ctx)
|
|
144
144
|
.then((output) => {
|
|
145
|
-
const parsedOutput = this.outputSchema
|
|
145
|
+
const parsedOutput = (0, type_utils_js_1.checkArguments)(`Agent ${this.name} output`, this.outputSchema, output);
|
|
146
146
|
return this.includeInputInOutput ? { ...parsedInput, ...parsedOutput } : parsedOutput;
|
|
147
147
|
})
|
|
148
148
|
.then((output) => {
|
|
@@ -22,7 +22,7 @@ export type SSEServerParameters = {
|
|
|
22
22
|
opts?: SSEClientTransportOptions;
|
|
23
23
|
/**
|
|
24
24
|
* The timeout for requests to the server, in milliseconds.
|
|
25
|
-
* @default
|
|
25
|
+
* @default 60000
|
|
26
26
|
*/
|
|
27
27
|
timeout?: number;
|
|
28
28
|
/**
|
|
@@ -41,7 +41,7 @@ export declare class MCPAgent extends Agent {
|
|
|
41
41
|
static from(options: MCPAgentOptions): MCPAgent;
|
|
42
42
|
private static fromTransport;
|
|
43
43
|
constructor(options: MCPAgentOptions);
|
|
44
|
-
|
|
44
|
+
client: Client;
|
|
45
45
|
readonly prompts: MCPPrompt[] & {
|
|
46
46
|
[key: string]: MCPPrompt;
|
|
47
47
|
};
|
|
@@ -71,7 +71,6 @@ export interface MCPBaseOptions<I extends Message = Message, O extends Message =
|
|
|
71
71
|
export declare abstract class MCPBase<I extends Message, O extends Message> extends Agent<I, O> {
|
|
72
72
|
constructor(options: MCPBaseOptions<I, O>);
|
|
73
73
|
protected client: ClientWithReconnect;
|
|
74
|
-
protected get mcpServer(): string | undefined;
|
|
75
74
|
}
|
|
76
75
|
export declare class MCPTool extends MCPBase<Message, CallToolResult> {
|
|
77
76
|
process(input: Message): Promise<CallToolResult>;
|
|
@@ -17,6 +17,11 @@ const agent_js_1 = require("./agent.js");
|
|
|
17
17
|
const MCP_AGENT_CLIENT_NAME = "MCPAgent";
|
|
18
18
|
const MCP_AGENT_CLIENT_VERSION = "0.0.1";
|
|
19
19
|
const DEFAULT_MAX_RECONNECTS = 10;
|
|
20
|
+
const DEFAULT_TIMEOUT = () => zod_1.z.coerce
|
|
21
|
+
.number()
|
|
22
|
+
.int()
|
|
23
|
+
.min(0)
|
|
24
|
+
.safeParse(process.env.MCP_TIMEOUT || process.env.TIMEOUT).data || 60e3;
|
|
20
25
|
function isSSEServerParameters(options) {
|
|
21
26
|
return "url" in options && typeof options.url === "string";
|
|
22
27
|
}
|
|
@@ -140,7 +145,9 @@ class ClientWithReconnect extends index_js_1.Client {
|
|
|
140
145
|
throw new Error("reconnect requires a transportCreator");
|
|
141
146
|
await (0, p_retry_1.default)(async () => {
|
|
142
147
|
await this.close();
|
|
143
|
-
await this.connect(await transportCreator()
|
|
148
|
+
await this.connect(await transportCreator(), {
|
|
149
|
+
timeout: this.reconnectOptions?.timeout ?? DEFAULT_TIMEOUT(),
|
|
150
|
+
});
|
|
144
151
|
}, {
|
|
145
152
|
retries: this.reconnectOptions?.maxReconnects ?? DEFAULT_MAX_RECONNECTS,
|
|
146
153
|
shouldRetry: this.shouldReconnect,
|
|
@@ -149,8 +156,8 @@ class ClientWithReconnect extends index_js_1.Client {
|
|
|
149
156
|
}
|
|
150
157
|
async request(request, resultSchema, options) {
|
|
151
158
|
const mergedOptions = {
|
|
152
|
-
...
|
|
153
|
-
timeout: options?.timeout ??
|
|
159
|
+
...options,
|
|
160
|
+
timeout: options?.timeout ?? DEFAULT_TIMEOUT(),
|
|
154
161
|
};
|
|
155
162
|
try {
|
|
156
163
|
return await super.request(request, resultSchema, mergedOptions);
|
|
@@ -171,9 +178,6 @@ class MCPBase extends agent_js_1.Agent {
|
|
|
171
178
|
this.client = options.client;
|
|
172
179
|
}
|
|
173
180
|
client;
|
|
174
|
-
get mcpServer() {
|
|
175
|
-
return getMCPServerName(this.client);
|
|
176
|
-
}
|
|
177
181
|
}
|
|
178
182
|
exports.MCPBase = MCPBase;
|
|
179
183
|
class MCPTool extends MCPBase {
|
|
@@ -12,8 +12,8 @@ class ExecutionEngine {
|
|
|
12
12
|
static async load({ path, ...options }) {
|
|
13
13
|
const { model, agents, tools, ...aigne } = await (0, index_js_1.load)({ path });
|
|
14
14
|
return new ExecutionEngine({
|
|
15
|
-
model,
|
|
16
15
|
...options,
|
|
16
|
+
model: options.model || model,
|
|
17
17
|
name: options.name || aigne.name || undefined,
|
|
18
18
|
description: options.description || aigne.description || undefined,
|
|
19
19
|
agents: agents.concat(options.agents ?? []),
|
|
@@ -58,7 +58,7 @@ async function loadAgentFromJsFile(path) {
|
|
|
58
58
|
throw new Error(`Agent file ${path} must export a default function, but got ${typeof agent}`);
|
|
59
59
|
}
|
|
60
60
|
return (0, type_utils_js_1.tryOrThrow)(() => agentJsFileSchema.parse({
|
|
61
|
-
name: agent.name,
|
|
61
|
+
name: agent.agent_name || agent.name,
|
|
62
62
|
description: agent.description,
|
|
63
63
|
input_schema: agent.input_schema,
|
|
64
64
|
output_schema: agent.output_schema,
|
|
@@ -10,6 +10,7 @@ export declare function loadAgentFromYamlFile(path: string): Promise<{
|
|
|
10
10
|
}, {
|
|
11
11
|
[x: string]: any;
|
|
12
12
|
}> | undefined;
|
|
13
|
+
tool_choice?: "auto" | "none" | "required" | "router" | undefined;
|
|
13
14
|
output_schema?: ZodObject<Record<string, ZodType<any, z.ZodTypeDef, any>>, z.UnknownKeysParam, z.ZodTypeAny, {
|
|
14
15
|
[x: string]: any;
|
|
15
16
|
}, {
|
|
@@ -33,6 +33,10 @@ const agentFileSchema = zod_1.z.discriminatedUnion("type", [
|
|
|
33
33
|
.array(zod_1.z.string())
|
|
34
34
|
.nullish()
|
|
35
35
|
.transform((v) => v ?? undefined),
|
|
36
|
+
tool_choice: zod_1.z
|
|
37
|
+
.union([zod_1.z.literal("auto"), zod_1.z.literal("none"), zod_1.z.literal("required"), zod_1.z.literal("router")])
|
|
38
|
+
.nullish()
|
|
39
|
+
.transform((v) => v ?? undefined),
|
|
36
40
|
}),
|
|
37
41
|
zod_1.z.object({
|
|
38
42
|
type: zod_1.z.literal("mcp"),
|
|
@@ -1,5 +1,6 @@
|
|
|
1
|
+
import { z } from "zod";
|
|
1
2
|
import { type Agent } from "../agents/agent.js";
|
|
2
|
-
import type { ChatModel } from "../models/chat-model.js";
|
|
3
|
+
import type { ChatModel, ChatModelOptions } from "../models/chat-model.js";
|
|
3
4
|
export interface LoadOptions {
|
|
4
5
|
path: string;
|
|
5
6
|
}
|
|
@@ -19,6 +20,75 @@ export declare function load(options: LoadOptions): Promise<{
|
|
|
19
20
|
} | null | undefined;
|
|
20
21
|
}>;
|
|
21
22
|
export declare function loadAgent(path: string): Promise<Agent>;
|
|
23
|
+
export declare function loadModel(model?: z.infer<typeof aigneFileSchema>["chat_model"], modelOptions?: ChatModelOptions): Promise<ChatModel | undefined>;
|
|
24
|
+
declare const aigneFileSchema: z.ZodObject<{
|
|
25
|
+
name: z.ZodOptional<z.ZodNullable<z.ZodString>>;
|
|
26
|
+
description: z.ZodOptional<z.ZodNullable<z.ZodString>>;
|
|
27
|
+
chat_model: z.ZodEffects<z.ZodOptional<z.ZodNullable<z.ZodUnion<[z.ZodString, z.ZodObject<{
|
|
28
|
+
provider: z.ZodOptional<z.ZodNullable<z.ZodString>>;
|
|
29
|
+
name: z.ZodOptional<z.ZodNullable<z.ZodString>>;
|
|
30
|
+
temperature: z.ZodOptional<z.ZodNullable<z.ZodNumber>>;
|
|
31
|
+
top_p: z.ZodOptional<z.ZodNullable<z.ZodNumber>>;
|
|
32
|
+
frequent_penalty: z.ZodOptional<z.ZodNullable<z.ZodNumber>>;
|
|
33
|
+
presence_penalty: z.ZodOptional<z.ZodNullable<z.ZodNumber>>;
|
|
34
|
+
}, "strip", z.ZodTypeAny, {
|
|
35
|
+
name?: string | null | undefined;
|
|
36
|
+
temperature?: number | null | undefined;
|
|
37
|
+
provider?: string | null | undefined;
|
|
38
|
+
top_p?: number | null | undefined;
|
|
39
|
+
frequent_penalty?: number | null | undefined;
|
|
40
|
+
presence_penalty?: number | null | undefined;
|
|
41
|
+
}, {
|
|
42
|
+
name?: string | null | undefined;
|
|
43
|
+
temperature?: number | null | undefined;
|
|
44
|
+
provider?: string | null | undefined;
|
|
45
|
+
top_p?: number | null | undefined;
|
|
46
|
+
frequent_penalty?: number | null | undefined;
|
|
47
|
+
presence_penalty?: number | null | undefined;
|
|
48
|
+
}>]>>>, {
|
|
49
|
+
name?: string | null | undefined;
|
|
50
|
+
temperature?: number | null | undefined;
|
|
51
|
+
provider?: string | null | undefined;
|
|
52
|
+
top_p?: number | null | undefined;
|
|
53
|
+
frequent_penalty?: number | null | undefined;
|
|
54
|
+
presence_penalty?: number | null | undefined;
|
|
55
|
+
} | null | undefined, string | {
|
|
56
|
+
name?: string | null | undefined;
|
|
57
|
+
temperature?: number | null | undefined;
|
|
58
|
+
provider?: string | null | undefined;
|
|
59
|
+
top_p?: number | null | undefined;
|
|
60
|
+
frequent_penalty?: number | null | undefined;
|
|
61
|
+
presence_penalty?: number | null | undefined;
|
|
62
|
+
} | null | undefined>;
|
|
63
|
+
agents: z.ZodOptional<z.ZodNullable<z.ZodArray<z.ZodString, "many">>>;
|
|
64
|
+
tools: z.ZodOptional<z.ZodNullable<z.ZodArray<z.ZodString, "many">>>;
|
|
65
|
+
}, "strip", z.ZodTypeAny, {
|
|
66
|
+
description?: string | null | undefined;
|
|
67
|
+
tools?: string[] | null | undefined;
|
|
68
|
+
name?: string | null | undefined;
|
|
69
|
+
chat_model?: {
|
|
70
|
+
name?: string | null | undefined;
|
|
71
|
+
temperature?: number | null | undefined;
|
|
72
|
+
provider?: string | null | undefined;
|
|
73
|
+
top_p?: number | null | undefined;
|
|
74
|
+
frequent_penalty?: number | null | undefined;
|
|
75
|
+
presence_penalty?: number | null | undefined;
|
|
76
|
+
} | null | undefined;
|
|
77
|
+
agents?: string[] | null | undefined;
|
|
78
|
+
}, {
|
|
79
|
+
description?: string | null | undefined;
|
|
80
|
+
tools?: string[] | null | undefined;
|
|
81
|
+
name?: string | null | undefined;
|
|
82
|
+
chat_model?: string | {
|
|
83
|
+
name?: string | null | undefined;
|
|
84
|
+
temperature?: number | null | undefined;
|
|
85
|
+
provider?: string | null | undefined;
|
|
86
|
+
top_p?: number | null | undefined;
|
|
87
|
+
frequent_penalty?: number | null | undefined;
|
|
88
|
+
presence_penalty?: number | null | undefined;
|
|
89
|
+
} | null | undefined;
|
|
90
|
+
agents?: string[] | null | undefined;
|
|
91
|
+
}>;
|
|
22
92
|
export declare function loadAIGNEFile(path: string): Promise<{
|
|
23
93
|
description?: string | null | undefined;
|
|
24
94
|
tools?: string[] | null | undefined;
|
|
@@ -33,3 +103,4 @@ export declare function loadAIGNEFile(path: string): Promise<{
|
|
|
33
103
|
} | null | undefined;
|
|
34
104
|
agents?: string[] | null | undefined;
|
|
35
105
|
}>;
|
|
106
|
+
export {};
|
package/lib/cjs/loader/index.js
CHANGED
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.load = load;
|
|
4
4
|
exports.loadAgent = loadAgent;
|
|
5
|
+
exports.loadModel = loadModel;
|
|
5
6
|
exports.loadAIGNEFile = loadAIGNEFile;
|
|
6
7
|
const promises_1 = require("node:fs/promises");
|
|
7
8
|
const node_path_1 = require("node:path");
|
|
@@ -11,12 +12,15 @@ const agent_js_1 = require("../agents/agent.js");
|
|
|
11
12
|
const ai_agent_js_1 = require("../agents/ai-agent.js");
|
|
12
13
|
const mcp_agent_js_1 = require("../agents/mcp-agent.js");
|
|
13
14
|
const claude_chat_model_js_1 = require("../models/claude-chat-model.js");
|
|
15
|
+
const deepseek_chat_model_js_1 = require("../models/deepseek-chat-model.js");
|
|
16
|
+
const gemini_chat_model_js_1 = require("../models/gemini-chat-model.js");
|
|
17
|
+
const ollama_chat_model_js_1 = require("../models/ollama-chat-model.js");
|
|
18
|
+
const open_router_chat_model_js_1 = require("../models/open-router-chat-model.js");
|
|
14
19
|
const openai_chat_model_js_1 = require("../models/openai-chat-model.js");
|
|
15
20
|
const xai_chat_model_js_1 = require("../models/xai-chat-model.js");
|
|
16
21
|
const type_utils_js_1 = require("../utils/type-utils.js");
|
|
17
22
|
const agent_js_js_1 = require("./agent-js.js");
|
|
18
23
|
const agent_yaml_js_1 = require("./agent-yaml.js");
|
|
19
|
-
const DEFAULT_MODEL_PROVIDER = "openai";
|
|
20
24
|
const AIGNE_FILE_NAME = ["aigne.yaml", "aigne.yml"];
|
|
21
25
|
async function load(options) {
|
|
22
26
|
const { path } = options;
|
|
@@ -54,6 +58,7 @@ async function loadAgent(path) {
|
|
|
54
58
|
outputSchema: agent.output_schema,
|
|
55
59
|
outputKey: agent.output_key,
|
|
56
60
|
tools: await Promise.all((agent.tools ?? []).map((filename) => loadAgent((0, node_path_1.join)((0, node_path_1.dirname)(path), filename)))),
|
|
61
|
+
toolChoice: agent.tool_choice,
|
|
57
62
|
});
|
|
58
63
|
}
|
|
59
64
|
if (agent.type === "mcp") {
|
|
@@ -73,21 +78,32 @@ async function loadAgent(path) {
|
|
|
73
78
|
}
|
|
74
79
|
throw new Error(`Unsupported agent file type: ${path}`);
|
|
75
80
|
}
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
81
|
+
const { MODEL_PROVIDER, MODEL_NAME } = process.env;
|
|
82
|
+
const DEFAULT_MODEL_PROVIDER = "openai";
|
|
83
|
+
const DEFAULT_MODEL_NAME = "gpt-4o-mini";
|
|
84
|
+
async function loadModel(model, modelOptions) {
|
|
79
85
|
const params = {
|
|
80
|
-
model: model
|
|
81
|
-
temperature: model
|
|
82
|
-
topP: model
|
|
83
|
-
frequencyPenalty: model
|
|
84
|
-
presencePenalty: model
|
|
86
|
+
model: MODEL_NAME ?? model?.name ?? DEFAULT_MODEL_NAME,
|
|
87
|
+
temperature: model?.temperature ?? undefined,
|
|
88
|
+
topP: model?.top_p ?? undefined,
|
|
89
|
+
frequencyPenalty: model?.frequent_penalty ?? undefined,
|
|
90
|
+
presencePenalty: model?.presence_penalty ?? undefined,
|
|
85
91
|
};
|
|
86
|
-
const availableModels = [
|
|
87
|
-
|
|
92
|
+
const availableModels = [
|
|
93
|
+
openai_chat_model_js_1.OpenAIChatModel,
|
|
94
|
+
claude_chat_model_js_1.ClaudeChatModel,
|
|
95
|
+
xai_chat_model_js_1.XAIChatModel,
|
|
96
|
+
gemini_chat_model_js_1.GeminiChatModel,
|
|
97
|
+
deepseek_chat_model_js_1.DeepSeekChatModel,
|
|
98
|
+
open_router_chat_model_js_1.OpenRouterChatModel,
|
|
99
|
+
ollama_chat_model_js_1.OllamaChatModel,
|
|
100
|
+
];
|
|
101
|
+
const M = availableModels.find((m) => m.name
|
|
102
|
+
.toLowerCase()
|
|
103
|
+
.includes((MODEL_PROVIDER ?? model?.provider ?? DEFAULT_MODEL_PROVIDER).toLowerCase()));
|
|
88
104
|
if (!M)
|
|
89
|
-
throw new Error(`Unsupported model: ${model
|
|
90
|
-
return new M(params);
|
|
105
|
+
throw new Error(`Unsupported model: ${model?.provider} ${model?.name}`);
|
|
106
|
+
return new M({ model: params.model, modelOptions: { ...params, ...modelOptions } });
|
|
91
107
|
}
|
|
92
108
|
const aigneFileSchema = zod_1.z.object({
|
|
93
109
|
name: zod_1.z.string().nullish(),
|
|
@@ -36,7 +36,7 @@ class ClaudeChatModel extends chat_model_js_1.ChatModel {
|
|
|
36
36
|
}
|
|
37
37
|
_client;
|
|
38
38
|
get client() {
|
|
39
|
-
const apiKey = this.options?.apiKey || process.env.CLAUDE_API_KEY;
|
|
39
|
+
const apiKey = this.options?.apiKey || process.env.ANTHROPIC_API_KEY || process.env.CLAUDE_API_KEY;
|
|
40
40
|
if (!apiKey)
|
|
41
41
|
throw new Error("Api Key is required for ClaudeChatModel");
|
|
42
42
|
this._client ??= new sdk_1.default({ apiKey });
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
|
|
2
|
+
export declare class DeepSeekChatModel extends OpenAIChatModel {
|
|
3
|
+
constructor(options?: OpenAIChatModelOptions);
|
|
4
|
+
protected apiKeyEnvName: string;
|
|
5
|
+
protected supportsNativeStructuredOutputs: boolean;
|
|
6
|
+
protected supportsToolsEmptyParameters: boolean;
|
|
7
|
+
}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.DeepSeekChatModel = void 0;
|
|
4
|
+
const openai_chat_model_js_1 = require("./openai-chat-model.js");
|
|
5
|
+
const DEEPSEEK_DEFAULT_CHAT_MODEL = "deepseek-chat";
|
|
6
|
+
const DEEPSEEK_BASE_URL = "https://api.deepseek.com";
|
|
7
|
+
class DeepSeekChatModel extends openai_chat_model_js_1.OpenAIChatModel {
|
|
8
|
+
constructor(options) {
|
|
9
|
+
super({
|
|
10
|
+
...options,
|
|
11
|
+
model: options?.model || DEEPSEEK_DEFAULT_CHAT_MODEL,
|
|
12
|
+
baseURL: options?.baseURL || DEEPSEEK_BASE_URL,
|
|
13
|
+
});
|
|
14
|
+
}
|
|
15
|
+
apiKeyEnvName = "DEEPSEEK_API_KEY";
|
|
16
|
+
supportsNativeStructuredOutputs = false;
|
|
17
|
+
supportsToolsEmptyParameters = false;
|
|
18
|
+
}
|
|
19
|
+
exports.DeepSeekChatModel = DeepSeekChatModel;
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
|
|
2
|
+
export declare class GeminiChatModel extends OpenAIChatModel {
|
|
3
|
+
constructor(options?: OpenAIChatModelOptions);
|
|
4
|
+
protected apiKeyEnvName: string;
|
|
5
|
+
protected supportsEndWithSystemMessage: boolean;
|
|
6
|
+
protected supportsToolsUseWithJsonSchema: boolean;
|
|
7
|
+
protected supportsParallelToolCalls: boolean;
|
|
8
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.GeminiChatModel = void 0;
|
|
4
|
+
const openai_chat_model_js_1 = require("./openai-chat-model.js");
|
|
5
|
+
const GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai";
|
|
6
|
+
const GEMINI_DEFAULT_CHAT_MODEL = "gemini-2.0-flash";
|
|
7
|
+
class GeminiChatModel extends openai_chat_model_js_1.OpenAIChatModel {
|
|
8
|
+
constructor(options) {
|
|
9
|
+
super({
|
|
10
|
+
...options,
|
|
11
|
+
model: options?.model || GEMINI_DEFAULT_CHAT_MODEL,
|
|
12
|
+
baseURL: options?.baseURL || GEMINI_BASE_URL,
|
|
13
|
+
});
|
|
14
|
+
}
|
|
15
|
+
apiKeyEnvName = "GEMINI_API_KEY";
|
|
16
|
+
supportsEndWithSystemMessage = false;
|
|
17
|
+
supportsToolsUseWithJsonSchema = false;
|
|
18
|
+
supportsParallelToolCalls = false;
|
|
19
|
+
}
|
|
20
|
+
exports.GeminiChatModel = GeminiChatModel;
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
|
|
2
|
+
export declare class OllamaChatModel extends OpenAIChatModel {
|
|
3
|
+
constructor(options?: OpenAIChatModelOptions);
|
|
4
|
+
protected apiKeyEnvName: string;
|
|
5
|
+
protected apiKeyDefault: string;
|
|
6
|
+
}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.OllamaChatModel = void 0;
|
|
4
|
+
const openai_chat_model_js_1 = require("./openai-chat-model.js");
|
|
5
|
+
const OLLAMA_DEFAULT_BASE_URL = "http://localhost:11434/v1";
|
|
6
|
+
const OLLAMA_DEFAULT_CHAT_MODEL = "llama3.2";
|
|
7
|
+
class OllamaChatModel extends openai_chat_model_js_1.OpenAIChatModel {
|
|
8
|
+
constructor(options) {
|
|
9
|
+
super({
|
|
10
|
+
...options,
|
|
11
|
+
model: options?.model || OLLAMA_DEFAULT_CHAT_MODEL,
|
|
12
|
+
baseURL: options?.baseURL || process.env.OLLAMA_BASE_URL || OLLAMA_DEFAULT_BASE_URL,
|
|
13
|
+
});
|
|
14
|
+
}
|
|
15
|
+
apiKeyEnvName = "OLLAMA_API_KEY";
|
|
16
|
+
apiKeyDefault = "ollama";
|
|
17
|
+
}
|
|
18
|
+
exports.OllamaChatModel = OllamaChatModel;
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.OpenRouterChatModel = void 0;
|
|
4
|
+
const openai_chat_model_js_1 = require("./openai-chat-model.js");
|
|
5
|
+
const OPEN_ROUTER_DEFAULT_CHAT_MODEL = "openai/gpt-4o";
|
|
6
|
+
const OPEN_ROUTER_BASE_URL = "https://openrouter.ai/api/v1";
|
|
7
|
+
class OpenRouterChatModel extends openai_chat_model_js_1.OpenAIChatModel {
|
|
8
|
+
constructor(options) {
|
|
9
|
+
super({
|
|
10
|
+
...options,
|
|
11
|
+
model: options?.model || OPEN_ROUTER_DEFAULT_CHAT_MODEL,
|
|
12
|
+
baseURL: options?.baseURL || OPEN_ROUTER_BASE_URL,
|
|
13
|
+
});
|
|
14
|
+
}
|
|
15
|
+
apiKeyEnvName = "OPEN_ROUTER_API_KEY";
|
|
16
|
+
}
|
|
17
|
+
exports.OpenRouterChatModel = OpenRouterChatModel;
|
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
import OpenAI from "openai";
|
|
2
|
+
import type { ChatCompletionMessageParam, ChatCompletionTool } from "openai/resources";
|
|
3
|
+
import type { Stream } from "openai/streaming.js";
|
|
2
4
|
import { z } from "zod";
|
|
3
|
-
import { ChatModel, type ChatModelInput, type ChatModelOptions, type ChatModelOutput } from "./chat-model.js";
|
|
5
|
+
import { ChatModel, type ChatModelInput, type ChatModelInputMessage, type ChatModelInputTool, type ChatModelOptions, type ChatModelOutput, type Role } from "./chat-model.js";
|
|
4
6
|
export interface OpenAIChatModelOptions {
|
|
5
7
|
apiKey?: string;
|
|
6
8
|
baseURL?: string;
|
|
@@ -62,7 +64,27 @@ export declare class OpenAIChatModel extends ChatModel {
|
|
|
62
64
|
options?: OpenAIChatModelOptions | undefined;
|
|
63
65
|
constructor(options?: OpenAIChatModelOptions | undefined);
|
|
64
66
|
protected _client?: OpenAI;
|
|
67
|
+
protected apiKeyEnvName: string;
|
|
68
|
+
protected apiKeyDefault: string | undefined;
|
|
69
|
+
protected supportsNativeStructuredOutputs: boolean;
|
|
70
|
+
protected supportsEndWithSystemMessage: boolean;
|
|
71
|
+
protected supportsToolsUseWithJsonSchema: boolean;
|
|
72
|
+
protected supportsParallelToolCalls: boolean;
|
|
73
|
+
protected supportsToolsEmptyParameters: boolean;
|
|
65
74
|
get client(): OpenAI;
|
|
66
75
|
get modelOptions(): ChatModelOptions | undefined;
|
|
67
76
|
process(input: ChatModelInput): Promise<ChatModelOutput>;
|
|
77
|
+
private getParallelToolCalls;
|
|
78
|
+
private getRunMessages;
|
|
79
|
+
private getRunResponseFormat;
|
|
80
|
+
private requestStructuredOutput;
|
|
68
81
|
}
|
|
82
|
+
export declare const ROLE_MAP: {
|
|
83
|
+
[key in Role]: ChatCompletionMessageParam["role"];
|
|
84
|
+
};
|
|
85
|
+
export declare function contentsFromInputMessages(messages: ChatModelInputMessage[]): Promise<ChatCompletionMessageParam[]>;
|
|
86
|
+
export declare function toolsFromInputTools(tools?: ChatModelInputTool[], options?: {
|
|
87
|
+
addTypeToEmptyParameters?: boolean;
|
|
88
|
+
}): ChatCompletionTool[] | undefined;
|
|
89
|
+
export declare function jsonSchemaToOpenAIJsonSchema(schema: Record<string, unknown>): Record<string, unknown>;
|
|
90
|
+
export declare function extractResultFromStream(stream: Stream<OpenAI.Chat.Completions.ChatCompletionChunk>, jsonMode?: boolean): Promise<ChatModelOutput>;
|