@aigne/core 1.7.0 → 1.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +25 -0
- package/lib/cjs/agents/agent.js +2 -2
- package/lib/cjs/agents/mcp-agent.d.ts +2 -3
- package/lib/cjs/agents/mcp-agent.js +10 -6
- package/lib/cjs/execution-engine/execution-engine.js +1 -1
- package/lib/cjs/loader/agent-js.js +1 -1
- package/lib/cjs/loader/agent-yaml.d.ts +1 -0
- package/lib/cjs/loader/agent-yaml.js +4 -0
- package/lib/cjs/loader/index.d.ts +72 -1
- package/lib/cjs/loader/index.js +29 -13
- package/lib/cjs/models/claude-chat-model.js +1 -1
- package/lib/cjs/models/deepseek-chat-model.d.ts +7 -0
- package/lib/cjs/models/deepseek-chat-model.js +19 -0
- package/lib/cjs/models/gemini-chat-model.d.ts +8 -0
- package/lib/cjs/models/gemini-chat-model.js +20 -0
- package/lib/cjs/models/ollama-chat-model.d.ts +6 -0
- package/lib/cjs/models/ollama-chat-model.js +18 -0
- package/lib/cjs/models/open-router-chat-model.d.ts +5 -0
- package/lib/cjs/models/open-router-chat-model.js +17 -0
- package/lib/cjs/models/openai-chat-model.d.ts +23 -1
- package/lib/cjs/models/openai-chat-model.js +182 -78
- package/lib/cjs/models/xai-chat-model.d.ts +3 -11
- package/lib/cjs/models/xai-chat-model.js +1 -14
- package/lib/cjs/prompt/prompt-builder.js +3 -0
- package/lib/cjs/utils/prompts.d.ts +1 -0
- package/lib/cjs/utils/prompts.js +13 -0
- package/lib/cjs/utils/type-utils.d.ts +1 -1
- package/lib/cjs/utils/type-utils.js +1 -1
- package/lib/dts/agents/mcp-agent.d.ts +2 -3
- package/lib/dts/loader/agent-yaml.d.ts +1 -0
- package/lib/dts/loader/index.d.ts +72 -1
- package/lib/dts/models/deepseek-chat-model.d.ts +7 -0
- package/lib/dts/models/gemini-chat-model.d.ts +8 -0
- package/lib/dts/models/ollama-chat-model.d.ts +6 -0
- package/lib/dts/models/open-router-chat-model.d.ts +5 -0
- package/lib/dts/models/openai-chat-model.d.ts +23 -1
- package/lib/dts/models/xai-chat-model.d.ts +3 -11
- package/lib/dts/utils/prompts.d.ts +1 -0
- package/lib/dts/utils/type-utils.d.ts +1 -1
- package/lib/esm/agents/agent.js +3 -3
- package/lib/esm/agents/mcp-agent.d.ts +2 -3
- package/lib/esm/agents/mcp-agent.js +10 -6
- package/lib/esm/execution-engine/execution-engine.js +1 -1
- package/lib/esm/loader/agent-js.js +1 -1
- package/lib/esm/loader/agent-yaml.d.ts +1 -0
- package/lib/esm/loader/agent-yaml.js +4 -0
- package/lib/esm/loader/index.d.ts +72 -1
- package/lib/esm/loader/index.js +28 -13
- package/lib/esm/models/claude-chat-model.js +1 -1
- package/lib/esm/models/deepseek-chat-model.d.ts +7 -0
- package/lib/esm/models/deepseek-chat-model.js +15 -0
- package/lib/esm/models/gemini-chat-model.d.ts +8 -0
- package/lib/esm/models/gemini-chat-model.js +16 -0
- package/lib/esm/models/ollama-chat-model.d.ts +6 -0
- package/lib/esm/models/ollama-chat-model.js +14 -0
- package/lib/esm/models/open-router-chat-model.d.ts +5 -0
- package/lib/esm/models/open-router-chat-model.js +13 -0
- package/lib/esm/models/openai-chat-model.d.ts +23 -1
- package/lib/esm/models/openai-chat-model.js +178 -78
- package/lib/esm/models/xai-chat-model.d.ts +3 -11
- package/lib/esm/models/xai-chat-model.js +1 -11
- package/lib/esm/prompt/prompt-builder.js +3 -0
- package/lib/esm/utils/prompts.d.ts +1 -0
- package/lib/esm/utils/prompts.js +10 -0
- package/lib/esm/utils/type-utils.d.ts +1 -1
- package/lib/esm/utils/type-utils.js +1 -1
- package/package.json +6 -3
|
@@ -2,6 +2,8 @@ import { nanoid } from "nanoid";
|
|
|
2
2
|
import OpenAI from "openai";
|
|
3
3
|
import { z } from "zod";
|
|
4
4
|
import { parseJSON } from "../utils/json-schema.js";
|
|
5
|
+
import { mergeUsage } from "../utils/model-utils.js";
|
|
6
|
+
import { getJsonOutputPrompt } from "../utils/prompts.js";
|
|
5
7
|
import { checkArguments, isNonNullable } from "../utils/type-utils.js";
|
|
6
8
|
import { ChatModel, } from "./chat-model.js";
|
|
7
9
|
const CHAT_MODEL_OPENAI_DEFAULT_MODEL = "gpt-4o-mini";
|
|
@@ -23,16 +25,23 @@ export const openAIChatModelOptionsSchema = z.object({
|
|
|
23
25
|
export class OpenAIChatModel extends ChatModel {
|
|
24
26
|
options;
|
|
25
27
|
constructor(options) {
|
|
26
|
-
if (options)
|
|
27
|
-
checkArguments("OpenAIChatModel", openAIChatModelOptionsSchema, options);
|
|
28
28
|
super();
|
|
29
29
|
this.options = options;
|
|
30
|
+
if (options)
|
|
31
|
+
checkArguments(this.name, openAIChatModelOptionsSchema, options);
|
|
30
32
|
}
|
|
31
33
|
_client;
|
|
34
|
+
apiKeyEnvName = "OPENAI_API_KEY";
|
|
35
|
+
apiKeyDefault;
|
|
36
|
+
supportsNativeStructuredOutputs = true;
|
|
37
|
+
supportsEndWithSystemMessage = true;
|
|
38
|
+
supportsToolsUseWithJsonSchema = true;
|
|
39
|
+
supportsParallelToolCalls = true;
|
|
40
|
+
supportsToolsEmptyParameters = true;
|
|
32
41
|
get client() {
|
|
33
|
-
const apiKey = this.options?.apiKey || process.env.
|
|
42
|
+
const apiKey = this.options?.apiKey || process.env[this.apiKeyEnvName] || this.apiKeyDefault;
|
|
34
43
|
if (!apiKey)
|
|
35
|
-
throw new Error(
|
|
44
|
+
throw new Error(`Api Key is required for ${this.name}`);
|
|
36
45
|
this._client ??= new OpenAI({
|
|
37
46
|
baseURL: this.options?.baseURL,
|
|
38
47
|
apiKey,
|
|
@@ -43,91 +52,104 @@ export class OpenAIChatModel extends ChatModel {
|
|
|
43
52
|
return this.options?.modelOptions;
|
|
44
53
|
}
|
|
45
54
|
async process(input) {
|
|
46
|
-
const
|
|
55
|
+
const body = {
|
|
47
56
|
model: this.options?.model || CHAT_MODEL_OPENAI_DEFAULT_MODEL,
|
|
48
57
|
temperature: input.modelOptions?.temperature ?? this.modelOptions?.temperature,
|
|
49
58
|
top_p: input.modelOptions?.topP ?? this.modelOptions?.topP,
|
|
50
59
|
frequency_penalty: input.modelOptions?.frequencyPenalty ?? this.modelOptions?.frequencyPenalty,
|
|
51
60
|
presence_penalty: input.modelOptions?.presencePenalty ?? this.modelOptions?.presencePenalty,
|
|
52
|
-
messages: await
|
|
53
|
-
tools: toolsFromInputTools(input.tools),
|
|
54
|
-
tool_choice: input.toolChoice,
|
|
55
|
-
parallel_tool_calls: !input.tools?.length
|
|
56
|
-
? undefined
|
|
57
|
-
: (input.modelOptions?.parallelToolCalls ?? this.modelOptions?.parallelToolCalls),
|
|
58
|
-
response_format: input.responseFormat?.type === "json_schema"
|
|
59
|
-
? {
|
|
60
|
-
type: "json_schema",
|
|
61
|
-
json_schema: {
|
|
62
|
-
...input.responseFormat.jsonSchema,
|
|
63
|
-
schema: jsonSchemaToOpenAIJsonSchema(input.responseFormat.jsonSchema.schema),
|
|
64
|
-
},
|
|
65
|
-
}
|
|
66
|
-
: undefined,
|
|
61
|
+
messages: await this.getRunMessages(input),
|
|
67
62
|
stream_options: {
|
|
68
63
|
include_usage: true,
|
|
69
64
|
},
|
|
70
65
|
stream: true,
|
|
66
|
+
};
|
|
67
|
+
const { jsonMode, responseFormat } = await this.getRunResponseFormat(input);
|
|
68
|
+
const stream = await this.client.chat.completions.create({
|
|
69
|
+
...body,
|
|
70
|
+
tools: toolsFromInputTools(input.tools, {
|
|
71
|
+
addTypeToEmptyParameters: !this.supportsToolsEmptyParameters,
|
|
72
|
+
}),
|
|
73
|
+
tool_choice: input.toolChoice,
|
|
74
|
+
parallel_tool_calls: this.getParallelToolCalls(input),
|
|
75
|
+
response_format: responseFormat,
|
|
71
76
|
});
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
const
|
|
78
|
-
|
|
79
|
-
if (choice?.delta.tool_calls?.length) {
|
|
80
|
-
for (const call of choice.delta.tool_calls) {
|
|
81
|
-
toolCalls[call.index] ??= {
|
|
82
|
-
id: call.id || nanoid(),
|
|
83
|
-
type: "function",
|
|
84
|
-
function: { name: "", arguments: {} },
|
|
85
|
-
args: "",
|
|
86
|
-
};
|
|
87
|
-
const c = toolCalls[call.index];
|
|
88
|
-
if (!c)
|
|
89
|
-
throw new Error("Tool call not found");
|
|
90
|
-
if (call.type)
|
|
91
|
-
c.type = call.type;
|
|
92
|
-
c.function.name = c.function.name + (call.function?.name || "");
|
|
93
|
-
c.args = c.args.concat(call.function?.arguments || "");
|
|
94
|
-
}
|
|
95
|
-
}
|
|
96
|
-
if (choice?.delta.content)
|
|
97
|
-
text += choice.delta.content;
|
|
98
|
-
if (chunk.usage) {
|
|
99
|
-
usage = {
|
|
100
|
-
inputTokens: chunk.usage.prompt_tokens,
|
|
101
|
-
outputTokens: chunk.usage.completion_tokens,
|
|
102
|
-
};
|
|
103
|
-
}
|
|
77
|
+
const result = await extractResultFromStream(stream, jsonMode);
|
|
78
|
+
if (!this.supportsToolsUseWithJsonSchema &&
|
|
79
|
+
!result.toolCalls?.length &&
|
|
80
|
+
input.responseFormat?.type === "json_schema" &&
|
|
81
|
+
result.text) {
|
|
82
|
+
const output = await this.requestStructuredOutput(body, input.responseFormat);
|
|
83
|
+
return { ...output, usage: mergeUsage(result.usage, output.usage) };
|
|
104
84
|
}
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
85
|
+
return result;
|
|
86
|
+
}
|
|
87
|
+
getParallelToolCalls(input) {
|
|
88
|
+
if (!this.supportsParallelToolCalls)
|
|
89
|
+
return undefined;
|
|
90
|
+
if (!input.tools?.length)
|
|
91
|
+
return undefined;
|
|
92
|
+
return input.modelOptions?.parallelToolCalls ?? this.modelOptions?.parallelToolCalls;
|
|
93
|
+
}
|
|
94
|
+
async getRunMessages(input) {
|
|
95
|
+
const messages = await contentsFromInputMessages(input.messages);
|
|
96
|
+
if (!this.supportsEndWithSystemMessage && messages.at(-1)?.role !== "user") {
|
|
97
|
+
messages.push({ role: "user", content: "" });
|
|
111
98
|
}
|
|
112
|
-
|
|
113
|
-
|
|
99
|
+
if (!this.supportsToolsUseWithJsonSchema && input.tools?.length)
|
|
100
|
+
return messages;
|
|
101
|
+
if (this.supportsNativeStructuredOutputs)
|
|
102
|
+
return messages;
|
|
103
|
+
if (input.responseFormat?.type === "json_schema") {
|
|
104
|
+
messages.unshift({
|
|
105
|
+
role: "system",
|
|
106
|
+
content: getJsonOutputPrompt(input.responseFormat.jsonSchema.schema),
|
|
107
|
+
});
|
|
114
108
|
}
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
}
|
|
109
|
+
return messages;
|
|
110
|
+
}
|
|
111
|
+
async getRunResponseFormat(input) {
|
|
112
|
+
if (!this.supportsToolsUseWithJsonSchema && input.tools?.length)
|
|
113
|
+
return { jsonMode: false, responseFormat: undefined };
|
|
114
|
+
if (!this.supportsNativeStructuredOutputs) {
|
|
115
|
+
const jsonMode = input.responseFormat?.type === "json_schema";
|
|
116
|
+
return { jsonMode, responseFormat: jsonMode ? { type: "json_object" } : undefined };
|
|
120
117
|
}
|
|
121
|
-
|
|
118
|
+
if (input.responseFormat?.type === "json_schema") {
|
|
119
|
+
return {
|
|
120
|
+
jsonMode: true,
|
|
121
|
+
responseFormat: {
|
|
122
|
+
type: "json_schema",
|
|
123
|
+
json_schema: {
|
|
124
|
+
...input.responseFormat.jsonSchema,
|
|
125
|
+
schema: jsonSchemaToOpenAIJsonSchema(input.responseFormat.jsonSchema.schema),
|
|
126
|
+
},
|
|
127
|
+
},
|
|
128
|
+
};
|
|
129
|
+
}
|
|
130
|
+
return { jsonMode: false, responseFormat: undefined };
|
|
131
|
+
}
|
|
132
|
+
async requestStructuredOutput(body, responseFormat) {
|
|
133
|
+
if (responseFormat?.type !== "json_schema") {
|
|
134
|
+
throw new Error("Expected json_schema response format");
|
|
135
|
+
}
|
|
136
|
+
const { jsonMode, responseFormat: resolvedResponseFormat } = await this.getRunResponseFormat({
|
|
137
|
+
responseFormat,
|
|
138
|
+
});
|
|
139
|
+
const res = await this.client.chat.completions.create({
|
|
140
|
+
...body,
|
|
141
|
+
response_format: resolvedResponseFormat,
|
|
142
|
+
});
|
|
143
|
+
return extractResultFromStream(res, jsonMode);
|
|
122
144
|
}
|
|
123
145
|
}
|
|
124
|
-
const ROLE_MAP = {
|
|
146
|
+
export const ROLE_MAP = {
|
|
125
147
|
system: "system",
|
|
126
148
|
user: "user",
|
|
127
149
|
agent: "assistant",
|
|
128
150
|
tool: "tool",
|
|
129
151
|
};
|
|
130
|
-
async function contentsFromInputMessages(messages) {
|
|
152
|
+
export async function contentsFromInputMessages(messages) {
|
|
131
153
|
return messages.map((i) => ({
|
|
132
154
|
role: ROLE_MAP[i.role],
|
|
133
155
|
content: typeof i.content === "string"
|
|
@@ -156,19 +178,25 @@ async function contentsFromInputMessages(messages) {
|
|
|
156
178
|
name: i.name,
|
|
157
179
|
}));
|
|
158
180
|
}
|
|
159
|
-
function toolsFromInputTools(tools) {
|
|
181
|
+
export function toolsFromInputTools(tools, options) {
|
|
160
182
|
return tools?.length
|
|
161
|
-
? tools.map((i) =>
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
183
|
+
? tools.map((i) => {
|
|
184
|
+
const parameters = i.function.parameters;
|
|
185
|
+
if (options?.addTypeToEmptyParameters && Object.keys(parameters).length === 0) {
|
|
186
|
+
parameters.type = "object";
|
|
187
|
+
}
|
|
188
|
+
return {
|
|
189
|
+
type: "function",
|
|
190
|
+
function: {
|
|
191
|
+
name: i.function.name,
|
|
192
|
+
description: i.function.description,
|
|
193
|
+
parameters,
|
|
194
|
+
},
|
|
195
|
+
};
|
|
196
|
+
})
|
|
169
197
|
: undefined;
|
|
170
198
|
}
|
|
171
|
-
function jsonSchemaToOpenAIJsonSchema(schema) {
|
|
199
|
+
export function jsonSchemaToOpenAIJsonSchema(schema) {
|
|
172
200
|
if (schema?.type === "object") {
|
|
173
201
|
const { required, properties } = schema;
|
|
174
202
|
return {
|
|
@@ -193,3 +221,75 @@ function jsonSchemaToOpenAIJsonSchema(schema) {
|
|
|
193
221
|
}
|
|
194
222
|
return schema;
|
|
195
223
|
}
|
|
224
|
+
export async function extractResultFromStream(stream, jsonMode = false) {
|
|
225
|
+
let text = "";
|
|
226
|
+
const toolCalls = [];
|
|
227
|
+
let usage;
|
|
228
|
+
let model;
|
|
229
|
+
for await (const chunk of stream) {
|
|
230
|
+
const choice = chunk.choices?.[0];
|
|
231
|
+
model ??= chunk.model;
|
|
232
|
+
if (choice?.delta.tool_calls?.length) {
|
|
233
|
+
for (const call of choice.delta.tool_calls) {
|
|
234
|
+
// Gemini not support tool call delta
|
|
235
|
+
if (call.index !== undefined) {
|
|
236
|
+
handleToolCallDelta(toolCalls, call);
|
|
237
|
+
}
|
|
238
|
+
else {
|
|
239
|
+
handleCompleteToolCall(toolCalls, call);
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
if (choice?.delta.content)
|
|
244
|
+
text += choice.delta.content;
|
|
245
|
+
if (chunk.usage) {
|
|
246
|
+
usage = {
|
|
247
|
+
inputTokens: chunk.usage.prompt_tokens,
|
|
248
|
+
outputTokens: chunk.usage.completion_tokens,
|
|
249
|
+
};
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
const result = {
|
|
253
|
+
usage,
|
|
254
|
+
model,
|
|
255
|
+
};
|
|
256
|
+
if (jsonMode && text) {
|
|
257
|
+
result.json = parseJSON(text);
|
|
258
|
+
}
|
|
259
|
+
else {
|
|
260
|
+
result.text = text;
|
|
261
|
+
}
|
|
262
|
+
if (toolCalls.length) {
|
|
263
|
+
result.toolCalls = toolCalls.map(({ args, ...c }) => ({
|
|
264
|
+
...c,
|
|
265
|
+
function: { ...c.function, arguments: parseJSON(args) },
|
|
266
|
+
}));
|
|
267
|
+
}
|
|
268
|
+
return result;
|
|
269
|
+
}
|
|
270
|
+
function handleToolCallDelta(toolCalls, call) {
|
|
271
|
+
toolCalls[call.index] ??= {
|
|
272
|
+
id: call.id || nanoid(),
|
|
273
|
+
type: "function",
|
|
274
|
+
function: { name: "", arguments: {} },
|
|
275
|
+
args: "",
|
|
276
|
+
};
|
|
277
|
+
const c = toolCalls[call.index];
|
|
278
|
+
if (!c)
|
|
279
|
+
throw new Error("Tool call not found");
|
|
280
|
+
if (call.type)
|
|
281
|
+
c.type = call.type;
|
|
282
|
+
c.function.name = c.function.name + (call.function?.name || "");
|
|
283
|
+
c.args = c.args.concat(call.function?.arguments || "");
|
|
284
|
+
}
|
|
285
|
+
function handleCompleteToolCall(toolCalls, call) {
|
|
286
|
+
toolCalls.push({
|
|
287
|
+
id: call.id || nanoid(),
|
|
288
|
+
type: "function",
|
|
289
|
+
function: {
|
|
290
|
+
name: call.function?.name || "",
|
|
291
|
+
arguments: parseJSON(call.function?.arguments || "{}"),
|
|
292
|
+
},
|
|
293
|
+
args: call.function?.arguments || "",
|
|
294
|
+
});
|
|
295
|
+
}
|
|
@@ -1,13 +1,5 @@
|
|
|
1
|
-
import
|
|
2
|
-
import type { ChatModelOptions } from "./chat-model.js";
|
|
3
|
-
import { OpenAIChatModel } from "./openai-chat-model.js";
|
|
4
|
-
export interface XAIChatModelOptions {
|
|
5
|
-
apiKey?: string;
|
|
6
|
-
model?: string;
|
|
7
|
-
modelOptions?: ChatModelOptions;
|
|
8
|
-
baseURL?: string;
|
|
9
|
-
}
|
|
1
|
+
import { OpenAIChatModel, type OpenAIChatModelOptions } from "./openai-chat-model.js";
|
|
10
2
|
export declare class XAIChatModel extends OpenAIChatModel {
|
|
11
|
-
constructor(options?:
|
|
12
|
-
|
|
3
|
+
constructor(options?: OpenAIChatModelOptions);
|
|
4
|
+
protected apiKeyEnvName: string;
|
|
13
5
|
}
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import OpenAI from "openai";
|
|
2
1
|
import { OpenAIChatModel } from "./openai-chat-model.js";
|
|
3
2
|
const XAI_DEFAULT_CHAT_MODEL = "grok-2-latest";
|
|
4
3
|
const XAI_BASE_URL = "https://api.x.ai/v1";
|
|
@@ -10,14 +9,5 @@ export class XAIChatModel extends OpenAIChatModel {
|
|
|
10
9
|
baseURL: options?.baseURL || XAI_BASE_URL,
|
|
11
10
|
});
|
|
12
11
|
}
|
|
13
|
-
|
|
14
|
-
const apiKey = this.options?.apiKey || process.env.XAI_API_KEY;
|
|
15
|
-
if (!apiKey)
|
|
16
|
-
throw new Error("Api Key is required for XAIChatModel");
|
|
17
|
-
this._client ??= new OpenAI({
|
|
18
|
-
baseURL: this.options?.baseURL,
|
|
19
|
-
apiKey,
|
|
20
|
-
});
|
|
21
|
-
return this._client;
|
|
22
|
-
}
|
|
12
|
+
apiKeyEnvName = "XAI_API_KEY";
|
|
23
13
|
}
|
|
@@ -127,6 +127,7 @@ export class PromptBuilder {
|
|
|
127
127
|
},
|
|
128
128
|
}));
|
|
129
129
|
let toolChoice;
|
|
130
|
+
const modelOptions = {};
|
|
130
131
|
// use manual choice if configured in the agent
|
|
131
132
|
const manualChoice = options.agent?.toolChoice;
|
|
132
133
|
if (manualChoice) {
|
|
@@ -141,6 +142,7 @@ export class PromptBuilder {
|
|
|
141
142
|
}
|
|
142
143
|
else if (manualChoice === "router") {
|
|
143
144
|
toolChoice = "required";
|
|
145
|
+
modelOptions.parallelToolCalls = false;
|
|
144
146
|
}
|
|
145
147
|
else {
|
|
146
148
|
toolChoice = manualChoice;
|
|
@@ -154,6 +156,7 @@ export class PromptBuilder {
|
|
|
154
156
|
toolAgents: toolAgents.length ? toolAgents : undefined,
|
|
155
157
|
tools: tools.length ? tools : undefined,
|
|
156
158
|
toolChoice,
|
|
159
|
+
modelOptions: Object.keys(modelOptions).length ? modelOptions : undefined,
|
|
157
160
|
};
|
|
158
161
|
}
|
|
159
162
|
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function getJsonOutputPrompt(schema: Record<string, unknown> | string): string;
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
export function getJsonOutputPrompt(schema) {
|
|
2
|
+
let prompt = "Provide your output as a JSON containing the following fields:";
|
|
3
|
+
if (typeof schema === "string") {
|
|
4
|
+
prompt += `\n<json_fields>\n${schema}\n</json_fields>`;
|
|
5
|
+
}
|
|
6
|
+
else {
|
|
7
|
+
prompt += `\n<json_fields>\n${JSON.stringify(schema)}\n</json_fields>`;
|
|
8
|
+
}
|
|
9
|
+
return prompt;
|
|
10
|
+
}
|
|
@@ -11,5 +11,5 @@ export declare function orArrayToArray<T>(value?: T | T[]): T[];
|
|
|
11
11
|
export declare function createAccessorArray<T>(array: T[], accessor: (array: T[], name: string) => T | undefined): T[] & {
|
|
12
12
|
[key: string]: T;
|
|
13
13
|
};
|
|
14
|
-
export declare function checkArguments<T>(prefix: string, schema: ZodType<T>, args: T):
|
|
14
|
+
export declare function checkArguments<T>(prefix: string, schema: ZodType<T>, args: T): T;
|
|
15
15
|
export declare function tryOrThrow<P extends PromiseOrValue<unknown>>(fn: () => P, error: string | Error | ((error: Error) => Error)): P;
|
|
@@ -43,7 +43,7 @@ export function createAccessorArray(array, accessor) {
|
|
|
43
43
|
}
|
|
44
44
|
export function checkArguments(prefix, schema, args) {
|
|
45
45
|
try {
|
|
46
|
-
schema.parse(args, {
|
|
46
|
+
return schema.parse(args, {
|
|
47
47
|
errorMap: (issue, ctx) => {
|
|
48
48
|
if (issue.code === "invalid_union") {
|
|
49
49
|
// handle all issues that are not invalid_type
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aigne/core",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.9.0",
|
|
4
4
|
"description": "AIGNE core library for building AI-powered applications",
|
|
5
5
|
"publishConfig": {
|
|
6
6
|
"access": "public"
|
|
@@ -47,6 +47,9 @@
|
|
|
47
47
|
"execution-engine/*": [
|
|
48
48
|
"./lib/dts/execution-engine/*"
|
|
49
49
|
],
|
|
50
|
+
"loader/*": [
|
|
51
|
+
"./lib/dts/loader/*"
|
|
52
|
+
],
|
|
50
53
|
"models/*": [
|
|
51
54
|
"./lib/dts/models/*"
|
|
52
55
|
],
|
|
@@ -83,11 +86,11 @@
|
|
|
83
86
|
"@types/bun": "^1.2.9",
|
|
84
87
|
"@types/express": "^5.0.1",
|
|
85
88
|
"@types/mustache": "^4.2.5",
|
|
86
|
-
"@types/node": "^22.14.
|
|
89
|
+
"@types/node": "^22.14.1",
|
|
87
90
|
"detect-port": "^2.1.0",
|
|
88
91
|
"express": "^5.1.0",
|
|
89
92
|
"npm-run-all": "^4.1.5",
|
|
90
|
-
"openai": "^4.
|
|
93
|
+
"openai": "^4.94.0",
|
|
91
94
|
"rimraf": "^6.0.1",
|
|
92
95
|
"typescript": "^5.8.3"
|
|
93
96
|
},
|