@aigne/core 1.12.0 → 1.13.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. package/CHANGELOG.md +13 -0
  2. package/lib/cjs/loader/index.js +2 -0
  3. package/lib/cjs/models/bedrock-chat-model.d.ts +70 -0
  4. package/lib/cjs/models/bedrock-chat-model.js +273 -0
  5. package/lib/cjs/models/chat-model.d.ts +1 -0
  6. package/lib/cjs/models/chat-model.js +8 -0
  7. package/lib/cjs/models/gemini-chat-model.d.ts +1 -0
  8. package/lib/cjs/models/gemini-chat-model.js +1 -0
  9. package/lib/cjs/models/openai-chat-model.d.ts +3 -0
  10. package/lib/cjs/models/openai-chat-model.js +100 -100
  11. package/lib/cjs/prompt/prompt-builder.js +2 -2
  12. package/lib/cjs/utils/prompts.js +1 -1
  13. package/lib/cjs/utils/type-utils.d.ts +1 -0
  14. package/lib/cjs/utils/type-utils.js +12 -0
  15. package/lib/dts/models/bedrock-chat-model.d.ts +70 -0
  16. package/lib/dts/models/chat-model.d.ts +1 -0
  17. package/lib/dts/models/gemini-chat-model.d.ts +1 -0
  18. package/lib/dts/models/openai-chat-model.d.ts +3 -0
  19. package/lib/dts/utils/type-utils.d.ts +1 -0
  20. package/lib/esm/loader/index.js +2 -0
  21. package/lib/esm/models/bedrock-chat-model.d.ts +70 -0
  22. package/lib/esm/models/bedrock-chat-model.js +268 -0
  23. package/lib/esm/models/chat-model.d.ts +1 -0
  24. package/lib/esm/models/chat-model.js +8 -0
  25. package/lib/esm/models/gemini-chat-model.d.ts +1 -0
  26. package/lib/esm/models/gemini-chat-model.js +1 -0
  27. package/lib/esm/models/openai-chat-model.d.ts +3 -0
  28. package/lib/esm/models/openai-chat-model.js +100 -100
  29. package/lib/esm/prompt/prompt-builder.js +3 -3
  30. package/lib/esm/utils/prompts.js +1 -1
  31. package/lib/esm/utils/type-utils.d.ts +1 -0
  32. package/lib/esm/utils/type-utils.js +11 -0
  33. package/package.json +8 -1
@@ -45,6 +45,7 @@ export class OpenAIChatModel extends ChatModel {
45
45
  supportsToolsUseWithJsonSchema = true;
46
46
  supportsParallelToolCalls = true;
47
47
  supportsToolsEmptyParameters = true;
48
+ supportsToolStreaming = true;
48
49
  supportsTemperature = true;
49
50
  get client() {
50
51
  const apiKey = this.options?.apiKey || process.env[this.apiKeyEnvName] || this.apiKeyDefault;
@@ -86,9 +87,9 @@ export class OpenAIChatModel extends ChatModel {
86
87
  response_format: responseFormat,
87
88
  });
88
89
  if (options?.streaming && input.responseFormat?.type !== "json_schema") {
89
- return await extractResultFromStream(stream, false, true);
90
+ return await this.extractResultFromStream(stream, false, true);
90
91
  }
91
- const result = await extractResultFromStream(stream, jsonMode);
92
+ const result = await this.extractResultFromStream(stream, jsonMode);
92
93
  if (!this.supportsToolsUseWithJsonSchema &&
93
94
  !result.toolCalls?.length &&
94
95
  input.responseFormat?.type === "json_schema" &&
@@ -154,7 +155,103 @@ export class OpenAIChatModel extends ChatModel {
154
155
  ...body,
155
156
  response_format: resolvedResponseFormat,
156
157
  });
157
- return extractResultFromStream(res, jsonMode);
158
+ return this.extractResultFromStream(res, jsonMode);
159
+ }
160
+ async extractResultFromStream(stream, jsonMode, streaming) {
161
+ const result = new ReadableStream({
162
+ start: async (controller) => {
163
+ try {
164
+ let text = "";
165
+ let refusal = "";
166
+ const toolCalls = [];
167
+ let model;
168
+ for await (const chunk of stream) {
169
+ const choice = chunk.choices?.[0];
170
+ if (!model) {
171
+ model = chunk.model;
172
+ controller.enqueue({
173
+ delta: {
174
+ json: {
175
+ model,
176
+ },
177
+ },
178
+ });
179
+ }
180
+ if (choice?.delta.tool_calls?.length) {
181
+ for (const call of choice.delta.tool_calls) {
182
+ if (this.supportsToolStreaming && call.index !== undefined) {
183
+ handleToolCallDelta(toolCalls, call);
184
+ }
185
+ else {
186
+ handleCompleteToolCall(toolCalls, call);
187
+ }
188
+ }
189
+ }
190
+ if (choice?.delta.content) {
191
+ text += choice.delta.content;
192
+ if (!jsonMode) {
193
+ controller.enqueue({
194
+ delta: {
195
+ text: {
196
+ text: choice.delta.content,
197
+ },
198
+ },
199
+ });
200
+ }
201
+ }
202
+ if (choice?.delta.refusal) {
203
+ refusal += choice.delta.refusal;
204
+ if (!jsonMode) {
205
+ controller.enqueue({
206
+ delta: {
207
+ text: { text: choice.delta.refusal },
208
+ },
209
+ });
210
+ }
211
+ }
212
+ if (chunk.usage) {
213
+ controller.enqueue({
214
+ delta: {
215
+ json: {
216
+ usage: {
217
+ inputTokens: chunk.usage.prompt_tokens,
218
+ outputTokens: chunk.usage.completion_tokens,
219
+ },
220
+ },
221
+ },
222
+ });
223
+ }
224
+ }
225
+ text = text || refusal;
226
+ if (jsonMode && text) {
227
+ controller.enqueue({
228
+ delta: {
229
+ json: {
230
+ json: parseJSON(text),
231
+ },
232
+ },
233
+ });
234
+ }
235
+ if (toolCalls.length) {
236
+ controller.enqueue({
237
+ delta: {
238
+ json: {
239
+ toolCalls: toolCalls.map(({ args, ...c }) => ({
240
+ ...c,
241
+ function: { ...c.function, arguments: parseJSON(args) },
242
+ })),
243
+ },
244
+ },
245
+ });
246
+ }
247
+ controller.close();
248
+ }
249
+ catch (error) {
250
+ controller.error(error);
251
+ }
252
+ },
253
+ });
254
+ return streaming ? result : await agentResponseStreamToObject(result);
158
255
  }
159
256
  }
160
257
  export const ROLE_MAP = {
@@ -235,103 +332,6 @@ export function jsonSchemaToOpenAIJsonSchema(schema) {
235
332
  }
236
333
  return schema;
237
334
  }
238
- async function extractResultFromStream(stream, jsonMode, streaming) {
239
- const result = new ReadableStream({
240
- async start(controller) {
241
- try {
242
- let text = "";
243
- let refusal = "";
244
- const toolCalls = [];
245
- let model;
246
- for await (const chunk of stream) {
247
- const choice = chunk.choices?.[0];
248
- if (!model) {
249
- model = chunk.model;
250
- controller.enqueue({
251
- delta: {
252
- json: {
253
- model,
254
- },
255
- },
256
- });
257
- }
258
- if (choice?.delta.tool_calls?.length) {
259
- for (const call of choice.delta.tool_calls) {
260
- // Gemini not support tool call delta
261
- if (call.index !== undefined) {
262
- handleToolCallDelta(toolCalls, call);
263
- }
264
- else {
265
- handleCompleteToolCall(toolCalls, call);
266
- }
267
- }
268
- }
269
- if (choice?.delta.content) {
270
- text += choice.delta.content;
271
- if (!jsonMode) {
272
- controller.enqueue({
273
- delta: {
274
- text: {
275
- text: choice.delta.content,
276
- },
277
- },
278
- });
279
- }
280
- }
281
- if (choice?.delta.refusal) {
282
- refusal += choice.delta.refusal;
283
- if (!jsonMode) {
284
- controller.enqueue({
285
- delta: {
286
- text: { text: choice.delta.refusal },
287
- },
288
- });
289
- }
290
- }
291
- if (chunk.usage) {
292
- controller.enqueue({
293
- delta: {
294
- json: {
295
- usage: {
296
- inputTokens: chunk.usage.prompt_tokens,
297
- outputTokens: chunk.usage.completion_tokens,
298
- },
299
- },
300
- },
301
- });
302
- }
303
- }
304
- text = text || refusal;
305
- if (jsonMode && text) {
306
- controller.enqueue({
307
- delta: {
308
- json: {
309
- json: parseJSON(text),
310
- },
311
- },
312
- });
313
- }
314
- if (toolCalls.length) {
315
- controller.enqueue({
316
- delta: {
317
- json: {
318
- toolCalls: toolCalls.map(({ args, ...c }) => ({
319
- ...c,
320
- function: { ...c.function, arguments: parseJSON(args) },
321
- })),
322
- },
323
- },
324
- });
325
- }
326
- controller.close();
327
- }
328
- catch (error) {
329
- controller.error(error);
330
- }
331
- },
332
- });
333
- return streaming ? result : await agentResponseStreamToObject(result);
334
- }
335
335
  function handleToolCallDelta(toolCalls, call) {
336
336
  toolCalls[call.index] ??= {
337
337
  id: call.id || nanoid(),
@@ -2,7 +2,7 @@ import { readFile } from "node:fs/promises";
2
2
  import { ZodObject } from "zod";
3
3
  import { Agent } from "../agents/agent.js";
4
4
  import { outputSchemaToResponseFormatSchema } from "../utils/json-schema.js";
5
- import { isNil } from "../utils/type-utils.js";
5
+ import { isNil, unique } from "../utils/type-utils.js";
6
6
  import { AgentMessageTemplate, ChatMessagesTemplate, SystemMessageTemplate, UserMessageTemplate, } from "./template.js";
7
7
  export const MESSAGE_KEY = "$message";
8
8
  export const DEFAULT_MAX_HISTORY_MESSAGES = 10;
@@ -112,10 +112,10 @@ export class PromptBuilder {
112
112
  : undefined;
113
113
  }
114
114
  buildTools(options) {
115
- const toolAgents = (options.context?.skills ?? [])
115
+ const toolAgents = unique((options.context?.skills ?? [])
116
116
  .concat(options.agent?.skills ?? [])
117
117
  // TODO: support nested tools?
118
- .flatMap((i) => (i.isInvokable ? i.skills.concat(i) : i.skills));
118
+ .flatMap((i) => (i.isInvokable ? i.skills.concat(i) : i.skills)), (i) => i.name);
119
119
  const tools = toolAgents.map((i) => ({
120
120
  type: "function",
121
121
  function: {
@@ -1,5 +1,5 @@
1
1
  export function getJsonOutputPrompt(schema) {
2
- let prompt = "Provide your output as a JSON containing the following fields:";
2
+ let prompt = "Output must be a JSON object containing the following fields only.";
3
3
  if (typeof schema === "string") {
4
4
  prompt += `\n<json_fields>\n${schema}\n</json_fields>`;
5
5
  }
@@ -8,6 +8,7 @@ export declare function isEmpty(obj: unknown): boolean;
8
8
  export declare function isNonNullable<T>(value: T): value is NonNullable<T>;
9
9
  export declare function isNotEmpty<T>(arr: T[]): arr is [T, ...T[]];
10
10
  export declare function duplicates<T>(arr: T[], key?: (item: T) => unknown): T[];
11
+ export declare function unique<T>(arr: T[], key?: (item: T) => unknown): T[];
11
12
  export declare function omitBy<T extends Record<string, unknown>, K extends keyof T>(obj: T, predicate: (value: T[K], key: K) => boolean): Partial<T>;
12
13
  export declare function orArrayToArray<T>(value?: T | T[]): T[];
13
14
  export declare function createAccessorArray<T>(array: T[], accessor: (array: T[], name: string) => T | undefined): T[] & {
@@ -34,6 +34,17 @@ export function duplicates(arr, key = (item) => item) {
34
34
  }
35
35
  return Array.from(duplicates);
36
36
  }
37
+ export function unique(arr, key = (item) => item) {
38
+ const seen = new Set();
39
+ return arr.filter((item) => {
40
+ const k = key(item);
41
+ if (seen.has(k)) {
42
+ return false;
43
+ }
44
+ seen.add(k);
45
+ return true;
46
+ });
47
+ }
37
48
  export function omitBy(obj, predicate) {
38
49
  return Object.fromEntries(Object.entries(obj).filter(([key, value]) => {
39
50
  const k = key;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aigne/core",
3
- "version": "1.12.0",
3
+ "version": "1.13.0",
4
4
  "description": "AIGNE core library for building AI-powered applications",
5
5
  "publishConfig": {
6
6
  "access": "public"
@@ -58,6 +58,12 @@
58
58
  ],
59
59
  "utils/*": [
60
60
  "./lib/dts/utils/*"
61
+ ],
62
+ "client/*": [
63
+ "./lib/dts/client/*"
64
+ ],
65
+ "server/*": [
66
+ "./lib/dts/server/*"
61
67
  ]
62
68
  }
63
69
  },
@@ -84,6 +90,7 @@
84
90
  },
85
91
  "peerDependencies": {
86
92
  "@anthropic-ai/sdk": "^0.39.0",
93
+ "@aws-sdk/client-bedrock-runtime": "^3.796.0",
87
94
  "@google/generative-ai": "^0.24.0",
88
95
  "openai": "^4.87.3"
89
96
  },