@agentica/core 0.23.0 → 0.24.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/lib/Agentica.d.ts +6 -1
- package/lib/Agentica.js +8 -3
- package/lib/Agentica.js.map +1 -1
- package/lib/context/AgenticaContext.d.ts +4 -0
- package/lib/histories/AgenticaUserInputHistory.d.ts +13 -7
- package/lib/index.mjs +7 -2
- package/lib/index.mjs.map +1 -1
- package/lib/utils/ChatGptCompletionMessageUtil.spec.d.ts +1 -0
- package/lib/utils/ChatGptCompletionMessageUtil.spec.js +288 -0
- package/lib/utils/ChatGptCompletionMessageUtil.spec.js.map +1 -0
- package/lib/utils/ChatGptTokenUsageAggregator.spec.d.ts +1 -0
- package/lib/utils/ChatGptTokenUsageAggregator.spec.js +199 -0
- package/lib/utils/ChatGptTokenUsageAggregator.spec.js.map +1 -0
- package/lib/utils/Singleton.js +18 -0
- package/lib/utils/Singleton.js.map +1 -1
- package/lib/utils/Singleton.spec.d.ts +1 -0
- package/lib/utils/Singleton.spec.js +106 -0
- package/lib/utils/Singleton.spec.js.map +1 -0
- package/lib/utils/__map_take.spec.d.ts +1 -0
- package/lib/utils/__map_take.spec.js +108 -0
- package/lib/utils/__map_take.spec.js.map +1 -0
- package/package.json +1 -1
- package/src/Agentica.ts +16 -2
- package/src/context/AgenticaContext.ts +5 -0
- package/src/histories/AgenticaUserInputHistory.ts +14 -8
- package/src/utils/ChatGptCompletionMessageUtil.spec.ts +320 -0
- package/src/utils/ChatGptTokenUsageAggregator.spec.ts +226 -0
- package/src/utils/Singleton.spec.ts +138 -0
- package/src/utils/Singleton.ts +18 -0
- package/src/utils/__map_take.spec.ts +140 -0
- package/lib/utils/MathUtil.d.ts +0 -3
- package/lib/utils/MathUtil.js +0 -8
- package/lib/utils/MathUtil.js.map +0 -1
- package/src/utils/MathUtil.ts +0 -3
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const __map_take_1 = require("./__map_take");
|
|
4
|
+
describe("__map_take", () => {
|
|
5
|
+
describe("basic functionality", () => {
|
|
6
|
+
it("should generate value for new key", () => {
|
|
7
|
+
const map = new Map();
|
|
8
|
+
const generator = () => 42;
|
|
9
|
+
const result = (0, __map_take_1.__map_take)(map, "test", generator);
|
|
10
|
+
expect(result).toBe(42);
|
|
11
|
+
expect(map.get("test")).toBe(42);
|
|
12
|
+
});
|
|
13
|
+
it("should return existing value for existing key", () => {
|
|
14
|
+
const map = new Map();
|
|
15
|
+
map.set("test", 100);
|
|
16
|
+
const generator = () => 42;
|
|
17
|
+
const result = (0, __map_take_1.__map_take)(map, "test", generator);
|
|
18
|
+
expect(result).toBe(100);
|
|
19
|
+
expect(map.get("test")).toBe(100);
|
|
20
|
+
});
|
|
21
|
+
});
|
|
22
|
+
describe("various type tests", () => {
|
|
23
|
+
it("should handle object type", () => {
|
|
24
|
+
const map = new Map();
|
|
25
|
+
const generator = () => ({ value: 42 });
|
|
26
|
+
const result = (0, __map_take_1.__map_take)(map, "test", generator);
|
|
27
|
+
expect(result).toEqual({ value: 42 });
|
|
28
|
+
expect(map.get("test")).toEqual({ value: 42 });
|
|
29
|
+
});
|
|
30
|
+
it("should handle array type", () => {
|
|
31
|
+
const map = new Map();
|
|
32
|
+
const generator = () => [1, 2, 3];
|
|
33
|
+
const result = (0, __map_take_1.__map_take)(map, "test", generator);
|
|
34
|
+
expect(result).toEqual([1, 2, 3]);
|
|
35
|
+
expect(map.get("test")).toEqual([1, 2, 3]);
|
|
36
|
+
});
|
|
37
|
+
it("should handle function type", () => {
|
|
38
|
+
var _a;
|
|
39
|
+
const map = new Map();
|
|
40
|
+
const generator = () => () => 42;
|
|
41
|
+
const result = (0, __map_take_1.__map_take)(map, "test", generator);
|
|
42
|
+
expect(result()).toBe(42);
|
|
43
|
+
expect((_a = map.get("test")) === null || _a === void 0 ? void 0 : _a()).toBe(42);
|
|
44
|
+
});
|
|
45
|
+
});
|
|
46
|
+
describe("edge cases", () => {
|
|
47
|
+
it("should handle null key", () => {
|
|
48
|
+
const map = new Map();
|
|
49
|
+
const generator = () => "test";
|
|
50
|
+
const result = (0, __map_take_1.__map_take)(map, null, generator);
|
|
51
|
+
expect(result).toBe("test");
|
|
52
|
+
expect(map.get(null)).toBe("test");
|
|
53
|
+
});
|
|
54
|
+
it("should handle undefined key", () => {
|
|
55
|
+
const map = new Map();
|
|
56
|
+
const generator = () => "test";
|
|
57
|
+
const result = (0, __map_take_1.__map_take)(map, undefined, generator);
|
|
58
|
+
expect(result).toBe("test");
|
|
59
|
+
expect(map.get(undefined)).toBe("test");
|
|
60
|
+
});
|
|
61
|
+
it("should handle empty string key", () => {
|
|
62
|
+
const map = new Map();
|
|
63
|
+
const generator = () => "test";
|
|
64
|
+
const result = (0, __map_take_1.__map_take)(map, "", generator);
|
|
65
|
+
expect(result).toBe("test");
|
|
66
|
+
expect(map.get("")).toBe("test");
|
|
67
|
+
});
|
|
68
|
+
});
|
|
69
|
+
describe("generator function tests", () => {
|
|
70
|
+
it("should not call generator multiple times", () => {
|
|
71
|
+
const map = new Map();
|
|
72
|
+
let callCount = 0;
|
|
73
|
+
const generator = () => {
|
|
74
|
+
callCount++;
|
|
75
|
+
return 42;
|
|
76
|
+
};
|
|
77
|
+
(0, __map_take_1.__map_take)(map, "test", generator);
|
|
78
|
+
(0, __map_take_1.__map_take)(map, "test", generator);
|
|
79
|
+
expect(callCount).toBe(1);
|
|
80
|
+
});
|
|
81
|
+
it("should handle generator throwing error", () => {
|
|
82
|
+
const map = new Map();
|
|
83
|
+
const generator = () => {
|
|
84
|
+
throw new Error("Generator error");
|
|
85
|
+
};
|
|
86
|
+
expect(() => (0, __map_take_1.__map_take)(map, "test", generator)).toThrow("Generator error");
|
|
87
|
+
});
|
|
88
|
+
it("should handle generator returning undefined", () => {
|
|
89
|
+
const map = new Map();
|
|
90
|
+
const generator = () => undefined;
|
|
91
|
+
const result = (0, __map_take_1.__map_take)(map, "test", generator);
|
|
92
|
+
expect(result).toBeUndefined();
|
|
93
|
+
expect(map.get("test")).toBeUndefined();
|
|
94
|
+
});
|
|
95
|
+
});
|
|
96
|
+
describe("concurrency tests", () => {
|
|
97
|
+
it("should handle concurrent access to same key", () => {
|
|
98
|
+
const map = new Map();
|
|
99
|
+
const generator = () => 42;
|
|
100
|
+
const result1 = (0, __map_take_1.__map_take)(map, "test", generator);
|
|
101
|
+
const result2 = (0, __map_take_1.__map_take)(map, "test", generator);
|
|
102
|
+
expect(result1).toBe(42);
|
|
103
|
+
expect(result2).toBe(42);
|
|
104
|
+
expect(map.get("test")).toBe(42);
|
|
105
|
+
});
|
|
106
|
+
});
|
|
107
|
+
});
|
|
108
|
+
//# sourceMappingURL=__map_take.spec.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"__map_take.spec.js","sourceRoot":"","sources":["../../src/utils/__map_take.spec.ts"],"names":[],"mappings":";;AAAA,6CAA0C;AAE1C,QAAQ,CAAC,YAAY,EAAE,GAAG,EAAE;IAC1B,QAAQ,CAAC,qBAAqB,EAAE,GAAG,EAAE;QACnC,EAAE,CAAC,mCAAmC,EAAE,GAAG,EAAE;YAC3C,MAAM,GAAG,GAAG,IAAI,GAAG,EAAkB,CAAC;YACtC,MAAM,SAAS,GAAG,GAAG,EAAE,CAAC,EAAE,CAAC;YAE3B,MAAM,MAAM,GAAG,IAAA,uBAAU,EAAC,GAAG,EAAE,MAAM,EAAE,SAAS,CAAC,CAAC;YAElD,MAAM,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;YACxB,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;QACnC,CAAC,CAAC,CAAC;QAEH,EAAE,CAAC,+CAA+C,EAAE,GAAG,EAAE;YACvD,MAAM,GAAG,GAAG,IAAI,GAAG,EAAkB,CAAC;YACtC,GAAG,CAAC,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;YAErB,MAAM,SAAS,GAAG,GAAG,EAAE,CAAC,EAAE,CAAC;YAC3B,MAAM,MAAM,GAAG,IAAA,uBAAU,EAAC,GAAG,EAAE,MAAM,EAAE,SAAS,CAAC,CAAC;YAElD,MAAM,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;YACzB,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;QACpC,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,oBAAoB,EAAE,GAAG,EAAE;QAClC,EAAE,CAAC,2BAA2B,EAAE,GAAG,EAAE;YACnC,MAAM,GAAG,GAAG,IAAI,GAAG,EAA6B,CAAC;YACjD,MAAM,SAAS,GAAG,GAAG,EAAE,CAAC,CAAC,EAAE,KAAK,EAAE,EAAE,EAAE,CAAC,CAAC;YAExC,MAAM,MAAM,GAAG,IAAA,uBAAU,EAAC,GAAG,EAAE,MAAM,EAAE,SAAS,CAAC,CAAC;YAElD,MAAM,CAAC,MAAM,CAAC,CAAC,OAAO,CAAC,EAAE,KAAK,EAAE,EAAE,EAAE,CAAC,CAAC;YACtC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,EAAE,KAAK,EAAE,EAAE,EAAE,CAAC,CAAC;QACjD,CAAC,CAAC,CAAC;QAEH,EAAE,CAAC,0BAA0B,EAAE,GAAG,EAAE;YAClC,MAAM,GAAG,GAAG,IAAI,GAAG,EAAoB,CAAC;YACxC,MAAM,SAAS,GAAG,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;YAElC,MAAM,MAAM,GAAG,IAAA,uBAAU,EAAC,GAAG,EAAE,MAAM,EAAE,SAAS,CAAC,CAAC;YAElD,MAAM,CAAC,MAAM,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;YAClC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;QAC7C,CAAC,CAAC,CAAC;QAEH,EAAE,CAAC,6BAA6B,EAAE,GAAG,EAAE;;YACrC,MAAM,GAAG,GAAG,IAAI,GAAG,EAAwB,CAAC;YAC5C,MAAM,SAAS,GAAG,GAAG,EAAE,CAAC,GAAG,EAAE,CAAC,EAAE,CAAC;YAEjC,MAAM,MAAM,GAAG,IAAA,uBAAU,EAAC,GAAG,EAAE,MAAM,EAAE,SAAS,CAAC,CAAC;YAElD,MAAM,CAAC,MAAM,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;YAC1B,MAAM,CAAC,MAAA,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,2CAAI,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;QACvC,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,YAAY,EAAE,GAAG,EAAE;QAC1B,EAAE,CAAC,wBAAwB,EAAE,GAAG,EAAE;YAChC,MAAM,GAAG,GAAG,IAAI,GAAG,EAAgB,CAAC;YACpC,MAAM,SAAS,GAAG,GAAG,EAAE,CAAC,MAAM,CAAC;YAE/B,MAAM,MAAM,GAAG,IAAA,uBAAU,EAAC,GAAG,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC;YAEhD,MAAM,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YAC5B,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QACrC,CAAC,CAAC,CAAC;QAEH,EAAE,CAAC,6BAA6B,EAAE,GAAG,EAAE;YACrC,MAAM,GAAG,GAAG,IAAI,GAAG,EAAqB,CAAC;YACzC,MAAM,SAAS,GAAG,GAAG,EAAE,CAAC,MAAM,CAAC;YAE/B,MAAM,MAAM,GAAG,IAAA,uBAAU,EAAC,GAAG,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC;YAErD,MAAM,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YAC5B,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAC1C,CAAC,CAAC,CAAC;QAEH,EAAE,CAAC,gCAAgC,EAAE,GAAG,EAAE;YACxC,MAAM,GAAG,GAAG,IAAI,GAAG,EAAkB,CAAC;YACtC,MAAM,SAAS,GAAG,GAAG,EAAE,CAAC,MAAM,CAAC;YAE/B,MAAM,MAAM,GAAG,IAAA,uBAAU,EAAC,GAAG,EAAE,EAAE,EAAE,SAAS,CAAC,CAAC;YAE9C,MAAM,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YAC5B,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QACnC,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,0BAA0B,EAAE,GAAG,EAAE;QACxC,EAAE,CAAC,0CAA0C,EAAE,GAAG,EAAE;YAClD,MAAM,GAAG,GAAG,IAAI,GAAG,EAAkB,CAAC;YACtC,IAAI,SAAS,GAAG,CAAC,CAAC;YAElB,MAAM,SAAS,GAAG,GAAG,EAAE;gBACrB,SAAS,EAAE,CAAC;gBACZ,OAAO,EAAE,CAAC;YACZ,CAAC,CAAC;YAEF,IAAA,uBAAU,EAAC,GAAG,EAAE,MAAM,EAAE,SAAS,CAAC,CAAC;YACnC,IAAA,uBAAU,EAAC,GAAG,EAAE,MAAM,EAAE,SAAS,CAAC,CAAC;YAEnC,MAAM,CAAC,SAAS,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;QAC5B,CAAC,CAAC,CAAC;QAEH,EAAE,CAAC,wCAAwC,EAAE,GAAG,EAAE;YAChD,MAAM,GAAG,GAAG,IAAI,GAAG,EAAkB,CAAC;YACtC,MAAM,SAAS,GAAG,GAAG,EAAE;gBACrB,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAC;YACrC,CAAC,CAAC;YAEF,MAAM,CAAC,GAAG,EAAE,CAAC,IAAA,uBAAU,EAAC,GAAG,EAAE,MAAM,EAAE,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,iBAAiB,CAAC,CAAC;QAC9E,CAAC,CAAC,CAAC;QAEH,EAAE,CAAC,6CAA6C,EAAE,GAAG,EAAE;YACrD,MAAM,GAAG,GAAG,IAAI,GAAG,EAAqB,CAAC;YACzC,MAAM,SAAS,GAAG,GAAG,EAAE,CAAC,SAAS,CAAC;YAElC,MAAM,MAAM,GAAG,IAAA,uBAAU,EAAC,GAAG,EAAE,MAAM,EAAE,SAAS,CAAC,CAAC;YAElD,MAAM,CAAC,MAAM,CAAC,CAAC,aAAa,EAAE,CAAC;YAC/B,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,aAAa,EAAE,CAAC;QAC1C,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,mBAAmB,EAAE,GAAG,EAAE;QACjC,EAAE,CAAC,6CAA6C,EAAE,GAAG,EAAE;YACrD,MAAM,GAAG,GAAG,IAAI,GAAG,EAAkB,CAAC;YACtC,MAAM,SAAS,GAAG,GAAG,EAAE,CAAC,EAAE,CAAC;YAE3B,MAAM,OAAO,GAAG,IAAA,uBAAU,EAAC,GAAG,EAAE,MAAM,EAAE,SAAS,CAAC,CAAC;YACnD,MAAM,OAAO,GAAG,IAAA,uBAAU,EAAC,GAAG,EAAE,MAAM,EAAE,SAAS,CAAC,CAAC;YAEnD,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;YACzB,MAAM,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;YACzB,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;QACnC,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
package/package.json
CHANGED
package/src/Agentica.ts
CHANGED
|
@@ -124,9 +124,17 @@ export class Agentica<Model extends ILlmSchema.Model> {
|
|
|
124
124
|
* function calling information like {@link AgenticaExecuteHistory}.
|
|
125
125
|
*
|
|
126
126
|
* @param content The content to talk
|
|
127
|
+
* @param options Options
|
|
128
|
+
* @param options.abortSignal Abort signal
|
|
129
|
+
* @throws AbortError
|
|
127
130
|
* @returns List of newly created chat prompts
|
|
128
131
|
*/
|
|
129
|
-
public async conversate(
|
|
132
|
+
public async conversate(
|
|
133
|
+
content: string | AgenticaUserInputHistory.Contents | Array<AgenticaUserInputHistory.Contents>,
|
|
134
|
+
options: {
|
|
135
|
+
abortSignal?: AbortSignal;
|
|
136
|
+
} = {},
|
|
137
|
+
): Promise<AgenticaHistory<Model>[]> {
|
|
130
138
|
const prompt: AgenticaUserInputHistory = createUserInputHistory({
|
|
131
139
|
contents: Array.isArray(content)
|
|
132
140
|
? content
|
|
@@ -147,6 +155,7 @@ export class Agentica<Model extends ILlmSchema.Model> {
|
|
|
147
155
|
const newbie: AgenticaHistory<Model>[] = await this.executor_(
|
|
148
156
|
this.getContext({
|
|
149
157
|
prompt,
|
|
158
|
+
abortSignal: options.abortSignal,
|
|
150
159
|
usage: this.token_usage_,
|
|
151
160
|
}),
|
|
152
161
|
);
|
|
@@ -219,6 +228,7 @@ export class Agentica<Model extends ILlmSchema.Model> {
|
|
|
219
228
|
public getContext(props: {
|
|
220
229
|
prompt: AgenticaUserInputHistory;
|
|
221
230
|
usage: AgenticaTokenUsage;
|
|
231
|
+
abortSignal?: AbortSignal;
|
|
222
232
|
}): AgenticaContext<Model> {
|
|
223
233
|
const dispatch = async (event: AgenticaEvent<Model>) => this.dispatch(event);
|
|
224
234
|
return {
|
|
@@ -231,6 +241,7 @@ export class Agentica<Model extends ILlmSchema.Model> {
|
|
|
231
241
|
stack: this.stack_,
|
|
232
242
|
ready: () => this.ready_,
|
|
233
243
|
prompt: props.prompt,
|
|
244
|
+
abortSignal: props.abortSignal,
|
|
234
245
|
|
|
235
246
|
// HANDLERS
|
|
236
247
|
dispatch: async event => this.dispatch(event),
|
|
@@ -246,7 +257,10 @@ export class Agentica<Model extends ILlmSchema.Model> {
|
|
|
246
257
|
include_usage: true,
|
|
247
258
|
},
|
|
248
259
|
},
|
|
249
|
-
options:
|
|
260
|
+
options: {
|
|
261
|
+
...this.props.vendor.options,
|
|
262
|
+
signal: props.abortSignal,
|
|
263
|
+
},
|
|
250
264
|
});
|
|
251
265
|
await dispatch(event);
|
|
252
266
|
|
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
import type typia from "typia";
|
|
2
|
+
|
|
1
3
|
import type { IAgenticaHistoryJson } from "../json/IAgenticaHistoryJson";
|
|
2
4
|
|
|
3
5
|
import type { AgenticaHistoryBase } from "./AgenticaHistoryBase";
|
|
@@ -35,7 +37,7 @@ export namespace AgenticaUserInputHistory {
|
|
|
35
37
|
/**
|
|
36
38
|
* Either a URL of the image or the base64 encoded image data.
|
|
37
39
|
*/
|
|
38
|
-
url: string
|
|
40
|
+
url: string & typia.tags.Format<"url">;
|
|
39
41
|
/**
|
|
40
42
|
* Specifies the detail level of the image. Learn more in the
|
|
41
43
|
* [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding).
|
|
@@ -46,6 +48,8 @@ export namespace AgenticaUserInputHistory {
|
|
|
46
48
|
|
|
47
49
|
/**
|
|
48
50
|
* Learn about [audio inputs](https://platform.openai.com/docs/guides/audio).
|
|
51
|
+
*
|
|
52
|
+
* Note: we not recommend it because audio input data only support base64 so it's too big data.
|
|
49
53
|
*/
|
|
50
54
|
export interface InputAudio extends ContentsBase<"input_audio"> {
|
|
51
55
|
input_audio: {
|
|
@@ -64,24 +68,26 @@ export namespace AgenticaUserInputHistory {
|
|
|
64
68
|
/**
|
|
65
69
|
* Learn about [file inputs](https://platform.openai.com/docs/guides/text) for text
|
|
66
70
|
* generation.
|
|
71
|
+
*
|
|
72
|
+
* Note: we recommend use `file_id` instead of `file_data` because it's too big data.
|
|
67
73
|
*/
|
|
68
74
|
export interface File extends ContentsBase<"file"> {
|
|
69
75
|
file: {
|
|
70
76
|
/**
|
|
71
|
-
* The
|
|
72
|
-
* string.
|
|
77
|
+
* The ID of an uploaded file to use as input.
|
|
73
78
|
*/
|
|
74
|
-
|
|
75
|
-
|
|
79
|
+
file_id: string;
|
|
80
|
+
} | {
|
|
76
81
|
/**
|
|
77
|
-
* The
|
|
82
|
+
* The base64 encoded file data, used when passing the file to the model as a
|
|
83
|
+
* string.
|
|
78
84
|
*/
|
|
79
|
-
|
|
85
|
+
file_data: string;
|
|
80
86
|
|
|
81
87
|
/**
|
|
82
88
|
* The name of the file, used when passing the file to the model as a string.
|
|
83
89
|
*/
|
|
84
|
-
filename
|
|
90
|
+
filename: string;
|
|
85
91
|
};
|
|
86
92
|
}
|
|
87
93
|
}
|
|
@@ -0,0 +1,320 @@
|
|
|
1
|
+
import type {
|
|
2
|
+
ChatCompletion,
|
|
3
|
+
ChatCompletionChunk,
|
|
4
|
+
ChatCompletionMessageToolCall,
|
|
5
|
+
} from "openai/resources";
|
|
6
|
+
|
|
7
|
+
import { ChatGptCompletionMessageUtil } from "./ChatGptCompletionMessageUtil";
|
|
8
|
+
|
|
9
|
+
describe("chatGptCompletionMessageUtil", () => {
|
|
10
|
+
describe("transformCompletionChunk", () => {
|
|
11
|
+
it("should transform string chunk to ChatCompletionChunk", () => {
|
|
12
|
+
const chunk = {
|
|
13
|
+
id: "test-id",
|
|
14
|
+
choices: [{
|
|
15
|
+
index: 0,
|
|
16
|
+
delta: { content: "Hello" },
|
|
17
|
+
}],
|
|
18
|
+
created: 1234567890,
|
|
19
|
+
model: "gpt-4",
|
|
20
|
+
object: "chat.completion.chunk",
|
|
21
|
+
};
|
|
22
|
+
|
|
23
|
+
const result = ChatGptCompletionMessageUtil.transformCompletionChunk(JSON.stringify(chunk));
|
|
24
|
+
expect(result).toEqual(chunk);
|
|
25
|
+
});
|
|
26
|
+
|
|
27
|
+
it("should transform Uint8Array chunk to ChatCompletionChunk", () => {
|
|
28
|
+
const chunk = {
|
|
29
|
+
id: "test-id",
|
|
30
|
+
choices: [{
|
|
31
|
+
index: 0,
|
|
32
|
+
delta: { content: "Hello" },
|
|
33
|
+
}],
|
|
34
|
+
created: 1234567890,
|
|
35
|
+
model: "gpt-4",
|
|
36
|
+
object: "chat.completion.chunk",
|
|
37
|
+
};
|
|
38
|
+
|
|
39
|
+
const uint8Array = new TextEncoder().encode(JSON.stringify(chunk));
|
|
40
|
+
const result = ChatGptCompletionMessageUtil.transformCompletionChunk(uint8Array);
|
|
41
|
+
expect(result).toEqual(chunk);
|
|
42
|
+
});
|
|
43
|
+
|
|
44
|
+
it("should handle invalid JSON", () => {
|
|
45
|
+
expect(() => {
|
|
46
|
+
ChatGptCompletionMessageUtil.transformCompletionChunk("invalid json");
|
|
47
|
+
}).toThrow();
|
|
48
|
+
});
|
|
49
|
+
});
|
|
50
|
+
|
|
51
|
+
describe("accumulate", () => {
|
|
52
|
+
it("should accumulate content from chunks", () => {
|
|
53
|
+
const origin: ChatCompletion = {
|
|
54
|
+
id: "test-id",
|
|
55
|
+
choices: [{
|
|
56
|
+
index: 0,
|
|
57
|
+
// @ts-expect-error - refusal is not required
|
|
58
|
+
message: { role: "assistant", content: "Hello" },
|
|
59
|
+
}],
|
|
60
|
+
created: 1234567890,
|
|
61
|
+
model: "gpt-4",
|
|
62
|
+
object: "chat.completion",
|
|
63
|
+
};
|
|
64
|
+
|
|
65
|
+
const chunk: ChatCompletionChunk = {
|
|
66
|
+
id: "test-id",
|
|
67
|
+
// @ts-expect-error - finish_reason is not required
|
|
68
|
+
choices: [{
|
|
69
|
+
index: 0,
|
|
70
|
+
delta: { content: " World" },
|
|
71
|
+
}],
|
|
72
|
+
created: 1234567890,
|
|
73
|
+
model: "gpt-4",
|
|
74
|
+
object: "chat.completion.chunk",
|
|
75
|
+
};
|
|
76
|
+
|
|
77
|
+
const result = ChatGptCompletionMessageUtil.accumulate(origin, chunk);
|
|
78
|
+
expect(result.choices[0]?.message.content).toBe("Hello World");
|
|
79
|
+
});
|
|
80
|
+
|
|
81
|
+
it("should accumulate tool calls", () => {
|
|
82
|
+
const origin: ChatCompletion = {
|
|
83
|
+
id: "test-id",
|
|
84
|
+
choices: [{
|
|
85
|
+
index: 0,
|
|
86
|
+
// @ts-expect-error - finish_reason is not required
|
|
87
|
+
message: {
|
|
88
|
+
role: "assistant",
|
|
89
|
+
content: null,
|
|
90
|
+
tool_calls: [{
|
|
91
|
+
id: "call_1",
|
|
92
|
+
type: "function",
|
|
93
|
+
function: {
|
|
94
|
+
name: "test",
|
|
95
|
+
arguments: "{\"arg\": \"value\"}",
|
|
96
|
+
},
|
|
97
|
+
}],
|
|
98
|
+
},
|
|
99
|
+
}],
|
|
100
|
+
created: 1234567890,
|
|
101
|
+
model: "gpt-4",
|
|
102
|
+
object: "chat.completion",
|
|
103
|
+
};
|
|
104
|
+
|
|
105
|
+
const chunk: ChatCompletionChunk = {
|
|
106
|
+
id: "test-id",
|
|
107
|
+
// @ts-expect-error - finish_reason is not required
|
|
108
|
+
choices: [{
|
|
109
|
+
index: 0,
|
|
110
|
+
delta: {
|
|
111
|
+
tool_calls: [{
|
|
112
|
+
index: 0,
|
|
113
|
+
id: "call_1",
|
|
114
|
+
function: {
|
|
115
|
+
name: "_function",
|
|
116
|
+
arguments: "{\"arg2\": \"value2\"}",
|
|
117
|
+
},
|
|
118
|
+
}],
|
|
119
|
+
},
|
|
120
|
+
}],
|
|
121
|
+
created: 1234567890,
|
|
122
|
+
model: "gpt-4",
|
|
123
|
+
object: "chat.completion.chunk",
|
|
124
|
+
};
|
|
125
|
+
|
|
126
|
+
const result = ChatGptCompletionMessageUtil.accumulate(origin, chunk);
|
|
127
|
+
expect(result.choices[0]?.message.tool_calls?.[0]?.function.name).toBe("test_function");
|
|
128
|
+
expect(result.choices[0]?.message.tool_calls?.[0]?.function.arguments).toBe("{\"arg\": \"value\"}{\"arg2\": \"value2\"}");
|
|
129
|
+
});
|
|
130
|
+
|
|
131
|
+
it("should handle usage aggregation", () => {
|
|
132
|
+
const origin: ChatCompletion = {
|
|
133
|
+
id: "test-id",
|
|
134
|
+
choices: [{
|
|
135
|
+
index: 0,
|
|
136
|
+
// @ts-expect-error - finish_reason is not required
|
|
137
|
+
message: { role: "assistant", content: "Hello" },
|
|
138
|
+
}],
|
|
139
|
+
created: 1234567890,
|
|
140
|
+
model: "gpt-4",
|
|
141
|
+
object: "chat.completion",
|
|
142
|
+
usage: {
|
|
143
|
+
prompt_tokens: 10,
|
|
144
|
+
completion_tokens: 5,
|
|
145
|
+
total_tokens: 15,
|
|
146
|
+
},
|
|
147
|
+
};
|
|
148
|
+
|
|
149
|
+
const chunk: ChatCompletionChunk = {
|
|
150
|
+
id: "test-id",
|
|
151
|
+
// @ts-expect-error - finish_reason is not required
|
|
152
|
+
choices: [{
|
|
153
|
+
index: 0,
|
|
154
|
+
delta: { content: " World" },
|
|
155
|
+
}],
|
|
156
|
+
created: 1234567890,
|
|
157
|
+
model: "gpt-4",
|
|
158
|
+
object: "chat.completion.chunk",
|
|
159
|
+
usage: {
|
|
160
|
+
prompt_tokens: 0,
|
|
161
|
+
completion_tokens: 6,
|
|
162
|
+
total_tokens: 6,
|
|
163
|
+
},
|
|
164
|
+
};
|
|
165
|
+
|
|
166
|
+
const result = ChatGptCompletionMessageUtil.accumulate(origin, chunk);
|
|
167
|
+
expect(result.usage).toEqual({
|
|
168
|
+
prompt_tokens: 10,
|
|
169
|
+
completion_tokens: 11,
|
|
170
|
+
total_tokens: 21,
|
|
171
|
+
completion_tokens_details: {
|
|
172
|
+
accepted_prediction_tokens: 0,
|
|
173
|
+
reasoning_tokens: 0,
|
|
174
|
+
rejected_prediction_tokens: 0,
|
|
175
|
+
},
|
|
176
|
+
prompt_tokens_details: {
|
|
177
|
+
audio_tokens: 0,
|
|
178
|
+
cached_tokens: 0,
|
|
179
|
+
},
|
|
180
|
+
});
|
|
181
|
+
});
|
|
182
|
+
});
|
|
183
|
+
|
|
184
|
+
describe("merge", () => {
|
|
185
|
+
it("should merge multiple chunks into completion", () => {
|
|
186
|
+
const chunks: ChatCompletionChunk[] = [
|
|
187
|
+
{
|
|
188
|
+
id: "test-id",
|
|
189
|
+
// @ts-expect-error - finish_reason is not required
|
|
190
|
+
choices: [{
|
|
191
|
+
index: 0,
|
|
192
|
+
delta: { content: "Hello" },
|
|
193
|
+
}],
|
|
194
|
+
created: 1234567890,
|
|
195
|
+
model: "gpt-4",
|
|
196
|
+
object: "chat.completion.chunk",
|
|
197
|
+
},
|
|
198
|
+
{
|
|
199
|
+
id: "test-id",
|
|
200
|
+
// @ts-expect-error - finish_reason is not required
|
|
201
|
+
choices: [{
|
|
202
|
+
index: 0,
|
|
203
|
+
delta: { content: " World" },
|
|
204
|
+
}],
|
|
205
|
+
created: 1234567890,
|
|
206
|
+
model: "gpt-4",
|
|
207
|
+
object: "chat.completion.chunk",
|
|
208
|
+
},
|
|
209
|
+
];
|
|
210
|
+
|
|
211
|
+
const result = ChatGptCompletionMessageUtil.merge(chunks);
|
|
212
|
+
expect(result.choices[0]?.message.content).toBe("Hello World");
|
|
213
|
+
});
|
|
214
|
+
|
|
215
|
+
it("should throw error for empty chunks array", () => {
|
|
216
|
+
expect(() => {
|
|
217
|
+
ChatGptCompletionMessageUtil.merge([]);
|
|
218
|
+
}).toThrow("No chunks received");
|
|
219
|
+
});
|
|
220
|
+
});
|
|
221
|
+
|
|
222
|
+
describe("mergeChoice", () => {
|
|
223
|
+
it("should merge finish reason", () => {
|
|
224
|
+
const acc: ChatCompletion.Choice = {
|
|
225
|
+
index: 0,
|
|
226
|
+
// @ts-expect-error - finish_reason is not required
|
|
227
|
+
message: { role: "assistant", content: "Hello" },
|
|
228
|
+
};
|
|
229
|
+
|
|
230
|
+
const cur: ChatCompletionChunk.Choice = {
|
|
231
|
+
index: 0,
|
|
232
|
+
delta: {},
|
|
233
|
+
finish_reason: "stop",
|
|
234
|
+
};
|
|
235
|
+
|
|
236
|
+
const result = ChatGptCompletionMessageUtil.mergeChoice(acc, cur);
|
|
237
|
+
expect(result.finish_reason).toBe("stop");
|
|
238
|
+
});
|
|
239
|
+
|
|
240
|
+
it("should merge content", () => {
|
|
241
|
+
const acc: ChatCompletion.Choice = {
|
|
242
|
+
index: 0,
|
|
243
|
+
// @ts-expect-error - refusal is not required
|
|
244
|
+
message: { role: "assistant", content: "Hello" },
|
|
245
|
+
};
|
|
246
|
+
|
|
247
|
+
// @ts-expect-error - finish_reason is not required
|
|
248
|
+
const cur: ChatCompletionChunk.Choice = {
|
|
249
|
+
index: 0,
|
|
250
|
+
delta: { content: " World" },
|
|
251
|
+
};
|
|
252
|
+
|
|
253
|
+
const result = ChatGptCompletionMessageUtil.mergeChoice(acc, cur);
|
|
254
|
+
expect(result.message.content).toBe("Hello World");
|
|
255
|
+
});
|
|
256
|
+
|
|
257
|
+
it("should merge refusal", () => {
|
|
258
|
+
// @ts-expect-error - finish_reason is not required
|
|
259
|
+
const acc: ChatCompletion.Choice = {
|
|
260
|
+
index: 0,
|
|
261
|
+
message: { role: "assistant", content: null, refusal: "I cannot" },
|
|
262
|
+
};
|
|
263
|
+
|
|
264
|
+
// @ts-expect-error - finish_reason is not required
|
|
265
|
+
const cur: ChatCompletionChunk.Choice = {
|
|
266
|
+
index: 0,
|
|
267
|
+
delta: { refusal: " do that" },
|
|
268
|
+
};
|
|
269
|
+
|
|
270
|
+
const result = ChatGptCompletionMessageUtil.mergeChoice(acc, cur);
|
|
271
|
+
expect(result.message.refusal).toBe("I cannot do that");
|
|
272
|
+
});
|
|
273
|
+
});
|
|
274
|
+
|
|
275
|
+
describe("mergeToolCalls", () => {
|
|
276
|
+
it("should merge tool call function arguments", () => {
|
|
277
|
+
const acc: ChatCompletionMessageToolCall = {
|
|
278
|
+
id: "call_1",
|
|
279
|
+
type: "function",
|
|
280
|
+
function: {
|
|
281
|
+
name: "test",
|
|
282
|
+
arguments: "{\"arg\": \"value\"}",
|
|
283
|
+
},
|
|
284
|
+
};
|
|
285
|
+
|
|
286
|
+
const cur: ChatCompletionChunk.Choice.Delta.ToolCall = {
|
|
287
|
+
index: 0,
|
|
288
|
+
id: "call_1",
|
|
289
|
+
function: {
|
|
290
|
+
arguments: "{\"arg2\": \"value2\"}",
|
|
291
|
+
},
|
|
292
|
+
};
|
|
293
|
+
|
|
294
|
+
const result = ChatGptCompletionMessageUtil.mergeToolCalls(acc, cur);
|
|
295
|
+
expect(result.function.arguments).toBe("{\"arg\": \"value\"}{\"arg2\": \"value2\"}");
|
|
296
|
+
});
|
|
297
|
+
|
|
298
|
+
it("should merge tool call function name", () => {
|
|
299
|
+
const acc: ChatCompletionMessageToolCall = {
|
|
300
|
+
id: "call_1",
|
|
301
|
+
type: "function",
|
|
302
|
+
function: {
|
|
303
|
+
name: "test",
|
|
304
|
+
arguments: "",
|
|
305
|
+
},
|
|
306
|
+
};
|
|
307
|
+
|
|
308
|
+
const cur: ChatCompletionChunk.Choice.Delta.ToolCall = {
|
|
309
|
+
index: 0,
|
|
310
|
+
id: "call_1",
|
|
311
|
+
function: {
|
|
312
|
+
name: "_function",
|
|
313
|
+
},
|
|
314
|
+
};
|
|
315
|
+
|
|
316
|
+
const result = ChatGptCompletionMessageUtil.mergeToolCalls(acc, cur);
|
|
317
|
+
expect(result.function.name).toBe("test_function");
|
|
318
|
+
});
|
|
319
|
+
});
|
|
320
|
+
});
|