@agentica/core 0.10.3 → 0.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -2
- package/lib/Agentica.d.ts +14 -14
- package/lib/Agentica.js +54 -29
- package/lib/Agentica.js.map +1 -1
- package/lib/chatgpt/ChatGptAgent.d.ts +3 -3
- package/lib/chatgpt/ChatGptAgent.js +1 -1
- package/lib/chatgpt/ChatGptAgent.js.map +1 -1
- package/lib/chatgpt/ChatGptCallFunctionAgent.d.ts +3 -3
- package/lib/chatgpt/ChatGptCallFunctionAgent.js +61 -63
- package/lib/chatgpt/ChatGptCallFunctionAgent.js.map +1 -1
- package/lib/chatgpt/ChatGptCancelFunctionAgent.d.ts +6 -6
- package/lib/chatgpt/ChatGptCancelFunctionAgent.js +28 -30
- package/lib/chatgpt/ChatGptCancelFunctionAgent.js.map +1 -1
- package/lib/chatgpt/ChatGptCompletionMessageUtil.d.ts +8 -0
- package/lib/chatgpt/ChatGptCompletionMessageUtil.js +536 -0
- package/lib/chatgpt/ChatGptCompletionMessageUtil.js.map +1 -0
- package/lib/chatgpt/ChatGptDescribeFunctionAgent.d.ts +4 -3
- package/lib/chatgpt/ChatGptDescribeFunctionAgent.js +56 -6
- package/lib/chatgpt/ChatGptDescribeFunctionAgent.js.map +1 -1
- package/lib/chatgpt/ChatGptHistoryDecoder.d.ts +2 -2
- package/lib/chatgpt/ChatGptHistoryDecoder.js +8 -8
- package/lib/chatgpt/ChatGptHistoryDecoder.js.map +1 -1
- package/lib/chatgpt/ChatGptInitializeFunctionAgent.d.ts +3 -3
- package/lib/chatgpt/ChatGptInitializeFunctionAgent.js +11 -5
- package/lib/chatgpt/ChatGptInitializeFunctionAgent.js.map +1 -1
- package/lib/chatgpt/ChatGptSelectFunctionAgent.d.ts +3 -3
- package/lib/chatgpt/ChatGptSelectFunctionAgent.js +39 -42
- package/lib/chatgpt/ChatGptSelectFunctionAgent.js.map +1 -1
- package/lib/chatgpt/ChatGptUsageAggregator.d.ts +6 -0
- package/lib/chatgpt/ChatGptUsageAggregator.js +48 -0
- package/lib/chatgpt/ChatGptUsageAggregator.js.map +1 -0
- package/lib/context/AgenticaCancelPrompt.d.ts +16 -0
- package/lib/context/AgenticaCancelPrompt.js +20 -0
- package/lib/context/AgenticaCancelPrompt.js.map +1 -0
- package/lib/context/AgenticaClassOperation.d.ts +8 -0
- package/lib/context/AgenticaClassOperation.js +11 -0
- package/lib/context/AgenticaClassOperation.js.map +1 -0
- package/lib/{structures/IAgenticaContext.d.ts → context/AgenticaContext.d.ts} +15 -14
- package/lib/{structures/IAgenticaPrompt.js → context/AgenticaContext.js} +1 -1
- package/lib/context/AgenticaContext.js.map +1 -0
- package/lib/context/AgenticaHttpOperation.d.ts +8 -0
- package/lib/context/AgenticaHttpOperation.js +11 -0
- package/lib/context/AgenticaHttpOperation.js.map +1 -0
- package/lib/context/AgenticaOperation.d.ts +46 -0
- package/lib/{structures/IAgenticaContext.js → context/AgenticaOperation.js} +1 -1
- package/lib/context/AgenticaOperation.js.map +1 -0
- package/lib/context/AgenticaOperationBase.d.ts +29 -0
- package/lib/context/AgenticaOperationBase.js +21 -0
- package/lib/context/AgenticaOperationBase.js.map +1 -0
- package/lib/{structures/IAgenticaOperationCollection.d.ts → context/AgenticaOperationCollection.d.ts} +6 -6
- package/lib/{structures/IAgenticaOperationSelection.js → context/AgenticaOperationCollection.js} +1 -1
- package/lib/context/AgenticaOperationCollection.js.map +1 -0
- package/lib/context/AgenticaOperationSelection.d.ts +15 -0
- package/lib/context/AgenticaOperationSelection.js +17 -0
- package/lib/context/AgenticaOperationSelection.js.map +1 -0
- package/lib/context/AgenticaTokenUsage.d.ts +82 -0
- package/lib/context/AgenticaTokenUsage.js +97 -0
- package/lib/context/AgenticaTokenUsage.js.map +1 -0
- package/lib/context/internal/AgenticaTokenUsageAggregator.d.ts +10 -0
- package/lib/context/internal/AgenticaTokenUsageAggregator.js +47 -0
- package/lib/context/internal/AgenticaTokenUsageAggregator.js.map +1 -0
- package/lib/context/internal/__IChatCancelFunctionsApplication.js.map +1 -0
- package/lib/context/internal/__IChatFunctionReference.js.map +1 -0
- package/lib/context/internal/__IChatInitialApplication.js.map +1 -0
- package/lib/context/internal/__IChatSelectFunctionsApplication.js.map +1 -0
- package/lib/events/AgenticaCallEvent.d.ts +18 -0
- package/lib/events/AgenticaCallEvent.js +22 -0
- package/lib/events/AgenticaCallEvent.js.map +1 -0
- package/lib/events/AgenticaCancelEvent.d.ts +14 -0
- package/lib/events/AgenticaCancelEvent.js +18 -0
- package/lib/events/AgenticaCancelEvent.js.map +1 -0
- package/lib/events/AgenticaDescribeEvent.d.ts +27 -0
- package/lib/events/AgenticaDescribeEvent.js +40 -0
- package/lib/events/AgenticaDescribeEvent.js.map +1 -0
- package/lib/events/AgenticaEvent.d.ts +25 -0
- package/lib/{structures/IAgenticaEvent.js → events/AgenticaEvent.js} +1 -1
- package/lib/events/AgenticaEvent.js.map +1 -0
- package/lib/events/AgenticaEventBase.d.ts +4 -0
- package/lib/events/AgenticaEventBase.js +10 -0
- package/lib/events/AgenticaEventBase.js.map +1 -0
- package/lib/events/AgenticaEventSource.d.ts +1 -0
- package/lib/events/AgenticaEventSource.js +3 -0
- package/lib/events/AgenticaEventSource.js.map +1 -0
- package/lib/events/AgenticaExecuteEvent.d.ts +22 -0
- package/lib/events/AgenticaExecuteEvent.js +33 -0
- package/lib/events/AgenticaExecuteEvent.js.map +1 -0
- package/lib/events/AgenticaInitializeEvent.d.ts +6 -0
- package/lib/events/AgenticaInitializeEvent.js +16 -0
- package/lib/events/AgenticaInitializeEvent.js.map +1 -0
- package/lib/events/AgenticaRequestEvent.d.ts +27 -0
- package/lib/events/AgenticaRequestEvent.js +22 -0
- package/lib/events/AgenticaRequestEvent.js.map +1 -0
- package/lib/events/AgenticaResponseEvent.d.ts +35 -0
- package/lib/events/AgenticaResponseEvent.js +16 -0
- package/lib/events/AgenticaResponseEvent.js.map +1 -0
- package/lib/events/AgenticaSelectEvent.d.ts +16 -0
- package/lib/events/AgenticaSelectEvent.js +26 -0
- package/lib/events/AgenticaSelectEvent.js.map +1 -0
- package/lib/events/AgenticaTextEvent.d.ts +25 -0
- package/lib/events/AgenticaTextEvent.js +40 -0
- package/lib/events/AgenticaTextEvent.js.map +1 -0
- package/lib/index.d.ts +26 -8
- package/lib/index.js +29 -9
- package/lib/index.js.map +1 -1
- package/lib/index.mjs +1391 -304
- package/lib/index.mjs.map +1 -1
- package/lib/internal/AgenticaOperationComposer.d.ts +2 -2
- package/lib/internal/AgenticaOperationComposer.js +12 -0
- package/lib/internal/AgenticaOperationComposer.js.map +1 -1
- package/lib/internal/ByteArrayUtil.d.ts +3 -0
- package/lib/internal/ByteArrayUtil.js +10 -0
- package/lib/internal/ByteArrayUtil.js.map +1 -0
- package/lib/internal/MPSCUtil.d.ts +21 -0
- package/lib/internal/MPSCUtil.js +84 -0
- package/lib/internal/MPSCUtil.js.map +1 -0
- package/lib/internal/StreamUtil.d.ts +6 -0
- package/lib/internal/StreamUtil.js +65 -0
- package/lib/internal/StreamUtil.js.map +1 -0
- package/lib/json/IAgenticaEventJson.d.ts +149 -0
- package/lib/{structures/IAgenticaOperation.js → json/IAgenticaEventJson.js} +1 -1
- package/lib/json/IAgenticaEventJson.js.map +1 -0
- package/lib/json/IAgenticaOperationJson.d.ts +33 -0
- package/lib/json/IAgenticaOperationJson.js +3 -0
- package/lib/json/IAgenticaOperationJson.js.map +1 -0
- package/lib/json/IAgenticaOperationSelectionJson.d.ts +18 -0
- package/lib/{structures/IAgenticaOperationCollection.js → json/IAgenticaOperationSelectionJson.js} +1 -1
- package/lib/json/IAgenticaOperationSelectionJson.js.map +1 -0
- package/lib/json/IAgenticaPromptJson.d.ts +111 -0
- package/lib/json/IAgenticaPromptJson.js +3 -0
- package/lib/json/IAgenticaPromptJson.js.map +1 -0
- package/lib/{structures/IAgenticaTokenUsage.d.ts → json/IAgenticaTokenUsageJson.d.ts} +10 -10
- package/lib/json/IAgenticaTokenUsageJson.js +3 -0
- package/lib/json/IAgenticaTokenUsageJson.js.map +1 -0
- package/lib/prompts/AgenticaCancelPrompt.d.ts +16 -0
- package/lib/prompts/AgenticaCancelPrompt.js +20 -0
- package/lib/prompts/AgenticaCancelPrompt.js.map +1 -0
- package/lib/prompts/AgenticaDescribePrompt.d.ts +24 -0
- package/lib/prompts/AgenticaDescribePrompt.js +20 -0
- package/lib/prompts/AgenticaDescribePrompt.js.map +1 -0
- package/lib/prompts/AgenticaExecutePrompt.d.ts +20 -0
- package/lib/prompts/AgenticaExecutePrompt.js +24 -0
- package/lib/prompts/AgenticaExecutePrompt.js.map +1 -0
- package/lib/prompts/AgenticaPrompt.d.ts +7 -0
- package/lib/{typings/AgenticaSource.js → prompts/AgenticaPrompt.js} +1 -1
- package/lib/prompts/AgenticaPrompt.js.map +1 -0
- package/lib/prompts/AgenticaPromptBase.d.ts +22 -0
- package/lib/prompts/AgenticaPromptBase.js +22 -0
- package/lib/prompts/AgenticaPromptBase.js.map +1 -0
- package/lib/prompts/AgenticaSelectPrompt.d.ts +16 -0
- package/lib/prompts/AgenticaSelectPrompt.js +20 -0
- package/lib/prompts/AgenticaSelectPrompt.js.map +1 -0
- package/lib/prompts/AgenticaTextPrompt.d.ts +14 -0
- package/lib/prompts/AgenticaTextPrompt.js +20 -0
- package/lib/prompts/AgenticaTextPrompt.js.map +1 -0
- package/lib/structures/IAgenticaConfig.d.ts +3 -3
- package/lib/structures/IAgenticaExecutor.d.ts +12 -11
- package/lib/structures/IAgenticaProps.d.ts +2 -3
- package/lib/structures/IAgenticaSystemPrompt.d.ts +12 -11
- package/lib/transformers/AgenticaEventTransformer.d.ts +45 -0
- package/lib/transformers/AgenticaEventTransformer.js +127 -0
- package/lib/transformers/AgenticaEventTransformer.js.map +1 -0
- package/lib/transformers/AgenticaPromptTransformer.d.ts +34 -0
- package/lib/transformers/AgenticaPromptTransformer.js +90 -0
- package/lib/transformers/AgenticaPromptTransformer.js.map +1 -0
- package/package.json +1 -1
- package/src/Agentica.ts +94 -61
- package/src/chatgpt/ChatGptAgent.ts +8 -7
- package/src/chatgpt/ChatGptCallFunctionAgent.ts +90 -88
- package/src/chatgpt/ChatGptCancelFunctionAgent.ts +51 -58
- package/src/chatgpt/ChatGptCompletionMessageUtil.ts +166 -0
- package/src/chatgpt/ChatGptDescribeFunctionAgent.ts +81 -12
- package/src/chatgpt/ChatGptHistoryDecoder.ts +14 -14
- package/src/chatgpt/ChatGptInitializeFunctionAgent.ts +21 -13
- package/src/chatgpt/ChatGptSelectFunctionAgent.ts +66 -74
- package/src/chatgpt/ChatGptUsageAggregator.ts +62 -0
- package/src/context/AgenticaCancelPrompt.ts +32 -0
- package/src/context/AgenticaClassOperation.ts +23 -0
- package/src/{structures/IAgenticaContext.ts → context/AgenticaContext.ts} +17 -16
- package/src/context/AgenticaHttpOperation.ts +27 -0
- package/src/{structures/IAgenticaOperation.ts → context/AgenticaOperation.ts} +25 -22
- package/src/context/AgenticaOperationBase.ts +57 -0
- package/src/{structures/IAgenticaOperationCollection.ts → context/AgenticaOperationCollection.ts} +6 -6
- package/src/context/AgenticaOperationSelection.ts +27 -0
- package/src/context/AgenticaTokenUsage.ts +170 -0
- package/src/context/internal/AgenticaTokenUsageAggregator.ts +66 -0
- package/src/events/AgenticaCallEvent.ts +36 -0
- package/src/events/AgenticaCancelEvent.ts +28 -0
- package/src/events/AgenticaDescribeEvent.ts +61 -0
- package/src/events/AgenticaEvent.ts +36 -0
- package/src/events/AgenticaEventBase.ts +7 -0
- package/src/{typings/AgenticaSource.ts → events/AgenticaEventSource.ts} +1 -1
- package/src/events/AgenticaExecuteEvent.ts +50 -0
- package/src/events/AgenticaInitializeEvent.ts +14 -0
- package/src/events/AgenticaRequestEvent.ts +45 -0
- package/src/events/AgenticaResponseEvent.ts +48 -0
- package/src/events/AgenticaSelectEvent.ts +37 -0
- package/src/events/AgenticaTextEvent.ts +57 -0
- package/src/index.ts +33 -9
- package/src/internal/AgenticaOperationComposer.ts +24 -15
- package/src/internal/ByteArrayUtil.ts +5 -0
- package/src/internal/MPSCUtil.ts +75 -0
- package/src/internal/StreamUtil.ts +64 -0
- package/src/json/IAgenticaEventJson.ts +178 -0
- package/src/json/IAgenticaOperationJson.ts +36 -0
- package/src/json/IAgenticaOperationSelectionJson.ts +19 -0
- package/src/json/IAgenticaPromptJson.ts +130 -0
- package/src/{structures/IAgenticaTokenUsage.ts → json/IAgenticaTokenUsageJson.ts} +10 -10
- package/src/prompts/AgenticaCancelPrompt.ts +32 -0
- package/src/prompts/AgenticaDescribePrompt.ts +41 -0
- package/src/prompts/AgenticaExecutePrompt.ts +52 -0
- package/src/prompts/AgenticaPrompt.ts +14 -0
- package/src/prompts/AgenticaPromptBase.ts +27 -0
- package/src/prompts/AgenticaSelectPrompt.ts +32 -0
- package/src/prompts/AgenticaTextPrompt.ts +31 -0
- package/src/structures/IAgenticaConfig.ts +3 -3
- package/src/structures/IAgenticaExecutor.ts +14 -13
- package/src/structures/IAgenticaProps.ts +2 -3
- package/src/structures/IAgenticaSystemPrompt.ts +12 -11
- package/src/transformers/AgenticaEventTransformer.ts +165 -0
- package/src/transformers/AgenticaPromptTransformer.ts +134 -0
- package/lib/internal/AgenticaPromptFactory.d.ts +0 -7
- package/lib/internal/AgenticaPromptFactory.js +0 -9
- package/lib/internal/AgenticaPromptFactory.js.map +0 -1
- package/lib/internal/AgenticaPromptTransformer.d.ts +0 -10
- package/lib/internal/AgenticaPromptTransformer.js +0 -58
- package/lib/internal/AgenticaPromptTransformer.js.map +0 -1
- package/lib/internal/AgenticaTokenUsageAggregator.d.ts +0 -11
- package/lib/internal/AgenticaTokenUsageAggregator.js +0 -92
- package/lib/internal/AgenticaTokenUsageAggregator.js.map +0 -1
- package/lib/structures/IAgenticaContext.js.map +0 -1
- package/lib/structures/IAgenticaEvent.d.ts +0 -192
- package/lib/structures/IAgenticaEvent.js.map +0 -1
- package/lib/structures/IAgenticaOperation.d.ts +0 -47
- package/lib/structures/IAgenticaOperation.js.map +0 -1
- package/lib/structures/IAgenticaOperationCollection.js.map +0 -1
- package/lib/structures/IAgenticaOperationSelection.d.ts +0 -50
- package/lib/structures/IAgenticaOperationSelection.js.map +0 -1
- package/lib/structures/IAgenticaPrompt.d.ts +0 -138
- package/lib/structures/IAgenticaPrompt.js.map +0 -1
- package/lib/structures/IAgenticaTokenUsage.js +0 -3
- package/lib/structures/IAgenticaTokenUsage.js.map +0 -1
- package/lib/structures/internal/__IChatCancelFunctionsApplication.js.map +0 -1
- package/lib/structures/internal/__IChatFunctionReference.js.map +0 -1
- package/lib/structures/internal/__IChatInitialApplication.js.map +0 -1
- package/lib/structures/internal/__IChatSelectFunctionsApplication.js.map +0 -1
- package/lib/typings/AgenticaSource.d.ts +0 -1
- package/lib/typings/AgenticaSource.js.map +0 -1
- package/src/internal/AgenticaPromptFactory.ts +0 -32
- package/src/internal/AgenticaPromptTransformer.ts +0 -86
- package/src/internal/AgenticaTokenUsageAggregator.ts +0 -115
- package/src/structures/IAgenticaEvent.ts +0 -229
- package/src/structures/IAgenticaOperationSelection.ts +0 -68
- package/src/structures/IAgenticaPrompt.ts +0 -182
- /package/lib/{structures → context}/internal/__IChatCancelFunctionsApplication.d.ts +0 -0
- /package/lib/{structures → context}/internal/__IChatCancelFunctionsApplication.js +0 -0
- /package/lib/{structures → context}/internal/__IChatFunctionReference.d.ts +0 -0
- /package/lib/{structures → context}/internal/__IChatFunctionReference.js +0 -0
- /package/lib/{structures → context}/internal/__IChatInitialApplication.d.ts +0 -0
- /package/lib/{structures → context}/internal/__IChatInitialApplication.js +0 -0
- /package/lib/{structures → context}/internal/__IChatSelectFunctionsApplication.d.ts +0 -0
- /package/lib/{structures → context}/internal/__IChatSelectFunctionsApplication.js +0 -0
- /package/src/{structures → context}/internal/__IChatCancelFunctionsApplication.ts +0 -0
- /package/src/{structures → context}/internal/__IChatFunctionReference.ts +0 -0
- /package/src/{structures → context}/internal/__IChatInitialApplication.ts +0 -0
- /package/src/{structures → context}/internal/__IChatSelectFunctionsApplication.ts +0 -0
package/lib/index.mjs
CHANGED
|
@@ -1,12 +1,384 @@
|
|
|
1
|
+
import "typia";
|
|
2
|
+
|
|
3
|
+
import { v4 } from "uuid";
|
|
4
|
+
|
|
1
5
|
import * as __typia_transform__accessExpressionAsString from "typia/lib/internal/_accessExpressionAsString.js";
|
|
2
6
|
|
|
3
7
|
import * as __typia_transform__validateReport from "typia/lib/internal/_validateReport.js";
|
|
4
8
|
|
|
5
9
|
import { HttpLlm, OpenApi, ChatGptTypeChecker } from "@samchon/openapi";
|
|
6
10
|
|
|
7
|
-
import "typia";
|
|
11
|
+
import * as __typia_transform__assertGuard from "typia/lib/internal/_assertGuard.js";
|
|
8
12
|
|
|
9
|
-
|
|
13
|
+
class AgenticaOperationSelection {
|
|
14
|
+
constructor(props) {
|
|
15
|
+
this.operation = props.operation;
|
|
16
|
+
this.reason = props.reason;
|
|
17
|
+
}
|
|
18
|
+
toJSON() {
|
|
19
|
+
return {
|
|
20
|
+
operation: this.operation.toJSON(),
|
|
21
|
+
reason: this.reason
|
|
22
|
+
};
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
class AgenticaTokenUsage {
|
|
27
|
+
constructor(props) {
|
|
28
|
+
if (props === undefined) {
|
|
29
|
+
const zero = AgenticaTokenUsage.zero();
|
|
30
|
+
this.aggregate = zero.aggregate;
|
|
31
|
+
this.initialize = zero.initialize;
|
|
32
|
+
this.select = zero.select;
|
|
33
|
+
this.cancel = zero.cancel;
|
|
34
|
+
this.call = zero.call;
|
|
35
|
+
this.describe = zero.describe;
|
|
36
|
+
} else {
|
|
37
|
+
this.aggregate = props.aggregate;
|
|
38
|
+
this.initialize = props.initialize;
|
|
39
|
+
this.select = props.select;
|
|
40
|
+
this.cancel = props.cancel;
|
|
41
|
+
this.call = props.call;
|
|
42
|
+
this.describe = props.describe;
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
increment(y) {
|
|
46
|
+
const increment = (x, y) => {
|
|
47
|
+
x.total += y.total;
|
|
48
|
+
x.input.total += y.input.total;
|
|
49
|
+
x.input.cached += y.input.cached;
|
|
50
|
+
x.output.total += y.output.total;
|
|
51
|
+
x.output.reasoning += y.output.reasoning;
|
|
52
|
+
x.output.accepted_prediction += y.output.accepted_prediction;
|
|
53
|
+
x.output.rejected_prediction += y.output.rejected_prediction;
|
|
54
|
+
};
|
|
55
|
+
increment(this.aggregate, y.aggregate);
|
|
56
|
+
increment(this.initialize, y.initialize);
|
|
57
|
+
increment(this.select, y.select);
|
|
58
|
+
increment(this.cancel, y.cancel);
|
|
59
|
+
increment(this.call, y.call);
|
|
60
|
+
increment(this.describe, y.describe);
|
|
61
|
+
}
|
|
62
|
+
toJSON() {
|
|
63
|
+
return (() => {
|
|
64
|
+
const _co0 = input => ({
|
|
65
|
+
aggregate: _co1(input.aggregate),
|
|
66
|
+
initialize: _co1(input.initialize),
|
|
67
|
+
select: _co1(input.select),
|
|
68
|
+
cancel: _co1(input.cancel),
|
|
69
|
+
call: _co1(input.call),
|
|
70
|
+
describe: _co1(input.describe)
|
|
71
|
+
});
|
|
72
|
+
const _co1 = input => ({
|
|
73
|
+
total: input.total,
|
|
74
|
+
input: _co2(input.input),
|
|
75
|
+
output: _co3(input.output)
|
|
76
|
+
});
|
|
77
|
+
const _co2 = input => ({
|
|
78
|
+
total: input.total,
|
|
79
|
+
cached: input.cached
|
|
80
|
+
});
|
|
81
|
+
const _co3 = input => ({
|
|
82
|
+
total: input.total,
|
|
83
|
+
reasoning: input.reasoning,
|
|
84
|
+
accepted_prediction: input.accepted_prediction,
|
|
85
|
+
rejected_prediction: input.rejected_prediction
|
|
86
|
+
});
|
|
87
|
+
return input => _co0(input);
|
|
88
|
+
})()(this);
|
|
89
|
+
}
|
|
90
|
+
static zero() {
|
|
91
|
+
const component = () => ({
|
|
92
|
+
total: 0,
|
|
93
|
+
input: {
|
|
94
|
+
total: 0,
|
|
95
|
+
cached: 0
|
|
96
|
+
},
|
|
97
|
+
output: {
|
|
98
|
+
total: 0,
|
|
99
|
+
reasoning: 0,
|
|
100
|
+
accepted_prediction: 0,
|
|
101
|
+
rejected_prediction: 0
|
|
102
|
+
}
|
|
103
|
+
});
|
|
104
|
+
return new AgenticaTokenUsage({
|
|
105
|
+
aggregate: component(),
|
|
106
|
+
initialize: component(),
|
|
107
|
+
select: component(),
|
|
108
|
+
cancel: component(),
|
|
109
|
+
call: component(),
|
|
110
|
+
describe: component()
|
|
111
|
+
});
|
|
112
|
+
}
|
|
113
|
+
static plus(x, y) {
|
|
114
|
+
const z = new AgenticaTokenUsage(x);
|
|
115
|
+
z.increment(y.toJSON());
|
|
116
|
+
return z;
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
class AgenticaPromptBase {
|
|
121
|
+
constructor(type) {
|
|
122
|
+
this.type = type;
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
let AgenticaCancelPrompt$1 = class AgenticaCancelPrompt extends AgenticaPromptBase {
|
|
127
|
+
constructor(props) {
|
|
128
|
+
super("cancel");
|
|
129
|
+
this.id = props.id;
|
|
130
|
+
this.selections = props.selections;
|
|
131
|
+
}
|
|
132
|
+
toJSON() {
|
|
133
|
+
return {
|
|
134
|
+
type: this.type,
|
|
135
|
+
id: this.id,
|
|
136
|
+
selections: this.selections.map((s => s.toJSON()))
|
|
137
|
+
};
|
|
138
|
+
}
|
|
139
|
+
};
|
|
140
|
+
|
|
141
|
+
class AgenticaDescribePrompt extends AgenticaPromptBase {
|
|
142
|
+
constructor(props) {
|
|
143
|
+
super("describe");
|
|
144
|
+
this.executes = props.executes;
|
|
145
|
+
this.text = props.text;
|
|
146
|
+
}
|
|
147
|
+
toJSON() {
|
|
148
|
+
return {
|
|
149
|
+
type: this.type,
|
|
150
|
+
executions: this.executes.map((e => e.toJSON())),
|
|
151
|
+
text: this.text
|
|
152
|
+
};
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
class AgenticaExecutePrompt extends AgenticaPromptBase {
|
|
157
|
+
constructor(props) {
|
|
158
|
+
super("execute");
|
|
159
|
+
this.id = props.id;
|
|
160
|
+
this.operation = props.operation;
|
|
161
|
+
this.arguments = props.arguments;
|
|
162
|
+
this.value = props.value;
|
|
163
|
+
}
|
|
164
|
+
toJSON() {
|
|
165
|
+
return {
|
|
166
|
+
type: this.type,
|
|
167
|
+
id: this.id,
|
|
168
|
+
operation: this.operation.toJSON(),
|
|
169
|
+
arguments: this.arguments,
|
|
170
|
+
value: this.value
|
|
171
|
+
};
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
class AgenticaSelectPrompt extends AgenticaPromptBase {
|
|
176
|
+
constructor(props) {
|
|
177
|
+
super("select");
|
|
178
|
+
this.id = props.id;
|
|
179
|
+
this.selections = props.selections;
|
|
180
|
+
}
|
|
181
|
+
toJSON() {
|
|
182
|
+
return {
|
|
183
|
+
type: this.type,
|
|
184
|
+
id: this.id,
|
|
185
|
+
selections: this.selections.map((s => s.toJSON()))
|
|
186
|
+
};
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
class AgenticaTextPrompt extends AgenticaPromptBase {
|
|
191
|
+
constructor(props) {
|
|
192
|
+
super("text");
|
|
193
|
+
this.role = props.role;
|
|
194
|
+
this.text = props.text;
|
|
195
|
+
}
|
|
196
|
+
toJSON() {
|
|
197
|
+
return {
|
|
198
|
+
type: this.type,
|
|
199
|
+
role: this.role,
|
|
200
|
+
text: this.text
|
|
201
|
+
};
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
class AgenticaEventBase {
|
|
206
|
+
constructor(type) {
|
|
207
|
+
this.type = type;
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
class AgenticaCallEvent extends AgenticaEventBase {
|
|
212
|
+
constructor(props) {
|
|
213
|
+
super("call");
|
|
214
|
+
this.id = props.id;
|
|
215
|
+
this.operation = props.operation;
|
|
216
|
+
this.arguments = props.arguments;
|
|
217
|
+
}
|
|
218
|
+
toJSON() {
|
|
219
|
+
return {
|
|
220
|
+
type: "call",
|
|
221
|
+
id: this.id,
|
|
222
|
+
operation: this.operation.toJSON(),
|
|
223
|
+
arguments: this.arguments
|
|
224
|
+
};
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
class AgenticaCancelEvent extends AgenticaEventBase {
|
|
229
|
+
constructor(props) {
|
|
230
|
+
super("cancel");
|
|
231
|
+
this.selection = props.selection;
|
|
232
|
+
}
|
|
233
|
+
toJSON() {
|
|
234
|
+
return {
|
|
235
|
+
type: "cancel",
|
|
236
|
+
selection: this.selection.toJSON()
|
|
237
|
+
};
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
class AgenticaDescribeEvent extends AgenticaEventBase {
|
|
242
|
+
get text() {
|
|
243
|
+
return this.get_();
|
|
244
|
+
}
|
|
245
|
+
get done() {
|
|
246
|
+
return this.done_();
|
|
247
|
+
}
|
|
248
|
+
constructor(props) {
|
|
249
|
+
super("describe");
|
|
250
|
+
this.stream = props.stream;
|
|
251
|
+
this.executes = props.executes;
|
|
252
|
+
this.done_ = props.done;
|
|
253
|
+
this.get_ = props.get;
|
|
254
|
+
this.join_ = props.join;
|
|
255
|
+
}
|
|
256
|
+
join() {
|
|
257
|
+
return this.join_();
|
|
258
|
+
}
|
|
259
|
+
toJSON() {
|
|
260
|
+
return {
|
|
261
|
+
type: "describe",
|
|
262
|
+
executes: this.executes.map((e => e.toJSON())),
|
|
263
|
+
text: this.text,
|
|
264
|
+
done: this.done
|
|
265
|
+
};
|
|
266
|
+
}
|
|
267
|
+
toPrompt() {
|
|
268
|
+
return new AgenticaDescribePrompt({
|
|
269
|
+
executes: this.executes,
|
|
270
|
+
text: this.text
|
|
271
|
+
});
|
|
272
|
+
}
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
class AgenticaExecuteEvent extends AgenticaEventBase {
|
|
276
|
+
constructor(props) {
|
|
277
|
+
super("execute");
|
|
278
|
+
this.id = props.id;
|
|
279
|
+
this.operation = props.operation;
|
|
280
|
+
this.arguments = props.arguments;
|
|
281
|
+
this.value = props.value;
|
|
282
|
+
}
|
|
283
|
+
toJSON() {
|
|
284
|
+
return {
|
|
285
|
+
type: "execute",
|
|
286
|
+
id: this.id,
|
|
287
|
+
operation: this.operation.toJSON(),
|
|
288
|
+
arguments: this.arguments,
|
|
289
|
+
value: this.value
|
|
290
|
+
};
|
|
291
|
+
}
|
|
292
|
+
toPrompt() {
|
|
293
|
+
return new AgenticaExecutePrompt({
|
|
294
|
+
id: this.id,
|
|
295
|
+
operation: this.operation,
|
|
296
|
+
arguments: this.arguments,
|
|
297
|
+
value: this.value
|
|
298
|
+
});
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
class AgenticaRequestEvent extends AgenticaEventBase {
|
|
303
|
+
constructor(props) {
|
|
304
|
+
super("request");
|
|
305
|
+
this.source = props.source;
|
|
306
|
+
this.body = props.body;
|
|
307
|
+
this.options = props.options;
|
|
308
|
+
}
|
|
309
|
+
toJSON() {
|
|
310
|
+
return {
|
|
311
|
+
type: "request",
|
|
312
|
+
source: this.source,
|
|
313
|
+
body: this.body,
|
|
314
|
+
options: this.options
|
|
315
|
+
};
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
class AgenticaResponseEvent extends AgenticaEventBase {
|
|
320
|
+
constructor(props) {
|
|
321
|
+
super("response");
|
|
322
|
+
this.source = props.source;
|
|
323
|
+
this.body = props.body;
|
|
324
|
+
this.stream = props.stream;
|
|
325
|
+
this.options = props.options;
|
|
326
|
+
this.join = props.join;
|
|
327
|
+
}
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
class AgenticaSelectEvent extends AgenticaEventBase {
|
|
331
|
+
constructor(props) {
|
|
332
|
+
super("select");
|
|
333
|
+
this.selection = props.selection;
|
|
334
|
+
}
|
|
335
|
+
toJSON() {
|
|
336
|
+
return {
|
|
337
|
+
type: "select",
|
|
338
|
+
selection: this.selection.toJSON()
|
|
339
|
+
};
|
|
340
|
+
}
|
|
341
|
+
toPrompt() {
|
|
342
|
+
return new AgenticaSelectPrompt({
|
|
343
|
+
id: v4(),
|
|
344
|
+
selections: [ this.selection ]
|
|
345
|
+
});
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
|
|
349
|
+
class AgenticaTextEvent extends AgenticaEventBase {
|
|
350
|
+
get text() {
|
|
351
|
+
return this.get_();
|
|
352
|
+
}
|
|
353
|
+
get done() {
|
|
354
|
+
return this.done_();
|
|
355
|
+
}
|
|
356
|
+
constructor(props) {
|
|
357
|
+
super("text");
|
|
358
|
+
this.role = props.role;
|
|
359
|
+
this.stream = props.stream;
|
|
360
|
+
this.done_ = props.done;
|
|
361
|
+
this.get_ = props.get;
|
|
362
|
+
this.join_ = props.join;
|
|
363
|
+
}
|
|
364
|
+
join() {
|
|
365
|
+
return this.join_();
|
|
366
|
+
}
|
|
367
|
+
toJSON() {
|
|
368
|
+
return {
|
|
369
|
+
type: "text",
|
|
370
|
+
role: this.role,
|
|
371
|
+
text: this.text,
|
|
372
|
+
done: this.done
|
|
373
|
+
};
|
|
374
|
+
}
|
|
375
|
+
toPrompt() {
|
|
376
|
+
return new AgenticaTextPrompt({
|
|
377
|
+
role: this.role,
|
|
378
|
+
text: this.text
|
|
379
|
+
});
|
|
380
|
+
}
|
|
381
|
+
}
|
|
10
382
|
|
|
11
383
|
const createHttpLlmApplication = props => {
|
|
12
384
|
const inspect = (() => {
|
|
@@ -7549,6 +7921,21 @@ const createHttpLlmApplication = props => {
|
|
|
7549
7921
|
};
|
|
7550
7922
|
};
|
|
7551
7923
|
|
|
7924
|
+
class AgenticaCancelPrompt extends AgenticaPromptBase {
|
|
7925
|
+
constructor(props) {
|
|
7926
|
+
super("cancel");
|
|
7927
|
+
this.id = props.id;
|
|
7928
|
+
this.selections = props.selections;
|
|
7929
|
+
}
|
|
7930
|
+
toJSON() {
|
|
7931
|
+
return {
|
|
7932
|
+
type: this.type,
|
|
7933
|
+
id: this.id,
|
|
7934
|
+
selections: this.selections.map((s => s.toJSON()))
|
|
7935
|
+
};
|
|
7936
|
+
}
|
|
7937
|
+
}
|
|
7938
|
+
|
|
7552
7939
|
var AgenticaConstant;
|
|
7553
7940
|
|
|
7554
7941
|
(function(AgenticaConstant) {
|
|
@@ -7600,26 +7987,616 @@ const isNode = new Singleton((() => {
|
|
|
7600
7987
|
return typeof global === "object" && isObject(global) && isObject(global.process) && isObject(global.process.versions) && typeof global.process.versions.node !== "undefined";
|
|
7601
7988
|
}));
|
|
7602
7989
|
|
|
7603
|
-
var
|
|
7990
|
+
var StreamUtil;
|
|
7604
7991
|
|
|
7605
|
-
(function(
|
|
7606
|
-
|
|
7607
|
-
|
|
7608
|
-
|
|
7609
|
-
|
|
7610
|
-
|
|
7611
|
-
|
|
7612
|
-
|
|
7992
|
+
(function(StreamUtil) {
|
|
7993
|
+
StreamUtil.readAll = async stream => {
|
|
7994
|
+
const reader = stream.getReader();
|
|
7995
|
+
const result = [];
|
|
7996
|
+
while (true) {
|
|
7997
|
+
const {done, value} = await reader.read();
|
|
7998
|
+
if (done) break;
|
|
7999
|
+
result.push(value);
|
|
8000
|
+
}
|
|
8001
|
+
return result;
|
|
8002
|
+
};
|
|
8003
|
+
StreamUtil.reduce = async (stream, reducer, initial) => {
|
|
8004
|
+
const reader = stream.getReader();
|
|
8005
|
+
let acc = initial ?? null;
|
|
8006
|
+
while (true) {
|
|
8007
|
+
const {done, value} = await reader.read();
|
|
8008
|
+
if (done) break;
|
|
8009
|
+
if (acc === null) {
|
|
8010
|
+
acc = value;
|
|
8011
|
+
continue;
|
|
8012
|
+
}
|
|
8013
|
+
acc = reducer(acc, value);
|
|
8014
|
+
}
|
|
8015
|
+
return acc;
|
|
8016
|
+
};
|
|
8017
|
+
StreamUtil.to = value => {
|
|
8018
|
+
const stream = new ReadableStream({
|
|
8019
|
+
start: controller => {
|
|
8020
|
+
controller.enqueue(value);
|
|
8021
|
+
controller.close();
|
|
8022
|
+
}
|
|
8023
|
+
});
|
|
8024
|
+
return stream;
|
|
8025
|
+
};
|
|
8026
|
+
StreamUtil.transform = (stream, transformer) => {
|
|
8027
|
+
const reader = stream.getReader();
|
|
8028
|
+
return new ReadableStream({
|
|
8029
|
+
pull: async controller => {
|
|
8030
|
+
const {done, value} = await reader.read();
|
|
8031
|
+
if (!done) {
|
|
8032
|
+
controller.enqueue(transformer(value));
|
|
8033
|
+
} else {
|
|
8034
|
+
controller.close();
|
|
8035
|
+
}
|
|
8036
|
+
}
|
|
8037
|
+
});
|
|
8038
|
+
};
|
|
8039
|
+
})(StreamUtil || (StreamUtil = {}));
|
|
8040
|
+
|
|
8041
|
+
var ByteArrayUtil;
|
|
8042
|
+
|
|
8043
|
+
(function(ByteArrayUtil) {
|
|
8044
|
+
ByteArrayUtil.toUtf8 = byteArray => (new TextDecoder).decode(byteArray);
|
|
8045
|
+
})(ByteArrayUtil || (ByteArrayUtil = {}));
|
|
8046
|
+
|
|
8047
|
+
var ChatGptUsageAggregator;
|
|
8048
|
+
|
|
8049
|
+
(function(ChatGptUsageAggregator) {
|
|
8050
|
+
ChatGptUsageAggregator.sumCompletionTokenDetail = (x, y) => ({
|
|
8051
|
+
accepted_prediction_tokens: (x.accepted_prediction_tokens ?? 0) + (y.accepted_prediction_tokens ?? 0),
|
|
8052
|
+
reasoning_tokens: (x.reasoning_tokens ?? 0) + (y.reasoning_tokens ?? 0),
|
|
8053
|
+
rejected_prediction_tokens: (x.rejected_prediction_tokens ?? 0) + (y.rejected_prediction_tokens ?? 0)
|
|
7613
8054
|
});
|
|
7614
|
-
|
|
7615
|
-
|
|
7616
|
-
|
|
7617
|
-
|
|
7618
|
-
|
|
7619
|
-
|
|
8055
|
+
ChatGptUsageAggregator.sumPromptTokenDetail = (x, y) => ({
|
|
8056
|
+
audio_tokens: (x.audio_tokens ?? 0) + (y.audio_tokens ?? 0),
|
|
8057
|
+
cached_tokens: (x.cached_tokens ?? 0) + (y.cached_tokens ?? 0)
|
|
8058
|
+
});
|
|
8059
|
+
ChatGptUsageAggregator.sum = (x, y) => ({
|
|
8060
|
+
prompt_tokens: (x.prompt_tokens ?? 0) + (y.prompt_tokens ?? 0),
|
|
8061
|
+
completion_tokens: (x.completion_tokens ?? 0) + (y.completion_tokens ?? 0),
|
|
8062
|
+
total_tokens: (x.total_tokens ?? 0) + (y.total_tokens ?? 0),
|
|
8063
|
+
completion_tokens_details: ChatGptUsageAggregator.sumCompletionTokenDetail(x.completion_tokens_details ?? {
|
|
8064
|
+
accepted_prediction_tokens: 0,
|
|
8065
|
+
reasoning_tokens: 0,
|
|
8066
|
+
rejected_prediction_tokens: 0
|
|
8067
|
+
}, y.completion_tokens_details ?? {
|
|
8068
|
+
accepted_prediction_tokens: 0,
|
|
8069
|
+
reasoning_tokens: 0,
|
|
8070
|
+
rejected_prediction_tokens: 0
|
|
8071
|
+
}),
|
|
8072
|
+
prompt_tokens_details: ChatGptUsageAggregator.sumPromptTokenDetail(x.prompt_tokens_details ?? {
|
|
8073
|
+
audio_tokens: 0,
|
|
8074
|
+
cached_tokens: 0
|
|
8075
|
+
}, y.prompt_tokens_details ?? {
|
|
8076
|
+
audio_tokens: 0,
|
|
8077
|
+
cached_tokens: 0
|
|
7620
8078
|
})
|
|
7621
8079
|
});
|
|
7622
|
-
})(
|
|
8080
|
+
})(ChatGptUsageAggregator || (ChatGptUsageAggregator = {}));
|
|
8081
|
+
|
|
8082
|
+
var ChatGptCompletionMessageUtil;
|
|
8083
|
+
|
|
8084
|
+
(function(ChatGptCompletionMessageUtil) {
|
|
8085
|
+
ChatGptCompletionMessageUtil.transformCompletionChunk = source => {
|
|
8086
|
+
const str = source instanceof Uint8Array ? ByteArrayUtil.toUtf8(source) : source;
|
|
8087
|
+
return (() => {
|
|
8088
|
+
const _io0 = input => "string" === typeof input.id && (Array.isArray(input.choices) && input.choices.every((elem => "object" === typeof elem && null !== elem && _io1(elem)))) && "number" === typeof input.created && "string" === typeof input.model && "chat.completion.chunk" === input.object && (null === input.service_tier || undefined === input.service_tier || "scale" === input.service_tier || "default" === input.service_tier) && (undefined === input.system_fingerprint || "string" === typeof input.system_fingerprint) && (null === input.usage || undefined === input.usage || "object" === typeof input.usage && null !== input.usage && _io9(input.usage));
|
|
8089
|
+
const _io1 = input => "object" === typeof input.delta && null !== input.delta && false === Array.isArray(input.delta) && _io2(input.delta) && (null === input.finish_reason || "function_call" === input.finish_reason || "stop" === input.finish_reason || "length" === input.finish_reason || "tool_calls" === input.finish_reason || "content_filter" === input.finish_reason) && "number" === typeof input.index && (null === input.logprobs || undefined === input.logprobs || "object" === typeof input.logprobs && null !== input.logprobs && _io6(input.logprobs));
|
|
8090
|
+
const _io2 = input => (null === input.content || undefined === input.content || "string" === typeof input.content) && (undefined === input.function_call || "object" === typeof input.function_call && null !== input.function_call && false === Array.isArray(input.function_call) && _io3(input.function_call)) && (null === input.refusal || undefined === input.refusal || "string" === typeof input.refusal) && (undefined === input.role || "assistant" === input.role || "user" === input.role || "developer" === input.role || "system" === input.role || "tool" === input.role) && (undefined === input.tool_calls || Array.isArray(input.tool_calls) && input.tool_calls.every((elem => "object" === typeof elem && null !== elem && _io4(elem))));
|
|
8091
|
+
const _io3 = input => (undefined === input.arguments || "string" === typeof input.arguments) && (undefined === input.name || "string" === typeof input.name);
|
|
8092
|
+
const _io4 = input => "number" === typeof input.index && (undefined === input.id || "string" === typeof input.id) && (undefined === input["function"] || "object" === typeof input["function"] && null !== input["function"] && false === Array.isArray(input["function"]) && _io5(input["function"])) && (undefined === input.type || "function" === input.type);
|
|
8093
|
+
const _io5 = input => (undefined === input.arguments || "string" === typeof input.arguments) && (undefined === input.name || "string" === typeof input.name);
|
|
8094
|
+
const _io6 = input => (null === input.content || Array.isArray(input.content) && input.content.every((elem => "object" === typeof elem && null !== elem && _io7(elem)))) && (null === input.refusal || Array.isArray(input.refusal) && input.refusal.every((elem => "object" === typeof elem && null !== elem && _io7(elem))));
|
|
8095
|
+
const _io7 = input => "string" === typeof input.token && (null === input.bytes || Array.isArray(input.bytes) && input.bytes.every((elem => "number" === typeof elem))) && "number" === typeof input.logprob && (Array.isArray(input.top_logprobs) && input.top_logprobs.every((elem => "object" === typeof elem && null !== elem && _io8(elem))));
|
|
8096
|
+
const _io8 = input => "string" === typeof input.token && (null === input.bytes || Array.isArray(input.bytes) && input.bytes.every((elem => "number" === typeof elem))) && "number" === typeof input.logprob;
|
|
8097
|
+
const _io9 = input => "number" === typeof input.completion_tokens && "number" === typeof input.prompt_tokens && "number" === typeof input.total_tokens && (undefined === input.completion_tokens_details || "object" === typeof input.completion_tokens_details && null !== input.completion_tokens_details && false === Array.isArray(input.completion_tokens_details) && _io10(input.completion_tokens_details)) && (undefined === input.prompt_tokens_details || "object" === typeof input.prompt_tokens_details && null !== input.prompt_tokens_details && false === Array.isArray(input.prompt_tokens_details) && _io11(input.prompt_tokens_details));
|
|
8098
|
+
const _io10 = input => (undefined === input.accepted_prediction_tokens || "number" === typeof input.accepted_prediction_tokens) && (undefined === input.audio_tokens || "number" === typeof input.audio_tokens) && (undefined === input.reasoning_tokens || "number" === typeof input.reasoning_tokens) && (undefined === input.rejected_prediction_tokens || "number" === typeof input.rejected_prediction_tokens);
|
|
8099
|
+
const _io11 = input => (undefined === input.audio_tokens || "number" === typeof input.audio_tokens) && (undefined === input.cached_tokens || "number" === typeof input.cached_tokens);
|
|
8100
|
+
const _ao0 = (input, _path, _exceptionable = true) => ("string" === typeof input.id || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8101
|
+
method: "json.assertParse",
|
|
8102
|
+
path: _path + ".id",
|
|
8103
|
+
expected: "string",
|
|
8104
|
+
value: input.id
|
|
8105
|
+
}, _errorFactory)) && ((Array.isArray(input.choices) || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8106
|
+
method: "json.assertParse",
|
|
8107
|
+
path: _path + ".choices",
|
|
8108
|
+
expected: "Array<ChatCompletionChunk.Choice>",
|
|
8109
|
+
value: input.choices
|
|
8110
|
+
}, _errorFactory)) && input.choices.every(((elem, _index8) => ("object" === typeof elem && null !== elem || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8111
|
+
method: "json.assertParse",
|
|
8112
|
+
path: _path + ".choices[" + _index8 + "]",
|
|
8113
|
+
expected: "ChatCompletionChunk.Choice",
|
|
8114
|
+
value: elem
|
|
8115
|
+
}, _errorFactory)) && _ao1(elem, _path + ".choices[" + _index8 + "]", _exceptionable) || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8116
|
+
method: "json.assertParse",
|
|
8117
|
+
path: _path + ".choices[" + _index8 + "]",
|
|
8118
|
+
expected: "ChatCompletionChunk.Choice",
|
|
8119
|
+
value: elem
|
|
8120
|
+
}, _errorFactory))) || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8121
|
+
method: "json.assertParse",
|
|
8122
|
+
path: _path + ".choices",
|
|
8123
|
+
expected: "Array<ChatCompletionChunk.Choice>",
|
|
8124
|
+
value: input.choices
|
|
8125
|
+
}, _errorFactory)) && ("number" === typeof input.created || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8126
|
+
method: "json.assertParse",
|
|
8127
|
+
path: _path + ".created",
|
|
8128
|
+
expected: "number",
|
|
8129
|
+
value: input.created
|
|
8130
|
+
}, _errorFactory)) && ("string" === typeof input.model || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8131
|
+
method: "json.assertParse",
|
|
8132
|
+
path: _path + ".model",
|
|
8133
|
+
expected: "string",
|
|
8134
|
+
value: input.model
|
|
8135
|
+
}, _errorFactory)) && ("chat.completion.chunk" === input.object || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8136
|
+
method: "json.assertParse",
|
|
8137
|
+
path: _path + ".object",
|
|
8138
|
+
expected: '"chat.completion.chunk"',
|
|
8139
|
+
value: input.object
|
|
8140
|
+
}, _errorFactory)) && (null === input.service_tier || undefined === input.service_tier || "scale" === input.service_tier || "default" === input.service_tier || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8141
|
+
method: "json.assertParse",
|
|
8142
|
+
path: _path + ".service_tier",
|
|
8143
|
+
expected: '("default" | "scale" | null | undefined)',
|
|
8144
|
+
value: input.service_tier
|
|
8145
|
+
}, _errorFactory)) && (undefined === input.system_fingerprint || "string" === typeof input.system_fingerprint || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8146
|
+
method: "json.assertParse",
|
|
8147
|
+
path: _path + ".system_fingerprint",
|
|
8148
|
+
expected: "(string | undefined)",
|
|
8149
|
+
value: input.system_fingerprint
|
|
8150
|
+
}, _errorFactory)) && (null === input.usage || undefined === input.usage || ("object" === typeof input.usage && null !== input.usage || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8151
|
+
method: "json.assertParse",
|
|
8152
|
+
path: _path + ".usage",
|
|
8153
|
+
expected: "(CompletionUsage | null | undefined)",
|
|
8154
|
+
value: input.usage
|
|
8155
|
+
}, _errorFactory)) && _ao9(input.usage, _path + ".usage", _exceptionable) || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8156
|
+
method: "json.assertParse",
|
|
8157
|
+
path: _path + ".usage",
|
|
8158
|
+
expected: "(CompletionUsage | null | undefined)",
|
|
8159
|
+
value: input.usage
|
|
8160
|
+
}, _errorFactory));
|
|
8161
|
+
const _ao1 = (input, _path, _exceptionable = true) => (("object" === typeof input.delta && null !== input.delta && false === Array.isArray(input.delta) || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8162
|
+
method: "json.assertParse",
|
|
8163
|
+
path: _path + ".delta",
|
|
8164
|
+
expected: "ChatCompletionChunk.Choice.Delta",
|
|
8165
|
+
value: input.delta
|
|
8166
|
+
}, _errorFactory)) && _ao2(input.delta, _path + ".delta", _exceptionable) || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8167
|
+
method: "json.assertParse",
|
|
8168
|
+
path: _path + ".delta",
|
|
8169
|
+
expected: "ChatCompletionChunk.Choice.Delta",
|
|
8170
|
+
value: input.delta
|
|
8171
|
+
}, _errorFactory)) && (null === input.finish_reason || "function_call" === input.finish_reason || "stop" === input.finish_reason || "length" === input.finish_reason || "tool_calls" === input.finish_reason || "content_filter" === input.finish_reason || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8172
|
+
method: "json.assertParse",
|
|
8173
|
+
path: _path + ".finish_reason",
|
|
8174
|
+
expected: '("content_filter" | "function_call" | "length" | "stop" | "tool_calls" | null)',
|
|
8175
|
+
value: input.finish_reason
|
|
8176
|
+
}, _errorFactory)) && ("number" === typeof input.index || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8177
|
+
method: "json.assertParse",
|
|
8178
|
+
path: _path + ".index",
|
|
8179
|
+
expected: "number",
|
|
8180
|
+
value: input.index
|
|
8181
|
+
}, _errorFactory)) && (null === input.logprobs || undefined === input.logprobs || ("object" === typeof input.logprobs && null !== input.logprobs || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8182
|
+
method: "json.assertParse",
|
|
8183
|
+
path: _path + ".logprobs",
|
|
8184
|
+
expected: "(ChatCompletionChunk.Choice.Logprobs | null | undefined)",
|
|
8185
|
+
value: input.logprobs
|
|
8186
|
+
}, _errorFactory)) && _ao6(input.logprobs, _path + ".logprobs", _exceptionable) || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8187
|
+
method: "json.assertParse",
|
|
8188
|
+
path: _path + ".logprobs",
|
|
8189
|
+
expected: "(ChatCompletionChunk.Choice.Logprobs | null | undefined)",
|
|
8190
|
+
value: input.logprobs
|
|
8191
|
+
}, _errorFactory));
|
|
8192
|
+
const _ao2 = (input, _path, _exceptionable = true) => (null === input.content || undefined === input.content || "string" === typeof input.content || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8193
|
+
method: "json.assertParse",
|
|
8194
|
+
path: _path + ".content",
|
|
8195
|
+
expected: "(null | string | undefined)",
|
|
8196
|
+
value: input.content
|
|
8197
|
+
}, _errorFactory)) && (undefined === input.function_call || ("object" === typeof input.function_call && null !== input.function_call && false === Array.isArray(input.function_call) || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8198
|
+
method: "json.assertParse",
|
|
8199
|
+
path: _path + ".function_call",
|
|
8200
|
+
expected: "(ChatCompletionChunk.Choice.Delta.FunctionCall | undefined)",
|
|
8201
|
+
value: input.function_call
|
|
8202
|
+
}, _errorFactory)) && _ao3(input.function_call, _path + ".function_call", _exceptionable) || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8203
|
+
method: "json.assertParse",
|
|
8204
|
+
path: _path + ".function_call",
|
|
8205
|
+
expected: "(ChatCompletionChunk.Choice.Delta.FunctionCall | undefined)",
|
|
8206
|
+
value: input.function_call
|
|
8207
|
+
}, _errorFactory)) && (null === input.refusal || undefined === input.refusal || "string" === typeof input.refusal || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8208
|
+
method: "json.assertParse",
|
|
8209
|
+
path: _path + ".refusal",
|
|
8210
|
+
expected: "(null | string | undefined)",
|
|
8211
|
+
value: input.refusal
|
|
8212
|
+
}, _errorFactory)) && (undefined === input.role || "assistant" === input.role || "user" === input.role || "developer" === input.role || "system" === input.role || "tool" === input.role || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8213
|
+
method: "json.assertParse",
|
|
8214
|
+
path: _path + ".role",
|
|
8215
|
+
expected: '("assistant" | "developer" | "system" | "tool" | "user" | undefined)',
|
|
8216
|
+
value: input.role
|
|
8217
|
+
}, _errorFactory)) && (undefined === input.tool_calls || (Array.isArray(input.tool_calls) || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8218
|
+
method: "json.assertParse",
|
|
8219
|
+
path: _path + ".tool_calls",
|
|
8220
|
+
expected: "(Array<ChatCompletionChunk.Choice.Delta.ToolCall> | undefined)",
|
|
8221
|
+
value: input.tool_calls
|
|
8222
|
+
}, _errorFactory)) && input.tool_calls.every(((elem, _index9) => ("object" === typeof elem && null !== elem || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8223
|
+
method: "json.assertParse",
|
|
8224
|
+
path: _path + ".tool_calls[" + _index9 + "]",
|
|
8225
|
+
expected: "ChatCompletionChunk.Choice.Delta.ToolCall",
|
|
8226
|
+
value: elem
|
|
8227
|
+
}, _errorFactory)) && _ao4(elem, _path + ".tool_calls[" + _index9 + "]", _exceptionable) || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8228
|
+
method: "json.assertParse",
|
|
8229
|
+
path: _path + ".tool_calls[" + _index9 + "]",
|
|
8230
|
+
expected: "ChatCompletionChunk.Choice.Delta.ToolCall",
|
|
8231
|
+
value: elem
|
|
8232
|
+
}, _errorFactory))) || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8233
|
+
method: "json.assertParse",
|
|
8234
|
+
path: _path + ".tool_calls",
|
|
8235
|
+
expected: "(Array<ChatCompletionChunk.Choice.Delta.ToolCall> | undefined)",
|
|
8236
|
+
value: input.tool_calls
|
|
8237
|
+
}, _errorFactory));
|
|
8238
|
+
const _ao3 = (input, _path, _exceptionable = true) => (undefined === input.arguments || "string" === typeof input.arguments || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8239
|
+
method: "json.assertParse",
|
|
8240
|
+
path: _path + ".arguments",
|
|
8241
|
+
expected: "(string | undefined)",
|
|
8242
|
+
value: input.arguments
|
|
8243
|
+
}, _errorFactory)) && (undefined === input.name || "string" === typeof input.name || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8244
|
+
method: "json.assertParse",
|
|
8245
|
+
path: _path + ".name",
|
|
8246
|
+
expected: "(string | undefined)",
|
|
8247
|
+
value: input.name
|
|
8248
|
+
}, _errorFactory));
|
|
8249
|
+
const _ao4 = (input, _path, _exceptionable = true) => ("number" === typeof input.index || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8250
|
+
method: "json.assertParse",
|
|
8251
|
+
path: _path + ".index",
|
|
8252
|
+
expected: "number",
|
|
8253
|
+
value: input.index
|
|
8254
|
+
}, _errorFactory)) && (undefined === input.id || "string" === typeof input.id || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8255
|
+
method: "json.assertParse",
|
|
8256
|
+
path: _path + ".id",
|
|
8257
|
+
expected: "(string | undefined)",
|
|
8258
|
+
value: input.id
|
|
8259
|
+
}, _errorFactory)) && (undefined === input["function"] || ("object" === typeof input["function"] && null !== input["function"] && false === Array.isArray(input["function"]) || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8260
|
+
method: "json.assertParse",
|
|
8261
|
+
path: _path + '["function"]',
|
|
8262
|
+
expected: "(ChatCompletionChunk.Choice.Delta.ToolCall.Function | undefined)",
|
|
8263
|
+
value: input["function"]
|
|
8264
|
+
}, _errorFactory)) && _ao5(input["function"], _path + '["function"]', _exceptionable) || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8265
|
+
method: "json.assertParse",
|
|
8266
|
+
path: _path + '["function"]',
|
|
8267
|
+
expected: "(ChatCompletionChunk.Choice.Delta.ToolCall.Function | undefined)",
|
|
8268
|
+
value: input["function"]
|
|
8269
|
+
}, _errorFactory)) && (undefined === input.type || "function" === input.type || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8270
|
+
method: "json.assertParse",
|
|
8271
|
+
path: _path + ".type",
|
|
8272
|
+
expected: '("function" | undefined)',
|
|
8273
|
+
value: input.type
|
|
8274
|
+
}, _errorFactory));
|
|
8275
|
+
const _ao5 = (input, _path, _exceptionable = true) => (undefined === input.arguments || "string" === typeof input.arguments || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8276
|
+
method: "json.assertParse",
|
|
8277
|
+
path: _path + ".arguments",
|
|
8278
|
+
expected: "(string | undefined)",
|
|
8279
|
+
value: input.arguments
|
|
8280
|
+
}, _errorFactory)) && (undefined === input.name || "string" === typeof input.name || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8281
|
+
method: "json.assertParse",
|
|
8282
|
+
path: _path + ".name",
|
|
8283
|
+
expected: "(string | undefined)",
|
|
8284
|
+
value: input.name
|
|
8285
|
+
}, _errorFactory));
|
|
8286
|
+
const _ao6 = (input, _path, _exceptionable = true) => (null === input.content || (Array.isArray(input.content) || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8287
|
+
method: "json.assertParse",
|
|
8288
|
+
path: _path + ".content",
|
|
8289
|
+
expected: "(Array<ChatCompletionTokenLogprob> | null)",
|
|
8290
|
+
value: input.content
|
|
8291
|
+
}, _errorFactory)) && input.content.every(((elem, _index10) => ("object" === typeof elem && null !== elem || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8292
|
+
method: "json.assertParse",
|
|
8293
|
+
path: _path + ".content[" + _index10 + "]",
|
|
8294
|
+
expected: "ChatCompletionTokenLogprob",
|
|
8295
|
+
value: elem
|
|
8296
|
+
}, _errorFactory)) && _ao7(elem, _path + ".content[" + _index10 + "]", _exceptionable) || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8297
|
+
method: "json.assertParse",
|
|
8298
|
+
path: _path + ".content[" + _index10 + "]",
|
|
8299
|
+
expected: "ChatCompletionTokenLogprob",
|
|
8300
|
+
value: elem
|
|
8301
|
+
}, _errorFactory))) || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8302
|
+
method: "json.assertParse",
|
|
8303
|
+
path: _path + ".content",
|
|
8304
|
+
expected: "(Array<ChatCompletionTokenLogprob> | null)",
|
|
8305
|
+
value: input.content
|
|
8306
|
+
}, _errorFactory)) && (null === input.refusal || (Array.isArray(input.refusal) || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8307
|
+
method: "json.assertParse",
|
|
8308
|
+
path: _path + ".refusal",
|
|
8309
|
+
expected: "(Array<ChatCompletionTokenLogprob> | null)",
|
|
8310
|
+
value: input.refusal
|
|
8311
|
+
}, _errorFactory)) && input.refusal.every(((elem, _index11) => ("object" === typeof elem && null !== elem || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8312
|
+
method: "json.assertParse",
|
|
8313
|
+
path: _path + ".refusal[" + _index11 + "]",
|
|
8314
|
+
expected: "ChatCompletionTokenLogprob",
|
|
8315
|
+
value: elem
|
|
8316
|
+
}, _errorFactory)) && _ao7(elem, _path + ".refusal[" + _index11 + "]", _exceptionable) || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8317
|
+
method: "json.assertParse",
|
|
8318
|
+
path: _path + ".refusal[" + _index11 + "]",
|
|
8319
|
+
expected: "ChatCompletionTokenLogprob",
|
|
8320
|
+
value: elem
|
|
8321
|
+
}, _errorFactory))) || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8322
|
+
method: "json.assertParse",
|
|
8323
|
+
path: _path + ".refusal",
|
|
8324
|
+
expected: "(Array<ChatCompletionTokenLogprob> | null)",
|
|
8325
|
+
value: input.refusal
|
|
8326
|
+
}, _errorFactory));
|
|
8327
|
+
const _ao7 = (input, _path, _exceptionable = true) => ("string" === typeof input.token || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8328
|
+
method: "json.assertParse",
|
|
8329
|
+
path: _path + ".token",
|
|
8330
|
+
expected: "string",
|
|
8331
|
+
value: input.token
|
|
8332
|
+
}, _errorFactory)) && (null === input.bytes || (Array.isArray(input.bytes) || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8333
|
+
method: "json.assertParse",
|
|
8334
|
+
path: _path + ".bytes",
|
|
8335
|
+
expected: "(Array<number> | null)",
|
|
8336
|
+
value: input.bytes
|
|
8337
|
+
}, _errorFactory)) && input.bytes.every(((elem, _index12) => "number" === typeof elem || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8338
|
+
method: "json.assertParse",
|
|
8339
|
+
path: _path + ".bytes[" + _index12 + "]",
|
|
8340
|
+
expected: "number",
|
|
8341
|
+
value: elem
|
|
8342
|
+
}, _errorFactory))) || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8343
|
+
method: "json.assertParse",
|
|
8344
|
+
path: _path + ".bytes",
|
|
8345
|
+
expected: "(Array<number> | null)",
|
|
8346
|
+
value: input.bytes
|
|
8347
|
+
}, _errorFactory)) && ("number" === typeof input.logprob || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8348
|
+
method: "json.assertParse",
|
|
8349
|
+
path: _path + ".logprob",
|
|
8350
|
+
expected: "number",
|
|
8351
|
+
value: input.logprob
|
|
8352
|
+
}, _errorFactory)) && ((Array.isArray(input.top_logprobs) || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8353
|
+
method: "json.assertParse",
|
|
8354
|
+
path: _path + ".top_logprobs",
|
|
8355
|
+
expected: "Array<ChatCompletionTokenLogprob.TopLogprob>",
|
|
8356
|
+
value: input.top_logprobs
|
|
8357
|
+
}, _errorFactory)) && input.top_logprobs.every(((elem, _index13) => ("object" === typeof elem && null !== elem || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8358
|
+
method: "json.assertParse",
|
|
8359
|
+
path: _path + ".top_logprobs[" + _index13 + "]",
|
|
8360
|
+
expected: "ChatCompletionTokenLogprob.TopLogprob",
|
|
8361
|
+
value: elem
|
|
8362
|
+
}, _errorFactory)) && _ao8(elem, _path + ".top_logprobs[" + _index13 + "]", _exceptionable) || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8363
|
+
method: "json.assertParse",
|
|
8364
|
+
path: _path + ".top_logprobs[" + _index13 + "]",
|
|
8365
|
+
expected: "ChatCompletionTokenLogprob.TopLogprob",
|
|
8366
|
+
value: elem
|
|
8367
|
+
}, _errorFactory))) || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8368
|
+
method: "json.assertParse",
|
|
8369
|
+
path: _path + ".top_logprobs",
|
|
8370
|
+
expected: "Array<ChatCompletionTokenLogprob.TopLogprob>",
|
|
8371
|
+
value: input.top_logprobs
|
|
8372
|
+
}, _errorFactory));
|
|
8373
|
+
const _ao8 = (input, _path, _exceptionable = true) => ("string" === typeof input.token || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8374
|
+
method: "json.assertParse",
|
|
8375
|
+
path: _path + ".token",
|
|
8376
|
+
expected: "string",
|
|
8377
|
+
value: input.token
|
|
8378
|
+
}, _errorFactory)) && (null === input.bytes || (Array.isArray(input.bytes) || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8379
|
+
method: "json.assertParse",
|
|
8380
|
+
path: _path + ".bytes",
|
|
8381
|
+
expected: "(Array<number> | null)",
|
|
8382
|
+
value: input.bytes
|
|
8383
|
+
}, _errorFactory)) && input.bytes.every(((elem, _index14) => "number" === typeof elem || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8384
|
+
method: "json.assertParse",
|
|
8385
|
+
path: _path + ".bytes[" + _index14 + "]",
|
|
8386
|
+
expected: "number",
|
|
8387
|
+
value: elem
|
|
8388
|
+
}, _errorFactory))) || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8389
|
+
method: "json.assertParse",
|
|
8390
|
+
path: _path + ".bytes",
|
|
8391
|
+
expected: "(Array<number> | null)",
|
|
8392
|
+
value: input.bytes
|
|
8393
|
+
}, _errorFactory)) && ("number" === typeof input.logprob || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8394
|
+
method: "json.assertParse",
|
|
8395
|
+
path: _path + ".logprob",
|
|
8396
|
+
expected: "number",
|
|
8397
|
+
value: input.logprob
|
|
8398
|
+
}, _errorFactory));
|
|
8399
|
+
const _ao9 = (input, _path, _exceptionable = true) => ("number" === typeof input.completion_tokens || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8400
|
+
method: "json.assertParse",
|
|
8401
|
+
path: _path + ".completion_tokens",
|
|
8402
|
+
expected: "number",
|
|
8403
|
+
value: input.completion_tokens
|
|
8404
|
+
}, _errorFactory)) && ("number" === typeof input.prompt_tokens || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8405
|
+
method: "json.assertParse",
|
|
8406
|
+
path: _path + ".prompt_tokens",
|
|
8407
|
+
expected: "number",
|
|
8408
|
+
value: input.prompt_tokens
|
|
8409
|
+
}, _errorFactory)) && ("number" === typeof input.total_tokens || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8410
|
+
method: "json.assertParse",
|
|
8411
|
+
path: _path + ".total_tokens",
|
|
8412
|
+
expected: "number",
|
|
8413
|
+
value: input.total_tokens
|
|
8414
|
+
}, _errorFactory)) && (undefined === input.completion_tokens_details || ("object" === typeof input.completion_tokens_details && null !== input.completion_tokens_details && false === Array.isArray(input.completion_tokens_details) || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8415
|
+
method: "json.assertParse",
|
|
8416
|
+
path: _path + ".completion_tokens_details",
|
|
8417
|
+
expected: "(CompletionUsage.CompletionTokensDetails | undefined)",
|
|
8418
|
+
value: input.completion_tokens_details
|
|
8419
|
+
}, _errorFactory)) && _ao10(input.completion_tokens_details, _path + ".completion_tokens_details", _exceptionable) || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8420
|
+
method: "json.assertParse",
|
|
8421
|
+
path: _path + ".completion_tokens_details",
|
|
8422
|
+
expected: "(CompletionUsage.CompletionTokensDetails | undefined)",
|
|
8423
|
+
value: input.completion_tokens_details
|
|
8424
|
+
}, _errorFactory)) && (undefined === input.prompt_tokens_details || ("object" === typeof input.prompt_tokens_details && null !== input.prompt_tokens_details && false === Array.isArray(input.prompt_tokens_details) || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8425
|
+
method: "json.assertParse",
|
|
8426
|
+
path: _path + ".prompt_tokens_details",
|
|
8427
|
+
expected: "(CompletionUsage.PromptTokensDetails | undefined)",
|
|
8428
|
+
value: input.prompt_tokens_details
|
|
8429
|
+
}, _errorFactory)) && _ao11(input.prompt_tokens_details, _path + ".prompt_tokens_details", _exceptionable) || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8430
|
+
method: "json.assertParse",
|
|
8431
|
+
path: _path + ".prompt_tokens_details",
|
|
8432
|
+
expected: "(CompletionUsage.PromptTokensDetails | undefined)",
|
|
8433
|
+
value: input.prompt_tokens_details
|
|
8434
|
+
}, _errorFactory));
|
|
8435
|
+
const _ao10 = (input, _path, _exceptionable = true) => (undefined === input.accepted_prediction_tokens || "number" === typeof input.accepted_prediction_tokens || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8436
|
+
method: "json.assertParse",
|
|
8437
|
+
path: _path + ".accepted_prediction_tokens",
|
|
8438
|
+
expected: "(number | undefined)",
|
|
8439
|
+
value: input.accepted_prediction_tokens
|
|
8440
|
+
}, _errorFactory)) && (undefined === input.audio_tokens || "number" === typeof input.audio_tokens || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8441
|
+
method: "json.assertParse",
|
|
8442
|
+
path: _path + ".audio_tokens",
|
|
8443
|
+
expected: "(number | undefined)",
|
|
8444
|
+
value: input.audio_tokens
|
|
8445
|
+
}, _errorFactory)) && (undefined === input.reasoning_tokens || "number" === typeof input.reasoning_tokens || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8446
|
+
method: "json.assertParse",
|
|
8447
|
+
path: _path + ".reasoning_tokens",
|
|
8448
|
+
expected: "(number | undefined)",
|
|
8449
|
+
value: input.reasoning_tokens
|
|
8450
|
+
}, _errorFactory)) && (undefined === input.rejected_prediction_tokens || "number" === typeof input.rejected_prediction_tokens || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8451
|
+
method: "json.assertParse",
|
|
8452
|
+
path: _path + ".rejected_prediction_tokens",
|
|
8453
|
+
expected: "(number | undefined)",
|
|
8454
|
+
value: input.rejected_prediction_tokens
|
|
8455
|
+
}, _errorFactory));
|
|
8456
|
+
const _ao11 = (input, _path, _exceptionable = true) => (undefined === input.audio_tokens || "number" === typeof input.audio_tokens || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8457
|
+
method: "json.assertParse",
|
|
8458
|
+
path: _path + ".audio_tokens",
|
|
8459
|
+
expected: "(number | undefined)",
|
|
8460
|
+
value: input.audio_tokens
|
|
8461
|
+
}, _errorFactory)) && (undefined === input.cached_tokens || "number" === typeof input.cached_tokens || __typia_transform__assertGuard._assertGuard(_exceptionable, {
|
|
8462
|
+
method: "json.assertParse",
|
|
8463
|
+
path: _path + ".cached_tokens",
|
|
8464
|
+
expected: "(number | undefined)",
|
|
8465
|
+
value: input.cached_tokens
|
|
8466
|
+
}, _errorFactory));
|
|
8467
|
+
const __is = input => "object" === typeof input && null !== input && _io0(input);
|
|
8468
|
+
let _errorFactory;
|
|
8469
|
+
const __assert = (input, errorFactory) => {
|
|
8470
|
+
if (false === __is(input)) {
|
|
8471
|
+
_errorFactory = errorFactory;
|
|
8472
|
+
((input, _path, _exceptionable = true) => ("object" === typeof input && null !== input || __typia_transform__assertGuard._assertGuard(true, {
|
|
8473
|
+
method: "json.assertParse",
|
|
8474
|
+
path: _path + "",
|
|
8475
|
+
expected: "ChatCompletionChunk",
|
|
8476
|
+
value: input
|
|
8477
|
+
}, _errorFactory)) && _ao0(input, _path + "", true) || __typia_transform__assertGuard._assertGuard(true, {
|
|
8478
|
+
method: "json.assertParse",
|
|
8479
|
+
path: _path + "",
|
|
8480
|
+
expected: "ChatCompletionChunk",
|
|
8481
|
+
value: input
|
|
8482
|
+
}, _errorFactory))(input, "$input", true);
|
|
8483
|
+
}
|
|
8484
|
+
return input;
|
|
8485
|
+
};
|
|
8486
|
+
return (input, errorFactory) => __assert(JSON.parse(input), errorFactory);
|
|
8487
|
+
})()(str);
|
|
8488
|
+
};
|
|
8489
|
+
ChatGptCompletionMessageUtil.accumulate = (origin, chunk) => {
|
|
8490
|
+
const choices = origin.choices;
|
|
8491
|
+
chunk.choices.forEach((choice => {
|
|
8492
|
+
const accChoice = choices[choice.index];
|
|
8493
|
+
if (accChoice) {
|
|
8494
|
+
choices[choice.index] = ChatGptCompletionMessageUtil.mergeChoice(accChoice, choice);
|
|
8495
|
+
return;
|
|
8496
|
+
}
|
|
8497
|
+
choices[choice.index] = {
|
|
8498
|
+
index: choice.index,
|
|
8499
|
+
finish_reason: choice.finish_reason ?? null,
|
|
8500
|
+
logprobs: choice.logprobs ?? null,
|
|
8501
|
+
message: {
|
|
8502
|
+
tool_calls: choice.delta.tool_calls ? choice.delta.tool_calls.reduce(((acc, cur) => {
|
|
8503
|
+
acc[cur.index] = {
|
|
8504
|
+
id: cur.id ?? "",
|
|
8505
|
+
type: "function",
|
|
8506
|
+
function: {
|
|
8507
|
+
name: cur.function?.name ?? "",
|
|
8508
|
+
arguments: cur.function?.arguments ?? ""
|
|
8509
|
+
}
|
|
8510
|
+
};
|
|
8511
|
+
return acc;
|
|
8512
|
+
}), []) : undefined,
|
|
8513
|
+
content: choice.delta.content ?? null,
|
|
8514
|
+
refusal: choice.delta.refusal ?? null,
|
|
8515
|
+
role: "assistant"
|
|
8516
|
+
}
|
|
8517
|
+
};
|
|
8518
|
+
}));
|
|
8519
|
+
const usage = (() => {
|
|
8520
|
+
if (!chunk.usage) {
|
|
8521
|
+
return origin.usage;
|
|
8522
|
+
}
|
|
8523
|
+
if (!origin.usage) {
|
|
8524
|
+
return chunk.usage;
|
|
8525
|
+
}
|
|
8526
|
+
return ChatGptUsageAggregator.sum(origin.usage, chunk.usage);
|
|
8527
|
+
})();
|
|
8528
|
+
return {
|
|
8529
|
+
...origin,
|
|
8530
|
+
choices,
|
|
8531
|
+
usage
|
|
8532
|
+
};
|
|
8533
|
+
};
|
|
8534
|
+
ChatGptCompletionMessageUtil.merge = chunks => {
|
|
8535
|
+
const firstChunk = chunks[0];
|
|
8536
|
+
if (!firstChunk) throw new Error("No chunks received");
|
|
8537
|
+
return chunks.reduce(ChatGptCompletionMessageUtil.accumulate, {
|
|
8538
|
+
id: firstChunk.id,
|
|
8539
|
+
choices: [],
|
|
8540
|
+
created: firstChunk.created,
|
|
8541
|
+
model: firstChunk.model,
|
|
8542
|
+
object: "chat.completion",
|
|
8543
|
+
usage: undefined,
|
|
8544
|
+
service_tier: firstChunk.service_tier,
|
|
8545
|
+
system_fingerprint: firstChunk.system_fingerprint
|
|
8546
|
+
});
|
|
8547
|
+
};
|
|
8548
|
+
ChatGptCompletionMessageUtil.mergeChoice = (acc, cur) => {
|
|
8549
|
+
var _a;
|
|
8550
|
+
if (!acc.finish_reason && cur.finish_reason) {
|
|
8551
|
+
acc.finish_reason = cur.finish_reason;
|
|
8552
|
+
}
|
|
8553
|
+
if (!acc.logprobs && cur.logprobs) {
|
|
8554
|
+
acc.logprobs = cur.logprobs;
|
|
8555
|
+
}
|
|
8556
|
+
if (cur.delta.content) {
|
|
8557
|
+
if (!acc.message.content) {
|
|
8558
|
+
acc.message.content = cur.delta.content;
|
|
8559
|
+
} else {
|
|
8560
|
+
acc.message.content += cur.delta.content;
|
|
8561
|
+
}
|
|
8562
|
+
}
|
|
8563
|
+
if (cur.delta.refusal) {
|
|
8564
|
+
if (!acc.message.refusal) {
|
|
8565
|
+
acc.message.refusal = cur.delta.refusal;
|
|
8566
|
+
} else {
|
|
8567
|
+
acc.message.refusal += cur.delta.refusal;
|
|
8568
|
+
}
|
|
8569
|
+
}
|
|
8570
|
+
if (cur.delta.tool_calls) {
|
|
8571
|
+
(_a = acc.message).tool_calls ?? (_a.tool_calls = []);
|
|
8572
|
+
const toolCalls = acc.message.tool_calls;
|
|
8573
|
+
cur.delta.tool_calls.forEach((toolCall => {
|
|
8574
|
+
const existingToolCall = toolCalls[toolCall.index];
|
|
8575
|
+
if (existingToolCall) {
|
|
8576
|
+
toolCalls[toolCall.index] = ChatGptCompletionMessageUtil.mergeToolCalls(existingToolCall, toolCall);
|
|
8577
|
+
return;
|
|
8578
|
+
}
|
|
8579
|
+
toolCalls[toolCall.index] = {
|
|
8580
|
+
id: toolCall.id ?? "",
|
|
8581
|
+
type: "function",
|
|
8582
|
+
function: {
|
|
8583
|
+
name: toolCall.function?.name ?? "",
|
|
8584
|
+
arguments: toolCall.function?.arguments ?? ""
|
|
8585
|
+
}
|
|
8586
|
+
};
|
|
8587
|
+
}));
|
|
8588
|
+
}
|
|
8589
|
+
return acc;
|
|
8590
|
+
};
|
|
8591
|
+
ChatGptCompletionMessageUtil.mergeToolCalls = (acc, cur) => {
|
|
8592
|
+
if (cur.function) {
|
|
8593
|
+
acc.function.arguments += cur.function.arguments ?? "";
|
|
8594
|
+
acc.function.name += cur.function.name ?? "";
|
|
8595
|
+
}
|
|
8596
|
+
acc.id += cur.id ?? "";
|
|
8597
|
+
return acc;
|
|
8598
|
+
};
|
|
8599
|
+
})(ChatGptCompletionMessageUtil || (ChatGptCompletionMessageUtil = {}));
|
|
7623
8600
|
|
|
7624
8601
|
var ChatGptHistoryDecoder;
|
|
7625
8602
|
|
|
@@ -7636,9 +8613,9 @@ var ChatGptHistoryDecoder;
|
|
|
7636
8613
|
function: {
|
|
7637
8614
|
name: `${history.type}Functions`,
|
|
7638
8615
|
arguments: JSON.stringify({
|
|
7639
|
-
functions: history.
|
|
7640
|
-
name:
|
|
7641
|
-
reason:
|
|
8616
|
+
functions: history.selections.map((s => ({
|
|
8617
|
+
name: s.operation.function.name,
|
|
8618
|
+
reason: s.reason
|
|
7642
8619
|
})))
|
|
7643
8620
|
})
|
|
7644
8621
|
}
|
|
@@ -7654,7 +8631,7 @@ var ChatGptHistoryDecoder;
|
|
|
7654
8631
|
type: "function",
|
|
7655
8632
|
id: history.id,
|
|
7656
8633
|
function: {
|
|
7657
|
-
name: history.
|
|
8634
|
+
name: history.operation.name,
|
|
7658
8635
|
arguments: JSON.stringify(history.arguments)
|
|
7659
8636
|
}
|
|
7660
8637
|
} ]
|
|
@@ -7663,16 +8640,16 @@ var ChatGptHistoryDecoder;
|
|
|
7663
8640
|
tool_call_id: history.id,
|
|
7664
8641
|
content: JSON.stringify({
|
|
7665
8642
|
function: {
|
|
7666
|
-
protocol: history.protocol,
|
|
7667
|
-
description: history.function.description,
|
|
7668
|
-
parameters: history.function.parameters,
|
|
7669
|
-
output: history.function.output,
|
|
7670
|
-
...history.protocol === "http" ? {
|
|
7671
|
-
method: history.function.method,
|
|
7672
|
-
path: history.function.path
|
|
8643
|
+
protocol: history.operation.protocol,
|
|
8644
|
+
description: history.operation.function.description,
|
|
8645
|
+
parameters: history.operation.function.parameters,
|
|
8646
|
+
output: history.operation.function.output,
|
|
8647
|
+
...history.operation.protocol === "http" ? {
|
|
8648
|
+
method: history.operation.function.method,
|
|
8649
|
+
path: history.operation.function.path
|
|
7673
8650
|
} : {}
|
|
7674
8651
|
},
|
|
7675
|
-
...history.protocol === "http" ? {
|
|
8652
|
+
...history.operation.protocol === "http" ? {
|
|
7676
8653
|
status: history.value.status,
|
|
7677
8654
|
data: history.value.body
|
|
7678
8655
|
} : {
|
|
@@ -7697,41 +8674,35 @@ var ChatGptCancelFunctionAgent;
|
|
|
7697
8674
|
events.push(e);
|
|
7698
8675
|
}
|
|
7699
8676
|
}, operations, 0))));
|
|
7700
|
-
if (stacks.every((s => s.length === 0))) return prompts[0]; else if ((ctx.config?.eliticism ?? AgenticaConstant.ELITICISM) === true) return step(ctx, stacks.flat().map((s => ctx.operations.group.get(s.controller.name).get(s.function.name))), 0);
|
|
7701
|
-
const collection = {
|
|
8677
|
+
if (stacks.every((s => s.length === 0))) return prompts[0]; else if ((ctx.config?.eliticism ?? AgenticaConstant.ELITICISM) === true) return step(ctx, stacks.flat().map((s => ctx.operations.group.get(s.operation.controller.name).get(s.operation.function.name))), 0);
|
|
8678
|
+
const collection = new AgenticaCancelPrompt({
|
|
7702
8679
|
id: v4(),
|
|
7703
|
-
|
|
7704
|
-
|
|
7705
|
-
};
|
|
8680
|
+
selections: []
|
|
8681
|
+
});
|
|
7706
8682
|
for (const e of events) if (e.type === "select") {
|
|
7707
|
-
collection.
|
|
7708
|
-
protocol: e.operation.protocol,
|
|
7709
|
-
controller: e.operation.controller,
|
|
7710
|
-
function: e.operation.function,
|
|
7711
|
-
reason: e.reason,
|
|
7712
|
-
name: e.operation.name
|
|
7713
|
-
}));
|
|
8683
|
+
collection.selections.push(e.selection);
|
|
7714
8684
|
await ChatGptCancelFunctionAgent.cancelFunction(ctx, {
|
|
7715
|
-
name: e.operation.name,
|
|
7716
|
-
reason: e.reason
|
|
8685
|
+
name: e.selection.operation.name,
|
|
8686
|
+
reason: e.selection.reason
|
|
7717
8687
|
});
|
|
7718
8688
|
}
|
|
7719
8689
|
return [ collection ];
|
|
7720
8690
|
};
|
|
7721
8691
|
ChatGptCancelFunctionAgent.cancelFunction = async (ctx, reference) => {
|
|
7722
|
-
const index = ctx.stack.findIndex((item => item.name === reference.name));
|
|
8692
|
+
const index = ctx.stack.findIndex((item => item.operation.name === reference.name));
|
|
7723
8693
|
if (index === -1) return null;
|
|
7724
8694
|
const item = ctx.stack[index];
|
|
7725
8695
|
ctx.stack.splice(index, 1);
|
|
7726
|
-
await ctx.dispatch({
|
|
7727
|
-
|
|
7728
|
-
|
|
7729
|
-
|
|
7730
|
-
|
|
8696
|
+
await ctx.dispatch(new AgenticaCancelEvent({
|
|
8697
|
+
selection: new AgenticaOperationSelection({
|
|
8698
|
+
operation: item.operation,
|
|
8699
|
+
reason: reference.reason
|
|
8700
|
+
})
|
|
8701
|
+
}));
|
|
7731
8702
|
return item;
|
|
7732
8703
|
};
|
|
7733
8704
|
const step = async (ctx, operations, retry, failures) => {
|
|
7734
|
-
const
|
|
8705
|
+
const completionStream = await ctx.request("cancel", {
|
|
7735
8706
|
messages: [ {
|
|
7736
8707
|
role: "system",
|
|
7737
8708
|
content: AgenticaDefaultPrompt.write(ctx.config)
|
|
@@ -7775,6 +8746,8 @@ var ChatGptCancelFunctionAgent;
|
|
|
7775
8746
|
tool_choice: "auto",
|
|
7776
8747
|
parallel_tool_calls: true
|
|
7777
8748
|
});
|
|
8749
|
+
const chunks = await StreamUtil.readAll(completionStream);
|
|
8750
|
+
const completion = ChatGptCompletionMessageUtil.merge(chunks);
|
|
7778
8751
|
if (retry++ < (ctx.config?.retry ?? AgenticaConstant.RETRY)) {
|
|
7779
8752
|
const failures = [];
|
|
7780
8753
|
for (const choice of completion.choices) for (const tc of choice.message.tool_calls ?? []) {
|
|
@@ -7859,16 +8832,15 @@ var ChatGptCancelFunctionAgent;
|
|
|
7859
8832
|
const _io1 = input => "string" === typeof input.reason && "string" === typeof input.name;
|
|
7860
8833
|
return input => "object" === typeof input && null !== input && _io0(input);
|
|
7861
8834
|
})()(input) === false) continue; else if (tc.function.name === "cancelFunctions") {
|
|
7862
|
-
const collection = {
|
|
8835
|
+
const collection = new AgenticaCancelPrompt({
|
|
7863
8836
|
id: tc.id,
|
|
7864
|
-
|
|
7865
|
-
|
|
7866
|
-
};
|
|
8837
|
+
selections: []
|
|
8838
|
+
});
|
|
7867
8839
|
for (const reference of input.functions) {
|
|
7868
8840
|
const operation = await ChatGptCancelFunctionAgent.cancelFunction(ctx, reference);
|
|
7869
|
-
if (operation !== null) collection.
|
|
8841
|
+
if (operation !== null) collection.selections.push(operation);
|
|
7870
8842
|
}
|
|
7871
|
-
if (collection.
|
|
8843
|
+
if (collection.selections.length !== 0) prompts.push(collection);
|
|
7872
8844
|
}
|
|
7873
8845
|
}
|
|
7874
8846
|
}
|
|
@@ -7995,7 +8967,7 @@ var ChatGptCallFunctionAgent;
|
|
|
7995
8967
|
|
|
7996
8968
|
(function(ChatGptCallFunctionAgent) {
|
|
7997
8969
|
ChatGptCallFunctionAgent.execute = async ctx => {
|
|
7998
|
-
const
|
|
8970
|
+
const completionStream = await ctx.request("call", {
|
|
7999
8971
|
messages: [ {
|
|
8000
8972
|
role: "system",
|
|
8001
8973
|
content: AgenticaDefaultPrompt.write(ctx.config)
|
|
@@ -8006,76 +8978,81 @@ var ChatGptCallFunctionAgent;
|
|
|
8006
8978
|
role: "system",
|
|
8007
8979
|
content: ctx.config?.systemPrompt?.execute?.(ctx.histories) ?? AgenticaSystemPrompt.EXECUTE
|
|
8008
8980
|
} ],
|
|
8009
|
-
tools: ctx.stack.map((
|
|
8981
|
+
tools: ctx.stack.map((s => ({
|
|
8010
8982
|
type: "function",
|
|
8011
8983
|
function: {
|
|
8012
|
-
name:
|
|
8013
|
-
description:
|
|
8014
|
-
parameters:
|
|
8984
|
+
name: s.operation.name,
|
|
8985
|
+
description: s.operation.function.description,
|
|
8986
|
+
parameters: s.operation.function.separated ? s.operation.function.separated.llm ?? {
|
|
8015
8987
|
type: "object",
|
|
8016
8988
|
properties: {},
|
|
8017
8989
|
required: [],
|
|
8018
8990
|
additionalProperties: false,
|
|
8019
8991
|
$defs: {}
|
|
8020
|
-
} :
|
|
8992
|
+
} : s.operation.function.parameters
|
|
8021
8993
|
}
|
|
8022
8994
|
}))),
|
|
8023
8995
|
tool_choice: "auto",
|
|
8024
8996
|
parallel_tool_calls: false
|
|
8025
8997
|
});
|
|
8026
8998
|
const closures = [];
|
|
8999
|
+
const chunks = await StreamUtil.readAll(completionStream);
|
|
9000
|
+
const completion = ChatGptCompletionMessageUtil.merge(chunks);
|
|
8027
9001
|
for (const choice of completion.choices) {
|
|
8028
9002
|
for (const tc of choice.message.tool_calls ?? []) {
|
|
8029
9003
|
if (tc.type === "function") {
|
|
8030
9004
|
const operation = ctx.operations.flat.get(tc.function.name);
|
|
8031
9005
|
if (operation === undefined) continue;
|
|
8032
9006
|
closures.push((async () => {
|
|
8033
|
-
const call = {
|
|
8034
|
-
type: "call",
|
|
9007
|
+
const call = new AgenticaCallEvent({
|
|
8035
9008
|
id: tc.id,
|
|
8036
9009
|
operation,
|
|
8037
9010
|
arguments: JSON.parse(tc.function.arguments)
|
|
8038
|
-
};
|
|
9011
|
+
});
|
|
8039
9012
|
if (call.operation.protocol === "http") fillHttpArguments({
|
|
8040
9013
|
operation: call.operation,
|
|
8041
9014
|
arguments: call.arguments
|
|
8042
9015
|
});
|
|
8043
9016
|
await ctx.dispatch(call);
|
|
8044
9017
|
const execute = await propagate(ctx, call, 0);
|
|
8045
|
-
await ctx.dispatch({
|
|
8046
|
-
type: "execute",
|
|
9018
|
+
await ctx.dispatch(new AgenticaExecuteEvent({
|
|
8047
9019
|
id: call.id,
|
|
8048
9020
|
operation: call.operation,
|
|
8049
9021
|
arguments: execute.arguments,
|
|
8050
9022
|
value: execute.value
|
|
8051
|
-
});
|
|
9023
|
+
}));
|
|
8052
9024
|
await ChatGptCancelFunctionAgent.cancelFunction(ctx, {
|
|
8053
9025
|
name: call.operation.name,
|
|
8054
9026
|
reason: "completed"
|
|
8055
9027
|
});
|
|
8056
|
-
await ctx.dispatch({
|
|
8057
|
-
|
|
8058
|
-
|
|
8059
|
-
|
|
8060
|
-
|
|
8061
|
-
|
|
8062
|
-
|
|
9028
|
+
await ctx.dispatch(new AgenticaCancelEvent({
|
|
9029
|
+
selection: new AgenticaOperationSelection({
|
|
9030
|
+
operation: call.operation,
|
|
9031
|
+
reason: "complete"
|
|
9032
|
+
})
|
|
9033
|
+
}));
|
|
9034
|
+
return [ execute, new AgenticaCancelPrompt({
|
|
8063
9035
|
id: call.id,
|
|
8064
|
-
|
|
8065
|
-
|
|
9036
|
+
selections: [ new AgenticaOperationSelection({
|
|
9037
|
+
operation: call.operation,
|
|
8066
9038
|
reason: "complete"
|
|
8067
9039
|
}) ]
|
|
8068
|
-
} ];
|
|
9040
|
+
}) ];
|
|
8069
9041
|
}));
|
|
8070
9042
|
}
|
|
8071
9043
|
}
|
|
8072
9044
|
if (choice.message.role === "assistant" && !!choice.message.content?.length) closures.push((async () => {
|
|
8073
|
-
const value = {
|
|
8074
|
-
type: "text",
|
|
9045
|
+
const value = new AgenticaTextPrompt({
|
|
8075
9046
|
role: "assistant",
|
|
8076
9047
|
text: choice.message.content
|
|
8077
|
-
};
|
|
8078
|
-
await ctx.dispatch(
|
|
9048
|
+
});
|
|
9049
|
+
await ctx.dispatch(new AgenticaTextEvent({
|
|
9050
|
+
role: "assistant",
|
|
9051
|
+
get: () => value.text,
|
|
9052
|
+
done: () => true,
|
|
9053
|
+
stream: StreamUtil.to(value.text),
|
|
9054
|
+
join: () => Promise.resolve(value.text)
|
|
9055
|
+
}));
|
|
8079
9056
|
return [ value ];
|
|
8080
9057
|
}));
|
|
8081
9058
|
}
|
|
@@ -8101,24 +9078,16 @@ var ChatGptCallFunctionAgent;
|
|
|
8101
9078
|
input: call.arguments
|
|
8102
9079
|
});
|
|
8103
9080
|
const success = ((response.status === 400 || response.status === 404 || response.status === 422) && retry++ < (ctx.config?.retry ?? AgenticaConstant.RETRY) && typeof response.body) === false;
|
|
8104
|
-
return (success === false ? await correct(ctx, call, retry, response.body) : null) ??
|
|
8105
|
-
|
|
8106
|
-
protocol: "http",
|
|
8107
|
-
controller: call.operation.controller,
|
|
8108
|
-
function: call.operation.function,
|
|
9081
|
+
return (success === false ? await correct(ctx, call, retry, response.body) : null) ?? new AgenticaExecutePrompt({
|
|
9082
|
+
operation: call.operation,
|
|
8109
9083
|
id: call.id,
|
|
8110
|
-
name: call.operation.name,
|
|
8111
9084
|
arguments: call.arguments,
|
|
8112
9085
|
value: response
|
|
8113
9086
|
});
|
|
8114
9087
|
} catch (error) {
|
|
8115
|
-
return
|
|
8116
|
-
|
|
8117
|
-
protocol: "http",
|
|
8118
|
-
controller: call.operation.controller,
|
|
8119
|
-
function: call.operation.function,
|
|
9088
|
+
return new AgenticaExecutePrompt({
|
|
9089
|
+
operation: call.operation,
|
|
8120
9090
|
id: call.id,
|
|
8121
|
-
name: call.operation.name,
|
|
8122
9091
|
arguments: call.arguments,
|
|
8123
9092
|
value: {
|
|
8124
9093
|
status: 500,
|
|
@@ -8133,13 +9102,9 @@ var ChatGptCallFunctionAgent;
|
|
|
8133
9102
|
}
|
|
8134
9103
|
} else {
|
|
8135
9104
|
const check = call.operation.function.validate(call.arguments);
|
|
8136
|
-
if (check.success === false) return (retry++ < (ctx.config?.retry ?? AgenticaConstant.RETRY) ? await correct(ctx, call, retry, check.errors) : null) ??
|
|
8137
|
-
type: "execute",
|
|
8138
|
-
protocol: "class",
|
|
8139
|
-
controller: call.operation.controller,
|
|
8140
|
-
function: call.operation.function,
|
|
9105
|
+
if (check.success === false) return (retry++ < (ctx.config?.retry ?? AgenticaConstant.RETRY) ? await correct(ctx, call, retry, check.errors) : null) ?? new AgenticaExecutePrompt({
|
|
8141
9106
|
id: call.id,
|
|
8142
|
-
|
|
9107
|
+
operation: call.operation,
|
|
8143
9108
|
arguments: call.arguments,
|
|
8144
9109
|
value: {
|
|
8145
9110
|
name: "TypeGuardError",
|
|
@@ -8153,24 +9118,16 @@ var ChatGptCallFunctionAgent;
|
|
|
8153
9118
|
function: call.operation.function,
|
|
8154
9119
|
arguments: call.arguments
|
|
8155
9120
|
}) : await call.operation.controller.execute[call.operation.function.name](call.arguments);
|
|
8156
|
-
return
|
|
8157
|
-
type: "execute",
|
|
8158
|
-
protocol: "class",
|
|
8159
|
-
controller: call.operation.controller,
|
|
8160
|
-
function: call.operation.function,
|
|
9121
|
+
return new AgenticaExecutePrompt({
|
|
8161
9122
|
id: call.id,
|
|
8162
|
-
|
|
9123
|
+
operation: call.operation,
|
|
8163
9124
|
arguments: call.arguments,
|
|
8164
9125
|
value
|
|
8165
9126
|
});
|
|
8166
9127
|
} catch (error) {
|
|
8167
|
-
return
|
|
8168
|
-
type: "execute",
|
|
8169
|
-
protocol: "class",
|
|
8170
|
-
controller: call.operation.controller,
|
|
8171
|
-
function: call.operation.function,
|
|
9128
|
+
return new AgenticaExecutePrompt({
|
|
8172
9129
|
id: call.id,
|
|
8173
|
-
|
|
9130
|
+
operation: call.operation,
|
|
8174
9131
|
arguments: call.arguments,
|
|
8175
9132
|
value: error instanceof Error ? {
|
|
8176
9133
|
...error,
|
|
@@ -8182,7 +9139,7 @@ var ChatGptCallFunctionAgent;
|
|
|
8182
9139
|
}
|
|
8183
9140
|
};
|
|
8184
9141
|
const correct = async (ctx, call, retry, error) => {
|
|
8185
|
-
const
|
|
9142
|
+
const completionStream = await ctx.request("call", {
|
|
8186
9143
|
messages: [ {
|
|
8187
9144
|
role: "system",
|
|
8188
9145
|
content: AgenticaDefaultPrompt.write(ctx.config)
|
|
@@ -8227,14 +9184,15 @@ var ChatGptCallFunctionAgent;
|
|
|
8227
9184
|
tool_choice: "auto",
|
|
8228
9185
|
parallel_tool_calls: false
|
|
8229
9186
|
});
|
|
9187
|
+
const chunks = await StreamUtil.readAll(completionStream);
|
|
9188
|
+
const completion = ChatGptCompletionMessageUtil.merge(chunks);
|
|
8230
9189
|
const toolCall = (completion.choices[0]?.message.tool_calls ?? []).find((tc => tc.type === "function" && tc.function.name === call.operation.name));
|
|
8231
9190
|
if (toolCall === undefined) return null;
|
|
8232
|
-
return propagate(ctx, {
|
|
9191
|
+
return propagate(ctx, new AgenticaCallEvent({
|
|
8233
9192
|
id: toolCall.id,
|
|
8234
|
-
type: "call",
|
|
8235
9193
|
operation: call.operation,
|
|
8236
9194
|
arguments: JSON.parse(toolCall.function.arguments)
|
|
8237
|
-
}, retry);
|
|
9195
|
+
}), retry);
|
|
8238
9196
|
};
|
|
8239
9197
|
const fillHttpArguments = props => {
|
|
8240
9198
|
if (props.operation.protocol !== "http") return;
|
|
@@ -8245,12 +9203,89 @@ var ChatGptCallFunctionAgent;
|
|
|
8245
9203
|
const isObject = ($defs, schema) => ChatGptTypeChecker.isObject(schema) || ChatGptTypeChecker.isReference(schema) && isObject($defs, $defs[schema.$ref.split("/").at(-1)]) || ChatGptTypeChecker.isAnyOf(schema) && schema.anyOf.every((schema => isObject($defs, schema)));
|
|
8246
9204
|
})(ChatGptCallFunctionAgent || (ChatGptCallFunctionAgent = {}));
|
|
8247
9205
|
|
|
9206
|
+
var MPSCUtil;
|
|
9207
|
+
|
|
9208
|
+
(function(MPSCUtil) {
|
|
9209
|
+
MPSCUtil.create = () => {
|
|
9210
|
+
const queue = new AsyncQueue;
|
|
9211
|
+
const consumer = new ReadableStream({
|
|
9212
|
+
async pull(controller) {
|
|
9213
|
+
const {value, done} = await queue.dequeue();
|
|
9214
|
+
if (done) {
|
|
9215
|
+
controller.close();
|
|
9216
|
+
} else {
|
|
9217
|
+
controller.enqueue(value);
|
|
9218
|
+
}
|
|
9219
|
+
}
|
|
9220
|
+
});
|
|
9221
|
+
return {
|
|
9222
|
+
consumer,
|
|
9223
|
+
produce: chunk => queue.enqueue(chunk),
|
|
9224
|
+
close: () => queue.close(),
|
|
9225
|
+
done: () => queue.done(),
|
|
9226
|
+
waitClose: () => queue.waitClose()
|
|
9227
|
+
};
|
|
9228
|
+
};
|
|
9229
|
+
class AsyncQueue {
|
|
9230
|
+
constructor() {
|
|
9231
|
+
this.queue = [];
|
|
9232
|
+
this.resolvers = [];
|
|
9233
|
+
this.closeResolvers = [];
|
|
9234
|
+
this.closed = false;
|
|
9235
|
+
}
|
|
9236
|
+
enqueue(item) {
|
|
9237
|
+
this.queue.push(item);
|
|
9238
|
+
if (this.resolvers.length > 0) {
|
|
9239
|
+
this.resolvers.shift()?.({
|
|
9240
|
+
value: this.queue.shift(),
|
|
9241
|
+
done: false
|
|
9242
|
+
});
|
|
9243
|
+
}
|
|
9244
|
+
}
|
|
9245
|
+
async dequeue() {
|
|
9246
|
+
if (this.queue.length > 0) {
|
|
9247
|
+
return {
|
|
9248
|
+
value: this.queue.shift(),
|
|
9249
|
+
done: false
|
|
9250
|
+
};
|
|
9251
|
+
}
|
|
9252
|
+
if (this.closed) return {
|
|
9253
|
+
value: undefined,
|
|
9254
|
+
done: true
|
|
9255
|
+
};
|
|
9256
|
+
return new Promise((resolve => this.resolvers.push(resolve)));
|
|
9257
|
+
}
|
|
9258
|
+
done() {
|
|
9259
|
+
return this.closed;
|
|
9260
|
+
}
|
|
9261
|
+
close() {
|
|
9262
|
+
this.closed = true;
|
|
9263
|
+
while (this.resolvers.length > 0) {
|
|
9264
|
+
this.resolvers.shift()?.({
|
|
9265
|
+
value: undefined,
|
|
9266
|
+
done: true
|
|
9267
|
+
});
|
|
9268
|
+
}
|
|
9269
|
+
this.closeResolvers.forEach((resolve => resolve()));
|
|
9270
|
+
}
|
|
9271
|
+
waitClose() {
|
|
9272
|
+
if (this.closed) {
|
|
9273
|
+
return Promise.resolve();
|
|
9274
|
+
}
|
|
9275
|
+
return new Promise((resolve => {
|
|
9276
|
+
this.closeResolvers.push(resolve);
|
|
9277
|
+
}));
|
|
9278
|
+
}
|
|
9279
|
+
}
|
|
9280
|
+
MPSCUtil.AsyncQueue = AsyncQueue;
|
|
9281
|
+
})(MPSCUtil || (MPSCUtil = {}));
|
|
9282
|
+
|
|
8248
9283
|
var ChatGptDescribeFunctionAgent;
|
|
8249
9284
|
|
|
8250
9285
|
(function(ChatGptDescribeFunctionAgent) {
|
|
8251
9286
|
ChatGptDescribeFunctionAgent.execute = async (ctx, histories) => {
|
|
8252
9287
|
if (histories.length === 0) return [];
|
|
8253
|
-
const
|
|
9288
|
+
const completionStream = await ctx.request("describe", {
|
|
8254
9289
|
messages: [ {
|
|
8255
9290
|
role: "system",
|
|
8256
9291
|
content: AgenticaDefaultPrompt.write(ctx.config)
|
|
@@ -8259,12 +9294,57 @@ var ChatGptDescribeFunctionAgent;
|
|
|
8259
9294
|
content: ctx.config?.systemPrompt?.describe?.(histories) ?? AgenticaSystemPrompt.DESCRIBE
|
|
8260
9295
|
} ]
|
|
8261
9296
|
});
|
|
8262
|
-
const
|
|
8263
|
-
|
|
8264
|
-
|
|
9297
|
+
const describeContext = [];
|
|
9298
|
+
const completion = await StreamUtil.reduce(completionStream, (async (accPromise, chunk) => {
|
|
9299
|
+
const acc = await accPromise;
|
|
9300
|
+
const registerContext = choices => {
|
|
9301
|
+
for (const choice of choices) {
|
|
9302
|
+
if (choice.finish_reason) {
|
|
9303
|
+
describeContext[choice.index].close();
|
|
9304
|
+
continue;
|
|
9305
|
+
}
|
|
9306
|
+
if (!choice.delta.content) {
|
|
9307
|
+
continue;
|
|
9308
|
+
}
|
|
9309
|
+
if (describeContext[choice.index]) {
|
|
9310
|
+
describeContext[choice.index].content += choice.delta.content;
|
|
9311
|
+
describeContext[choice.index].produce(choice.delta.content);
|
|
9312
|
+
continue;
|
|
9313
|
+
}
|
|
9314
|
+
const {consumer, produce, close, waitClose, done} = MPSCUtil.create();
|
|
9315
|
+
describeContext[choice.index] = {
|
|
9316
|
+
content: choice.delta.content,
|
|
9317
|
+
consumer,
|
|
9318
|
+
produce,
|
|
9319
|
+
close,
|
|
9320
|
+
waitClose,
|
|
9321
|
+
done
|
|
9322
|
+
};
|
|
9323
|
+
produce(choice.delta.content);
|
|
9324
|
+
void ctx.dispatch(new AgenticaDescribeEvent({
|
|
9325
|
+
executes: histories,
|
|
9326
|
+
stream: consumer,
|
|
9327
|
+
done,
|
|
9328
|
+
get: () => describeContext[choice.index]?.content ?? "",
|
|
9329
|
+
join: async () => {
|
|
9330
|
+
await waitClose();
|
|
9331
|
+
return describeContext[choice.index].content;
|
|
9332
|
+
}
|
|
9333
|
+
}));
|
|
9334
|
+
}
|
|
9335
|
+
};
|
|
9336
|
+
if (acc.object === "chat.completion.chunk") {
|
|
9337
|
+
registerContext([ acc, chunk ].flatMap((acc => acc.choices)));
|
|
9338
|
+
return ChatGptCompletionMessageUtil.merge([ acc, chunk ]);
|
|
9339
|
+
}
|
|
9340
|
+
registerContext(chunk.choices);
|
|
9341
|
+
return ChatGptCompletionMessageUtil.accumulate(acc, chunk);
|
|
9342
|
+
}));
|
|
9343
|
+
if (!completion) throw new Error("No completion received");
|
|
9344
|
+
const descriptions = completion.choices.map((choice => choice.message.role === "assistant" && !!choice.message.content?.length ? choice.message.content : null)).filter((str => str !== null)).map((content => new AgenticaDescribePrompt({
|
|
9345
|
+
executes: histories,
|
|
8265
9346
|
text: content
|
|
8266
9347
|
})));
|
|
8267
|
-
for (const describe of descriptions) await ctx.dispatch(describe);
|
|
8268
9348
|
return descriptions;
|
|
8269
9349
|
};
|
|
8270
9350
|
})(ChatGptDescribeFunctionAgent || (ChatGptDescribeFunctionAgent = {}));
|
|
@@ -8273,7 +9353,7 @@ var ChatGptInitializeFunctionAgent;
|
|
|
8273
9353
|
|
|
8274
9354
|
(function(ChatGptInitializeFunctionAgent) {
|
|
8275
9355
|
ChatGptInitializeFunctionAgent.execute = async ctx => {
|
|
8276
|
-
const
|
|
9356
|
+
const completionStream = await ctx.request("initialize", {
|
|
8277
9357
|
messages: [ {
|
|
8278
9358
|
role: "system",
|
|
8279
9359
|
content: AgenticaDefaultPrompt.write(ctx.config)
|
|
@@ -8295,13 +9375,16 @@ var ChatGptInitializeFunctionAgent;
|
|
|
8295
9375
|
tool_choice: "auto",
|
|
8296
9376
|
parallel_tool_calls: false
|
|
8297
9377
|
});
|
|
9378
|
+
const chunks = await StreamUtil.readAll(completionStream);
|
|
9379
|
+
const completion = ChatGptCompletionMessageUtil.merge(chunks);
|
|
8298
9380
|
const prompts = [];
|
|
8299
9381
|
for (const choice of completion.choices) {
|
|
8300
|
-
if (choice.message.role === "assistant" && !!choice.message.content?.length)
|
|
8301
|
-
|
|
8302
|
-
|
|
8303
|
-
|
|
8304
|
-
|
|
9382
|
+
if (choice.message.role === "assistant" && !!choice.message.content?.length) {
|
|
9383
|
+
prompts.push(new AgenticaTextPrompt({
|
|
9384
|
+
role: "assistant",
|
|
9385
|
+
text: choice.message.content
|
|
9386
|
+
}));
|
|
9387
|
+
}
|
|
8305
9388
|
}
|
|
8306
9389
|
if (completion.choices.some((c => !!c.message.tool_calls?.some((tc => tc.type === "function" && tc.function.name === FUNCTION.name))))) await ctx.initialize();
|
|
8307
9390
|
return prompts;
|
|
@@ -9959,29 +11042,22 @@ var ChatGptSelectFunctionAgent;
|
|
|
9959
11042
|
events.push(e);
|
|
9960
11043
|
}
|
|
9961
11044
|
}, operations, 0))));
|
|
9962
|
-
if (stacks.every((s => s.length === 0))) return prompts[0]; else if ((ctx.config?.eliticism ?? AgenticaConstant.ELITICISM) === true) return step(ctx, stacks.flat().map((s => ctx.operations.group.get(s.controller.name).get(s.function.name))), 0);
|
|
9963
|
-
const collection = {
|
|
11045
|
+
if (stacks.every((s => s.length === 0))) return prompts[0]; else if ((ctx.config?.eliticism ?? AgenticaConstant.ELITICISM) === true) return step(ctx, stacks.flat().map((s => ctx.operations.group.get(s.operation.controller.name).get(s.operation.function.name))), 0);
|
|
11046
|
+
const collection = new AgenticaSelectPrompt({
|
|
9964
11047
|
id: v4(),
|
|
9965
|
-
|
|
9966
|
-
|
|
9967
|
-
};
|
|
11048
|
+
selections: []
|
|
11049
|
+
});
|
|
9968
11050
|
for (const e of events) if (e.type === "select") {
|
|
9969
|
-
collection.
|
|
9970
|
-
protocol: e.operation.protocol,
|
|
9971
|
-
controller: e.operation.controller,
|
|
9972
|
-
function: e.operation.function,
|
|
9973
|
-
reason: e.reason,
|
|
9974
|
-
name: e.operation.name
|
|
9975
|
-
}));
|
|
11051
|
+
collection.selections.push(e.selection);
|
|
9976
11052
|
await selectFunction(ctx, {
|
|
9977
|
-
name: e.operation.name,
|
|
9978
|
-
reason: e.reason
|
|
11053
|
+
name: e.selection.operation.name,
|
|
11054
|
+
reason: e.selection.reason
|
|
9979
11055
|
});
|
|
9980
11056
|
}
|
|
9981
11057
|
return [ collection ];
|
|
9982
11058
|
};
|
|
9983
11059
|
const step = async (ctx, operations, retry, failures) => {
|
|
9984
|
-
const
|
|
11060
|
+
const completionStream = await ctx.request("select", {
|
|
9985
11061
|
messages: [ {
|
|
9986
11062
|
role: "system",
|
|
9987
11063
|
content: AgenticaDefaultPrompt.write(ctx.config)
|
|
@@ -10025,6 +11101,8 @@ var ChatGptSelectFunctionAgent;
|
|
|
10025
11101
|
tool_choice: "auto",
|
|
10026
11102
|
parallel_tool_calls: false
|
|
10027
11103
|
});
|
|
11104
|
+
const chunks = await StreamUtil.readAll(completionStream);
|
|
11105
|
+
const completion = ChatGptCompletionMessageUtil.merge(chunks);
|
|
10028
11106
|
if (retry++ < (ctx.config?.retry ?? AgenticaConstant.RETRY)) {
|
|
10029
11107
|
const failures = [];
|
|
10030
11108
|
for (const choice of completion.choices) for (const tc of choice.message.tool_calls ?? []) {
|
|
@@ -10109,32 +11187,33 @@ var ChatGptSelectFunctionAgent;
|
|
|
10109
11187
|
const _io1 = input => "string" === typeof input.reason && "string" === typeof input.name;
|
|
10110
11188
|
return input => "object" === typeof input && null !== input && _io0(input);
|
|
10111
11189
|
})()(input) === false) continue; else if (tc.function.name === "selectFunctions") {
|
|
10112
|
-
const collection = {
|
|
11190
|
+
const collection = new AgenticaSelectPrompt({
|
|
10113
11191
|
id: tc.id,
|
|
10114
|
-
|
|
10115
|
-
|
|
10116
|
-
};
|
|
11192
|
+
selections: []
|
|
11193
|
+
});
|
|
10117
11194
|
for (const reference of input.functions) {
|
|
10118
11195
|
const operation = await selectFunction(ctx, reference);
|
|
10119
|
-
if (operation !== null) collection.
|
|
10120
|
-
|
|
10121
|
-
controller: operation.controller,
|
|
10122
|
-
function: operation.function,
|
|
10123
|
-
name: operation.name,
|
|
11196
|
+
if (operation !== null) collection.selections.push(new AgenticaOperationSelection({
|
|
11197
|
+
operation,
|
|
10124
11198
|
reason: reference.reason
|
|
10125
11199
|
}));
|
|
10126
11200
|
}
|
|
10127
|
-
if (collection.
|
|
11201
|
+
if (collection.selections.length !== 0) prompts.push(collection);
|
|
10128
11202
|
}
|
|
10129
11203
|
}
|
|
10130
11204
|
if (choice.message.role === "assistant" && !!choice.message.content?.length) {
|
|
10131
|
-
const text = {
|
|
10132
|
-
type: "text",
|
|
11205
|
+
const text = new AgenticaTextPrompt({
|
|
10133
11206
|
role: "assistant",
|
|
10134
11207
|
text: choice.message.content
|
|
10135
|
-
};
|
|
11208
|
+
});
|
|
10136
11209
|
prompts.push(text);
|
|
10137
|
-
await ctx.dispatch(
|
|
11210
|
+
await ctx.dispatch(new AgenticaTextEvent({
|
|
11211
|
+
role: "assistant",
|
|
11212
|
+
stream: StreamUtil.to(text.text),
|
|
11213
|
+
join: () => Promise.resolve(text.text),
|
|
11214
|
+
done: () => true,
|
|
11215
|
+
get: () => text.text
|
|
11216
|
+
}));
|
|
10138
11217
|
}
|
|
10139
11218
|
}
|
|
10140
11219
|
return prompts;
|
|
@@ -10142,18 +11221,14 @@ var ChatGptSelectFunctionAgent;
|
|
|
10142
11221
|
const selectFunction = async (ctx, reference) => {
|
|
10143
11222
|
const operation = ctx.operations.flat.get(reference.name);
|
|
10144
11223
|
if (operation === undefined) return null;
|
|
10145
|
-
|
|
10146
|
-
|
|
10147
|
-
controller: operation.controller,
|
|
10148
|
-
function: operation.function,
|
|
10149
|
-
name: reference.name,
|
|
11224
|
+
const selection = new AgenticaOperationSelection({
|
|
11225
|
+
operation,
|
|
10150
11226
|
reason: reference.reason
|
|
10151
|
-
}));
|
|
10152
|
-
await ctx.dispatch({
|
|
10153
|
-
type: "select",
|
|
10154
|
-
reason: reference.reason,
|
|
10155
|
-
operation
|
|
10156
11227
|
});
|
|
11228
|
+
ctx.stack.push(selection);
|
|
11229
|
+
void ctx.dispatch(new AgenticaSelectEvent({
|
|
11230
|
+
selection
|
|
11231
|
+
}));
|
|
10157
11232
|
return operation;
|
|
10158
11233
|
};
|
|
10159
11234
|
const emendMessages = failures => failures.map((f => [ {
|
|
@@ -10293,7 +11368,7 @@ var ChatGptAgent;
|
|
|
10293
11368
|
const executes = prompts.filter((prompt => prompt.type === "execute"));
|
|
10294
11369
|
for (const e of executes) await ChatGptCancelFunctionAgent.cancelFunction(ctx, {
|
|
10295
11370
|
reason: "completed",
|
|
10296
|
-
name: e.
|
|
11371
|
+
name: e.operation.name
|
|
10297
11372
|
});
|
|
10298
11373
|
histories.push(...await (executor?.describe ?? ChatGptDescribeFunctionAgent.execute)(ctx, executes));
|
|
10299
11374
|
if (executes.length === 0 || ctx.stack.length === 0) break;
|
|
@@ -10302,6 +11377,43 @@ var ChatGptAgent;
|
|
|
10302
11377
|
};
|
|
10303
11378
|
})(ChatGptAgent || (ChatGptAgent = {}));
|
|
10304
11379
|
|
|
11380
|
+
var AgenticaTokenUsageAggregator;
|
|
11381
|
+
|
|
11382
|
+
(function(AgenticaTokenUsageAggregator) {
|
|
11383
|
+
AgenticaTokenUsageAggregator.aggregate = props => {
|
|
11384
|
+
if (!props.completionUsage) return;
|
|
11385
|
+
const component = props.usage[props.kind];
|
|
11386
|
+
component.total += props.completionUsage.total_tokens;
|
|
11387
|
+
component.input.total += props.completionUsage.prompt_tokens;
|
|
11388
|
+
component.input.total += props.completionUsage.prompt_tokens_details?.audio_tokens ?? 0;
|
|
11389
|
+
component.input.cached += props.completionUsage.prompt_tokens_details?.cached_tokens ?? 0;
|
|
11390
|
+
component.output.total += props.completionUsage.total_tokens;
|
|
11391
|
+
component.output.accepted_prediction += props.completionUsage.completion_tokens_details?.accepted_prediction_tokens ?? 0;
|
|
11392
|
+
component.output.reasoning += props.completionUsage.completion_tokens_details?.reasoning_tokens ?? 0;
|
|
11393
|
+
component.output.rejected_prediction += props.completionUsage.completion_tokens_details?.rejected_prediction_tokens ?? 0;
|
|
11394
|
+
const sum = getter => Object.entries(props.usage).filter((([key]) => key !== "aggregate")).map((([, comp]) => getter(comp))).reduce(((a, b) => a + b), 0);
|
|
11395
|
+
const aggregate = props.usage.aggregate;
|
|
11396
|
+
aggregate.total = sum((comp => comp.total));
|
|
11397
|
+
aggregate.input.total = sum((comp => comp.input.total));
|
|
11398
|
+
aggregate.input.cached = sum((comp => comp.input.cached));
|
|
11399
|
+
aggregate.output.total = sum((comp => comp.output.total));
|
|
11400
|
+
aggregate.output.reasoning = sum((comp => comp.output.reasoning));
|
|
11401
|
+
aggregate.output.accepted_prediction = sum((comp => comp.output.accepted_prediction));
|
|
11402
|
+
aggregate.output.rejected_prediction = sum((comp => comp.output.rejected_prediction));
|
|
11403
|
+
};
|
|
11404
|
+
})(AgenticaTokenUsageAggregator || (AgenticaTokenUsageAggregator = {}));
|
|
11405
|
+
|
|
11406
|
+
class AgenticaInitializeEvent extends AgenticaEventBase {
|
|
11407
|
+
constructor() {
|
|
11408
|
+
super("initialize");
|
|
11409
|
+
}
|
|
11410
|
+
toJSON() {
|
|
11411
|
+
return {
|
|
11412
|
+
type: "initialize"
|
|
11413
|
+
};
|
|
11414
|
+
}
|
|
11415
|
+
}
|
|
11416
|
+
|
|
10305
11417
|
const __map_take = (dict, key, generator) => {
|
|
10306
11418
|
const oldbie = dict.get(key);
|
|
10307
11419
|
if (oldbie) return oldbie;
|
|
@@ -10323,12 +11435,24 @@ var AgenticaOperationComposer;
|
|
|
10323
11435
|
protocol: "http",
|
|
10324
11436
|
controller,
|
|
10325
11437
|
function: func,
|
|
10326
|
-
name: naming(func.name, ci)
|
|
11438
|
+
name: naming(func.name, ci),
|
|
11439
|
+
toJSON: () => ({
|
|
11440
|
+
protocol: "http",
|
|
11441
|
+
controller: controller.name,
|
|
11442
|
+
function: func.name,
|
|
11443
|
+
name: naming(func.name, ci)
|
|
11444
|
+
})
|
|
10327
11445
|
}))) : controller.application.functions.map((func => ({
|
|
10328
11446
|
protocol: "class",
|
|
10329
11447
|
controller,
|
|
10330
11448
|
function: func,
|
|
10331
|
-
name: naming(func.name, ci)
|
|
11449
|
+
name: naming(func.name, ci),
|
|
11450
|
+
toJSON: () => ({
|
|
11451
|
+
protocol: "class",
|
|
11452
|
+
controller: controller.name,
|
|
11453
|
+
function: func.name,
|
|
11454
|
+
name: naming(func.name, ci)
|
|
11455
|
+
})
|
|
10332
11456
|
}))))).flat();
|
|
10333
11457
|
const divided = !!props.config?.capacity && array.length > props.config.capacity ? divideOperations({
|
|
10334
11458
|
array,
|
|
@@ -10359,122 +11483,67 @@ var AgenticaPromptTransformer;
|
|
|
10359
11483
|
|
|
10360
11484
|
(function(AgenticaPromptTransformer) {
|
|
10361
11485
|
AgenticaPromptTransformer.transform = props => {
|
|
10362
|
-
if (props.
|
|
10363
|
-
|
|
10364
|
-
|
|
10365
|
-
...findOperation({
|
|
10366
|
-
operations: props.operations,
|
|
10367
|
-
input: func
|
|
10368
|
-
}),
|
|
10369
|
-
reason: func.reason
|
|
10370
|
-
})))
|
|
10371
|
-
}; else if (props.input.type === "execute") return transformExecute({
|
|
11486
|
+
if (props.prompt.type === "text") return AgenticaPromptTransformer.transformText({
|
|
11487
|
+
prompt: props.prompt
|
|
11488
|
+
}); else if (props.prompt.type === "select") return AgenticaPromptTransformer.transformSelect({
|
|
10372
11489
|
operations: props.operations,
|
|
10373
|
-
|
|
10374
|
-
});
|
|
10375
|
-
return {
|
|
10376
|
-
type: "describe",
|
|
10377
|
-
text: props.input.text,
|
|
10378
|
-
executions: props.input.executions.map((next => transformExecute({
|
|
10379
|
-
operations: props.operations,
|
|
10380
|
-
input: next
|
|
10381
|
-
})))
|
|
10382
|
-
};
|
|
10383
|
-
};
|
|
10384
|
-
const transformExecute = props => {
|
|
10385
|
-
const operation = findOperation({
|
|
11490
|
+
prompt: props.prompt
|
|
11491
|
+
}); else if (props.prompt.type === "cancel") return AgenticaPromptTransformer.transformCancel({
|
|
10386
11492
|
operations: props.operations,
|
|
10387
|
-
|
|
10388
|
-
});
|
|
10389
|
-
|
|
10390
|
-
|
|
10391
|
-
|
|
10392
|
-
|
|
10393
|
-
|
|
10394
|
-
id: props.input.id,
|
|
10395
|
-
name: props.input.name,
|
|
10396
|
-
arguments: props.input.arguments,
|
|
10397
|
-
value: props.input.value
|
|
11493
|
+
prompt: props.prompt
|
|
11494
|
+
}); else if (props.prompt.type === "execute") return AgenticaPromptTransformer.transformExecute({
|
|
11495
|
+
operations: props.operations,
|
|
11496
|
+
prompt: props.prompt
|
|
11497
|
+
}); else if (props.prompt.type === "describe") return AgenticaPromptTransformer.transformDescribe({
|
|
11498
|
+
operations: props.operations,
|
|
11499
|
+
prompt: props.prompt
|
|
10398
11500
|
});
|
|
11501
|
+
throw new Error("Invalid prompt type.");
|
|
10399
11502
|
};
|
|
10400
|
-
|
|
10401
|
-
|
|
10402
|
-
|
|
10403
|
-
|
|
10404
|
-
|
|
11503
|
+
AgenticaPromptTransformer.transformText = props => new AgenticaTextPrompt(props.prompt);
|
|
11504
|
+
AgenticaPromptTransformer.transformSelect = props => new AgenticaSelectPrompt({
|
|
11505
|
+
id: props.prompt.id,
|
|
11506
|
+
selections: props.prompt.selections.map((select => new AgenticaOperationSelection({
|
|
11507
|
+
operation: findOperation({
|
|
11508
|
+
operations: props.operations,
|
|
11509
|
+
input: select.operation
|
|
11510
|
+
}),
|
|
11511
|
+
reason: select.reason
|
|
11512
|
+
})))
|
|
11513
|
+
});
|
|
11514
|
+
AgenticaPromptTransformer.transformCancel = props => new AgenticaCancelPrompt$1({
|
|
11515
|
+
id: props.prompt.id,
|
|
11516
|
+
selections: props.prompt.selections.map((select => new AgenticaOperationSelection({
|
|
11517
|
+
operation: findOperation({
|
|
11518
|
+
operations: props.operations,
|
|
11519
|
+
input: select.operation
|
|
11520
|
+
}),
|
|
11521
|
+
reason: select.reason
|
|
11522
|
+
})))
|
|
11523
|
+
});
|
|
11524
|
+
AgenticaPromptTransformer.transformExecute = props => new AgenticaExecutePrompt({
|
|
11525
|
+
id: props.prompt.id,
|
|
11526
|
+
operation: findOperation({
|
|
11527
|
+
operations: props.operations,
|
|
11528
|
+
input: props.prompt.operation
|
|
11529
|
+
}),
|
|
11530
|
+
arguments: props.prompt.arguments,
|
|
11531
|
+
value: props.prompt.value
|
|
11532
|
+
});
|
|
11533
|
+
AgenticaPromptTransformer.transformDescribe = props => new AgenticaDescribePrompt({
|
|
11534
|
+
text: props.prompt.text,
|
|
11535
|
+
executes: props.prompt.executions.map((next => AgenticaPromptTransformer.transformExecute({
|
|
11536
|
+
operations: props.operations,
|
|
11537
|
+
prompt: next
|
|
11538
|
+
})))
|
|
11539
|
+
});
|
|
10405
11540
|
})(AgenticaPromptTransformer || (AgenticaPromptTransformer = {}));
|
|
10406
11541
|
|
|
10407
|
-
|
|
10408
|
-
|
|
10409
|
-
(
|
|
10410
|
-
|
|
10411
|
-
|
|
10412
|
-
const component = props.usage[props.kind];
|
|
10413
|
-
component.total += props.completion.usage.total_tokens;
|
|
10414
|
-
component.input.total += props.completion.usage.prompt_tokens;
|
|
10415
|
-
props.completion.usage.prompt_tokens_details?.audio_tokens ?? 0;
|
|
10416
|
-
component.input.cached += props.completion.usage.prompt_tokens_details?.cached_tokens ?? 0;
|
|
10417
|
-
component.output.total += props.completion.usage.total_tokens;
|
|
10418
|
-
component.output.accepted_prediction += props.completion.usage.completion_tokens_details?.accepted_prediction_tokens ?? 0;
|
|
10419
|
-
component.output.reasoning += props.completion.usage.completion_tokens_details?.reasoning_tokens ?? 0;
|
|
10420
|
-
component.output.rejected_prediction += props.completion.usage.completion_tokens_details?.rejected_prediction_tokens ?? 0;
|
|
10421
|
-
const sum = getter => Object.entries(props.usage).filter((([key]) => key !== "aggregate")).map((([_, comp]) => getter(comp))).reduce(((a, b) => a + b), 0);
|
|
10422
|
-
const aggregate = props.usage.aggregate;
|
|
10423
|
-
aggregate.total = sum((comp => comp.total));
|
|
10424
|
-
aggregate.input.total = sum((comp => comp.input.total));
|
|
10425
|
-
aggregate.input.cached = sum((comp => comp.input.cached));
|
|
10426
|
-
aggregate.output.total = sum((comp => comp.output.total));
|
|
10427
|
-
aggregate.output.reasoning = sum((comp => comp.output.reasoning));
|
|
10428
|
-
aggregate.output.accepted_prediction = sum((comp => comp.output.accepted_prediction));
|
|
10429
|
-
aggregate.output.rejected_prediction = sum((comp => comp.output.rejected_prediction));
|
|
10430
|
-
};
|
|
10431
|
-
AgenticaTokenUsageAggregator.plus = (x, y) => {
|
|
10432
|
-
const component = (a, b) => ({
|
|
10433
|
-
total: a.total + b.total,
|
|
10434
|
-
input: {
|
|
10435
|
-
total: a.input.total + b.input.total,
|
|
10436
|
-
cached: a.input.cached + b.input.cached
|
|
10437
|
-
},
|
|
10438
|
-
output: {
|
|
10439
|
-
total: a.output.total + b.output.total,
|
|
10440
|
-
reasoning: a.output.reasoning + b.output.reasoning,
|
|
10441
|
-
accepted_prediction: a.output.accepted_prediction + b.output.accepted_prediction,
|
|
10442
|
-
rejected_prediction: a.output.rejected_prediction + b.output.rejected_prediction
|
|
10443
|
-
}
|
|
10444
|
-
});
|
|
10445
|
-
return {
|
|
10446
|
-
aggregate: component(x.aggregate, y.aggregate),
|
|
10447
|
-
initialize: component(x.initialize, y.initialize),
|
|
10448
|
-
select: component(x.select, y.select),
|
|
10449
|
-
cancel: component(x.cancel, y.cancel),
|
|
10450
|
-
call: component(x.call, y.call),
|
|
10451
|
-
describe: component(x.describe, y.describe)
|
|
10452
|
-
};
|
|
10453
|
-
};
|
|
10454
|
-
AgenticaTokenUsageAggregator.zero = () => {
|
|
10455
|
-
const component = () => ({
|
|
10456
|
-
total: 0,
|
|
10457
|
-
input: {
|
|
10458
|
-
total: 0,
|
|
10459
|
-
cached: 0
|
|
10460
|
-
},
|
|
10461
|
-
output: {
|
|
10462
|
-
total: 0,
|
|
10463
|
-
reasoning: 0,
|
|
10464
|
-
accepted_prediction: 0,
|
|
10465
|
-
rejected_prediction: 0
|
|
10466
|
-
}
|
|
10467
|
-
});
|
|
10468
|
-
return {
|
|
10469
|
-
aggregate: component(),
|
|
10470
|
-
initialize: component(),
|
|
10471
|
-
select: component(),
|
|
10472
|
-
cancel: component(),
|
|
10473
|
-
call: component(),
|
|
10474
|
-
describe: component()
|
|
10475
|
-
};
|
|
10476
|
-
};
|
|
10477
|
-
})(AgenticaTokenUsageAggregator || (AgenticaTokenUsageAggregator = {}));
|
|
11542
|
+
const findOperation = props => {
|
|
11543
|
+
const found = props.operations.get(props.input.controller)?.get(props.input.function);
|
|
11544
|
+
if (found === undefined) throw new Error(`No operation found: (controller: ${props.input.controller}, function: ${props.input.function})`);
|
|
11545
|
+
return found;
|
|
11546
|
+
};
|
|
10478
11547
|
|
|
10479
11548
|
class Agentica {
|
|
10480
11549
|
constructor(props) {
|
|
@@ -10487,9 +11556,9 @@ class Agentica {
|
|
|
10487
11556
|
this.listeners_ = new Map;
|
|
10488
11557
|
this.prompt_histories_ = (props.histories ?? []).map((input => AgenticaPromptTransformer.transform({
|
|
10489
11558
|
operations: this.operations_.group,
|
|
10490
|
-
input
|
|
11559
|
+
prompt: input
|
|
10491
11560
|
})));
|
|
10492
|
-
this.token_usage_ =
|
|
11561
|
+
this.token_usage_ = AgenticaTokenUsage.zero();
|
|
10493
11562
|
this.ready_ = false;
|
|
10494
11563
|
this.executor_ = typeof props.config?.executor === "function" ? props.config.executor : ChatGptAgent.execute(props.config?.executor ?? null);
|
|
10495
11564
|
}
|
|
@@ -10500,12 +11569,17 @@ class Agentica {
|
|
|
10500
11569
|
});
|
|
10501
11570
|
}
|
|
10502
11571
|
async conversate(content) {
|
|
10503
|
-
const prompt = {
|
|
10504
|
-
type: "text",
|
|
11572
|
+
const prompt = new AgenticaTextPrompt({
|
|
10505
11573
|
role: "user",
|
|
10506
11574
|
text: content
|
|
10507
|
-
};
|
|
10508
|
-
await this.dispatch(
|
|
11575
|
+
});
|
|
11576
|
+
await this.dispatch(new AgenticaTextEvent({
|
|
11577
|
+
role: "user",
|
|
11578
|
+
stream: StreamUtil.to(content),
|
|
11579
|
+
done: () => true,
|
|
11580
|
+
get: () => content,
|
|
11581
|
+
join: () => Promise.resolve(content)
|
|
11582
|
+
}));
|
|
10509
11583
|
const newbie = await this.executor_(this.getContext({
|
|
10510
11584
|
prompt,
|
|
10511
11585
|
usage: this.token_usage_
|
|
@@ -10541,37 +11615,50 @@ class Agentica {
|
|
|
10541
11615
|
ready: () => this.ready_,
|
|
10542
11616
|
prompt: props.prompt,
|
|
10543
11617
|
dispatch: event => this.dispatch(event),
|
|
10544
|
-
request: async (
|
|
10545
|
-
const event = {
|
|
10546
|
-
|
|
10547
|
-
source: kind,
|
|
11618
|
+
request: async (source, body) => {
|
|
11619
|
+
const event = new AgenticaRequestEvent({
|
|
11620
|
+
source,
|
|
10548
11621
|
body: {
|
|
10549
11622
|
...body,
|
|
10550
|
-
model: this.props.vendor.model
|
|
11623
|
+
model: this.props.vendor.model,
|
|
11624
|
+
stream: true
|
|
10551
11625
|
},
|
|
10552
11626
|
options: this.props.vendor.options
|
|
10553
|
-
};
|
|
11627
|
+
});
|
|
10554
11628
|
await dispatch(event);
|
|
10555
11629
|
const completion = await this.props.vendor.api.chat.completions.create(event.body, event.options);
|
|
10556
|
-
|
|
10557
|
-
|
|
10558
|
-
|
|
10559
|
-
|
|
10560
|
-
|
|
11630
|
+
const [streamForEvent, temporaryStream] = StreamUtil.transform(completion.toReadableStream(), (value => ChatGptCompletionMessageUtil.transformCompletionChunk(value))).tee();
|
|
11631
|
+
const [streamForAggregate, streamForReturn] = temporaryStream.tee();
|
|
11632
|
+
void (async () => {
|
|
11633
|
+
const reader = streamForAggregate.getReader();
|
|
11634
|
+
while (true) {
|
|
11635
|
+
const chunk = await reader.read();
|
|
11636
|
+
if (chunk.done) break;
|
|
11637
|
+
if (chunk.value.usage) {
|
|
11638
|
+
AgenticaTokenUsageAggregator.aggregate({
|
|
11639
|
+
kind: source,
|
|
11640
|
+
completionUsage: chunk.value.usage,
|
|
11641
|
+
usage: props.usage
|
|
11642
|
+
});
|
|
11643
|
+
}
|
|
11644
|
+
}
|
|
11645
|
+
})();
|
|
10561
11646
|
await dispatch({
|
|
10562
11647
|
type: "response",
|
|
10563
|
-
source
|
|
11648
|
+
source,
|
|
11649
|
+
stream: streamForEvent,
|
|
10564
11650
|
body: event.body,
|
|
10565
11651
|
options: event.options,
|
|
10566
|
-
|
|
11652
|
+
join: async () => {
|
|
11653
|
+
const chunks = await StreamUtil.readAll(streamForEvent);
|
|
11654
|
+
return ChatGptCompletionMessageUtil.merge(chunks);
|
|
11655
|
+
}
|
|
10567
11656
|
});
|
|
10568
|
-
return
|
|
11657
|
+
return streamForReturn;
|
|
10569
11658
|
},
|
|
10570
11659
|
initialize: async () => {
|
|
10571
11660
|
this.ready_ = true;
|
|
10572
|
-
await dispatch(
|
|
10573
|
-
type: "initialize"
|
|
10574
|
-
});
|
|
11661
|
+
await dispatch(new AgenticaInitializeEvent);
|
|
10575
11662
|
}
|
|
10576
11663
|
};
|
|
10577
11664
|
}
|
|
@@ -10599,5 +11686,5 @@ class Agentica {
|
|
|
10599
11686
|
}
|
|
10600
11687
|
}
|
|
10601
11688
|
|
|
10602
|
-
export { Agentica, createHttpLlmApplication };
|
|
11689
|
+
export { Agentica, AgenticaCallEvent, AgenticaCancelEvent, AgenticaCancelPrompt$1 as AgenticaCancelPrompt, AgenticaDescribeEvent, AgenticaDescribePrompt, AgenticaExecuteEvent, AgenticaExecutePrompt, AgenticaOperationSelection, AgenticaRequestEvent, AgenticaResponseEvent, AgenticaSelectEvent, AgenticaSelectPrompt, AgenticaTextEvent, AgenticaTextPrompt, AgenticaTokenUsage, createHttpLlmApplication };
|
|
10603
11690
|
//# sourceMappingURL=index.mjs.map
|