@agentica/core 0.14.2 → 0.15.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (139) hide show
  1. package/lib/Agentica.js +6 -6
  2. package/lib/Agentica.js.map +1 -1
  3. package/lib/constants/AgenticaConstant.js.map +1 -0
  4. package/lib/{internal → constants}/AgenticaDefaultPrompt.js +1 -1
  5. package/lib/constants/AgenticaDefaultPrompt.js.map +1 -0
  6. package/lib/constants/AgenticaSystemPrompt.js.map +1 -0
  7. package/lib/{internal → context/internal}/AgenticaOperationComposer.d.ts +3 -3
  8. package/lib/{internal → context/internal}/AgenticaOperationComposer.js +1 -1
  9. package/lib/context/internal/AgenticaOperationComposer.js.map +1 -0
  10. package/lib/factory/events.js +6 -6
  11. package/lib/factory/events.js.map +1 -1
  12. package/lib/factory/prompts.d.ts +3 -0
  13. package/lib/factory/prompts.js +78 -4
  14. package/lib/factory/prompts.js.map +1 -1
  15. package/lib/index.d.ts +1 -0
  16. package/lib/index.js +6 -1
  17. package/lib/index.js.map +1 -1
  18. package/lib/index.mjs +996 -1011
  19. package/lib/index.mjs.map +1 -1
  20. package/lib/orchestrate/call.d.ts +4 -0
  21. package/lib/orchestrate/{ChatGptCallFunctionAgent.js → call.js} +12 -16
  22. package/lib/orchestrate/call.js.map +1 -0
  23. package/lib/orchestrate/cancel.d.ts +4 -0
  24. package/lib/orchestrate/{ChatGptCancelFunctionAgent.js → cancel.js} +12 -35
  25. package/lib/orchestrate/cancel.js.map +1 -0
  26. package/lib/orchestrate/{ChatGptDescribeFunctionAgent.d.ts → describe.d.ts} +2 -3
  27. package/lib/orchestrate/{ChatGptDescribeFunctionAgent.js → describe.js} +10 -10
  28. package/lib/orchestrate/describe.js.map +1 -0
  29. package/lib/orchestrate/{ChatGptAgent.d.ts → execute.d.ts} +1 -3
  30. package/lib/orchestrate/{ChatGptAgent.js → execute.js} +18 -17
  31. package/lib/orchestrate/execute.js.map +1 -0
  32. package/lib/orchestrate/index.d.ts +6 -8
  33. package/lib/orchestrate/index.js +6 -8
  34. package/lib/orchestrate/index.js.map +1 -1
  35. package/lib/orchestrate/initialize.d.ts +4 -0
  36. package/lib/orchestrate/{ChatGptInitializeFunctionAgent.js → initialize.js} +9 -13
  37. package/lib/orchestrate/initialize.js.map +1 -0
  38. package/lib/orchestrate/internal/cancelFunction.js +35 -0
  39. package/lib/orchestrate/internal/cancelFunction.js.map +1 -0
  40. package/lib/orchestrate/internal/selectFunction.d.ts +5 -0
  41. package/lib/orchestrate/internal/selectFunction.js +32 -0
  42. package/lib/orchestrate/internal/selectFunction.js.map +1 -0
  43. package/lib/orchestrate/select.d.ts +4 -0
  44. package/lib/orchestrate/{ChatGptSelectFunctionAgent.js → select.js} +12 -34
  45. package/lib/orchestrate/select.js.map +1 -0
  46. package/lib/transformers/AgenticaEventTransformer.js +1 -1
  47. package/lib/transformers/AgenticaEventTransformer.js.map +1 -1
  48. package/lib/utils/AsyncQueue.js.map +1 -0
  49. package/lib/utils/ByteArrayUtil.js.map +1 -0
  50. package/lib/{orchestrate → utils}/ChatGptCompletionMessageUtil.js +3 -3
  51. package/lib/utils/ChatGptCompletionMessageUtil.js.map +1 -0
  52. package/lib/{orchestrate/ChatGptUsageAggregator.d.ts → utils/ChatGptTokenUsageAggregator.d.ts} +1 -1
  53. package/lib/{orchestrate/ChatGptUsageAggregator.js → utils/ChatGptTokenUsageAggregator.js} +3 -3
  54. package/lib/utils/ChatGptTokenUsageAggregator.js.map +1 -0
  55. package/lib/utils/MPSC.js.map +1 -0
  56. package/lib/utils/MathUtil.js.map +1 -0
  57. package/lib/utils/Singleton.js.map +1 -0
  58. package/lib/utils/StreamUtil.js.map +1 -0
  59. package/lib/utils/__map_take.d.ts +1 -0
  60. package/lib/utils/__map_take.js.map +1 -0
  61. package/lib/utils/index.d.ts +4 -0
  62. package/lib/utils/index.js +21 -0
  63. package/lib/utils/index.js.map +1 -0
  64. package/package.json +1 -1
  65. package/src/Agentica.ts +6 -6
  66. package/src/{internal → constants}/AgenticaDefaultPrompt.ts +2 -2
  67. package/src/{internal → context/internal}/AgenticaOperationComposer.ts +5 -5
  68. package/src/factory/events.ts +6 -6
  69. package/src/factory/prompts.ts +91 -5
  70. package/src/index.ts +9 -0
  71. package/src/orchestrate/{ChatGptCallFunctionAgent.ts → call.ts} +11 -16
  72. package/src/orchestrate/{ChatGptCancelFunctionAgent.ts → cancel.ts} +9 -37
  73. package/src/orchestrate/{ChatGptDescribeFunctionAgent.ts → describe.ts} +9 -10
  74. package/src/orchestrate/{ChatGptAgent.ts → execute.ts} +16 -15
  75. package/src/orchestrate/index.ts +6 -8
  76. package/src/orchestrate/{ChatGptInitializeFunctionAgent.ts → initialize.ts} +8 -13
  77. package/src/orchestrate/internal/cancelFunction.ts +34 -0
  78. package/src/orchestrate/internal/selectFunction.ts +29 -0
  79. package/src/orchestrate/{ChatGptSelectFunctionAgent.ts → select.ts} +10 -37
  80. package/src/transformers/AgenticaEventTransformer.ts +1 -1
  81. package/src/{orchestrate → utils}/ChatGptCompletionMessageUtil.ts +3 -3
  82. package/src/{orchestrate/ChatGptUsageAggregator.ts → utils/ChatGptTokenUsageAggregator.ts} +1 -1
  83. package/src/utils/index.ts +4 -0
  84. package/lib/internal/AgenticaConstant.js.map +0 -1
  85. package/lib/internal/AgenticaDefaultPrompt.js.map +0 -1
  86. package/lib/internal/AgenticaOperationComposer.js.map +0 -1
  87. package/lib/internal/AgenticaSystemPrompt.js.map +0 -1
  88. package/lib/internal/AsyncQueue.js.map +0 -1
  89. package/lib/internal/ByteArrayUtil.js.map +0 -1
  90. package/lib/internal/MPSC.js.map +0 -1
  91. package/lib/internal/MathUtil.js.map +0 -1
  92. package/lib/internal/Singleton.js.map +0 -1
  93. package/lib/internal/StreamUtil.js.map +0 -1
  94. package/lib/internal/__map_take.js.map +0 -1
  95. package/lib/orchestrate/ChatGptAgent.js.map +0 -1
  96. package/lib/orchestrate/ChatGptCallFunctionAgent.d.ts +0 -8
  97. package/lib/orchestrate/ChatGptCallFunctionAgent.js.map +0 -1
  98. package/lib/orchestrate/ChatGptCancelFunctionAgent.d.ts +0 -12
  99. package/lib/orchestrate/ChatGptCancelFunctionAgent.js.map +0 -1
  100. package/lib/orchestrate/ChatGptCompletionMessageUtil.js.map +0 -1
  101. package/lib/orchestrate/ChatGptDescribeFunctionAgent.js.map +0 -1
  102. package/lib/orchestrate/ChatGptHistoryDecoder.d.ts +0 -8
  103. package/lib/orchestrate/ChatGptHistoryDecoder.js +0 -80
  104. package/lib/orchestrate/ChatGptHistoryDecoder.js.map +0 -1
  105. package/lib/orchestrate/ChatGptInitializeFunctionAgent.d.ts +0 -8
  106. package/lib/orchestrate/ChatGptInitializeFunctionAgent.js.map +0 -1
  107. package/lib/orchestrate/ChatGptSelectFunctionAgent.d.ts +0 -21
  108. package/lib/orchestrate/ChatGptSelectFunctionAgent.js.map +0 -1
  109. package/lib/orchestrate/ChatGptUsageAggregator.js.map +0 -1
  110. package/src/orchestrate/ChatGptHistoryDecoder.ts +0 -91
  111. /package/lib/{internal → constants}/AgenticaConstant.d.ts +0 -0
  112. /package/lib/{internal → constants}/AgenticaConstant.js +0 -0
  113. /package/lib/{internal → constants}/AgenticaDefaultPrompt.d.ts +0 -0
  114. /package/lib/{internal → constants}/AgenticaSystemPrompt.d.ts +0 -0
  115. /package/lib/{internal → constants}/AgenticaSystemPrompt.js +0 -0
  116. /package/lib/{internal/Singleton.d.ts → orchestrate/internal/cancelFunction.d.ts} +0 -0
  117. /package/lib/{internal → utils}/AsyncQueue.d.ts +0 -0
  118. /package/lib/{internal → utils}/AsyncQueue.js +0 -0
  119. /package/lib/{internal → utils}/ByteArrayUtil.d.ts +0 -0
  120. /package/lib/{internal → utils}/ByteArrayUtil.js +0 -0
  121. /package/lib/{orchestrate → utils}/ChatGptCompletionMessageUtil.d.ts +0 -0
  122. /package/lib/{internal → utils}/MPSC.d.ts +0 -0
  123. /package/lib/{internal → utils}/MPSC.js +0 -0
  124. /package/lib/{internal → utils}/MathUtil.d.ts +0 -0
  125. /package/lib/{internal → utils}/MathUtil.js +0 -0
  126. /package/lib/{internal/__map_take.d.ts → utils/Singleton.d.ts} +0 -0
  127. /package/lib/{internal → utils}/Singleton.js +0 -0
  128. /package/lib/{internal → utils}/StreamUtil.d.ts +0 -0
  129. /package/lib/{internal → utils}/StreamUtil.js +0 -0
  130. /package/lib/{internal → utils}/__map_take.js +0 -0
  131. /package/src/{internal → constants}/AgenticaConstant.ts +0 -0
  132. /package/src/{internal → constants}/AgenticaSystemPrompt.ts +0 -0
  133. /package/src/{internal → utils}/AsyncQueue.ts +0 -0
  134. /package/src/{internal → utils}/ByteArrayUtil.ts +0 -0
  135. /package/src/{internal → utils}/MPSC.ts +0 -0
  136. /package/src/{internal → utils}/MathUtil.ts +0 -0
  137. /package/src/{internal → utils}/Singleton.ts +0 -0
  138. /package/src/{internal → utils}/StreamUtil.ts +0 -0
  139. /package/src/{internal → utils}/__map_take.ts +0 -0
package/lib/index.mjs CHANGED
@@ -1,128 +1,14 @@
1
- import { HttpLlm, ChatGptTypeChecker, OpenApi } from "@samchon/openapi";
2
-
3
- import * as __typia_transform__validateReport from "typia/lib/internal/_validateReport.js";
1
+ import * as __typia_transform__assertGuard from "typia/lib/internal/_assertGuard.js";
4
2
 
5
3
  import "typia";
6
4
 
7
5
  import { v4 } from "uuid";
8
6
 
9
- import * as __typia_transform__assertGuard from "typia/lib/internal/_assertGuard.js";
10
-
11
- import * as __typia_transform__accessExpressionAsString from "typia/lib/internal/_accessExpressionAsString.js";
12
-
13
- var AgenticaConstant;
14
-
15
- (function(AgenticaConstant) {
16
- AgenticaConstant.RETRY = 3;
17
- AgenticaConstant.ELITICISM = true;
18
- })(AgenticaConstant || (AgenticaConstant = {}));
19
-
20
- const AgenticaSystemPrompt = {
21
- CANCEL: "You are a helpful assistant for cancelling functions which are prepared to call.\n\nUse the supplied tools to select some functions to cancel of `getApiFunctions()` returned.\n\nIf you can't find any proper function to select, don't talk, don't do anything.",
22
- COMMON: 'At first, the user\'s language locale code is "${locale}". When you are conversating with the user or describing the function calling result, consider it and always translate to the target locale language. Never conversate with different locale language text with the user.\n\nAt second, the user\'s timezone is "${timezone}", and ISO datetime is ${datetime}. When you are conversating with the user, consider current time and user belonged timezone.',
23
- DESCRIBE: "You are a helpful assistant describing return values of function calls.\n\nAbove messages are the list of function call histories. When describing the return values, please do not too much shortly summarize them. Instead, provide detailed descriptions as much as.\n\nAlso, its content format must be markdown. If required, utilize the mermaid syntax for drawing some diagrams. When image contents are, just put them through the markdown image syntax.\n\nAt last, if user's language locale code is different with your description, please translate it to the user's language.",
24
- EXECUTE: 'You are a helpful assistant for tool calling.\n\nUse the supplied tools to assist the user.\n\nIf previous messages are not enough to compose the arguments, you can ask the user to write more information. By the way, when asking the user to write more information, make the text concise and clear.\n\nFor reference, in the "tool" role message content, the `function` property means metadata of the API operation. In other words, it is the function schema describing its purpose, parameters and return value types. And then the `data` property is the return value from the target function calling.',
25
- INITIALIZE: "You are a helpful assistant.\n\nUse the supplied tools to assist the user.",
26
- SELECT: "You are a helpful assistant for selecting functions to call.\n\nUse the supplied tools to select some functions of `getApiFunctions()` returned.\n\nWhen selecting functions to call, pay attention to the relationship between functions. In particular, check the prerequisites between each function.\n\nIf you can't find any proper function to select, just type your own message. By the way, when typing your own message, please consider the user's language locale code. If your message is different with the user's language, please translate it to the user's."
27
- };
28
-
29
- const NOT_MOUNTED_YET = {};
30
-
31
- class Singleton {
32
- constructor(closure) {
33
- this.closure_ = closure;
34
- this.value_ = NOT_MOUNTED_YET;
35
- }
36
- get(...args) {
37
- if (this.value_ === NOT_MOUNTED_YET) {
38
- this.value_ = this.closure_(...args);
39
- }
40
- return this.value_;
41
- }
42
- }
43
-
44
- const isNode = new Singleton((() => {
45
- const isObject = obj => typeof obj === "object" && obj !== null;
46
- return typeof global === "object" && isObject(global) && isObject(global.process) && isObject(global.process.versions) && typeof global.process.versions.node !== "undefined";
47
- }));
48
-
49
- const getLocale = new Singleton((() => isNode.get() ? process.env.LANG?.split(".")[0] ?? "en-US" : navigator.language));
50
-
51
- const getTimezone = new Singleton((() => Intl.DateTimeFormat().resolvedOptions().timeZone));
52
-
53
- function write(config) {
54
- if (config?.systemPrompt?.common !== undefined) {
55
- return config?.systemPrompt?.common(config);
56
- }
57
- const locale = config?.locale ?? getLocale.get();
58
- const timezone = config?.timezone ?? getTimezone.get();
59
- return AgenticaSystemPrompt.COMMON.replace("${locale}", locale).replace("${timezone}", timezone);
60
- }
61
-
62
- const AgenticaDefaultPrompt = {
63
- write
64
- };
65
-
66
- async function readAll(stream) {
67
- const reader = stream.getReader();
68
- const result = [];
69
- while (true) {
70
- const {done, value} = await reader.read();
71
- if (done) {
72
- break;
73
- }
74
- result.push(value);
75
- }
76
- return result;
77
- }
78
-
79
- async function reduce(stream, reducer, initial) {
80
- const reader = stream.getReader();
81
- let acc = initial ?? null;
82
- while (true) {
83
- const {done, value} = await reader.read();
84
- if (done) {
85
- break;
86
- }
87
- if (acc === null) {
88
- acc = value;
89
- continue;
90
- }
91
- acc = reducer(acc, value);
92
- }
93
- return acc;
94
- }
95
-
96
- function to(value) {
97
- const stream = new ReadableStream({
98
- start: controller => {
99
- controller.enqueue(value);
100
- controller.close();
101
- }
102
- });
103
- return stream;
104
- }
7
+ import * as __typia_transform__validateReport from "typia/lib/internal/_validateReport.js";
105
8
 
106
- function transform$1(stream, transformer) {
107
- const reader = stream.getReader();
108
- return new ReadableStream({
109
- pull: async controller => {
110
- const {done, value} = await reader.read();
111
- if (!done) {
112
- controller.enqueue(transformer(value));
113
- } else {
114
- controller.close();
115
- }
116
- }
117
- });
118
- }
9
+ import { HttpLlm, ChatGptTypeChecker, OpenApi } from "@samchon/openapi";
119
10
 
120
- const StreamUtil = {
121
- readAll,
122
- reduce,
123
- to,
124
- transform: transform$1
125
- };
11
+ import * as __typia_transform__accessExpressionAsString from "typia/lib/internal/_accessExpressionAsString.js";
126
12
 
127
13
  var ByteArrayUtil;
128
14
 
@@ -172,7 +58,7 @@ function sum(x, y) {
172
58
  };
173
59
  }
174
60
 
175
- const ChatGptUsageAggregator = {
61
+ const ChatGptTokenUsageAggregator = {
176
62
  sum,
177
63
  sumCompletionTokenDetail,
178
64
  sumPromptTokenDetail
@@ -620,7 +506,7 @@ function accumulate(origin, chunk) {
620
506
  if (origin.usage == null) {
621
507
  return chunk.usage;
622
508
  }
623
- return ChatGptUsageAggregator.sum(origin.usage, chunk.usage);
509
+ return ChatGptTokenUsageAggregator.sum(origin.usage, chunk.usage);
624
510
  })();
625
511
  return {
626
512
  ...origin,
@@ -707,74 +593,320 @@ const ChatGptCompletionMessageUtil = {
707
593
  mergeToolCalls
708
594
  };
709
595
 
710
- function decode(history) {
711
- if (history.type === "describe") {
712
- return [];
713
- } else if (history.type === "text") {
714
- return [ {
715
- role: history.role,
716
- content: history.text
717
- } ];
718
- } else if (history.type === "select" || history.type === "cancel") {
719
- return [ {
720
- role: "assistant",
721
- tool_calls: [ {
722
- type: "function",
723
- id: history.id,
724
- function: {
725
- name: `${history.type}Functions`,
726
- arguments: JSON.stringify({
727
- functions: history.selections.map((s => ({
728
- name: s.operation.function.name,
729
- reason: s.reason
730
- })))
731
- })
732
- }
733
- } ]
734
- }, {
735
- role: "tool",
736
- tool_call_id: history.id,
737
- content: ""
738
- } ];
596
+ class AgenticaTokenUsage {
597
+ constructor(props) {
598
+ if (props === undefined) {
599
+ const zero = AgenticaTokenUsage.zero();
600
+ this.aggregate = zero.aggregate;
601
+ this.initialize = zero.initialize;
602
+ this.select = zero.select;
603
+ this.cancel = zero.cancel;
604
+ this.call = zero.call;
605
+ this.describe = zero.describe;
606
+ } else {
607
+ this.aggregate = props.aggregate;
608
+ this.initialize = props.initialize;
609
+ this.select = props.select;
610
+ this.cancel = props.cancel;
611
+ this.call = props.call;
612
+ this.describe = props.describe;
613
+ }
739
614
  }
740
- return [ {
741
- role: "assistant",
742
- tool_calls: [ {
743
- type: "function",
744
- id: history.id,
745
- function: {
746
- name: history.operation.name,
747
- arguments: JSON.stringify(history.arguments)
748
- }
749
- } ]
750
- }, {
751
- role: "tool",
752
- tool_call_id: history.id,
753
- content: JSON.stringify({
754
- function: {
755
- protocol: history.operation.protocol,
756
- description: history.operation.function.description,
757
- parameters: history.operation.function.parameters,
758
- output: history.operation.function.output,
759
- ...history.operation.protocol === "http" ? {
760
- method: history.operation.function.method,
761
- path: history.operation.function.path
762
- } : {}
615
+ increment(y) {
616
+ const increment = (x, y) => {
617
+ x.total += y.total;
618
+ x.input.total += y.input.total;
619
+ x.input.cached += y.input.cached;
620
+ x.output.total += y.output.total;
621
+ x.output.reasoning += y.output.reasoning;
622
+ x.output.accepted_prediction += y.output.accepted_prediction;
623
+ x.output.rejected_prediction += y.output.rejected_prediction;
624
+ };
625
+ increment(this.aggregate, y.aggregate);
626
+ increment(this.initialize, y.initialize);
627
+ increment(this.select, y.select);
628
+ increment(this.cancel, y.cancel);
629
+ increment(this.call, y.call);
630
+ increment(this.describe, y.describe);
631
+ }
632
+ toJSON() {
633
+ return (() => {
634
+ const _co0 = input => ({
635
+ aggregate: _co1(input.aggregate),
636
+ initialize: _co1(input.initialize),
637
+ select: _co1(input.select),
638
+ cancel: _co1(input.cancel),
639
+ call: _co1(input.call),
640
+ describe: _co1(input.describe)
641
+ });
642
+ const _co1 = input => ({
643
+ total: input.total,
644
+ input: _co2(input.input),
645
+ output: _co3(input.output)
646
+ });
647
+ const _co2 = input => ({
648
+ total: input.total,
649
+ cached: input.cached
650
+ });
651
+ const _co3 = input => ({
652
+ total: input.total,
653
+ reasoning: input.reasoning,
654
+ accepted_prediction: input.accepted_prediction,
655
+ rejected_prediction: input.rejected_prediction
656
+ });
657
+ return input => _co0(input);
658
+ })()(this);
659
+ }
660
+ static zero() {
661
+ const component = () => ({
662
+ total: 0,
663
+ input: {
664
+ total: 0,
665
+ cached: 0
763
666
  },
764
- ...history.operation.protocol === "http" ? {
765
- status: history.value.status,
766
- data: history.value.body
767
- } : {
768
- value: history.value
667
+ output: {
668
+ total: 0,
669
+ reasoning: 0,
670
+ accepted_prediction: 0,
671
+ rejected_prediction: 0
769
672
  }
770
- })
771
- } ];
772
- }
773
-
774
- const ChatGptHistoryDecoder = {
775
- decode
673
+ });
674
+ return new AgenticaTokenUsage({
675
+ aggregate: component(),
676
+ initialize: component(),
677
+ select: component(),
678
+ cancel: component(),
679
+ call: component(),
680
+ describe: component()
681
+ });
682
+ }
683
+ static plus(x, y) {
684
+ const z = new AgenticaTokenUsage(x);
685
+ z.increment(y.toJSON());
686
+ return z;
687
+ }
688
+ }
689
+
690
+ function aggregate(props) {
691
+ const component = props.usage[props.kind];
692
+ component.total += props.completionUsage.total_tokens;
693
+ component.input.total += props.completionUsage.prompt_tokens;
694
+ component.input.total += props.completionUsage.prompt_tokens_details?.audio_tokens ?? 0;
695
+ component.input.cached += props.completionUsage.prompt_tokens_details?.cached_tokens ?? 0;
696
+ component.output.total += props.completionUsage.completion_tokens;
697
+ component.output.accepted_prediction += props.completionUsage.completion_tokens_details?.accepted_prediction_tokens ?? 0;
698
+ component.output.reasoning += props.completionUsage.completion_tokens_details?.reasoning_tokens ?? 0;
699
+ component.output.rejected_prediction += props.completionUsage.completion_tokens_details?.rejected_prediction_tokens ?? 0;
700
+ const sum = getter => Object.entries(props.usage).filter((([key]) => key !== "aggregate")).map((([, comp]) => getter(comp))).reduce(((a, b) => a + b), 0);
701
+ const aggregate = props.usage.aggregate;
702
+ aggregate.total = sum((comp => comp.total));
703
+ aggregate.input.total = sum((comp => comp.input.total));
704
+ aggregate.input.cached = sum((comp => comp.input.cached));
705
+ aggregate.output.total = sum((comp => comp.output.total));
706
+ aggregate.output.reasoning = sum((comp => comp.output.reasoning));
707
+ aggregate.output.accepted_prediction = sum((comp => comp.output.accepted_prediction));
708
+ aggregate.output.rejected_prediction = sum((comp => comp.output.rejected_prediction));
709
+ }
710
+
711
+ const AgenticaTokenUsageAggregator = {
712
+ aggregate
713
+ };
714
+
715
+ function __map_take(dict, key, generator) {
716
+ const oldbie = dict.get(key);
717
+ if (oldbie !== undefined) {
718
+ return oldbie;
719
+ }
720
+ const value = generator();
721
+ dict.set(key, value);
722
+ return value;
723
+ }
724
+
725
+ function compose(props) {
726
+ const unique = props.controllers.length === 1 || (() => {
727
+ const names = props.controllers.map((controller => controller.application.functions.map((func => func.name)))).flat();
728
+ return new Set(names).size === names.length;
729
+ })();
730
+ const naming = (func, ci) => unique ? func : `_${ci}_${func}`;
731
+ const array = props.controllers.map(((controller, ci) => controller.protocol === "http" ? controller.application.functions.map((func => ({
732
+ protocol: "http",
733
+ controller,
734
+ function: func,
735
+ name: naming(func.name, ci),
736
+ toJSON: () => ({
737
+ protocol: "http",
738
+ controller: controller.name,
739
+ function: func.name,
740
+ name: naming(func.name, ci)
741
+ })
742
+ }))) : controller.application.functions.map((func => ({
743
+ protocol: "class",
744
+ controller,
745
+ function: func,
746
+ name: naming(func.name, ci),
747
+ toJSON: () => ({
748
+ protocol: "class",
749
+ controller: controller.name,
750
+ function: func.name,
751
+ name: naming(func.name, ci)
752
+ })
753
+ }))))).flat();
754
+ const divided = props.config?.capacity !== undefined && array.length > props.config.capacity ? divide({
755
+ array,
756
+ capacity: props.config.capacity
757
+ }) : undefined;
758
+ const flat = new Map;
759
+ const group = new Map;
760
+ for (const item of array) {
761
+ flat.set(item.name, item);
762
+ __map_take(group, item.controller.name, (() => new Map)).set(item.name, item);
763
+ }
764
+ return {
765
+ array,
766
+ divided,
767
+ flat,
768
+ group
769
+ };
770
+ }
771
+
772
+ function divide(props) {
773
+ const size = Math.ceil(props.array.length / props.capacity);
774
+ const capacity = Math.ceil(props.array.length / size);
775
+ const replica = props.array.slice();
776
+ return Array.from({
777
+ length: size
778
+ }, (() => replica.splice(0, capacity)));
779
+ }
780
+
781
+ const AgenticaOperationComposer = {
782
+ compose
783
+ };
784
+
785
+ async function readAll(stream) {
786
+ const reader = stream.getReader();
787
+ const result = [];
788
+ while (true) {
789
+ const {done, value} = await reader.read();
790
+ if (done) {
791
+ break;
792
+ }
793
+ result.push(value);
794
+ }
795
+ return result;
796
+ }
797
+
798
+ async function reduce(stream, reducer, initial) {
799
+ const reader = stream.getReader();
800
+ let acc = initial ?? null;
801
+ while (true) {
802
+ const {done, value} = await reader.read();
803
+ if (done) {
804
+ break;
805
+ }
806
+ if (acc === null) {
807
+ acc = value;
808
+ continue;
809
+ }
810
+ acc = reducer(acc, value);
811
+ }
812
+ return acc;
813
+ }
814
+
815
+ function to(value) {
816
+ const stream = new ReadableStream({
817
+ start: controller => {
818
+ controller.enqueue(value);
819
+ controller.close();
820
+ }
821
+ });
822
+ return stream;
823
+ }
824
+
825
+ function transform$1(stream, transformer) {
826
+ const reader = stream.getReader();
827
+ return new ReadableStream({
828
+ pull: async controller => {
829
+ const {done, value} = await reader.read();
830
+ if (!done) {
831
+ controller.enqueue(transformer(value));
832
+ } else {
833
+ controller.close();
834
+ }
835
+ }
836
+ });
837
+ }
838
+
839
+ const StreamUtil = {
840
+ readAll,
841
+ reduce,
842
+ to,
843
+ transform: transform$1
776
844
  };
777
845
 
846
+ function decodePrompt(history) {
847
+ if (history.type === "describe") {
848
+ return [];
849
+ } else if (history.type === "text") {
850
+ return [ {
851
+ role: history.role,
852
+ content: history.text
853
+ } ];
854
+ } else if (history.type === "select" || history.type === "cancel") {
855
+ return [ {
856
+ role: "assistant",
857
+ tool_calls: [ {
858
+ type: "function",
859
+ id: history.id,
860
+ function: {
861
+ name: `${history.type}Functions`,
862
+ arguments: JSON.stringify({
863
+ functions: history.selections.map((s => ({
864
+ name: s.operation.function.name,
865
+ reason: s.reason
866
+ })))
867
+ })
868
+ }
869
+ } ]
870
+ }, {
871
+ role: "tool",
872
+ tool_call_id: history.id,
873
+ content: ""
874
+ } ];
875
+ }
876
+ return [ {
877
+ role: "assistant",
878
+ tool_calls: [ {
879
+ type: "function",
880
+ id: history.id,
881
+ function: {
882
+ name: history.operation.name,
883
+ arguments: JSON.stringify(history.arguments)
884
+ }
885
+ } ]
886
+ }, {
887
+ role: "tool",
888
+ tool_call_id: history.id,
889
+ content: JSON.stringify({
890
+ function: {
891
+ protocol: history.operation.protocol,
892
+ description: history.operation.function.description,
893
+ parameters: history.operation.function.parameters,
894
+ output: history.operation.function.output,
895
+ ...history.operation.protocol === "http" ? {
896
+ method: history.operation.function.method,
897
+ path: history.operation.function.path
898
+ } : {}
899
+ },
900
+ ...history.operation.protocol === "http" ? {
901
+ status: history.value.status,
902
+ data: history.value.body
903
+ } : {
904
+ value: history.value
905
+ }
906
+ })
907
+ } ];
908
+ }
909
+
778
910
  function createTextPrompt(props) {
779
911
  const prompt = {
780
912
  type: "text",
@@ -845,25 +977,134 @@ function createExecutePrompt(props) {
845
977
  };
846
978
  }
847
979
 
848
- function createInitializeEvent() {
849
- const event = {
850
- type: "initialize"
851
- };
980
+ function createOperationSelection(props) {
852
981
  return {
853
- type: event.type,
854
- toJSON: () => event
982
+ operation: props.operation,
983
+ reason: props.reason,
984
+ toJSON: () => ({
985
+ operation: props.operation.toJSON(),
986
+ reason: props.reason
987
+ })
855
988
  };
856
989
  }
857
990
 
858
- function createSelectEvent(props) {
859
- return {
860
- type: "select",
861
- selection: props.selection,
862
- toJSON: () => ({
863
- type: "select",
864
- selection: props.selection.toJSON()
865
- }),
866
- toPrompt: () => createSelectPrompt({
991
+ function transform(props) {
992
+ if (props.prompt.type === "text") {
993
+ return transformText({
994
+ prompt: props.prompt
995
+ });
996
+ } else if (props.prompt.type === "select") {
997
+ return transformSelect({
998
+ operations: props.operations,
999
+ prompt: props.prompt
1000
+ });
1001
+ } else if (props.prompt.type === "cancel") {
1002
+ return transformCancel({
1003
+ operations: props.operations,
1004
+ prompt: props.prompt
1005
+ });
1006
+ } else if (props.prompt.type === "execute") {
1007
+ return transformExecute({
1008
+ operations: props.operations,
1009
+ prompt: props.prompt
1010
+ });
1011
+ } else if (props.prompt.type === "describe") {
1012
+ return transformDescribe({
1013
+ operations: props.operations,
1014
+ prompt: props.prompt
1015
+ });
1016
+ }
1017
+ throw new Error("Invalid prompt type.");
1018
+ }
1019
+
1020
+ function transformText(props) {
1021
+ return createTextPrompt(props.prompt);
1022
+ }
1023
+
1024
+ function transformSelect(props) {
1025
+ return createSelectPrompt({
1026
+ id: props.prompt.id,
1027
+ selections: props.prompt.selections.map((select => createOperationSelection({
1028
+ operation: findOperation({
1029
+ operations: props.operations,
1030
+ input: select.operation
1031
+ }),
1032
+ reason: select.reason
1033
+ })))
1034
+ });
1035
+ }
1036
+
1037
+ function transformCancel(props) {
1038
+ return createCancelPrompt({
1039
+ id: props.prompt.id,
1040
+ selections: props.prompt.selections.map((select => createOperationSelection({
1041
+ operation: findOperation({
1042
+ operations: props.operations,
1043
+ input: select.operation
1044
+ }),
1045
+ reason: select.reason
1046
+ })))
1047
+ });
1048
+ }
1049
+
1050
+ function transformExecute(props) {
1051
+ return createExecutePrompt({
1052
+ id: props.prompt.id,
1053
+ operation: findOperation({
1054
+ operations: props.operations,
1055
+ input: props.prompt.operation
1056
+ }),
1057
+ arguments: props.prompt.arguments,
1058
+ value: props.prompt.value
1059
+ });
1060
+ }
1061
+
1062
+ function transformDescribe(props) {
1063
+ return createDescribePrompt({
1064
+ text: props.prompt.text,
1065
+ executes: props.prompt.executes.map((next => transformExecute({
1066
+ operations: props.operations,
1067
+ prompt: next
1068
+ })))
1069
+ });
1070
+ }
1071
+
1072
+ function findOperation(props) {
1073
+ const found = props.operations.get(props.input.controller)?.get(props.input.function);
1074
+ if (found === undefined) {
1075
+ throw new Error(`No operation found: (controller: ${props.input.controller}, function: ${props.input.function})`);
1076
+ }
1077
+ return found;
1078
+ }
1079
+
1080
+ const AgenticaPromptTransformer = {
1081
+ transform,
1082
+ transformText,
1083
+ transformSelect,
1084
+ transformCancel,
1085
+ transformExecute,
1086
+ transformDescribe
1087
+ };
1088
+
1089
+ function createInitializeEvent() {
1090
+ const event = {
1091
+ type: "initialize"
1092
+ };
1093
+ return {
1094
+ type: event.type,
1095
+ toJSON: () => event
1096
+ };
1097
+ }
1098
+
1099
+ function createSelectEvent(props) {
1100
+ return {
1101
+ type: "select",
1102
+ selection: props.selection,
1103
+ toJSON: () => ({
1104
+ type: "select",
1105
+ selection: props.selection.toJSON()
1106
+ }),
1107
+ toPrompt: () => createSelectPrompt({
867
1108
  id: v4(),
868
1109
  selections: [ props.selection ]
869
1110
  })
@@ -1001,150 +1242,232 @@ function createResponseEvent(props) {
1001
1242
  };
1002
1243
  }
1003
1244
 
1004
- function createOperationSelection(props) {
1005
- return {
1006
- operation: props.operation,
1007
- reason: props.reason,
1008
- toJSON: () => ({
1009
- operation: props.operation.toJSON(),
1010
- reason: props.reason
1011
- })
1012
- };
1013
- }
1014
-
1015
- const CONTAINER$1 = {
1016
- functions: [ {
1017
- name: "cancelFunctions",
1018
- parameters: {
1019
- description: " Properties of the function\n\n------------------------------\n\nCurrent Type: {@link __IChatFunctionReference.IProps}",
1020
- type: "object",
1021
- properties: {
1022
- functions: {
1023
- title: "List of target functions",
1024
- description: "List of target functions.",
1025
- type: "array",
1026
- items: {
1027
- description: "Current Type: {@link ___IChatFunctionReference}",
1028
- type: "object",
1029
- properties: {
1030
- reason: {
1031
- title: "The reason of the function selection",
1032
- description: "The reason of the function selection.\n\nJust write the reason why you've determined to select this function.",
1033
- type: "string"
1034
- },
1035
- name: {
1036
- title: "Name of the target function to call",
1037
- description: "Name of the target function to call.",
1038
- type: "string"
1039
- }
1040
- },
1041
- required: [ "reason", "name" ]
1042
- }
1043
- }
1044
- },
1045
- required: [ "functions" ],
1046
- additionalProperties: false,
1047
- $defs: {}
1048
- },
1049
- description: "Cancel a function from the candidate list to call.\n\nIf you A.I. agent has understood that the user wants to cancel\nsome candidate functions to call from the conversation, please cancel\nthem through this function.\n\nAlso, when you A.I. find a function that has been selected by the candidate\npooling, cancel the function by calling this function. For reference, the\ncandidate pooling means that user wants only one function to call, but you A.I.\nagent selects multiple candidate functions because the A.I. agent can't specify\nonly one thing due to lack of specificity or homogeneity of candidate functions.\n\nAdditionally, if you A.I. agent wants to cancel same function multiply, you can\ndo it by assigning the same function name multiply in the `functions` property.",
1050
- validate: (() => {
1051
- const _io0 = input => Array.isArray(input.functions) && input.functions.every((elem => "object" === typeof elem && null !== elem && _io1(elem)));
1052
- const _io1 = input => "string" === typeof input.reason && "string" === typeof input.name;
1053
- const _vo0 = (input, _path, _exceptionable = true) => [ (Array.isArray(input.functions) || _report(_exceptionable, {
1054
- path: _path + ".functions",
1055
- expected: "Array<___IChatFunctionReference>",
1056
- value: input.functions
1057
- })) && input.functions.map(((elem, _index2) => ("object" === typeof elem && null !== elem || _report(_exceptionable, {
1058
- path: _path + ".functions[" + _index2 + "]",
1059
- expected: "___IChatFunctionReference",
1060
- value: elem
1061
- })) && _vo1(elem, _path + ".functions[" + _index2 + "]", _exceptionable) || _report(_exceptionable, {
1062
- path: _path + ".functions[" + _index2 + "]",
1063
- expected: "___IChatFunctionReference",
1064
- value: elem
1065
- }))).every((flag => flag)) || _report(_exceptionable, {
1066
- path: _path + ".functions",
1067
- expected: "Array<___IChatFunctionReference>",
1068
- value: input.functions
1069
- }) ].every((flag => flag));
1070
- const _vo1 = (input, _path, _exceptionable = true) => [ "string" === typeof input.reason || _report(_exceptionable, {
1071
- path: _path + ".reason",
1072
- expected: "string",
1073
- value: input.reason
1074
- }), "string" === typeof input.name || _report(_exceptionable, {
1075
- path: _path + ".name",
1076
- expected: "string",
1077
- value: input.name
1078
- }) ].every((flag => flag));
1079
- const __is = input => "object" === typeof input && null !== input && _io0(input);
1080
- let errors;
1081
- let _report;
1082
- return input => {
1083
- if (false === __is(input)) {
1084
- errors = [];
1085
- _report = __typia_transform__validateReport._validateReport(errors);
1086
- ((input, _path, _exceptionable = true) => ("object" === typeof input && null !== input || _report(true, {
1087
- path: _path + "",
1088
- expected: "__IChatFunctionReference.IProps",
1089
- value: input
1090
- })) && _vo0(input, _path + "", true) || _report(true, {
1091
- path: _path + "",
1092
- expected: "__IChatFunctionReference.IProps",
1093
- value: input
1094
- }))(input, "$input", true);
1095
- const success = 0 === errors.length;
1096
- return success ? {
1097
- success,
1098
- data: input
1099
- } : {
1100
- success,
1101
- errors,
1102
- data: input
1103
- };
1104
- }
1105
- return {
1106
- success: true,
1107
- data: input
1108
- };
1109
- };
1110
- })()
1111
- } ]
1245
+ const AgenticaSystemPrompt = {
1246
+ CANCEL: "You are a helpful assistant for cancelling functions which are prepared to call.\n\nUse the supplied tools to select some functions to cancel of `getApiFunctions()` returned.\n\nIf you can't find any proper function to select, don't talk, don't do anything.",
1247
+ COMMON: 'At first, the user\'s language locale code is "${locale}". When you are conversating with the user or describing the function calling result, consider it and always translate to the target locale language. Never conversate with different locale language text with the user.\n\nAt second, the user\'s timezone is "${timezone}", and ISO datetime is ${datetime}. When you are conversating with the user, consider current time and user belonged timezone.',
1248
+ DESCRIBE: "You are a helpful assistant describing return values of function calls.\n\nAbove messages are the list of function call histories. When describing the return values, please do not too much shortly summarize them. Instead, provide detailed descriptions as much as.\n\nAlso, its content format must be markdown. If required, utilize the mermaid syntax for drawing some diagrams. When image contents are, just put them through the markdown image syntax.\n\nAt last, if user's language locale code is different with your description, please translate it to the user's language.",
1249
+ EXECUTE: 'You are a helpful assistant for tool calling.\n\nUse the supplied tools to assist the user.\n\nIf previous messages are not enough to compose the arguments, you can ask the user to write more information. By the way, when asking the user to write more information, make the text concise and clear.\n\nFor reference, in the "tool" role message content, the `function` property means metadata of the API operation. In other words, it is the function schema describing its purpose, parameters and return value types. And then the `data` property is the return value from the target function calling.',
1250
+ INITIALIZE: "You are a helpful assistant.\n\nUse the supplied tools to assist the user.",
1251
+ SELECT: "You are a helpful assistant for selecting functions to call.\n\nUse the supplied tools to select some functions of `getApiFunctions()` returned.\n\nWhen selecting functions to call, pay attention to the relationship between functions. In particular, check the prerequisites between each function.\n\nIf you can't find any proper function to select, just type your own message. By the way, when typing your own message, please consider the user's language locale code. If your message is different with the user's language, please translate it to the user's."
1112
1252
  };
1113
1253
 
1114
- async function execute$4(ctx) {
1115
- if (ctx.operations.divided === undefined) {
1116
- return step$1(ctx, ctx.operations.array, 0);
1254
+ const NOT_MOUNTED_YET = {};
1255
+
1256
+ class Singleton {
1257
+ constructor(closure) {
1258
+ this.closure_ = closure;
1259
+ this.value_ = NOT_MOUNTED_YET;
1117
1260
  }
1118
- const stacks = ctx.operations.divided.map((() => []));
1119
- const events = [];
1120
- const prompts = await Promise.all(ctx.operations.divided.map((async (operations, i) => step$1({
1121
- ...ctx,
1122
- stack: stacks[i],
1123
- dispatch: async e => {
1124
- events.push(e);
1261
+ get(...args) {
1262
+ if (this.value_ === NOT_MOUNTED_YET) {
1263
+ this.value_ = this.closure_(...args);
1125
1264
  }
1126
- }, operations, 0))));
1127
- if (stacks.every((s => s.length === 0))) {
1128
- return prompts[0];
1129
- } else if ((ctx.config?.eliticism ?? AgenticaConstant.ELITICISM) === true) {
1130
- return step$1(ctx, stacks.flat().map((s => ctx.operations.group.get(s.operation.controller.name).get(s.operation.function.name))), 0);
1265
+ return this.value_;
1131
1266
  }
1132
- const collection = createCancelPrompt({
1133
- id: v4(),
1134
- selections: []
1135
- });
1136
- for (const e of events) {
1137
- if (e.type === "select") {
1138
- collection.selections.push(e.selection);
1139
- await cancelFunction(ctx, {
1140
- name: e.selection.operation.name,
1141
- reason: e.selection.reason
1267
+ }
1268
+
1269
+ const isNode = new Singleton((() => {
1270
+ const isObject = obj => typeof obj === "object" && obj !== null;
1271
+ return typeof global === "object" && isObject(global) && isObject(global.process) && isObject(global.process.versions) && typeof global.process.versions.node !== "undefined";
1272
+ }));
1273
+
1274
+ const getLocale = new Singleton((() => isNode.get() ? process.env.LANG?.split(".")[0] ?? "en-US" : navigator.language));
1275
+
1276
+ const getTimezone = new Singleton((() => Intl.DateTimeFormat().resolvedOptions().timeZone));
1277
+
1278
+ function write(config) {
1279
+ if (config?.systemPrompt?.common !== undefined) {
1280
+ return config?.systemPrompt?.common(config);
1281
+ }
1282
+ const locale = config?.locale ?? getLocale.get();
1283
+ const timezone = config?.timezone ?? getTimezone.get();
1284
+ return AgenticaSystemPrompt.COMMON.replace("${locale}", locale).replace("${timezone}", timezone);
1285
+ }
1286
+
1287
+ const AgenticaDefaultPrompt = {
1288
+ write
1289
+ };
1290
+
1291
+ class AsyncQueue {
1292
+ constructor() {
1293
+ this.queue = [];
1294
+ this.resolvers = [];
1295
+ this.closeResolvers = [];
1296
+ this.emptyResolvers = [];
1297
+ this.closed = false;
1298
+ }
1299
+ enqueue(item) {
1300
+ this.queue.push(item);
1301
+ if (this.resolvers.length > 0) {
1302
+ this.resolvers.shift()?.({
1303
+ value: this.queue.shift(),
1304
+ done: false
1142
1305
  });
1143
1306
  }
1144
1307
  }
1145
- return [ collection ];
1308
+ async dequeue() {
1309
+ if (this.queue.length > 0) {
1310
+ return {
1311
+ value: this.queue.shift(),
1312
+ done: false
1313
+ };
1314
+ }
1315
+ if (this.closed) {
1316
+ if (this.emptyResolvers.length > 0) {
1317
+ this.emptyResolvers.forEach((resolve => resolve()));
1318
+ this.emptyResolvers = [];
1319
+ }
1320
+ return {
1321
+ value: undefined,
1322
+ done: true
1323
+ };
1324
+ }
1325
+ return new Promise((resolve => this.resolvers.push(resolve)));
1326
+ }
1327
+ isEmpty() {
1328
+ return this.queue.length === 0;
1329
+ }
1330
+ isClosed() {
1331
+ return this.closed;
1332
+ }
1333
+ done() {
1334
+ return this.isClosed() && this.isEmpty();
1335
+ }
1336
+ close() {
1337
+ this.closed = true;
1338
+ while (this.resolvers.length > 0) {
1339
+ this.resolvers.shift()?.({
1340
+ value: undefined,
1341
+ done: true
1342
+ });
1343
+ }
1344
+ this.closeResolvers.forEach((resolve => resolve()));
1345
+ }
1346
+ async waitUntilEmpty() {
1347
+ if (this.isEmpty()) {
1348
+ return Promise.resolve();
1349
+ }
1350
+ return new Promise((resolve => {
1351
+ this.emptyResolvers.push(resolve);
1352
+ }));
1353
+ }
1354
+ async waitClosed() {
1355
+ if (this.isClosed()) {
1356
+ return Promise.resolve();
1357
+ }
1358
+ return new Promise((resolve => {
1359
+ this.closeResolvers.push(resolve);
1360
+ }));
1361
+ }
1362
+ }
1363
+
1364
+ class MPSC {
1365
+ constructor() {
1366
+ this.queue = new AsyncQueue;
1367
+ this.consumer = new ReadableStream({
1368
+ pull: async controller => {
1369
+ const {value, done} = await this.queue.dequeue();
1370
+ if (done === true) {
1371
+ controller.close();
1372
+ return;
1373
+ }
1374
+ controller.enqueue(value);
1375
+ }
1376
+ });
1377
+ }
1378
+ produce(chunk) {
1379
+ this.queue.enqueue(chunk);
1380
+ }
1381
+ close() {
1382
+ this.queue.close();
1383
+ }
1384
+ done() {
1385
+ return this.queue.done();
1386
+ }
1387
+ async waitClosed() {
1388
+ await this.queue.waitClosed();
1389
+ }
1390
+ async waitUntilEmpty() {
1391
+ await this.queue.waitUntilEmpty();
1392
+ }
1393
+ }
1394
+
1395
+ async function describe(ctx, histories) {
1396
+ if (histories.length === 0) {
1397
+ return [];
1398
+ }
1399
+ const completionStream = await ctx.request("describe", {
1400
+ messages: [ {
1401
+ role: "system",
1402
+ content: AgenticaDefaultPrompt.write(ctx.config)
1403
+ }, ...histories.map(decodePrompt).flat(), {
1404
+ role: "system",
1405
+ content: ctx.config?.systemPrompt?.describe?.(histories) ?? AgenticaSystemPrompt.DESCRIBE
1406
+ } ]
1407
+ });
1408
+ const describeContext = [];
1409
+ const completion = await StreamUtil.reduce(completionStream, (async (accPromise, chunk) => {
1410
+ const acc = await accPromise;
1411
+ const registerContext = choices => {
1412
+ for (const choice of choices) {
1413
+ if (choice.finish_reason != null) {
1414
+ describeContext[choice.index].mpsc.close();
1415
+ continue;
1416
+ }
1417
+ if (choice.delta.content == null) {
1418
+ continue;
1419
+ }
1420
+ if (describeContext[choice.index] != null) {
1421
+ describeContext[choice.index].content += choice.delta.content;
1422
+ describeContext[choice.index].mpsc.produce(choice.delta.content);
1423
+ continue;
1424
+ }
1425
+ const mpsc = new MPSC;
1426
+ describeContext[choice.index] = {
1427
+ content: choice.delta.content,
1428
+ mpsc
1429
+ };
1430
+ mpsc.produce(choice.delta.content);
1431
+ void ctx.dispatch(createDescribeEvent({
1432
+ executes: histories,
1433
+ stream: mpsc.consumer,
1434
+ done: () => mpsc.done(),
1435
+ get: () => describeContext[choice.index]?.content ?? "",
1436
+ join: async () => {
1437
+ await mpsc.waitClosed();
1438
+ return describeContext[choice.index].content;
1439
+ }
1440
+ }));
1441
+ }
1442
+ };
1443
+ if (acc.object === "chat.completion.chunk") {
1444
+ registerContext([ acc, chunk ].flatMap((v => v.choices)));
1445
+ return ChatGptCompletionMessageUtil.merge([ acc, chunk ]);
1446
+ }
1447
+ registerContext(chunk.choices);
1448
+ return ChatGptCompletionMessageUtil.accumulate(acc, chunk);
1449
+ }));
1450
+ if (completion == null) {
1451
+ throw new Error("No completion received");
1452
+ }
1453
+ const descriptions = completion.choices.map((choice => choice.message.role === "assistant" ? choice.message.content : null)).filter((str => str !== null)).map((content => createDescribePrompt({
1454
+ executes: histories,
1455
+ text: content
1456
+ })));
1457
+ return descriptions;
1146
1458
  }
1147
1459
 
1460
+ const ChatGptDescribeFunctionAgent = {
1461
+ execute: describe
1462
+ };
1463
+
1464
+ var AgenticaConstant;
1465
+
1466
+ (function(AgenticaConstant) {
1467
+ AgenticaConstant.RETRY = 3;
1468
+ AgenticaConstant.ELITICISM = true;
1469
+ })(AgenticaConstant || (AgenticaConstant = {}));
1470
+
1148
1471
  async function cancelFunction(ctx, reference) {
1149
1472
  const index = ctx.stack.findIndex((item => item.operation.name === reference.name));
1150
1473
  if (index === -1) {
@@ -1161,34 +1484,167 @@ async function cancelFunction(ctx, reference) {
1161
1484
  return item;
1162
1485
  }
1163
1486
 
1164
- async function step$1(ctx, operations, retry, failures) {
1165
- const completionStream = await ctx.request("cancel", {
1166
- messages: [ {
1167
- role: "system",
1168
- content: AgenticaDefaultPrompt.write(ctx.config)
1169
- }, {
1170
- role: "assistant",
1171
- tool_calls: [ {
1172
- type: "function",
1173
- id: "getApiFunctions",
1174
- function: {
1175
- name: "getApiFunctions",
1176
- arguments: JSON.stringify({})
1487
+ const CONTAINER$1 = {
1488
+ functions: [ {
1489
+ name: "cancelFunctions",
1490
+ parameters: {
1491
+ description: " Properties of the function\n\n------------------------------\n\nCurrent Type: {@link __IChatFunctionReference.IProps}",
1492
+ type: "object",
1493
+ properties: {
1494
+ functions: {
1495
+ title: "List of target functions",
1496
+ description: "List of target functions.",
1497
+ type: "array",
1498
+ items: {
1499
+ description: "Current Type: {@link ___IChatFunctionReference}",
1500
+ type: "object",
1501
+ properties: {
1502
+ reason: {
1503
+ title: "The reason of the function selection",
1504
+ description: "The reason of the function selection.\n\nJust write the reason why you've determined to select this function.",
1505
+ type: "string"
1506
+ },
1507
+ name: {
1508
+ title: "Name of the target function to call",
1509
+ description: "Name of the target function to call.",
1510
+ type: "string"
1511
+ }
1512
+ },
1513
+ required: [ "reason", "name" ]
1514
+ }
1177
1515
  }
1178
- } ]
1179
- }, {
1180
- role: "tool",
1181
- tool_call_id: "getApiFunctions",
1182
- content: JSON.stringify(operations.map((op => ({
1183
- name: op.name,
1184
- description: op.function.description,
1185
- ...op.protocol === "http" ? {
1516
+ },
1517
+ required: [ "functions" ],
1518
+ additionalProperties: false,
1519
+ $defs: {}
1520
+ },
1521
+ description: "Cancel a function from the candidate list to call.\n\nIf you A.I. agent has understood that the user wants to cancel\nsome candidate functions to call from the conversation, please cancel\nthem through this function.\n\nAlso, when you A.I. find a function that has been selected by the candidate\npooling, cancel the function by calling this function. For reference, the\ncandidate pooling means that user wants only one function to call, but you A.I.\nagent selects multiple candidate functions because the A.I. agent can't specify\nonly one thing due to lack of specificity or homogeneity of candidate functions.\n\nAdditionally, if you A.I. agent wants to cancel same function multiply, you can\ndo it by assigning the same function name multiply in the `functions` property.",
1522
+ validate: (() => {
1523
+ const _io0 = input => Array.isArray(input.functions) && input.functions.every((elem => "object" === typeof elem && null !== elem && _io1(elem)));
1524
+ const _io1 = input => "string" === typeof input.reason && "string" === typeof input.name;
1525
+ const _vo0 = (input, _path, _exceptionable = true) => [ (Array.isArray(input.functions) || _report(_exceptionable, {
1526
+ path: _path + ".functions",
1527
+ expected: "Array<___IChatFunctionReference>",
1528
+ value: input.functions
1529
+ })) && input.functions.map(((elem, _index2) => ("object" === typeof elem && null !== elem || _report(_exceptionable, {
1530
+ path: _path + ".functions[" + _index2 + "]",
1531
+ expected: "___IChatFunctionReference",
1532
+ value: elem
1533
+ })) && _vo1(elem, _path + ".functions[" + _index2 + "]", _exceptionable) || _report(_exceptionable, {
1534
+ path: _path + ".functions[" + _index2 + "]",
1535
+ expected: "___IChatFunctionReference",
1536
+ value: elem
1537
+ }))).every((flag => flag)) || _report(_exceptionable, {
1538
+ path: _path + ".functions",
1539
+ expected: "Array<___IChatFunctionReference>",
1540
+ value: input.functions
1541
+ }) ].every((flag => flag));
1542
+ const _vo1 = (input, _path, _exceptionable = true) => [ "string" === typeof input.reason || _report(_exceptionable, {
1543
+ path: _path + ".reason",
1544
+ expected: "string",
1545
+ value: input.reason
1546
+ }), "string" === typeof input.name || _report(_exceptionable, {
1547
+ path: _path + ".name",
1548
+ expected: "string",
1549
+ value: input.name
1550
+ }) ].every((flag => flag));
1551
+ const __is = input => "object" === typeof input && null !== input && _io0(input);
1552
+ let errors;
1553
+ let _report;
1554
+ return input => {
1555
+ if (false === __is(input)) {
1556
+ errors = [];
1557
+ _report = __typia_transform__validateReport._validateReport(errors);
1558
+ ((input, _path, _exceptionable = true) => ("object" === typeof input && null !== input || _report(true, {
1559
+ path: _path + "",
1560
+ expected: "__IChatFunctionReference.IProps",
1561
+ value: input
1562
+ })) && _vo0(input, _path + "", true) || _report(true, {
1563
+ path: _path + "",
1564
+ expected: "__IChatFunctionReference.IProps",
1565
+ value: input
1566
+ }))(input, "$input", true);
1567
+ const success = 0 === errors.length;
1568
+ return success ? {
1569
+ success,
1570
+ data: input
1571
+ } : {
1572
+ success,
1573
+ errors,
1574
+ data: input
1575
+ };
1576
+ }
1577
+ return {
1578
+ success: true,
1579
+ data: input
1580
+ };
1581
+ };
1582
+ })()
1583
+ } ]
1584
+ };
1585
+
1586
+ async function cancel(ctx) {
1587
+ if (ctx.operations.divided === undefined) {
1588
+ return step$1(ctx, ctx.operations.array, 0);
1589
+ }
1590
+ const stacks = ctx.operations.divided.map((() => []));
1591
+ const events = [];
1592
+ const prompts = await Promise.all(ctx.operations.divided.map((async (operations, i) => step$1({
1593
+ ...ctx,
1594
+ stack: stacks[i],
1595
+ dispatch: async e => {
1596
+ events.push(e);
1597
+ }
1598
+ }, operations, 0))));
1599
+ if (stacks.every((s => s.length === 0))) {
1600
+ return prompts[0];
1601
+ } else if ((ctx.config?.eliticism ?? AgenticaConstant.ELITICISM) === true) {
1602
+ return step$1(ctx, stacks.flat().map((s => ctx.operations.group.get(s.operation.controller.name).get(s.operation.function.name))), 0);
1603
+ }
1604
+ const collection = createCancelPrompt({
1605
+ id: v4(),
1606
+ selections: []
1607
+ });
1608
+ for (const e of events) {
1609
+ if (e.type === "select") {
1610
+ collection.selections.push(e.selection);
1611
+ await cancelFunction(ctx, {
1612
+ name: e.selection.operation.name,
1613
+ reason: e.selection.reason
1614
+ });
1615
+ }
1616
+ }
1617
+ return [ collection ];
1618
+ }
1619
+
1620
+ async function step$1(ctx, operations, retry, failures) {
1621
+ const completionStream = await ctx.request("cancel", {
1622
+ messages: [ {
1623
+ role: "system",
1624
+ content: AgenticaDefaultPrompt.write(ctx.config)
1625
+ }, {
1626
+ role: "assistant",
1627
+ tool_calls: [ {
1628
+ type: "function",
1629
+ id: "getApiFunctions",
1630
+ function: {
1631
+ name: "getApiFunctions",
1632
+ arguments: JSON.stringify({})
1633
+ }
1634
+ } ]
1635
+ }, {
1636
+ role: "tool",
1637
+ tool_call_id: "getApiFunctions",
1638
+ content: JSON.stringify(operations.map((op => ({
1639
+ name: op.name,
1640
+ description: op.function.description,
1641
+ ...op.protocol === "http" ? {
1186
1642
  method: op.function.method,
1187
1643
  path: op.function.path,
1188
1644
  tags: op.function.tags
1189
1645
  } : {}
1190
1646
  }))))
1191
- }, ...ctx.histories.map(ChatGptHistoryDecoder.decode).flat(), {
1647
+ }, ...ctx.histories.map(decodePrompt).flat(), {
1192
1648
  role: "user",
1193
1649
  content: ctx.prompt.text
1194
1650
  }, {
@@ -1352,17 +1808,12 @@ function emendMessages$1(failures) {
1352
1808
  } ])).flat();
1353
1809
  }
1354
1810
 
1355
- const ChatGptCancelFunctionAgent = {
1356
- execute: execute$4,
1357
- cancelFunction
1358
- };
1359
-
1360
- async function execute$3(ctx) {
1811
+ async function call(ctx) {
1361
1812
  const completionStream = await ctx.request("call", {
1362
1813
  messages: [ {
1363
1814
  role: "system",
1364
1815
  content: AgenticaDefaultPrompt.write(ctx.config)
1365
- }, ...ctx.histories.map(ChatGptHistoryDecoder.decode).flat(), {
1816
+ }, ...ctx.histories.map(decodePrompt).flat(), {
1366
1817
  role: "user",
1367
1818
  content: ctx.prompt.text
1368
1819
  }, {
@@ -1416,7 +1867,7 @@ async function execute$3(ctx) {
1416
1867
  arguments: execute.arguments,
1417
1868
  value: execute.value
1418
1869
  }));
1419
- await ChatGptCancelFunctionAgent.cancelFunction(ctx, {
1870
+ await cancelFunction(ctx, {
1420
1871
  name: call.operation.name,
1421
1872
  reason: "completed"
1422
1873
  });
@@ -1571,7 +2022,7 @@ async function correct(ctx, call, retry, error) {
1571
2022
  messages: [ {
1572
2023
  role: "system",
1573
2024
  content: AgenticaDefaultPrompt.write(ctx.config)
1574
- }, ...ctx.histories.map(ChatGptHistoryDecoder.decode).flat(), {
2025
+ }, ...ctx.histories.map(decodePrompt).flat(), {
1575
2026
  role: "user",
1576
2027
  content: ctx.prompt.text
1577
2028
  }, {
@@ -1642,183 +2093,6 @@ function isObject($defs, schema) {
1642
2093
  return ChatGptTypeChecker.isObject(schema) || ChatGptTypeChecker.isReference(schema) && isObject($defs, $defs[schema.$ref.split("/").at(-1)]) || ChatGptTypeChecker.isAnyOf(schema) && schema.anyOf.every((schema => isObject($defs, schema)));
1643
2094
  }
1644
2095
 
1645
- const ChatGptCallFunctionAgent = {
1646
- execute: execute$3
1647
- };
1648
-
1649
- class AsyncQueue {
1650
- constructor() {
1651
- this.queue = [];
1652
- this.resolvers = [];
1653
- this.closeResolvers = [];
1654
- this.emptyResolvers = [];
1655
- this.closed = false;
1656
- }
1657
- enqueue(item) {
1658
- this.queue.push(item);
1659
- if (this.resolvers.length > 0) {
1660
- this.resolvers.shift()?.({
1661
- value: this.queue.shift(),
1662
- done: false
1663
- });
1664
- }
1665
- }
1666
- async dequeue() {
1667
- if (this.queue.length > 0) {
1668
- return {
1669
- value: this.queue.shift(),
1670
- done: false
1671
- };
1672
- }
1673
- if (this.closed) {
1674
- if (this.emptyResolvers.length > 0) {
1675
- this.emptyResolvers.forEach((resolve => resolve()));
1676
- this.emptyResolvers = [];
1677
- }
1678
- return {
1679
- value: undefined,
1680
- done: true
1681
- };
1682
- }
1683
- return new Promise((resolve => this.resolvers.push(resolve)));
1684
- }
1685
- isEmpty() {
1686
- return this.queue.length === 0;
1687
- }
1688
- isClosed() {
1689
- return this.closed;
1690
- }
1691
- done() {
1692
- return this.isClosed() && this.isEmpty();
1693
- }
1694
- close() {
1695
- this.closed = true;
1696
- while (this.resolvers.length > 0) {
1697
- this.resolvers.shift()?.({
1698
- value: undefined,
1699
- done: true
1700
- });
1701
- }
1702
- this.closeResolvers.forEach((resolve => resolve()));
1703
- }
1704
- async waitUntilEmpty() {
1705
- if (this.isEmpty()) {
1706
- return Promise.resolve();
1707
- }
1708
- return new Promise((resolve => {
1709
- this.emptyResolvers.push(resolve);
1710
- }));
1711
- }
1712
- async waitClosed() {
1713
- if (this.isClosed()) {
1714
- return Promise.resolve();
1715
- }
1716
- return new Promise((resolve => {
1717
- this.closeResolvers.push(resolve);
1718
- }));
1719
- }
1720
- }
1721
-
1722
- class MPSC {
1723
- constructor() {
1724
- this.queue = new AsyncQueue;
1725
- this.consumer = new ReadableStream({
1726
- pull: async controller => {
1727
- const {value, done} = await this.queue.dequeue();
1728
- if (done === true) {
1729
- controller.close();
1730
- return;
1731
- }
1732
- controller.enqueue(value);
1733
- }
1734
- });
1735
- }
1736
- produce(chunk) {
1737
- this.queue.enqueue(chunk);
1738
- }
1739
- close() {
1740
- this.queue.close();
1741
- }
1742
- done() {
1743
- return this.queue.done();
1744
- }
1745
- async waitClosed() {
1746
- await this.queue.waitClosed();
1747
- }
1748
- async waitUntilEmpty() {
1749
- await this.queue.waitUntilEmpty();
1750
- }
1751
- }
1752
-
1753
- async function execute$2(ctx, histories) {
1754
- if (histories.length === 0) {
1755
- return [];
1756
- }
1757
- const completionStream = await ctx.request("describe", {
1758
- messages: [ {
1759
- role: "system",
1760
- content: AgenticaDefaultPrompt.write(ctx.config)
1761
- }, ...histories.map(ChatGptHistoryDecoder.decode).flat(), {
1762
- role: "system",
1763
- content: ctx.config?.systemPrompt?.describe?.(histories) ?? AgenticaSystemPrompt.DESCRIBE
1764
- } ]
1765
- });
1766
- const describeContext = [];
1767
- const completion = await StreamUtil.reduce(completionStream, (async (accPromise, chunk) => {
1768
- const acc = await accPromise;
1769
- const registerContext = choices => {
1770
- for (const choice of choices) {
1771
- if (choice.finish_reason != null) {
1772
- describeContext[choice.index].mpsc.close();
1773
- continue;
1774
- }
1775
- if (choice.delta.content == null) {
1776
- continue;
1777
- }
1778
- if (describeContext[choice.index] != null) {
1779
- describeContext[choice.index].content += choice.delta.content;
1780
- describeContext[choice.index].mpsc.produce(choice.delta.content);
1781
- continue;
1782
- }
1783
- const mpsc = new MPSC;
1784
- describeContext[choice.index] = {
1785
- content: choice.delta.content,
1786
- mpsc
1787
- };
1788
- mpsc.produce(choice.delta.content);
1789
- void ctx.dispatch(createDescribeEvent({
1790
- executes: histories,
1791
- stream: mpsc.consumer,
1792
- done: () => mpsc.done(),
1793
- get: () => describeContext[choice.index]?.content ?? "",
1794
- join: async () => {
1795
- await mpsc.waitClosed();
1796
- return describeContext[choice.index].content;
1797
- }
1798
- }));
1799
- }
1800
- };
1801
- if (acc.object === "chat.completion.chunk") {
1802
- registerContext([ acc, chunk ].flatMap((v => v.choices)));
1803
- return ChatGptCompletionMessageUtil.merge([ acc, chunk ]);
1804
- }
1805
- registerContext(chunk.choices);
1806
- return ChatGptCompletionMessageUtil.accumulate(acc, chunk);
1807
- }));
1808
- if (completion == null) {
1809
- throw new Error("No completion received");
1810
- }
1811
- const descriptions = completion.choices.map((choice => choice.message.role === "assistant" ? choice.message.content : null)).filter((str => str !== null)).map((content => createDescribePrompt({
1812
- executes: histories,
1813
- text: content
1814
- })));
1815
- return descriptions;
1816
- }
1817
-
1818
- const ChatGptDescribeFunctionAgent = {
1819
- execute: execute$2
1820
- };
1821
-
1822
2096
  const FUNCTION = {
1823
2097
  functions: [ {
1824
2098
  name: "getApiFunctions",
@@ -3457,12 +3731,12 @@ const FUNCTION = {
3457
3731
  } ]
3458
3732
  }.functions[0];
3459
3733
 
3460
- async function execute$1(ctx) {
3734
+ async function initialize(ctx) {
3461
3735
  const completionStream = await ctx.request("initialize", {
3462
3736
  messages: [ {
3463
3737
  role: "system",
3464
3738
  content: AgenticaDefaultPrompt.write(ctx.config)
3465
- }, ...ctx.histories.map(ChatGptHistoryDecoder.decode).flat(), {
3739
+ }, ...ctx.histories.map(decodePrompt).flat(), {
3466
3740
  role: "user",
3467
3741
  content: ctx.prompt.text
3468
3742
  }, {
@@ -3540,9 +3814,21 @@ async function execute$1(ctx) {
3540
3814
  return prompts;
3541
3815
  }
3542
3816
 
3543
- const ChatGptInitializeFunctionAgent = {
3544
- execute: execute$1
3545
- };
3817
+ async function selectFunction(ctx, reference) {
3818
+ const operation = ctx.operations.flat.get(reference.name);
3819
+ if (operation === undefined) {
3820
+ return null;
3821
+ }
3822
+ const selection = createOperationSelection({
3823
+ operation,
3824
+ reason: reference.reason
3825
+ });
3826
+ ctx.stack.push(selection);
3827
+ void ctx.dispatch(createSelectEvent({
3828
+ selection
3829
+ }));
3830
+ return operation;
3831
+ }
3546
3832
 
3547
3833
  const CONTAINER = {
3548
3834
  functions: [ {
@@ -3643,7 +3929,7 @@ const CONTAINER = {
3643
3929
  } ]
3644
3930
  };
3645
3931
 
3646
- async function execute(ctx) {
3932
+ async function select(ctx) {
3647
3933
  if (ctx.operations.divided === undefined) {
3648
3934
  return step(ctx, ctx.operations.array, 0);
3649
3935
  }
@@ -3704,7 +3990,7 @@ async function step(ctx, operations, retry, failures) {
3704
3990
  tags: op.function.tags
3705
3991
  } : {}
3706
3992
  }))))
3707
- }, ...ctx.histories.map(ChatGptHistoryDecoder.decode).flat(), {
3993
+ }, ...ctx.histories.map(decodePrompt).flat(), {
3708
3994
  role: "user",
3709
3995
  content: ctx.prompt.text
3710
3996
  }, {
@@ -3773,467 +4059,158 @@ async function step(ctx, operations, retry, failures) {
3773
4059
  expected: "__IChatFunctionReference.IProps",
3774
4060
  value: input
3775
4061
  })) && _vo0(input, _path + "", true) || _report(true, {
3776
- path: _path + "",
3777
- expected: "__IChatFunctionReference.IProps",
3778
- value: input
3779
- }))(input, "$input", true);
3780
- const success = 0 === errors.length;
3781
- return success ? {
3782
- success,
3783
- data: input
3784
- } : {
3785
- success,
3786
- errors,
3787
- data: input
3788
- };
3789
- }
3790
- return {
3791
- success: true,
3792
- data: input
3793
- };
3794
- };
3795
- })()(input);
3796
- if (validation.success === false) {
3797
- failures.push({
3798
- id: tc.id,
3799
- name: tc.function.name,
3800
- validation
3801
- });
3802
- }
3803
- }
3804
- }
3805
- if (failures.length > 0) {
3806
- return step(ctx, operations, retry, failures);
3807
- }
3808
- }
3809
- const prompts = [];
3810
- for (const choice of completion.choices) {
3811
- if (choice.message.tool_calls != null) {
3812
- for (const tc of choice.message.tool_calls) {
3813
- if (tc.type !== "function") {
3814
- continue;
3815
- }
3816
- if (tc.function.name !== "selectFunctions") {
3817
- continue;
3818
- }
3819
- const input = (() => {
3820
- const _io0 = input => Array.isArray(input.functions) && input.functions.every((elem => "object" === typeof elem && null !== elem && _io1(elem)));
3821
- const _io1 = input => "string" === typeof input.reason && "string" === typeof input.name;
3822
- const __is = input => "object" === typeof input && null !== input && _io0(input);
3823
- return input => {
3824
- input = JSON.parse(input);
3825
- return __is(input) ? input : null;
3826
- };
3827
- })()(tc.function.arguments);
3828
- if (input === null) {
3829
- continue;
3830
- }
3831
- const collection = createSelectPrompt({
3832
- id: tc.id,
3833
- selections: []
3834
- });
3835
- for (const reference of input.functions) {
3836
- const operation = await selectFunction(ctx, reference);
3837
- if (operation === null) {
3838
- continue;
3839
- }
3840
- collection.selections.push(createOperationSelection({
3841
- operation,
3842
- reason: reference.reason
3843
- }));
3844
- }
3845
- if (collection.selections.length !== 0) {
3846
- prompts.push(collection);
3847
- }
3848
- }
3849
- }
3850
- if (choice.message.role === "assistant" && choice.message.content != null) {
3851
- const text = createTextPrompt({
3852
- role: "assistant",
3853
- text: choice.message.content
3854
- });
3855
- prompts.push(text);
3856
- await ctx.dispatch(createTextEvent({
3857
- role: "assistant",
3858
- stream: StreamUtil.to(text.text),
3859
- join: async () => Promise.resolve(text.text),
3860
- done: () => true,
3861
- get: () => text.text
3862
- }));
3863
- }
3864
- }
3865
- return prompts;
3866
- }
3867
-
3868
- async function selectFunction(ctx, reference) {
3869
- const operation = ctx.operations.flat.get(reference.name);
3870
- if (operation === undefined) {
3871
- return null;
3872
- }
3873
- const selection = createOperationSelection({
3874
- operation,
3875
- reason: reference.reason
3876
- });
3877
- ctx.stack.push(selection);
3878
- void ctx.dispatch(createSelectEvent({
3879
- selection
3880
- }));
3881
- return operation;
3882
- }
3883
-
3884
- function emendMessages(failures) {
3885
- return failures.map((f => [ {
3886
- role: "assistant",
3887
- tool_calls: [ {
3888
- type: "function",
3889
- id: f.id,
3890
- function: {
3891
- name: f.name,
3892
- arguments: JSON.stringify(f.validation.data)
3893
- }
3894
- } ]
3895
- }, {
3896
- role: "tool",
3897
- content: JSON.stringify(f.validation.errors),
3898
- tool_call_id: f.id
3899
- }, {
3900
- role: "system",
3901
- content: [ "You A.I. assistant has composed wrong typed arguments.", "", "Correct it at the next function calling." ].join("\n")
3902
- } ])).flat();
3903
- }
3904
-
3905
- const ChatGptSelectFunctionAgent = {
3906
- execute,
3907
- selectFunction,
3908
- emendMessages
3909
- };
3910
-
3911
- const ChatGptAgent = {
3912
- execute: executor => async ctx => {
3913
- const histories = [];
3914
- if (ctx.ready() === false) {
3915
- if (executor?.initialize === null) {
3916
- await ctx.initialize();
3917
- } else {
3918
- histories.push(...await (executor?.initialize ?? ChatGptInitializeFunctionAgent.execute)(ctx));
3919
- if (ctx.ready() === false) {
3920
- return histories;
3921
- }
3922
- }
3923
- }
3924
- if (ctx.stack.length !== 0) {
3925
- histories.push(...await (executor?.cancel ?? ChatGptCancelFunctionAgent.execute)(ctx));
3926
- }
3927
- histories.push(...await (executor?.select ?? ChatGptSelectFunctionAgent.execute)(ctx));
3928
- if (ctx.stack.length === 0) {
3929
- return histories;
3930
- }
3931
- while (true) {
3932
- const prompts = await (executor?.call ?? ChatGptCallFunctionAgent.execute)(ctx);
3933
- histories.push(...prompts);
3934
- const executes = prompts.filter((prompt => prompt.type === "execute"));
3935
- for (const e of executes) {
3936
- await ChatGptCancelFunctionAgent.cancelFunction(ctx, {
3937
- reason: "completed",
3938
- name: e.operation.name
3939
- });
3940
- }
3941
- histories.push(...await (executor?.describe ?? ChatGptDescribeFunctionAgent.execute)(ctx, executes));
3942
- if (executes.length === 0 || ctx.stack.length === 0) {
3943
- break;
3944
- }
3945
- }
3946
- return histories;
3947
- }
3948
- };
3949
-
3950
- class AgenticaTokenUsage {
3951
- constructor(props) {
3952
- if (props === undefined) {
3953
- const zero = AgenticaTokenUsage.zero();
3954
- this.aggregate = zero.aggregate;
3955
- this.initialize = zero.initialize;
3956
- this.select = zero.select;
3957
- this.cancel = zero.cancel;
3958
- this.call = zero.call;
3959
- this.describe = zero.describe;
3960
- } else {
3961
- this.aggregate = props.aggregate;
3962
- this.initialize = props.initialize;
3963
- this.select = props.select;
3964
- this.cancel = props.cancel;
3965
- this.call = props.call;
3966
- this.describe = props.describe;
3967
- }
3968
- }
3969
- increment(y) {
3970
- const increment = (x, y) => {
3971
- x.total += y.total;
3972
- x.input.total += y.input.total;
3973
- x.input.cached += y.input.cached;
3974
- x.output.total += y.output.total;
3975
- x.output.reasoning += y.output.reasoning;
3976
- x.output.accepted_prediction += y.output.accepted_prediction;
3977
- x.output.rejected_prediction += y.output.rejected_prediction;
3978
- };
3979
- increment(this.aggregate, y.aggregate);
3980
- increment(this.initialize, y.initialize);
3981
- increment(this.select, y.select);
3982
- increment(this.cancel, y.cancel);
3983
- increment(this.call, y.call);
3984
- increment(this.describe, y.describe);
3985
- }
3986
- toJSON() {
3987
- return (() => {
3988
- const _co0 = input => ({
3989
- aggregate: _co1(input.aggregate),
3990
- initialize: _co1(input.initialize),
3991
- select: _co1(input.select),
3992
- cancel: _co1(input.cancel),
3993
- call: _co1(input.call),
3994
- describe: _co1(input.describe)
3995
- });
3996
- const _co1 = input => ({
3997
- total: input.total,
3998
- input: _co2(input.input),
3999
- output: _co3(input.output)
4000
- });
4001
- const _co2 = input => ({
4002
- total: input.total,
4003
- cached: input.cached
4004
- });
4005
- const _co3 = input => ({
4006
- total: input.total,
4007
- reasoning: input.reasoning,
4008
- accepted_prediction: input.accepted_prediction,
4009
- rejected_prediction: input.rejected_prediction
4010
- });
4011
- return input => _co0(input);
4012
- })()(this);
4013
- }
4014
- static zero() {
4015
- const component = () => ({
4016
- total: 0,
4017
- input: {
4018
- total: 0,
4019
- cached: 0
4020
- },
4021
- output: {
4022
- total: 0,
4023
- reasoning: 0,
4024
- accepted_prediction: 0,
4025
- rejected_prediction: 0
4026
- }
4027
- });
4028
- return new AgenticaTokenUsage({
4029
- aggregate: component(),
4030
- initialize: component(),
4031
- select: component(),
4032
- cancel: component(),
4033
- call: component(),
4034
- describe: component()
4035
- });
4036
- }
4037
- static plus(x, y) {
4038
- const z = new AgenticaTokenUsage(x);
4039
- z.increment(y.toJSON());
4040
- return z;
4041
- }
4042
- }
4043
-
4044
- function aggregate(props) {
4045
- const component = props.usage[props.kind];
4046
- component.total += props.completionUsage.total_tokens;
4047
- component.input.total += props.completionUsage.prompt_tokens;
4048
- component.input.total += props.completionUsage.prompt_tokens_details?.audio_tokens ?? 0;
4049
- component.input.cached += props.completionUsage.prompt_tokens_details?.cached_tokens ?? 0;
4050
- component.output.total += props.completionUsage.completion_tokens;
4051
- component.output.accepted_prediction += props.completionUsage.completion_tokens_details?.accepted_prediction_tokens ?? 0;
4052
- component.output.reasoning += props.completionUsage.completion_tokens_details?.reasoning_tokens ?? 0;
4053
- component.output.rejected_prediction += props.completionUsage.completion_tokens_details?.rejected_prediction_tokens ?? 0;
4054
- const sum = getter => Object.entries(props.usage).filter((([key]) => key !== "aggregate")).map((([, comp]) => getter(comp))).reduce(((a, b) => a + b), 0);
4055
- const aggregate = props.usage.aggregate;
4056
- aggregate.total = sum((comp => comp.total));
4057
- aggregate.input.total = sum((comp => comp.input.total));
4058
- aggregate.input.cached = sum((comp => comp.input.cached));
4059
- aggregate.output.total = sum((comp => comp.output.total));
4060
- aggregate.output.reasoning = sum((comp => comp.output.reasoning));
4061
- aggregate.output.accepted_prediction = sum((comp => comp.output.accepted_prediction));
4062
- aggregate.output.rejected_prediction = sum((comp => comp.output.rejected_prediction));
4063
- }
4064
-
4065
- const AgenticaTokenUsageAggregator = {
4066
- aggregate
4067
- };
4068
-
4069
- function __map_take(dict, key, generator) {
4070
- const oldbie = dict.get(key);
4071
- if (oldbie !== undefined) {
4072
- return oldbie;
4073
- }
4074
- const value = generator();
4075
- dict.set(key, value);
4076
- return value;
4077
- }
4078
-
4079
- function compose(props) {
4080
- const unique = props.controllers.length === 1 || (() => {
4081
- const names = props.controllers.map((controller => controller.application.functions.map((func => func.name)))).flat();
4082
- return new Set(names).size === names.length;
4083
- })();
4084
- const naming = (func, ci) => unique ? func : `_${ci}_${func}`;
4085
- const array = props.controllers.map(((controller, ci) => controller.protocol === "http" ? controller.application.functions.map((func => ({
4086
- protocol: "http",
4087
- controller,
4088
- function: func,
4089
- name: naming(func.name, ci),
4090
- toJSON: () => ({
4091
- protocol: "http",
4092
- controller: controller.name,
4093
- function: func.name,
4094
- name: naming(func.name, ci)
4095
- })
4096
- }))) : controller.application.functions.map((func => ({
4097
- protocol: "class",
4098
- controller,
4099
- function: func,
4100
- name: naming(func.name, ci),
4101
- toJSON: () => ({
4102
- protocol: "class",
4103
- controller: controller.name,
4104
- function: func.name,
4105
- name: naming(func.name, ci)
4106
- })
4107
- }))))).flat();
4108
- const divided = props.config?.capacity !== undefined && array.length > props.config.capacity ? divide({
4109
- array,
4110
- capacity: props.config.capacity
4111
- }) : undefined;
4112
- const flat = new Map;
4113
- const group = new Map;
4114
- for (const item of array) {
4115
- flat.set(item.name, item);
4116
- __map_take(group, item.controller.name, (() => new Map)).set(item.name, item);
4062
+ path: _path + "",
4063
+ expected: "__IChatFunctionReference.IProps",
4064
+ value: input
4065
+ }))(input, "$input", true);
4066
+ const success = 0 === errors.length;
4067
+ return success ? {
4068
+ success,
4069
+ data: input
4070
+ } : {
4071
+ success,
4072
+ errors,
4073
+ data: input
4074
+ };
4075
+ }
4076
+ return {
4077
+ success: true,
4078
+ data: input
4079
+ };
4080
+ };
4081
+ })()(input);
4082
+ if (validation.success === false) {
4083
+ failures.push({
4084
+ id: tc.id,
4085
+ name: tc.function.name,
4086
+ validation
4087
+ });
4088
+ }
4089
+ }
4090
+ }
4091
+ if (failures.length > 0) {
4092
+ return step(ctx, operations, retry, failures);
4093
+ }
4117
4094
  }
4118
- return {
4119
- array,
4120
- divided,
4121
- flat,
4122
- group
4123
- };
4124
- }
4125
-
4126
- function divide(props) {
4127
- const size = Math.ceil(props.array.length / props.capacity);
4128
- const capacity = Math.ceil(props.array.length / size);
4129
- const replica = props.array.slice();
4130
- return Array.from({
4131
- length: size
4132
- }, (() => replica.splice(0, capacity)));
4133
- }
4134
-
4135
- const AgenticaOperationComposer = {
4136
- compose
4137
- };
4138
-
4139
- function transform(props) {
4140
- if (props.prompt.type === "text") {
4141
- return transformText({
4142
- prompt: props.prompt
4143
- });
4144
- } else if (props.prompt.type === "select") {
4145
- return transformSelect({
4146
- operations: props.operations,
4147
- prompt: props.prompt
4148
- });
4149
- } else if (props.prompt.type === "cancel") {
4150
- return transformCancel({
4151
- operations: props.operations,
4152
- prompt: props.prompt
4153
- });
4154
- } else if (props.prompt.type === "execute") {
4155
- return transformExecute({
4156
- operations: props.operations,
4157
- prompt: props.prompt
4158
- });
4159
- } else if (props.prompt.type === "describe") {
4160
- return transformDescribe({
4161
- operations: props.operations,
4162
- prompt: props.prompt
4163
- });
4095
+ const prompts = [];
4096
+ for (const choice of completion.choices) {
4097
+ if (choice.message.tool_calls != null) {
4098
+ for (const tc of choice.message.tool_calls) {
4099
+ if (tc.type !== "function") {
4100
+ continue;
4101
+ }
4102
+ if (tc.function.name !== "selectFunctions") {
4103
+ continue;
4104
+ }
4105
+ const input = (() => {
4106
+ const _io0 = input => Array.isArray(input.functions) && input.functions.every((elem => "object" === typeof elem && null !== elem && _io1(elem)));
4107
+ const _io1 = input => "string" === typeof input.reason && "string" === typeof input.name;
4108
+ const __is = input => "object" === typeof input && null !== input && _io0(input);
4109
+ return input => {
4110
+ input = JSON.parse(input);
4111
+ return __is(input) ? input : null;
4112
+ };
4113
+ })()(tc.function.arguments);
4114
+ if (input === null) {
4115
+ continue;
4116
+ }
4117
+ const collection = createSelectPrompt({
4118
+ id: tc.id,
4119
+ selections: []
4120
+ });
4121
+ for (const reference of input.functions) {
4122
+ const operation = await selectFunction(ctx, reference);
4123
+ if (operation === null) {
4124
+ continue;
4125
+ }
4126
+ collection.selections.push(createOperationSelection({
4127
+ operation,
4128
+ reason: reference.reason
4129
+ }));
4130
+ }
4131
+ if (collection.selections.length !== 0) {
4132
+ prompts.push(collection);
4133
+ }
4134
+ }
4135
+ }
4136
+ if (choice.message.role === "assistant" && choice.message.content != null) {
4137
+ const text = createTextPrompt({
4138
+ role: "assistant",
4139
+ text: choice.message.content
4140
+ });
4141
+ prompts.push(text);
4142
+ await ctx.dispatch(createTextEvent({
4143
+ role: "assistant",
4144
+ stream: StreamUtil.to(text.text),
4145
+ join: async () => Promise.resolve(text.text),
4146
+ done: () => true,
4147
+ get: () => text.text
4148
+ }));
4149
+ }
4164
4150
  }
4165
- throw new Error("Invalid prompt type.");
4166
- }
4167
-
4168
- function transformText(props) {
4169
- return createTextPrompt(props.prompt);
4170
- }
4171
-
4172
- function transformSelect(props) {
4173
- return createSelectPrompt({
4174
- id: props.prompt.id,
4175
- selections: props.prompt.selections.map((select => createOperationSelection({
4176
- operation: findOperation({
4177
- operations: props.operations,
4178
- input: select.operation
4179
- }),
4180
- reason: select.reason
4181
- })))
4182
- });
4183
- }
4184
-
4185
- function transformCancel(props) {
4186
- return createCancelPrompt({
4187
- id: props.prompt.id,
4188
- selections: props.prompt.selections.map((select => createOperationSelection({
4189
- operation: findOperation({
4190
- operations: props.operations,
4191
- input: select.operation
4192
- }),
4193
- reason: select.reason
4194
- })))
4195
- });
4196
- }
4197
-
4198
- function transformExecute(props) {
4199
- return createExecutePrompt({
4200
- id: props.prompt.id,
4201
- operation: findOperation({
4202
- operations: props.operations,
4203
- input: props.prompt.operation
4204
- }),
4205
- arguments: props.prompt.arguments,
4206
- value: props.prompt.value
4207
- });
4151
+ return prompts;
4208
4152
  }
4209
4153
 
4210
- function transformDescribe(props) {
4211
- return createDescribePrompt({
4212
- text: props.prompt.text,
4213
- executes: props.prompt.executes.map((next => transformExecute({
4214
- operations: props.operations,
4215
- prompt: next
4216
- })))
4217
- });
4154
+ function emendMessages(failures) {
4155
+ return failures.map((f => [ {
4156
+ role: "assistant",
4157
+ tool_calls: [ {
4158
+ type: "function",
4159
+ id: f.id,
4160
+ function: {
4161
+ name: f.name,
4162
+ arguments: JSON.stringify(f.validation.data)
4163
+ }
4164
+ } ]
4165
+ }, {
4166
+ role: "tool",
4167
+ content: JSON.stringify(f.validation.errors),
4168
+ tool_call_id: f.id
4169
+ }, {
4170
+ role: "system",
4171
+ content: [ "You A.I. assistant has composed wrong typed arguments.", "", "Correct it at the next function calling." ].join("\n")
4172
+ } ])).flat();
4218
4173
  }
4219
4174
 
4220
- function findOperation(props) {
4221
- const found = props.operations.get(props.input.controller)?.get(props.input.function);
4222
- if (found === undefined) {
4223
- throw new Error(`No operation found: (controller: ${props.input.controller}, function: ${props.input.function})`);
4224
- }
4225
- return found;
4175
+ function execute(executor) {
4176
+ return async ctx => {
4177
+ const histories = [];
4178
+ if (ctx.ready() === false) {
4179
+ if (executor?.initialize === null) {
4180
+ await ctx.initialize();
4181
+ } else {
4182
+ histories.push(...await (executor?.initialize ?? initialize)(ctx));
4183
+ if (ctx.ready() === false) {
4184
+ return histories;
4185
+ }
4186
+ }
4187
+ }
4188
+ if (ctx.stack.length !== 0) {
4189
+ histories.push(...await (executor?.cancel ?? cancel)(ctx));
4190
+ }
4191
+ histories.push(...await (executor?.select ?? select)(ctx));
4192
+ if (ctx.stack.length === 0) {
4193
+ return histories;
4194
+ }
4195
+ while (true) {
4196
+ const prompts = await (executor?.call ?? call)(ctx);
4197
+ histories.push(...prompts);
4198
+ const executes = prompts.filter((prompt => prompt.type === "execute"));
4199
+ for (const e of executes) {
4200
+ await cancelFunction(ctx, {
4201
+ reason: "completed",
4202
+ name: e.operation.name
4203
+ });
4204
+ }
4205
+ histories.push(...await (executor?.describe ?? describe)(ctx, executes));
4206
+ if (executes.length === 0 || ctx.stack.length === 0) {
4207
+ break;
4208
+ }
4209
+ }
4210
+ return histories;
4211
+ };
4226
4212
  }
4227
4213
 
4228
- const AgenticaPromptTransformer = {
4229
- transform,
4230
- transformText,
4231
- transformSelect,
4232
- transformCancel,
4233
- transformExecute,
4234
- transformDescribe
4235
- };
4236
-
4237
4214
  class Agentica {
4238
4215
  constructor(props) {
4239
4216
  this.props = props;
@@ -4249,7 +4226,7 @@ class Agentica {
4249
4226
  })));
4250
4227
  this.token_usage_ = AgenticaTokenUsage.zero();
4251
4228
  this.ready_ = false;
4252
- this.executor_ = typeof props.config?.executor === "function" ? props.config.executor : ChatGptAgent.execute(props.config?.executor ?? null);
4229
+ this.executor_ = typeof props.config?.executor === "function" ? props.config.executor : execute(props.config?.executor ?? null);
4253
4230
  }
4254
4231
  clone() {
4255
4232
  return new Agentica({
@@ -4383,7 +4360,7 @@ class Agentica {
4383
4360
  }
4384
4361
  }
4385
4362
 
4386
- var index$1 = Object.freeze({
4363
+ var index$2 = Object.freeze({
4387
4364
  __proto__: null,
4388
4365
  createCallEvent,
4389
4366
  createCancelEvent,
@@ -4400,7 +4377,8 @@ var index$1 = Object.freeze({
4400
4377
  createSelectPrompt,
4401
4378
  createTextEvent,
4402
4379
  createTextPrompt,
4403
- createValidateEvent
4380
+ createValidateEvent,
4381
+ decodePrompt
4404
4382
  });
4405
4383
 
4406
4384
  function assertHttpLlmApplication(props) {
@@ -21119,17 +21097,24 @@ function validateHttpLlmApplication(props) {
21119
21097
  };
21120
21098
  }
21121
21099
 
21100
+ var index$1 = Object.freeze({
21101
+ __proto__: null,
21102
+ ChatGptDescribeFunctionAgent,
21103
+ call,
21104
+ cancel,
21105
+ describe,
21106
+ execute,
21107
+ initialize,
21108
+ select
21109
+ });
21110
+
21122
21111
  var index = Object.freeze({
21123
21112
  __proto__: null,
21124
- ChatGptAgent,
21125
- ChatGptCallFunctionAgent,
21126
- ChatGptCancelFunctionAgent,
21113
+ AsyncQueue,
21127
21114
  ChatGptCompletionMessageUtil,
21128
- ChatGptDescribeFunctionAgent,
21129
- ChatGptHistoryDecoder,
21130
- ChatGptInitializeFunctionAgent,
21131
- ChatGptSelectFunctionAgent
21115
+ MPSC,
21116
+ StreamUtil
21132
21117
  });
21133
21118
 
21134
- export { Agentica, AgenticaTokenUsage, assertHttpLlmApplication, index$1 as factory, index as orchestrate, validateHttpLlmApplication };
21119
+ export { Agentica, AgenticaTokenUsage, assertHttpLlmApplication, index$2 as factory, index$1 as orchestrate, index as utils, validateHttpLlmApplication };
21135
21120
  //# sourceMappingURL=index.mjs.map