@agentica/core 0.14.3 → 0.15.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (139) hide show
  1. package/lib/Agentica.js +6 -6
  2. package/lib/Agentica.js.map +1 -1
  3. package/lib/constants/AgenticaConstant.js.map +1 -0
  4. package/lib/{internal → constants}/AgenticaDefaultPrompt.js +1 -1
  5. package/lib/constants/AgenticaDefaultPrompt.js.map +1 -0
  6. package/lib/constants/AgenticaSystemPrompt.js.map +1 -0
  7. package/lib/{internal → context/internal}/AgenticaOperationComposer.d.ts +3 -3
  8. package/lib/{internal → context/internal}/AgenticaOperationComposer.js +1 -1
  9. package/lib/context/internal/AgenticaOperationComposer.js.map +1 -0
  10. package/lib/factory/events.js +6 -6
  11. package/lib/factory/events.js.map +1 -1
  12. package/lib/factory/prompts.d.ts +3 -0
  13. package/lib/factory/prompts.js +78 -4
  14. package/lib/factory/prompts.js.map +1 -1
  15. package/lib/index.d.ts +1 -0
  16. package/lib/index.js +6 -1
  17. package/lib/index.js.map +1 -1
  18. package/lib/index.mjs +996 -1011
  19. package/lib/index.mjs.map +1 -1
  20. package/lib/orchestrate/call.d.ts +4 -0
  21. package/lib/orchestrate/{ChatGptCallFunctionAgent.js → call.js} +12 -16
  22. package/lib/orchestrate/call.js.map +1 -0
  23. package/lib/orchestrate/cancel.d.ts +4 -0
  24. package/lib/orchestrate/{ChatGptCancelFunctionAgent.js → cancel.js} +12 -35
  25. package/lib/orchestrate/cancel.js.map +1 -0
  26. package/lib/orchestrate/{ChatGptDescribeFunctionAgent.d.ts → describe.d.ts} +2 -3
  27. package/lib/orchestrate/{ChatGptDescribeFunctionAgent.js → describe.js} +10 -10
  28. package/lib/orchestrate/describe.js.map +1 -0
  29. package/lib/orchestrate/{ChatGptAgent.d.ts → execute.d.ts} +1 -3
  30. package/lib/orchestrate/{ChatGptAgent.js → execute.js} +18 -17
  31. package/lib/orchestrate/execute.js.map +1 -0
  32. package/lib/orchestrate/index.d.ts +6 -8
  33. package/lib/orchestrate/index.js +6 -8
  34. package/lib/orchestrate/index.js.map +1 -1
  35. package/lib/orchestrate/initialize.d.ts +4 -0
  36. package/lib/orchestrate/{ChatGptInitializeFunctionAgent.js → initialize.js} +9 -13
  37. package/lib/orchestrate/initialize.js.map +1 -0
  38. package/lib/orchestrate/internal/cancelFunction.js +35 -0
  39. package/lib/orchestrate/internal/cancelFunction.js.map +1 -0
  40. package/lib/orchestrate/internal/selectFunction.d.ts +5 -0
  41. package/lib/orchestrate/internal/selectFunction.js +32 -0
  42. package/lib/orchestrate/internal/selectFunction.js.map +1 -0
  43. package/lib/orchestrate/select.d.ts +4 -0
  44. package/lib/orchestrate/{ChatGptSelectFunctionAgent.js → select.js} +12 -34
  45. package/lib/orchestrate/select.js.map +1 -0
  46. package/lib/transformers/AgenticaEventTransformer.js +1 -1
  47. package/lib/transformers/AgenticaEventTransformer.js.map +1 -1
  48. package/lib/utils/AsyncQueue.js.map +1 -0
  49. package/lib/utils/ByteArrayUtil.js.map +1 -0
  50. package/lib/{orchestrate → utils}/ChatGptCompletionMessageUtil.js +3 -3
  51. package/lib/utils/ChatGptCompletionMessageUtil.js.map +1 -0
  52. package/lib/{orchestrate/ChatGptUsageAggregator.d.ts → utils/ChatGptTokenUsageAggregator.d.ts} +1 -1
  53. package/lib/{orchestrate/ChatGptUsageAggregator.js → utils/ChatGptTokenUsageAggregator.js} +3 -3
  54. package/lib/utils/ChatGptTokenUsageAggregator.js.map +1 -0
  55. package/lib/utils/MPSC.js.map +1 -0
  56. package/lib/utils/MathUtil.js.map +1 -0
  57. package/lib/utils/Singleton.js.map +1 -0
  58. package/lib/utils/StreamUtil.js.map +1 -0
  59. package/lib/utils/__map_take.d.ts +1 -0
  60. package/lib/utils/__map_take.js.map +1 -0
  61. package/lib/utils/index.d.ts +4 -0
  62. package/lib/utils/index.js +21 -0
  63. package/lib/utils/index.js.map +1 -0
  64. package/package.json +1 -1
  65. package/src/Agentica.ts +6 -6
  66. package/src/{internal → constants}/AgenticaDefaultPrompt.ts +2 -2
  67. package/src/{internal → context/internal}/AgenticaOperationComposer.ts +5 -5
  68. package/src/factory/events.ts +6 -6
  69. package/src/factory/prompts.ts +91 -5
  70. package/src/index.ts +9 -0
  71. package/src/orchestrate/{ChatGptCallFunctionAgent.ts → call.ts} +11 -16
  72. package/src/orchestrate/{ChatGptCancelFunctionAgent.ts → cancel.ts} +9 -37
  73. package/src/orchestrate/{ChatGptDescribeFunctionAgent.ts → describe.ts} +9 -10
  74. package/src/orchestrate/{ChatGptAgent.ts → execute.ts} +16 -15
  75. package/src/orchestrate/index.ts +6 -8
  76. package/src/orchestrate/{ChatGptInitializeFunctionAgent.ts → initialize.ts} +8 -13
  77. package/src/orchestrate/internal/cancelFunction.ts +34 -0
  78. package/src/orchestrate/internal/selectFunction.ts +29 -0
  79. package/src/orchestrate/{ChatGptSelectFunctionAgent.ts → select.ts} +10 -37
  80. package/src/transformers/AgenticaEventTransformer.ts +1 -1
  81. package/src/{orchestrate → utils}/ChatGptCompletionMessageUtil.ts +3 -3
  82. package/src/{orchestrate/ChatGptUsageAggregator.ts → utils/ChatGptTokenUsageAggregator.ts} +1 -1
  83. package/src/utils/index.ts +4 -0
  84. package/lib/internal/AgenticaConstant.js.map +0 -1
  85. package/lib/internal/AgenticaDefaultPrompt.js.map +0 -1
  86. package/lib/internal/AgenticaOperationComposer.js.map +0 -1
  87. package/lib/internal/AgenticaSystemPrompt.js.map +0 -1
  88. package/lib/internal/AsyncQueue.js.map +0 -1
  89. package/lib/internal/ByteArrayUtil.js.map +0 -1
  90. package/lib/internal/MPSC.js.map +0 -1
  91. package/lib/internal/MathUtil.js.map +0 -1
  92. package/lib/internal/Singleton.js.map +0 -1
  93. package/lib/internal/StreamUtil.js.map +0 -1
  94. package/lib/internal/__map_take.js.map +0 -1
  95. package/lib/orchestrate/ChatGptAgent.js.map +0 -1
  96. package/lib/orchestrate/ChatGptCallFunctionAgent.d.ts +0 -8
  97. package/lib/orchestrate/ChatGptCallFunctionAgent.js.map +0 -1
  98. package/lib/orchestrate/ChatGptCancelFunctionAgent.d.ts +0 -12
  99. package/lib/orchestrate/ChatGptCancelFunctionAgent.js.map +0 -1
  100. package/lib/orchestrate/ChatGptCompletionMessageUtil.js.map +0 -1
  101. package/lib/orchestrate/ChatGptDescribeFunctionAgent.js.map +0 -1
  102. package/lib/orchestrate/ChatGptHistoryDecoder.d.ts +0 -8
  103. package/lib/orchestrate/ChatGptHistoryDecoder.js +0 -80
  104. package/lib/orchestrate/ChatGptHistoryDecoder.js.map +0 -1
  105. package/lib/orchestrate/ChatGptInitializeFunctionAgent.d.ts +0 -8
  106. package/lib/orchestrate/ChatGptInitializeFunctionAgent.js.map +0 -1
  107. package/lib/orchestrate/ChatGptSelectFunctionAgent.d.ts +0 -21
  108. package/lib/orchestrate/ChatGptSelectFunctionAgent.js.map +0 -1
  109. package/lib/orchestrate/ChatGptUsageAggregator.js.map +0 -1
  110. package/src/orchestrate/ChatGptHistoryDecoder.ts +0 -91
  111. /package/lib/{internal → constants}/AgenticaConstant.d.ts +0 -0
  112. /package/lib/{internal → constants}/AgenticaConstant.js +0 -0
  113. /package/lib/{internal → constants}/AgenticaDefaultPrompt.d.ts +0 -0
  114. /package/lib/{internal → constants}/AgenticaSystemPrompt.d.ts +0 -0
  115. /package/lib/{internal → constants}/AgenticaSystemPrompt.js +0 -0
  116. /package/lib/{internal/Singleton.d.ts → orchestrate/internal/cancelFunction.d.ts} +0 -0
  117. /package/lib/{internal → utils}/AsyncQueue.d.ts +0 -0
  118. /package/lib/{internal → utils}/AsyncQueue.js +0 -0
  119. /package/lib/{internal → utils}/ByteArrayUtil.d.ts +0 -0
  120. /package/lib/{internal → utils}/ByteArrayUtil.js +0 -0
  121. /package/lib/{orchestrate → utils}/ChatGptCompletionMessageUtil.d.ts +0 -0
  122. /package/lib/{internal → utils}/MPSC.d.ts +0 -0
  123. /package/lib/{internal → utils}/MPSC.js +0 -0
  124. /package/lib/{internal → utils}/MathUtil.d.ts +0 -0
  125. /package/lib/{internal → utils}/MathUtil.js +0 -0
  126. /package/lib/{internal/__map_take.d.ts → utils/Singleton.d.ts} +0 -0
  127. /package/lib/{internal → utils}/Singleton.js +0 -0
  128. /package/lib/{internal → utils}/StreamUtil.d.ts +0 -0
  129. /package/lib/{internal → utils}/StreamUtil.js +0 -0
  130. /package/lib/{internal → utils}/__map_take.js +0 -0
  131. /package/src/{internal → constants}/AgenticaConstant.ts +0 -0
  132. /package/src/{internal → constants}/AgenticaSystemPrompt.ts +0 -0
  133. /package/src/{internal → utils}/AsyncQueue.ts +0 -0
  134. /package/src/{internal → utils}/ByteArrayUtil.ts +0 -0
  135. /package/src/{internal → utils}/MPSC.ts +0 -0
  136. /package/src/{internal → utils}/MathUtil.ts +0 -0
  137. /package/src/{internal → utils}/Singleton.ts +0 -0
  138. /package/src/{internal → utils}/StreamUtil.ts +0 -0
  139. /package/src/{internal → utils}/__map_take.ts +0 -0
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1 @@
1
+ {"version":3,"file":"__map_take.js","sourceRoot":"","sources":["../../src/utils/__map_take.ts"],"names":[],"mappings":";;AAGA,gCASC;AAZD;;GAEG;AACH,SAAgB,UAAU,CAAS,IAAiB,EAAE,GAAQ,EAAE,SAAkB;IAChF,MAAM,MAAM,GAAkB,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;IAC5C,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;QACzB,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,MAAM,KAAK,GAAM,SAAS,EAAE,CAAC;IAC7B,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;IACrB,OAAO,KAAK,CAAC;AACf,CAAC"}
@@ -0,0 +1,4 @@
1
+ export * from "./AsyncQueue";
2
+ export * from "./ChatGptCompletionMessageUtil";
3
+ export * from "./MPSC";
4
+ export * from "./StreamUtil";
@@ -0,0 +1,21 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __exportStar = (this && this.__exportStar) || function(m, exports) {
14
+ for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
15
+ };
16
+ Object.defineProperty(exports, "__esModule", { value: true });
17
+ __exportStar(require("./AsyncQueue"), exports);
18
+ __exportStar(require("./ChatGptCompletionMessageUtil"), exports);
19
+ __exportStar(require("./MPSC"), exports);
20
+ __exportStar(require("./StreamUtil"), exports);
21
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/utils/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;AAAA,+CAA6B;AAC7B,iEAA+C;AAC/C,yCAAuB;AACvB,+CAA6B"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@agentica/core",
3
- "version": "0.14.3",
3
+ "version": "0.15.0",
4
4
  "description": "Agentic AI Library specialized in LLM Function Calling",
5
5
  "author": "Wrtn Technologies",
6
6
  "license": "MIT",
package/src/Agentica.ts CHANGED
@@ -12,16 +12,16 @@ import type { IAgenticaVendor } from "./structures/IAgenticaVendor";
12
12
  import type { AgenticaTextPrompt } from "./prompts/AgenticaTextPrompt";
13
13
  import type { AgenticaRequestEvent } from "./events/AgenticaRequestEvent";
14
14
 
15
- import { ChatGptAgent } from "./orchestrate/ChatGptAgent";
16
- import { ChatGptCompletionMessageUtil } from "./orchestrate/ChatGptCompletionMessageUtil";
15
+ import { ChatGptCompletionMessageUtil } from "./utils/ChatGptCompletionMessageUtil";
17
16
  import { AgenticaTokenUsage } from "./context/AgenticaTokenUsage";
18
17
  import { AgenticaTokenUsageAggregator } from "./context/internal/AgenticaTokenUsageAggregator";
19
- import { __map_take } from "./internal/__map_take";
20
- import { AgenticaOperationComposer } from "./internal/AgenticaOperationComposer";
21
- import { StreamUtil } from "./internal/StreamUtil";
18
+ import { __map_take } from "./utils/__map_take";
19
+ import { AgenticaOperationComposer } from "./context/internal/AgenticaOperationComposer";
20
+ import { StreamUtil } from "./utils/StreamUtil";
22
21
  import { AgenticaPromptTransformer } from "./transformers/AgenticaPromptTransformer";
23
22
  import { createTextPrompt } from "./factory/prompts";
24
23
  import { createInitializeEvent, createRequestEvent, createTextEvent } from "./factory/events";
24
+ import { execute } from "./orchestrate/execute";
25
25
 
26
26
  /**
27
27
  * Nestia A.I. chatbot agent.
@@ -97,7 +97,7 @@ export class Agentica<Model extends ILlmSchema.Model> {
97
97
  this.executor_
98
98
  = typeof props.config?.executor === "function"
99
99
  ? props.config.executor
100
- : ChatGptAgent.execute(props.config?.executor ?? null);
100
+ : execute(props.config?.executor ?? null);
101
101
  }
102
102
 
103
103
  /**
@@ -1,8 +1,8 @@
1
1
  import type { ILlmSchema } from "@samchon/openapi";
2
-
3
2
  import type { IAgenticaConfig } from "../structures/IAgenticaConfig";
3
+
4
4
  import { AgenticaSystemPrompt } from "./AgenticaSystemPrompt";
5
- import { Singleton } from "./Singleton";
5
+ import { Singleton } from "../utils/Singleton";
6
6
 
7
7
  /**
8
8
  * @TODO maybe this code will rewrite
@@ -1,10 +1,10 @@
1
1
  import type { ILlmSchema } from "@samchon/openapi";
2
+ import type { AgenticaOperation } from "../AgenticaOperation";
3
+ import type { AgenticaOperationCollection } from "../AgenticaOperationCollection";
4
+ import type { IAgenticaConfig } from "../../structures/IAgenticaConfig";
5
+ import type { IAgenticaController } from "../../structures/IAgenticaController";
2
6
 
3
- import type { AgenticaOperation } from "../context/AgenticaOperation";
4
- import type { AgenticaOperationCollection } from "../context/AgenticaOperationCollection";
5
- import type { IAgenticaConfig } from "../structures/IAgenticaConfig";
6
- import type { IAgenticaController } from "../structures/IAgenticaController";
7
- import { __map_take } from "./__map_take";
7
+ import { __map_take } from "../../utils/__map_take";
8
8
 
9
9
  export function compose<Model extends ILlmSchema.Model>(props: {
10
10
  controllers: IAgenticaController<Model>[];
@@ -21,8 +21,8 @@ import type { AgenticaValidateEvent } from "../events/AgenticaValidateEvent";
21
21
  import { createExecutePrompt, createSelectPrompt } from "./prompts";
22
22
 
23
23
  /* -----------------------------------------------------------
24
- FUNCTION SELECTS
25
- ----------------------------------------------------------- */
24
+ FUNCTION SELECTS
25
+ ----------------------------------------------------------- */
26
26
  export function createInitializeEvent(): AgenticaInitializeEvent {
27
27
  const event: IAgenticaEventJson.IInitialize = {
28
28
  type: "initialize",
@@ -64,8 +64,8 @@ export function createCancelEvent<Model extends ILlmSchema.Model>(props: {
64
64
  }
65
65
 
66
66
  /* -----------------------------------------------------------
67
- FUNCTION CALLS
68
- ----------------------------------------------------------- */
67
+ FUNCTION CALLS
68
+ ----------------------------------------------------------- */
69
69
  export function createCallEvent<Model extends ILlmSchema.Model>(props: {
70
70
  id: string;
71
71
  operation: AgenticaOperation<Model>;
@@ -131,8 +131,8 @@ export function createExecuteEvent<Model extends ILlmSchema.Model>(props: {
131
131
  }
132
132
 
133
133
  /* -----------------------------------------------------------
134
- TEXT STEAMING
135
- ----------------------------------------------------------- */
134
+ TEXT STEAMING
135
+ ----------------------------------------------------------- */
136
136
  export function createTextEvent<Role extends "user" | "assistant">(props: {
137
137
  role: Role;
138
138
  stream: ReadableStream<string>;
@@ -1,4 +1,5 @@
1
- import type { ILlmSchema } from "@samchon/openapi";
1
+ import type OpenAI from "openai";
2
+ import type { IHttpResponse, ILlmSchema } from "@samchon/openapi";
2
3
  import type { AgenticaTextPrompt } from "../prompts/AgenticaTextPrompt";
3
4
  import type { IAgenticaPromptJson } from "../json/IAgenticaPromptJson";
4
5
  import type { AgenticaDescribePrompt } from "../prompts/AgenticaDescribePrompt";
@@ -7,10 +8,95 @@ import type { AgenticaOperationSelection } from "../context/AgenticaOperationSel
7
8
  import type { AgenticaSelectPrompt } from "../prompts/AgenticaSelectPrompt";
8
9
  import type { AgenticaCancelPrompt } from "../context/AgenticaCancelPrompt";
9
10
  import type { AgenticaOperation } from "../context/AgenticaOperation";
11
+ import type { AgenticaPrompt } from "../prompts/AgenticaPrompt";
12
+
13
+ export function decodePrompt<Model extends ILlmSchema.Model>(history: AgenticaPrompt<Model>): OpenAI.ChatCompletionMessageParam[] {
14
+ // NO NEED TO DECODE DESCRIBE
15
+ if (history.type === "describe") {
16
+ return [];
17
+ }
18
+ else if (history.type === "text") {
19
+ return [
20
+ {
21
+ role: history.role,
22
+ content: history.text,
23
+ },
24
+ ];
25
+ }
26
+ else if (history.type === "select" || history.type === "cancel") {
27
+ return [
28
+ {
29
+ role: "assistant",
30
+ tool_calls: [
31
+ {
32
+ type: "function",
33
+ id: history.id,
34
+ function: {
35
+ name: `${history.type}Functions`,
36
+ arguments: JSON.stringify({
37
+ functions: history.selections.map(s => ({
38
+ name: s.operation.function.name,
39
+ reason: s.reason,
40
+ })),
41
+ }),
42
+ },
43
+ },
44
+ ],
45
+ },
46
+ {
47
+ role: "tool",
48
+ tool_call_id: history.id,
49
+ content: "",
50
+ },
51
+ ];
52
+ }
53
+
54
+ return [
55
+ {
56
+ role: "assistant",
57
+ tool_calls: [
58
+ {
59
+ type: "function",
60
+ id: history.id,
61
+ function: {
62
+ name: history.operation.name,
63
+ arguments: JSON.stringify(history.arguments),
64
+ },
65
+ },
66
+ ],
67
+ },
68
+ {
69
+ role: "tool",
70
+ tool_call_id: history.id,
71
+ content: JSON.stringify({
72
+ function: {
73
+ protocol: history.operation.protocol,
74
+ description: history.operation.function.description,
75
+ parameters: history.operation.function.parameters,
76
+ output: history.operation.function.output,
77
+ ...(history.operation.protocol === "http"
78
+ ? {
79
+ method: history.operation.function.method,
80
+ path: history.operation.function.path,
81
+ }
82
+ : {}),
83
+ },
84
+ ...(history.operation.protocol === "http"
85
+ ? {
86
+ status: (history.value as IHttpResponse).status,
87
+ data: (history.value as IHttpResponse).body,
88
+ }
89
+ : {
90
+ value: history.value,
91
+ }),
92
+ }),
93
+ },
94
+ ];
95
+ }
10
96
 
11
97
  /* -----------------------------------------------------------
12
- TEXT PROMPTS
13
- ----------------------------------------------------------- */
98
+ TEXT PROMPTS
99
+ ----------------------------------------------------------- */
14
100
  export function createTextPrompt<Role extends "assistant" | "user" = "assistant" | "user">(props: {
15
101
  role: Role;
16
102
  text: string;
@@ -43,8 +129,8 @@ export function createDescribePrompt<Model extends ILlmSchema.Model>(props: {
43
129
  }
44
130
 
45
131
  /* -----------------------------------------------------------
46
- FUNCTION CALLING PROMPTS
47
- ----------------------------------------------------------- */
132
+ FUNCTION CALLING PROMPTS
133
+ ----------------------------------------------------------- */
48
134
  export function createSelectPrompt<Model extends ILlmSchema.Model>(props: {
49
135
  id: string;
50
136
  selections: AgenticaOperationSelection<Model>[];
package/src/index.ts CHANGED
@@ -16,6 +16,8 @@ export * from "./events/AgenticaRequestEvent";
16
16
  export * from "./events/AgenticaResponseEvent";
17
17
  export * from "./events/AgenticaSelectEvent";
18
18
  export * from "./events/AgenticaTextEvent";
19
+ export * from "./events/AgenticaValidateEvent";
20
+
19
21
  export * as factory from "./factory";
20
22
 
21
23
  export * from "./functional/assertHttpLlmApplication";
@@ -26,7 +28,9 @@ export * from "./json/IAgenticaOperationSelectionJson";
26
28
  export * from "./json/IAgenticaPromptJson";
27
29
 
28
30
  export * from "./json/IAgenticaTokenUsageJson";
31
+
29
32
  export * as orchestrate from "./orchestrate";
33
+
30
34
  export * from "./prompts/AgenticaCancelPrompt";
31
35
  export * from "./prompts/AgenticaDescribePrompt";
32
36
  export * from "./prompts/AgenticaExecutePrompt";
@@ -41,3 +45,8 @@ export * from "./structures/IAgenticaProps";
41
45
  export * from "./structures/IAgenticaSystemPrompt";
42
46
 
43
47
  export * from "./structures/IAgenticaVendor";
48
+
49
+ /**
50
+ * @internal
51
+ */
52
+ export * as utils from "./utils";
@@ -19,18 +19,17 @@ import type { AgenticaTextPrompt } from "../prompts/AgenticaTextPrompt";
19
19
  import type { AgenticaExecutePrompt } from "../prompts/AgenticaExecutePrompt";
20
20
  import type { AgenticaCallEvent } from "../events/AgenticaCallEvent";
21
21
 
22
- import { AgenticaConstant } from "../internal/AgenticaConstant";
23
- import { AgenticaDefaultPrompt } from "../internal/AgenticaDefaultPrompt";
24
- import { AgenticaSystemPrompt } from "../internal/AgenticaSystemPrompt";
25
- import { StreamUtil } from "../internal/StreamUtil";
26
- import { ChatGptCancelFunctionAgent } from "./ChatGptCancelFunctionAgent";
27
- import { ChatGptCompletionMessageUtil } from "./ChatGptCompletionMessageUtil";
28
- import { ChatGptHistoryDecoder } from "./ChatGptHistoryDecoder";
22
+ import { AgenticaConstant } from "../constants/AgenticaConstant";
23
+ import { AgenticaDefaultPrompt } from "../constants/AgenticaDefaultPrompt";
24
+ import { AgenticaSystemPrompt } from "../constants/AgenticaSystemPrompt";
25
+ import { StreamUtil } from "../utils/StreamUtil";
26
+ import { ChatGptCompletionMessageUtil } from "../utils/ChatGptCompletionMessageUtil";
29
27
  import { createCallEvent, createCancelEvent, createExecuteEvent, createTextEvent, createValidateEvent } from "../factory/events";
30
28
  import { createOperationSelection } from "../factory/operations";
31
- import { createCancelPrompt, createExecutePrompt, createTextPrompt } from "../factory/prompts";
29
+ import { createCancelPrompt, createExecutePrompt, createTextPrompt, decodePrompt } from "../factory/prompts";
30
+ import { cancelFunction } from "./internal/cancelFunction";
32
31
 
33
- async function execute<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Model>): Promise<AgenticaPrompt<Model>[]> {
32
+ export async function call<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Model>): Promise<AgenticaPrompt<Model>[]> {
34
33
  // ----
35
34
  // EXECUTE CHATGPT API
36
35
  // ----
@@ -42,7 +41,7 @@ async function execute<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Mode
42
41
  content: AgenticaDefaultPrompt.write(ctx.config),
43
42
  } satisfies OpenAI.ChatCompletionSystemMessageParam,
44
43
  // PREVIOUS HISTORIES
45
- ...ctx.histories.map(ChatGptHistoryDecoder.decode).flat(),
44
+ ...ctx.histories.map(decodePrompt).flat(),
46
45
  // USER INPUT
47
46
  {
48
47
  role: "user",
@@ -137,7 +136,7 @@ async function execute<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Mode
137
136
  }),
138
137
  );
139
138
 
140
- await ChatGptCancelFunctionAgent.cancelFunction(ctx, {
139
+ await cancelFunction(ctx, {
141
140
  name: call.operation.name,
142
141
  reason: "completed",
143
142
  });
@@ -369,7 +368,7 @@ async function correct<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Mode
369
368
  content: AgenticaDefaultPrompt.write(ctx.config),
370
369
  } satisfies OpenAI.ChatCompletionSystemMessageParam,
371
370
  // PREVIOUS HISTORIES
372
- ...ctx.histories.map(ChatGptHistoryDecoder.decode).flat(),
371
+ ...ctx.histories.map(decodePrompt).flat(),
373
372
  // USER INPUT
374
373
  {
375
374
  role: "user",
@@ -496,7 +495,3 @@ function isObject($defs: Record<string, IChatGptSchema>, schema: IChatGptSchema)
496
495
  && schema.anyOf.every(schema => isObject($defs, schema)))
497
496
  );
498
497
  }
499
-
500
- export const ChatGptCallFunctionAgent = {
501
- execute,
502
- };
@@ -12,15 +12,13 @@ import type { AgenticaEvent } from "../events/AgenticaEvent";
12
12
  import type { AgenticaCancelPrompt } from "../context/AgenticaCancelPrompt";
13
13
  import type { AgenticaOperationSelection } from "../context/AgenticaOperationSelection";
14
14
 
15
- import { AgenticaConstant } from "../internal/AgenticaConstant";
16
- import { AgenticaDefaultPrompt } from "../internal/AgenticaDefaultPrompt";
17
- import { AgenticaSystemPrompt } from "../internal/AgenticaSystemPrompt";
18
- import { StreamUtil } from "../internal/StreamUtil";
19
- import { ChatGptCompletionMessageUtil } from "./ChatGptCompletionMessageUtil";
20
- import { ChatGptHistoryDecoder } from "./ChatGptHistoryDecoder";
21
- import { createCancelPrompt } from "../factory/prompts";
22
- import { createCancelEvent } from "../factory/events";
23
- import { createOperationSelection } from "../factory/operations";
15
+ import { AgenticaConstant } from "../constants/AgenticaConstant";
16
+ import { AgenticaDefaultPrompt } from "../constants/AgenticaDefaultPrompt";
17
+ import { AgenticaSystemPrompt } from "../constants/AgenticaSystemPrompt";
18
+ import { StreamUtil } from "../utils/StreamUtil";
19
+ import { ChatGptCompletionMessageUtil } from "../utils/ChatGptCompletionMessageUtil";
20
+ import { createCancelPrompt, decodePrompt } from "../factory/prompts";
21
+ import { cancelFunction } from "./internal/cancelFunction";
24
22
 
25
23
  const CONTAINER: ILlmApplication<"chatgpt"> = typia.llm.application<
26
24
  __IChatCancelFunctionsApplication,
@@ -33,7 +31,7 @@ interface IFailure {
33
31
  validation: IValidation.IFailure;
34
32
  }
35
33
 
36
- async function execute<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Model>): Promise<AgenticaCancelPrompt<Model>[]> {
34
+ export async function cancel<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Model>): Promise<AgenticaCancelPrompt<Model>[]> {
37
35
  if (ctx.operations.divided === undefined) {
38
36
  return step(ctx, ctx.operations.array, 0);
39
37
  }
@@ -94,27 +92,6 @@ async function execute<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Mode
94
92
  return [collection];
95
93
  }
96
94
 
97
- async function cancelFunction<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Model>, reference: __IChatFunctionReference): Promise<AgenticaOperationSelection<Model> | null> {
98
- const index: number = ctx.stack.findIndex(
99
- item => item.operation.name === reference.name,
100
- );
101
- if (index === -1) {
102
- return null;
103
- }
104
-
105
- const item: AgenticaOperationSelection<Model> = ctx.stack[index]!;
106
- ctx.stack.splice(index, 1);
107
- await ctx.dispatch(
108
- createCancelEvent({
109
- selection: createOperationSelection({
110
- operation: item.operation,
111
- reason: reference.reason,
112
- }),
113
- }),
114
- );
115
- return item;
116
- }
117
-
118
95
  async function step<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Model>, operations: AgenticaOperation<Model>[], retry: number, failures?: IFailure[]): Promise<AgenticaCancelPrompt<Model>[]> {
119
96
  // ----
120
97
  // EXECUTE CHATGPT API
@@ -158,7 +135,7 @@ async function step<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Model>,
158
135
  ),
159
136
  },
160
137
  // PREVIOUS HISTORIES
161
- ...ctx.histories.map(ChatGptHistoryDecoder.decode).flat(),
138
+ ...ctx.histories.map(decodePrompt).flat(),
162
139
  // USER INPUT
163
140
  {
164
141
  role: "user",
@@ -299,8 +276,3 @@ function emendMessages(failures: IFailure[]): OpenAI.ChatCompletionMessageParam[
299
276
  ])
300
277
  .flat();
301
278
  }
302
-
303
- export const ChatGptCancelFunctionAgent = {
304
- execute,
305
- cancelFunction,
306
- };
@@ -4,16 +4,15 @@ import type { AgenticaContext } from "../context/AgenticaContext";
4
4
  import type { AgenticaExecutePrompt } from "../prompts/AgenticaExecutePrompt";
5
5
  import type { AgenticaDescribePrompt } from "../prompts/AgenticaDescribePrompt";
6
6
 
7
- import { AgenticaDefaultPrompt } from "../internal/AgenticaDefaultPrompt";
8
- import { AgenticaSystemPrompt } from "../internal/AgenticaSystemPrompt";
9
- import { MPSC } from "../internal/MPSC";
10
- import { StreamUtil } from "../internal/StreamUtil";
11
- import { ChatGptCompletionMessageUtil } from "./ChatGptCompletionMessageUtil";
12
- import { ChatGptHistoryDecoder } from "./ChatGptHistoryDecoder";
7
+ import { AgenticaDefaultPrompt } from "../constants/AgenticaDefaultPrompt";
8
+ import { AgenticaSystemPrompt } from "../constants/AgenticaSystemPrompt";
9
+ import { MPSC } from "../utils/MPSC";
10
+ import { StreamUtil } from "../utils/StreamUtil";
11
+ import { ChatGptCompletionMessageUtil } from "../utils/ChatGptCompletionMessageUtil";
13
12
  import { createDescribeEvent } from "../factory/events";
14
- import { createDescribePrompt } from "../factory/prompts";
13
+ import { createDescribePrompt, decodePrompt } from "../factory/prompts";
15
14
 
16
- async function execute<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Model>, histories: AgenticaExecutePrompt<Model>[]): Promise<AgenticaDescribePrompt<Model>[]> {
15
+ export async function describe<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Model>, histories: AgenticaExecutePrompt<Model>[]): Promise<AgenticaDescribePrompt<Model>[]> {
17
16
  if (histories.length === 0) {
18
17
  return [];
19
18
  }
@@ -26,7 +25,7 @@ async function execute<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Mode
26
25
  content: AgenticaDefaultPrompt.write(ctx.config),
27
26
  } satisfies OpenAI.ChatCompletionSystemMessageParam,
28
27
  // FUNCTION CALLING HISTORIES
29
- ...histories.map(ChatGptHistoryDecoder.decode).flat(),
28
+ ...histories.map(decodePrompt).flat(),
30
29
  // SYSTEM PROMPT
31
30
  {
32
31
  role: "system",
@@ -123,5 +122,5 @@ async function execute<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Mode
123
122
  }
124
123
 
125
124
  export const ChatGptDescribeFunctionAgent = {
126
- execute,
125
+ execute: describe,
127
126
  };
@@ -4,14 +4,15 @@ import type { AgenticaExecutePrompt } from "../prompts/AgenticaExecutePrompt";
4
4
  import type { AgenticaPrompt } from "../prompts/AgenticaPrompt";
5
5
  import type { IAgenticaExecutor } from "../structures/IAgenticaExecutor";
6
6
 
7
- import { ChatGptCallFunctionAgent } from "./ChatGptCallFunctionAgent";
8
- import { ChatGptCancelFunctionAgent } from "./ChatGptCancelFunctionAgent";
9
- import { ChatGptDescribeFunctionAgent } from "./ChatGptDescribeFunctionAgent";
10
- import { ChatGptInitializeFunctionAgent } from "./ChatGptInitializeFunctionAgent";
11
- import { ChatGptSelectFunctionAgent } from "./ChatGptSelectFunctionAgent";
7
+ import { describe } from "./describe";
8
+ import { cancel } from "./cancel";
9
+ import { call } from "./call";
10
+ import { cancelFunction } from "./internal/cancelFunction";
11
+ import { initialize } from "./initialize";
12
+ import { select } from "./select";
12
13
 
13
- export const ChatGptAgent = {
14
- execute: <Model extends ILlmSchema.Model>(executor: Partial<IAgenticaExecutor<Model>> | null) => async (ctx: AgenticaContext<Model>): Promise<AgenticaPrompt<Model>[]> => {
14
+ export function execute<Model extends ILlmSchema.Model>(executor: Partial<IAgenticaExecutor<Model>> | null) {
15
+ return async (ctx: AgenticaContext<Model>): Promise<AgenticaPrompt<Model>[]> => {
15
16
  const histories: AgenticaPrompt<Model>[] = [];
16
17
 
17
18
  // FUNCTIONS ARE NOT LISTED YET
@@ -22,7 +23,7 @@ export const ChatGptAgent = {
22
23
  else {
23
24
  histories.push(
24
25
  ...(await (
25
- executor?.initialize ?? ChatGptInitializeFunctionAgent.execute
26
+ executor?.initialize ?? initialize
26
27
  )(ctx)),
27
28
  );
28
29
  if (ctx.ready() === false) {
@@ -34,7 +35,7 @@ export const ChatGptAgent = {
34
35
  // CANCEL CANDIDATE FUNCTIONS
35
36
  if (ctx.stack.length !== 0) {
36
37
  histories.push(
37
- ...(await (executor?.cancel ?? ChatGptCancelFunctionAgent.execute)(
38
+ ...(await (executor?.cancel ?? cancel)(
38
39
  ctx,
39
40
  )),
40
41
  );
@@ -42,7 +43,7 @@ export const ChatGptAgent = {
42
43
 
43
44
  // SELECT CANDIDATE FUNCTIONS
44
45
  histories.push(
45
- ...(await (executor?.select ?? ChatGptSelectFunctionAgent.execute)(
46
+ ...(await (executor?.select ?? select)(
46
47
  ctx,
47
48
  )),
48
49
  );
@@ -54,7 +55,7 @@ export const ChatGptAgent = {
54
55
  while (true) {
55
56
  // EXECUTE FUNCTIONS
56
57
  const prompts: AgenticaPrompt<Model>[] = await (
57
- executor?.call ?? ChatGptCallFunctionAgent.execute
58
+ executor?.call ?? call
58
59
  )(ctx);
59
60
  histories.push(...prompts);
60
61
 
@@ -63,14 +64,14 @@ export const ChatGptAgent = {
63
64
  prompt => prompt.type === "execute",
64
65
  );
65
66
  for (const e of executes) {
66
- await ChatGptCancelFunctionAgent.cancelFunction(ctx, {
67
+ await cancelFunction(ctx, {
67
68
  reason: "completed",
68
69
  name: e.operation.name,
69
70
  });
70
71
  }
71
72
  histories.push(
72
73
  ...(await (
73
- executor?.describe ?? ChatGptDescribeFunctionAgent.execute
74
+ executor?.describe ?? describe
74
75
  )(ctx, executes)),
75
76
  );
76
77
  if (executes.length === 0 || ctx.stack.length === 0) {
@@ -78,5 +79,5 @@ export const ChatGptAgent = {
78
79
  }
79
80
  }
80
81
  return histories;
81
- },
82
- };
82
+ };
83
+ }
@@ -1,8 +1,6 @@
1
- export * from "./ChatGptAgent";
2
- export * from "./ChatGptCallFunctionAgent";
3
- export * from "./ChatGptCancelFunctionAgent";
4
- export * from "./ChatGptCompletionMessageUtil";
5
- export * from "./ChatGptDescribeFunctionAgent";
6
- export * from "./ChatGptHistoryDecoder";
7
- export * from "./ChatGptInitializeFunctionAgent";
8
- export * from "./ChatGptSelectFunctionAgent";
1
+ export * from "./call";
2
+ export * from "./cancel";
3
+ export * from "./describe";
4
+ export * from "./execute";
5
+ export * from "./initialize";
6
+ export * from "./select";
@@ -6,21 +6,20 @@ import type { AgenticaContext } from "../context/AgenticaContext";
6
6
  import type { __IChatInitialApplication } from "../context/internal/__IChatInitialApplication";
7
7
  import type { AgenticaPrompt } from "../prompts/AgenticaPrompt";
8
8
 
9
- import { AgenticaDefaultPrompt } from "../internal/AgenticaDefaultPrompt";
10
- import { AgenticaSystemPrompt } from "../internal/AgenticaSystemPrompt";
11
- import { MPSC } from "../internal/MPSC";
12
- import { StreamUtil } from "../internal/StreamUtil";
13
- import { ChatGptCompletionMessageUtil } from "./ChatGptCompletionMessageUtil";
14
- import { ChatGptHistoryDecoder } from "./ChatGptHistoryDecoder";
9
+ import { AgenticaDefaultPrompt } from "../constants/AgenticaDefaultPrompt";
10
+ import { AgenticaSystemPrompt } from "../constants/AgenticaSystemPrompt";
11
+ import { MPSC } from "../utils/MPSC";
12
+ import { StreamUtil } from "../utils/StreamUtil";
13
+ import { ChatGptCompletionMessageUtil } from "../utils/ChatGptCompletionMessageUtil";
15
14
  import { createTextEvent } from "../factory/events";
16
- import { createTextPrompt } from "../factory/prompts";
15
+ import { createTextPrompt, decodePrompt } from "../factory/prompts";
17
16
 
18
17
  const FUNCTION: ILlmFunction<"chatgpt"> = typia.llm.application<
19
18
  __IChatInitialApplication,
20
19
  "chatgpt"
21
20
  >().functions[0]!;
22
21
 
23
- async function execute<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Model>): Promise<AgenticaPrompt<Model>[]> {
22
+ export async function initialize<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Model>): Promise<AgenticaPrompt<Model>[]> {
24
23
  // ----
25
24
  // EXECUTE CHATGPT API
26
25
  // ----
@@ -32,7 +31,7 @@ async function execute<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Mode
32
31
  content: AgenticaDefaultPrompt.write(ctx.config),
33
32
  } satisfies OpenAI.ChatCompletionSystemMessageParam,
34
33
  // PREVIOUS HISTORIES
35
- ...ctx.histories.map(ChatGptHistoryDecoder.decode).flat(),
34
+ ...ctx.histories.map(decodePrompt).flat(),
36
35
  // USER INPUT
37
36
  {
38
37
  role: "user",
@@ -164,7 +163,3 @@ async function execute<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Mode
164
163
 
165
164
  return prompts;
166
165
  }
167
-
168
- export const ChatGptInitializeFunctionAgent = {
169
- execute,
170
- };
@@ -0,0 +1,34 @@
1
+ import type { ILlmSchema } from "@samchon/openapi";
2
+ import type { AgenticaOperationSelection } from "../../context/AgenticaOperationSelection";
3
+ import type { AgenticaContext } from "../../context/AgenticaContext";
4
+ import type { __IChatFunctionReference } from "../../context/internal/__IChatFunctionReference";
5
+
6
+ import { createCancelEvent } from "../../factory/events";
7
+ import { createOperationSelection } from "../../factory/operations";
8
+
9
+ /**
10
+ * @internal
11
+ */
12
+ export async function cancelFunction<Model extends ILlmSchema.Model>(
13
+ ctx: AgenticaContext<Model>,
14
+ reference: __IChatFunctionReference,
15
+ ): Promise<AgenticaOperationSelection<Model> | null> {
16
+ const index: number = ctx.stack.findIndex(
17
+ item => item.operation.name === reference.name,
18
+ );
19
+ if (index === -1) {
20
+ return null;
21
+ }
22
+
23
+ const item: AgenticaOperationSelection<Model> = ctx.stack[index]!;
24
+ ctx.stack.splice(index, 1);
25
+ await ctx.dispatch(
26
+ createCancelEvent({
27
+ selection: createOperationSelection({
28
+ operation: item.operation,
29
+ reason: reference.reason,
30
+ }),
31
+ }),
32
+ );
33
+ return item;
34
+ }