@agentica/core 0.13.5 → 0.14.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (189) hide show
  1. package/lib/Agentica.js +8 -10
  2. package/lib/Agentica.js.map +1 -1
  3. package/lib/context/AgenticaCancelPrompt.d.ts +4 -12
  4. package/lib/context/AgenticaCancelPrompt.js +0 -17
  5. package/lib/context/AgenticaCancelPrompt.js.map +1 -1
  6. package/lib/context/AgenticaOperationSelection.d.ts +4 -11
  7. package/lib/context/AgenticaOperationSelection.js +0 -14
  8. package/lib/context/AgenticaOperationSelection.js.map +1 -1
  9. package/lib/context/AgenticaTokenUsage.js.map +1 -1
  10. package/lib/context/internal/AgenticaTokenUsageAggregator.js.map +1 -1
  11. package/lib/events/AgenticaCallEvent.d.ts +23 -13
  12. package/lib/events/AgenticaCallEvent.js +0 -19
  13. package/lib/events/AgenticaCallEvent.js.map +1 -1
  14. package/lib/events/AgenticaCancelEvent.d.ts +4 -10
  15. package/lib/events/AgenticaCancelEvent.js +0 -15
  16. package/lib/events/AgenticaCancelEvent.js.map +1 -1
  17. package/lib/events/AgenticaDescribeEvent.d.ts +8 -27
  18. package/lib/events/AgenticaDescribeEvent.js +0 -52
  19. package/lib/events/AgenticaDescribeEvent.js.map +1 -1
  20. package/lib/events/AgenticaEvent.d.ts +13 -1
  21. package/lib/events/AgenticaEventBase.d.ts +5 -3
  22. package/lib/events/AgenticaEventBase.js +0 -7
  23. package/lib/events/AgenticaEventBase.js.map +1 -1
  24. package/lib/events/AgenticaExecuteEvent.d.ts +15 -15
  25. package/lib/events/AgenticaExecuteEvent.js +0 -30
  26. package/lib/events/AgenticaExecuteEvent.js.map +1 -1
  27. package/lib/events/AgenticaInitializeEvent.d.ts +3 -4
  28. package/lib/events/AgenticaInitializeEvent.js +0 -13
  29. package/lib/events/AgenticaInitializeEvent.js.map +1 -1
  30. package/lib/events/AgenticaRequestEvent.d.ts +5 -24
  31. package/lib/events/AgenticaRequestEvent.js +0 -19
  32. package/lib/events/AgenticaRequestEvent.js.map +1 -1
  33. package/lib/events/AgenticaResponseEvent.d.ts +22 -32
  34. package/lib/events/AgenticaResponseEvent.js +0 -13
  35. package/lib/events/AgenticaResponseEvent.js.map +1 -1
  36. package/lib/events/AgenticaSelectEvent.d.ts +6 -12
  37. package/lib/events/AgenticaSelectEvent.js +0 -23
  38. package/lib/events/AgenticaSelectEvent.js.map +1 -1
  39. package/lib/events/AgenticaTextEvent.d.ts +8 -27
  40. package/lib/events/AgenticaTextEvent.js +0 -52
  41. package/lib/events/AgenticaTextEvent.js.map +1 -1
  42. package/lib/events/AgenticaValidateEvent.d.ts +25 -0
  43. package/lib/events/AgenticaValidateEvent.js +3 -0
  44. package/lib/events/AgenticaValidateEvent.js.map +1 -0
  45. package/lib/factory/events.d.ts +65 -0
  46. package/lib/factory/events.js +173 -0
  47. package/lib/factory/events.js.map +1 -0
  48. package/lib/factory/index.d.ts +3 -0
  49. package/lib/factory/index.js +20 -0
  50. package/lib/factory/index.js.map +1 -0
  51. package/lib/factory/operations.d.ts +7 -0
  52. package/lib/factory/operations.js +14 -0
  53. package/lib/factory/operations.js.map +1 -0
  54. package/lib/factory/prompts.d.ts +30 -0
  55. package/lib/factory/prompts.js +76 -0
  56. package/lib/factory/prompts.js.map +1 -0
  57. package/lib/index.d.ts +2 -0
  58. package/lib/index.js +25 -4
  59. package/lib/index.js.map +1 -1
  60. package/lib/index.mjs +328 -347
  61. package/lib/index.mjs.map +1 -1
  62. package/lib/json/IAgenticaEventJson.d.ts +4 -2
  63. package/lib/json/IAgenticaPromptJson.d.ts +1 -1
  64. package/lib/orchestrate/ChatGptAgent.js.map +1 -0
  65. package/lib/{chatgpt → orchestrate}/ChatGptCallFunctionAgent.d.ts +2 -1
  66. package/lib/{chatgpt → orchestrate}/ChatGptCallFunctionAgent.js +33 -28
  67. package/lib/orchestrate/ChatGptCallFunctionAgent.js.map +1 -0
  68. package/lib/orchestrate/ChatGptCancelFunctionAgent.d.ts +12 -0
  69. package/lib/{chatgpt → orchestrate}/ChatGptCancelFunctionAgent.js +7 -9
  70. package/lib/orchestrate/ChatGptCancelFunctionAgent.js.map +1 -0
  71. package/lib/orchestrate/ChatGptCompletionMessageUtil.d.ts +14 -0
  72. package/lib/{chatgpt → orchestrate}/ChatGptCompletionMessageUtil.js +0 -5
  73. package/lib/orchestrate/ChatGptCompletionMessageUtil.js.map +1 -0
  74. package/lib/{chatgpt → orchestrate}/ChatGptDescribeFunctionAgent.d.ts +3 -2
  75. package/lib/{chatgpt → orchestrate}/ChatGptDescribeFunctionAgent.js +4 -5
  76. package/lib/orchestrate/ChatGptDescribeFunctionAgent.js.map +1 -0
  77. package/lib/{chatgpt → orchestrate}/ChatGptHistoryDecoder.d.ts +2 -1
  78. package/lib/{chatgpt → orchestrate}/ChatGptHistoryDecoder.js +0 -1
  79. package/lib/orchestrate/ChatGptHistoryDecoder.js.map +1 -0
  80. package/lib/{chatgpt → orchestrate}/ChatGptInitializeFunctionAgent.d.ts +2 -1
  81. package/lib/{chatgpt → orchestrate}/ChatGptInitializeFunctionAgent.js +4 -5
  82. package/lib/orchestrate/ChatGptInitializeFunctionAgent.js.map +1 -0
  83. package/lib/{chatgpt → orchestrate}/ChatGptSelectFunctionAgent.d.ts +1 -1
  84. package/lib/{chatgpt → orchestrate}/ChatGptSelectFunctionAgent.js +10 -13
  85. package/lib/orchestrate/ChatGptSelectFunctionAgent.js.map +1 -0
  86. package/lib/orchestrate/ChatGptUsageAggregator.d.ts +10 -0
  87. package/lib/{chatgpt → orchestrate}/ChatGptUsageAggregator.js +0 -3
  88. package/lib/orchestrate/ChatGptUsageAggregator.js.map +1 -0
  89. package/lib/orchestrate/index.d.ts +8 -0
  90. package/lib/orchestrate/index.js +25 -0
  91. package/lib/orchestrate/index.js.map +1 -0
  92. package/lib/prompts/AgenticaCancelPrompt.d.ts +4 -12
  93. package/lib/prompts/AgenticaCancelPrompt.js +0 -17
  94. package/lib/prompts/AgenticaCancelPrompt.js.map +1 -1
  95. package/lib/prompts/AgenticaDescribePrompt.d.ts +4 -12
  96. package/lib/prompts/AgenticaDescribePrompt.js +0 -17
  97. package/lib/prompts/AgenticaDescribePrompt.js.map +1 -1
  98. package/lib/prompts/AgenticaExecutePrompt.d.ts +42 -13
  99. package/lib/prompts/AgenticaExecutePrompt.js +0 -21
  100. package/lib/prompts/AgenticaExecutePrompt.js.map +1 -1
  101. package/lib/prompts/AgenticaPrompt.d.ts +10 -0
  102. package/lib/prompts/AgenticaPromptBase.d.ts +5 -6
  103. package/lib/prompts/AgenticaPromptBase.js +0 -19
  104. package/lib/prompts/AgenticaPromptBase.js.map +1 -1
  105. package/lib/prompts/AgenticaSelectPrompt.d.ts +4 -12
  106. package/lib/prompts/AgenticaSelectPrompt.js +0 -17
  107. package/lib/prompts/AgenticaSelectPrompt.js.map +1 -1
  108. package/lib/prompts/AgenticaTextPrompt.d.ts +4 -12
  109. package/lib/prompts/AgenticaTextPrompt.js +0 -17
  110. package/lib/prompts/AgenticaTextPrompt.js.map +1 -1
  111. package/lib/transformers/AgenticaEventTransformer.d.ts +16 -16
  112. package/lib/transformers/AgenticaEventTransformer.js +12 -30
  113. package/lib/transformers/AgenticaEventTransformer.js.map +1 -1
  114. package/lib/transformers/AgenticaPromptTransformer.d.ts +12 -11
  115. package/lib/transformers/AgenticaPromptTransformer.js +9 -19
  116. package/lib/transformers/AgenticaPromptTransformer.js.map +1 -1
  117. package/package.json +3 -3
  118. package/src/Agentica.ts +11 -11
  119. package/src/context/AgenticaCancelPrompt.ts +4 -25
  120. package/src/context/AgenticaContext.ts +0 -1
  121. package/src/context/AgenticaOperation.ts +0 -1
  122. package/src/context/AgenticaOperationCollection.ts +0 -1
  123. package/src/context/AgenticaOperationSelection.ts +4 -22
  124. package/src/context/AgenticaTokenUsage.ts +2 -2
  125. package/src/context/internal/AgenticaTokenUsageAggregator.ts +0 -1
  126. package/src/events/AgenticaCallEvent.ts +26 -29
  127. package/src/events/AgenticaCancelEvent.ts +4 -21
  128. package/src/events/AgenticaDescribeEvent.ts +8 -57
  129. package/src/events/AgenticaEvent.ts +14 -2
  130. package/src/events/AgenticaEventBase.ts +5 -6
  131. package/src/events/AgenticaExecuteEvent.ts +33 -41
  132. package/src/events/AgenticaInitializeEvent.ts +3 -11
  133. package/src/events/AgenticaRequestEvent.ts +5 -40
  134. package/src/events/AgenticaResponseEvent.ts +22 -39
  135. package/src/events/AgenticaSelectEvent.ts +6 -30
  136. package/src/events/AgenticaTextEvent.ts +11 -60
  137. package/src/events/AgenticaValidateEvent.ts +31 -0
  138. package/src/factory/events.ts +229 -0
  139. package/src/factory/index.ts +3 -0
  140. package/src/factory/operations.ts +17 -0
  141. package/src/factory/prompts.ts +104 -0
  142. package/src/index.ts +5 -6
  143. package/src/json/IAgenticaEventJson.ts +5 -3
  144. package/src/json/IAgenticaPromptJson.ts +1 -1
  145. package/src/{chatgpt → orchestrate}/ChatGptCallFunctionAgent.ts +62 -50
  146. package/src/{chatgpt → orchestrate}/ChatGptCancelFunctionAgent.ts +18 -16
  147. package/src/{chatgpt → orchestrate}/ChatGptCompletionMessageUtil.ts +7 -6
  148. package/src/{chatgpt → orchestrate}/ChatGptDescribeFunctionAgent.ts +7 -6
  149. package/src/{chatgpt → orchestrate}/ChatGptHistoryDecoder.ts +1 -2
  150. package/src/{chatgpt → orchestrate}/ChatGptInitializeFunctionAgent.ts +8 -7
  151. package/src/{chatgpt → orchestrate}/ChatGptSelectFunctionAgent.ts +18 -16
  152. package/src/{chatgpt → orchestrate}/ChatGptUsageAggregator.ts +3 -3
  153. package/src/orchestrate/index.ts +8 -0
  154. package/src/prompts/AgenticaCancelPrompt.ts +4 -25
  155. package/src/prompts/AgenticaDescribePrompt.ts +4 -25
  156. package/src/prompts/AgenticaExecutePrompt.ts +52 -44
  157. package/src/prompts/AgenticaPrompt.ts +10 -1
  158. package/src/prompts/AgenticaPromptBase.ts +5 -9
  159. package/src/prompts/AgenticaSelectPrompt.ts +4 -25
  160. package/src/prompts/AgenticaTextPrompt.ts +4 -26
  161. package/src/transformers/AgenticaEventTransformer.ts +30 -32
  162. package/src/transformers/AgenticaPromptTransformer.ts +22 -22
  163. package/lib/chatgpt/ChatGptAgent.js.map +0 -1
  164. package/lib/chatgpt/ChatGptCallFunctionAgent.js.map +0 -1
  165. package/lib/chatgpt/ChatGptCancelFunctionAgent.d.ts +0 -11
  166. package/lib/chatgpt/ChatGptCancelFunctionAgent.js.map +0 -1
  167. package/lib/chatgpt/ChatGptCompletionMessageUtil.d.ts +0 -13
  168. package/lib/chatgpt/ChatGptCompletionMessageUtil.js.map +0 -1
  169. package/lib/chatgpt/ChatGptDescribeFunctionAgent.js.map +0 -1
  170. package/lib/chatgpt/ChatGptHistoryDecoder.js.map +0 -1
  171. package/lib/chatgpt/ChatGptInitializeFunctionAgent.js.map +0 -1
  172. package/lib/chatgpt/ChatGptSelectFunctionAgent.js.map +0 -1
  173. package/lib/chatgpt/ChatGptUsageAggregator.d.ts +0 -9
  174. package/lib/chatgpt/ChatGptUsageAggregator.js.map +0 -1
  175. package/lib/context/AgenticaClassOperation.d.ts +0 -8
  176. package/lib/context/AgenticaClassOperation.js +0 -11
  177. package/lib/context/AgenticaClassOperation.js.map +0 -1
  178. package/lib/context/AgenticaHttpOperation.d.ts +0 -8
  179. package/lib/context/AgenticaHttpOperation.js +0 -11
  180. package/lib/context/AgenticaHttpOperation.js.map +0 -1
  181. package/lib/context/AgenticaOperationBase.d.ts +0 -29
  182. package/lib/context/AgenticaOperationBase.js +0 -21
  183. package/lib/context/AgenticaOperationBase.js.map +0 -1
  184. package/src/context/AgenticaClassOperation.ts +0 -23
  185. package/src/context/AgenticaHttpOperation.ts +0 -27
  186. package/src/context/AgenticaOperationBase.ts +0 -57
  187. package/lib/{chatgpt → orchestrate}/ChatGptAgent.d.ts +0 -0
  188. package/lib/{chatgpt → orchestrate}/ChatGptAgent.js +0 -0
  189. package/src/{chatgpt → orchestrate}/ChatGptAgent.ts +1 -1
@@ -1,3 +1,8 @@
1
+ import {
2
+ ChatGptTypeChecker,
3
+ HttpLlm,
4
+ } from "@samchon/openapi";
5
+
1
6
  import type {
2
7
  IChatGptSchema,
3
8
  IHttpMigrateRoute,
@@ -7,30 +12,25 @@ import type {
7
12
  import type OpenAI from "openai";
8
13
  import type { IValidation } from "typia";
9
14
  import type { AgenticaContext } from "../context/AgenticaContext";
10
-
11
15
  import type { AgenticaOperation } from "../context/AgenticaOperation";
12
16
  import type { AgenticaPrompt } from "../prompts/AgenticaPrompt";
13
- import {
14
- ChatGptTypeChecker,
15
- HttpLlm,
16
- } from "@samchon/openapi";
17
- import { AgenticaCancelPrompt } from "../context/AgenticaCancelPrompt";
18
- import { AgenticaOperationSelection } from "../context/AgenticaOperationSelection";
19
- import { AgenticaCallEvent } from "../events/AgenticaCallEvent";
20
- import { AgenticaCancelEvent } from "../events/AgenticaCancelEvent";
21
- import { AgenticaExecuteEvent } from "../events/AgenticaExecuteEvent";
22
- import { AgenticaTextEvent } from "../events/AgenticaTextEvent";
17
+ import type { AgenticaCancelPrompt } from "../context/AgenticaCancelPrompt";
18
+ import type { AgenticaTextPrompt } from "../prompts/AgenticaTextPrompt";
19
+ import type { AgenticaExecutePrompt } from "../prompts/AgenticaExecutePrompt";
20
+ import type { AgenticaCallEvent } from "../events/AgenticaCallEvent";
21
+
23
22
  import { AgenticaConstant } from "../internal/AgenticaConstant";
24
23
  import { AgenticaDefaultPrompt } from "../internal/AgenticaDefaultPrompt";
25
24
  import { AgenticaSystemPrompt } from "../internal/AgenticaSystemPrompt";
26
25
  import { StreamUtil } from "../internal/StreamUtil";
27
- import { AgenticaExecutePrompt } from "../prompts/AgenticaExecutePrompt";
28
- import { AgenticaTextPrompt } from "../prompts/AgenticaTextPrompt";
29
26
  import { ChatGptCancelFunctionAgent } from "./ChatGptCancelFunctionAgent";
30
27
  import { ChatGptCompletionMessageUtil } from "./ChatGptCompletionMessageUtil";
31
28
  import { ChatGptHistoryDecoder } from "./ChatGptHistoryDecoder";
29
+ import { createCallEvent, createCancelEvent, createExecuteEvent, createTextEvent, createValidateEvent } from "../factory/events";
30
+ import { createOperationSelection } from "../factory/operations";
31
+ import { createCancelPrompt, createExecutePrompt, createTextPrompt } from "../factory/prompts";
32
32
 
33
- export async function execute<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Model>): Promise<AgenticaPrompt<Model>[]> {
33
+ async function execute<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Model>): Promise<AgenticaPrompt<Model>[]> {
34
34
  // ----
35
35
  // EXECUTE CHATGPT API
36
36
  // ----
@@ -109,7 +109,7 @@ export async function execute<Model extends ILlmSchema.Model>(ctx: AgenticaConte
109
109
  async (): Promise<
110
110
  [AgenticaExecutePrompt<Model>, AgenticaCancelPrompt<Model>]
111
111
  > => {
112
- const call: AgenticaCallEvent<Model> = new AgenticaCallEvent({
112
+ const call: AgenticaCallEvent<Model> = createCallEvent({
113
113
  id: tc.id,
114
114
  operation,
115
115
  // @TODO add type assertion!
@@ -128,8 +128,8 @@ export async function execute<Model extends ILlmSchema.Model>(ctx: AgenticaConte
128
128
  call,
129
129
  0,
130
130
  );
131
- await ctx.dispatch(
132
- new AgenticaExecuteEvent({
131
+ void ctx.dispatch(
132
+ createExecuteEvent({
133
133
  id: call.id,
134
134
  operation: call.operation,
135
135
  arguments: execute.arguments,
@@ -141,9 +141,9 @@ export async function execute<Model extends ILlmSchema.Model>(ctx: AgenticaConte
141
141
  name: call.operation.name,
142
142
  reason: "completed",
143
143
  });
144
- await ctx.dispatch(
145
- new AgenticaCancelEvent({
146
- selection: new AgenticaOperationSelection({
144
+ void ctx.dispatch(
145
+ createCancelEvent({
146
+ selection: createOperationSelection({
147
147
  operation: call.operation,
148
148
  reason: "complete",
149
149
  }),
@@ -151,10 +151,10 @@ export async function execute<Model extends ILlmSchema.Model>(ctx: AgenticaConte
151
151
  );
152
152
  return [
153
153
  execute,
154
- new AgenticaCancelPrompt({
154
+ createCancelPrompt({
155
155
  id: call.id,
156
156
  selections: [
157
- new AgenticaOperationSelection({
157
+ createOperationSelection({
158
158
  operation: call.operation,
159
159
  reason: "complete",
160
160
  }),
@@ -171,12 +171,12 @@ export async function execute<Model extends ILlmSchema.Model>(ctx: AgenticaConte
171
171
  && choice.message.content.length > 0
172
172
  ) {
173
173
  closures.push(async () => {
174
- const value: AgenticaTextPrompt = new AgenticaTextPrompt({
174
+ const value: AgenticaTextPrompt = createTextPrompt({
175
175
  role: "assistant",
176
176
  text: choice.message.content!,
177
177
  });
178
- await ctx.dispatch(
179
- new AgenticaTextEvent({
178
+ void ctx.dispatch(
179
+ createTextEvent({
180
180
  role: "assistant",
181
181
  get: () => value.text,
182
182
  done: () => true,
@@ -200,18 +200,24 @@ async function propagate<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Mo
200
200
  const check: IValidation<unknown> = call.operation.function.validate(
201
201
  call.arguments,
202
202
  );
203
- if (
204
- check.success === false
205
- && retry++ < (ctx.config?.retry ?? AgenticaConstant.RETRY)
206
- ) {
207
- const trial: AgenticaExecutePrompt<Model> | null = await correct(
208
- ctx,
209
- call,
210
- retry,
211
- check.errors,
203
+ if (check.success === false) {
204
+ void ctx.dispatch(
205
+ createValidateEvent({
206
+ id: call.id,
207
+ operation: call.operation,
208
+ result: check,
209
+ }),
212
210
  );
213
- if (trial !== null) {
214
- return trial;
211
+ if (retry++ < (ctx.config?.retry ?? AgenticaConstant.RETRY)) {
212
+ const trial: AgenticaExecutePrompt<Model> | null = await correct(
213
+ ctx,
214
+ call,
215
+ retry,
216
+ check.errors,
217
+ );
218
+ if (trial !== null) {
219
+ return trial;
220
+ }
215
221
  }
216
222
  }
217
223
  try {
@@ -229,7 +235,7 @@ async function propagate<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Mo
229
235
  (success === false
230
236
  ? await correct(ctx, call, retry, response.body)
231
237
  : null)
232
- ?? new AgenticaExecutePrompt({
238
+ ?? createExecutePrompt({
233
239
  operation: call.operation,
234
240
  id: call.id,
235
241
  arguments: call.arguments,
@@ -239,7 +245,7 @@ async function propagate<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Mo
239
245
  }
240
246
  catch (error) {
241
247
  // DISPATCH ERROR
242
- return new AgenticaExecutePrompt({
248
+ return createExecutePrompt({
243
249
  operation: call.operation,
244
250
  id: call.id,
245
251
  arguments: call.arguments,
@@ -267,11 +273,18 @@ async function propagate<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Mo
267
273
  call.arguments,
268
274
  );
269
275
  if (check.success === false) {
276
+ void ctx.dispatch(
277
+ createValidateEvent({
278
+ id: call.id,
279
+ operation: call.operation,
280
+ result: check,
281
+ }),
282
+ );
270
283
  return (
271
284
  (retry++ < (ctx.config?.retry ?? AgenticaConstant.RETRY)
272
285
  ? await correct(ctx, call, retry, check.errors)
273
286
  : null)
274
- ?? new AgenticaExecutePrompt({
287
+ ?? createExecutePrompt({
275
288
  id: call.id,
276
289
  operation: call.operation,
277
290
  arguments: call.arguments,
@@ -286,7 +299,7 @@ async function propagate<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Mo
286
299
  // EXECUTE FUNCTION
287
300
  try {
288
301
  const value = await executeClassOperation(call.operation, call.arguments);
289
- return new AgenticaExecutePrompt({
302
+ return createExecutePrompt({
290
303
  id: call.id,
291
304
  operation: call.operation,
292
305
  arguments: call.arguments,
@@ -294,18 +307,18 @@ async function propagate<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Mo
294
307
  });
295
308
  }
296
309
  catch (error) {
297
- return new AgenticaExecutePrompt({
310
+ return createExecutePrompt({
298
311
  id: call.id,
299
312
  operation: call.operation,
300
313
  arguments: call.arguments,
301
314
  value:
302
- error instanceof Error
303
- ? {
304
- ...error,
305
- name: error.name,
306
- message: error.message,
307
- }
308
- : error,
315
+ error instanceof Error
316
+ ? {
317
+ ...error,
318
+ name: error.name,
319
+ message: error.message,
320
+ }
321
+ : error,
309
322
  });
310
323
  }
311
324
  }
@@ -317,7 +330,6 @@ async function executeHttpOperation<Model extends ILlmSchema.Model>(operation: A
317
330
  application: operation.controller.application,
318
331
  function: operation.function,
319
332
  };
320
-
321
333
  return operation.controller.execute !== undefined
322
334
  ? operation.controller.execute({ ...controllerBaseArguments, arguments: operationArguments })
323
335
  : HttpLlm.propagate({ ...controllerBaseArguments, input: operationArguments });
@@ -441,7 +453,7 @@ async function correct<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Mode
441
453
  }
442
454
  return propagate(
443
455
  ctx,
444
- new AgenticaCallEvent({
456
+ createCallEvent({
445
457
  id: toolCall.id,
446
458
  operation: call.operation,
447
459
  arguments: JSON.parse(toolCall.function.arguments) as Record<string, unknown>,
@@ -1,23 +1,26 @@
1
+ import typia from "typia";
2
+ import { v4 } from "uuid";
3
+
1
4
  import type { ILlmApplication, ILlmSchema } from "@samchon/openapi";
2
5
  import type OpenAI from "openai";
3
6
  import type { IValidation } from "typia";
4
7
  import type { AgenticaContext } from "../context/AgenticaContext";
5
8
  import type { AgenticaOperation } from "../context/AgenticaOperation";
6
-
7
9
  import type { __IChatCancelFunctionsApplication } from "../context/internal/__IChatCancelFunctionsApplication";
8
10
  import type { __IChatFunctionReference } from "../context/internal/__IChatFunctionReference";
9
11
  import type { AgenticaEvent } from "../events/AgenticaEvent";
10
- import typia from "typia";
11
- import { v4 } from "uuid";
12
- import { AgenticaCancelPrompt } from "../context/AgenticaCancelPrompt";
13
- import { AgenticaOperationSelection } from "../context/AgenticaOperationSelection";
14
- import { AgenticaCancelEvent } from "../events/AgenticaCancelEvent";
12
+ import type { AgenticaCancelPrompt } from "../context/AgenticaCancelPrompt";
13
+ import type { AgenticaOperationSelection } from "../context/AgenticaOperationSelection";
14
+
15
15
  import { AgenticaConstant } from "../internal/AgenticaConstant";
16
16
  import { AgenticaDefaultPrompt } from "../internal/AgenticaDefaultPrompt";
17
17
  import { AgenticaSystemPrompt } from "../internal/AgenticaSystemPrompt";
18
18
  import { StreamUtil } from "../internal/StreamUtil";
19
19
  import { ChatGptCompletionMessageUtil } from "./ChatGptCompletionMessageUtil";
20
20
  import { ChatGptHistoryDecoder } from "./ChatGptHistoryDecoder";
21
+ import { createCancelPrompt } from "../factory/prompts";
22
+ import { createCancelEvent } from "../factory/events";
23
+ import { createOperationSelection } from "../factory/operations";
21
24
 
22
25
  const CONTAINER: ILlmApplication<"chatgpt"> = typia.llm.application<
23
26
  __IChatCancelFunctionsApplication,
@@ -30,7 +33,7 @@ interface IFailure {
30
33
  validation: IValidation.IFailure;
31
34
  }
32
35
 
33
- export async function execute<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Model>): Promise<AgenticaCancelPrompt<Model>[]> {
36
+ async function execute<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Model>): Promise<AgenticaCancelPrompt<Model>[]> {
34
37
  if (ctx.operations.divided === undefined) {
35
38
  return step(ctx, ctx.operations.array, 0);
36
39
  }
@@ -75,7 +78,7 @@ export async function execute<Model extends ILlmSchema.Model>(ctx: AgenticaConte
75
78
  }
76
79
 
77
80
  // RE-COLLECT SELECT FUNCTION EVENTS
78
- const collection: AgenticaCancelPrompt<Model> = new AgenticaCancelPrompt({
81
+ const collection: AgenticaCancelPrompt<Model> = createCancelPrompt({
79
82
  id: v4(),
80
83
  selections: [],
81
84
  });
@@ -91,7 +94,7 @@ export async function execute<Model extends ILlmSchema.Model>(ctx: AgenticaConte
91
94
  return [collection];
92
95
  }
93
96
 
94
- export async function cancelFunction<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Model>, reference: __IChatFunctionReference): Promise<AgenticaOperationSelection<Model> | null> {
97
+ async function cancelFunction<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Model>, reference: __IChatFunctionReference): Promise<AgenticaOperationSelection<Model> | null> {
95
98
  const index: number = ctx.stack.findIndex(
96
99
  item => item.operation.name === reference.name,
97
100
  );
@@ -102,8 +105,8 @@ export async function cancelFunction<Model extends ILlmSchema.Model>(ctx: Agenti
102
105
  const item: AgenticaOperationSelection<Model> = ctx.stack[index]!;
103
106
  ctx.stack.splice(index, 1);
104
107
  await ctx.dispatch(
105
- new AgenticaCancelEvent({
106
- selection: new AgenticaOperationSelection({
108
+ createCancelEvent({
109
+ selection: createOperationSelection({
107
110
  operation: item.operation,
108
111
  reason: reference.reason,
109
112
  }),
@@ -243,11 +246,10 @@ async function step<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Model>,
243
246
  continue;
244
247
  }
245
248
 
246
- const collection: AgenticaCancelPrompt<Model>
247
- = new AgenticaCancelPrompt({
248
- id: tc.id,
249
- selections: [],
250
- });
249
+ const collection: AgenticaCancelPrompt<Model> = createCancelPrompt({
250
+ id: tc.id,
251
+ selections: [],
252
+ });
251
253
 
252
254
  for (const reference of input.functions) {
253
255
  const operation = await cancelFunction(ctx, reference);
@@ -1,3 +1,5 @@
1
+ import { json } from "typia";
2
+
1
3
  import type {
2
4
  ChatCompletion,
3
5
  ChatCompletionChunk,
@@ -5,12 +7,11 @@ import type {
5
7
  ChatCompletionMessageToolCall,
6
8
  CompletionUsage,
7
9
  } from "openai/resources";
8
- import { json } from "typia";
9
10
 
10
11
  import { ByteArrayUtil } from "../internal/ByteArrayUtil";
11
12
  import { ChatGptUsageAggregator } from "./ChatGptUsageAggregator";
12
13
 
13
- export function transformCompletionChunk(source: string | Uint8Array): ChatCompletionChunk {
14
+ function transformCompletionChunk(source: string | Uint8Array): ChatCompletionChunk {
14
15
  const str
15
16
  = source instanceof Uint8Array ? ByteArrayUtil.toUtf8(source) : source;
16
17
  return json.assertParse<
@@ -18,7 +19,7 @@ export function transformCompletionChunk(source: string | Uint8Array): ChatCompl
18
19
  >(str);
19
20
  }
20
21
 
21
- export function accumulate(origin: ChatCompletion, chunk: ChatCompletionChunk): ChatCompletion {
22
+ function accumulate(origin: ChatCompletion, chunk: ChatCompletionChunk): ChatCompletion {
22
23
  const choices = origin.choices;
23
24
  chunk.choices.forEach((choice) => {
24
25
  const accChoice = choices[choice.index];
@@ -75,7 +76,7 @@ export function accumulate(origin: ChatCompletion, chunk: ChatCompletionChunk):
75
76
  };
76
77
  }
77
78
 
78
- export function merge(chunks: ChatCompletionChunk[]): ChatCompletion {
79
+ function merge(chunks: ChatCompletionChunk[]): ChatCompletion {
79
80
  const firstChunk = chunks[0];
80
81
  if (firstChunk === undefined) {
81
82
  throw new Error("No chunks received");
@@ -93,7 +94,7 @@ export function merge(chunks: ChatCompletionChunk[]): ChatCompletion {
93
94
  } as ChatCompletion);
94
95
  }
95
96
 
96
- export function mergeChoice(acc: ChatCompletion.Choice, cur: ChatCompletionChunk.Choice): ChatCompletion.Choice {
97
+ function mergeChoice(acc: ChatCompletion.Choice, cur: ChatCompletionChunk.Choice): ChatCompletion.Choice {
97
98
  if (acc.finish_reason == null && cur.finish_reason != null) {
98
99
  acc.finish_reason = cur.finish_reason;
99
100
  }
@@ -148,7 +149,7 @@ export function mergeChoice(acc: ChatCompletion.Choice, cur: ChatCompletionChunk
148
149
  return acc;
149
150
  }
150
151
 
151
- export function mergeToolCalls(acc: ChatCompletionMessageToolCall, cur: ChatCompletionChunk.Choice.Delta.ToolCall): ChatCompletionMessageToolCall {
152
+ function mergeToolCalls(acc: ChatCompletionMessageToolCall, cur: ChatCompletionChunk.Choice.Delta.ToolCall): ChatCompletionMessageToolCall {
152
153
  if (cur.function != null) {
153
154
  acc.function.arguments += cur.function.arguments ?? "";
154
155
  acc.function.name += cur.function.name ?? "";
@@ -1,18 +1,19 @@
1
1
  import type { ILlmSchema } from "@samchon/openapi";
2
2
  import type OpenAI from "openai";
3
-
4
3
  import type { AgenticaContext } from "../context/AgenticaContext";
5
4
  import type { AgenticaExecutePrompt } from "../prompts/AgenticaExecutePrompt";
6
- import { AgenticaDescribeEvent } from "../events/AgenticaDescribeEvent";
5
+ import type { AgenticaDescribePrompt } from "../prompts/AgenticaDescribePrompt";
6
+
7
7
  import { AgenticaDefaultPrompt } from "../internal/AgenticaDefaultPrompt";
8
8
  import { AgenticaSystemPrompt } from "../internal/AgenticaSystemPrompt";
9
9
  import { MPSC } from "../internal/MPSC";
10
10
  import { StreamUtil } from "../internal/StreamUtil";
11
- import { AgenticaDescribePrompt } from "../prompts/AgenticaDescribePrompt";
12
11
  import { ChatGptCompletionMessageUtil } from "./ChatGptCompletionMessageUtil";
13
12
  import { ChatGptHistoryDecoder } from "./ChatGptHistoryDecoder";
13
+ import { createDescribeEvent } from "../factory/events";
14
+ import { createDescribePrompt } from "../factory/prompts";
14
15
 
15
- export async function execute<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Model>, histories: AgenticaExecutePrompt<Model>[]): Promise<AgenticaDescribePrompt<Model>[]> {
16
+ async function execute<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Model>, histories: AgenticaExecutePrompt<Model>[]): Promise<AgenticaDescribePrompt<Model>[]> {
16
17
  if (histories.length === 0) {
17
18
  return [];
18
19
  }
@@ -78,7 +79,7 @@ export async function execute<Model extends ILlmSchema.Model>(ctx: AgenticaConte
78
79
  mpsc.produce(choice.delta.content);
79
80
 
80
81
  void ctx.dispatch(
81
- new AgenticaDescribeEvent({
82
+ createDescribeEvent({
82
83
  executes: histories,
83
84
  stream: mpsc.consumer,
84
85
  done: () => mpsc.done(),
@@ -113,7 +114,7 @@ export async function execute<Model extends ILlmSchema.Model>(ctx: AgenticaConte
113
114
  .filter(str => str !== null)
114
115
  .map(
115
116
  content =>
116
- new AgenticaDescribePrompt({
117
+ createDescribePrompt({
117
118
  executes: histories,
118
119
  text: content,
119
120
  }),
@@ -1,9 +1,8 @@
1
1
  import type { IHttpResponse, ILlmSchema } from "@samchon/openapi";
2
2
  import type OpenAI from "openai";
3
-
4
3
  import type { AgenticaPrompt } from "../prompts/AgenticaPrompt";
5
4
 
6
- export function decode<Model extends ILlmSchema.Model>(history: AgenticaPrompt<Model>): OpenAI.ChatCompletionMessageParam[] {
5
+ function decode<Model extends ILlmSchema.Model>(history: AgenticaPrompt<Model>): OpenAI.ChatCompletionMessageParam[] {
7
6
  // NO NEED TO DECODE DESCRIBE
8
7
  if (history.type === "describe") {
9
8
  return [];
@@ -1,25 +1,26 @@
1
+ import typia from "typia";
2
+
1
3
  import type { ILlmFunction, ILlmSchema } from "@samchon/openapi";
2
4
  import type OpenAI from "openai";
3
5
  import type { AgenticaContext } from "../context/AgenticaContext";
4
-
5
6
  import type { __IChatInitialApplication } from "../context/internal/__IChatInitialApplication";
6
7
  import type { AgenticaPrompt } from "../prompts/AgenticaPrompt";
7
- import typia from "typia";
8
- import { AgenticaTextEvent } from "../events/AgenticaTextEvent";
8
+
9
9
  import { AgenticaDefaultPrompt } from "../internal/AgenticaDefaultPrompt";
10
10
  import { AgenticaSystemPrompt } from "../internal/AgenticaSystemPrompt";
11
11
  import { MPSC } from "../internal/MPSC";
12
12
  import { StreamUtil } from "../internal/StreamUtil";
13
- import { AgenticaTextPrompt } from "../prompts/AgenticaTextPrompt";
14
13
  import { ChatGptCompletionMessageUtil } from "./ChatGptCompletionMessageUtil";
15
14
  import { ChatGptHistoryDecoder } from "./ChatGptHistoryDecoder";
15
+ import { createTextEvent } from "../factory/events";
16
+ import { createTextPrompt } from "../factory/prompts";
16
17
 
17
18
  const FUNCTION: ILlmFunction<"chatgpt"> = typia.llm.application<
18
19
  __IChatInitialApplication,
19
20
  "chatgpt"
20
21
  >().functions[0]!;
21
22
 
22
- export async function execute<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Model>): Promise<AgenticaPrompt<Model>[]> {
23
+ async function execute<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Model>): Promise<AgenticaPrompt<Model>[]> {
23
24
  // ----
24
25
  // EXECUTE CHATGPT API
25
26
  // ----
@@ -106,7 +107,7 @@ export async function execute<Model extends ILlmSchema.Model>(ctx: AgenticaConte
106
107
  mpsc.produce(choice.delta.content);
107
108
 
108
109
  void ctx.dispatch(
109
- new AgenticaTextEvent({
110
+ createTextEvent({
110
111
  role: "assistant",
111
112
  stream: mpsc.consumer,
112
113
  done: () => mpsc.done(),
@@ -143,7 +144,7 @@ export async function execute<Model extends ILlmSchema.Model>(ctx: AgenticaConte
143
144
  && choice.message.content != null
144
145
  ) {
145
146
  prompts.push(
146
- new AgenticaTextPrompt({
147
+ createTextPrompt({
147
148
  role: "assistant",
148
149
  text: choice.message.content,
149
150
  }),
@@ -1,26 +1,28 @@
1
+ import typia from "typia";
2
+ import { v4 } from "uuid";
3
+
1
4
  import type { ILlmApplication, ILlmSchema } from "@samchon/openapi";
2
5
  import type OpenAI from "openai";
3
6
  import type { IValidation } from "typia";
4
7
  import type { AgenticaContext } from "../context/AgenticaContext";
5
8
  import type { AgenticaOperation } from "../context/AgenticaOperation";
6
-
7
9
  import type { __IChatFunctionReference } from "../context/internal/__IChatFunctionReference";
8
10
  import type { __IChatSelectFunctionsApplication } from "../context/internal/__IChatSelectFunctionsApplication";
9
11
  import type { AgenticaEvent } from "../events/AgenticaEvent";
10
12
  import type { AgenticaPrompt } from "../prompts/AgenticaPrompt";
11
- import typia from "typia";
12
- import { v4 } from "uuid";
13
- import { AgenticaOperationSelection } from "../context/AgenticaOperationSelection";
14
- import { AgenticaSelectEvent } from "../events/AgenticaSelectEvent";
15
- import { AgenticaTextEvent } from "../events/AgenticaTextEvent";
13
+ import type { AgenticaSelectPrompt } from "../prompts/AgenticaSelectPrompt";
14
+ import type { AgenticaOperationSelection } from "../context/AgenticaOperationSelection";
15
+ import type { AgenticaTextPrompt } from "../prompts/AgenticaTextPrompt";
16
+
16
17
  import { AgenticaConstant } from "../internal/AgenticaConstant";
17
18
  import { AgenticaDefaultPrompt } from "../internal/AgenticaDefaultPrompt";
18
19
  import { AgenticaSystemPrompt } from "../internal/AgenticaSystemPrompt";
19
20
  import { StreamUtil } from "../internal/StreamUtil";
20
- import { AgenticaSelectPrompt } from "../prompts/AgenticaSelectPrompt";
21
- import { AgenticaTextPrompt } from "../prompts/AgenticaTextPrompt";
22
21
  import { ChatGptCompletionMessageUtil } from "./ChatGptCompletionMessageUtil";
23
22
  import { ChatGptHistoryDecoder } from "./ChatGptHistoryDecoder";
23
+ import { createSelectPrompt, createTextPrompt } from "../factory/prompts";
24
+ import { createOperationSelection } from "../factory/operations";
25
+ import { createSelectEvent, createTextEvent } from "../factory/events";
24
26
 
25
27
  const CONTAINER: ILlmApplication<"chatgpt"> = typia.llm.application<
26
28
  __IChatSelectFunctionsApplication,
@@ -33,7 +35,7 @@ interface IFailure {
33
35
  validation: IValidation.IFailure;
34
36
  }
35
37
 
36
- export async function execute<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Model>): Promise<AgenticaPrompt<Model>[]> {
38
+ async function execute<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Model>): Promise<AgenticaPrompt<Model>[]> {
37
39
  if (ctx.operations.divided === undefined) {
38
40
  return step(ctx, ctx.operations.array, 0);
39
41
  }
@@ -78,7 +80,7 @@ export async function execute<Model extends ILlmSchema.Model>(ctx: AgenticaConte
78
80
  }
79
81
 
80
82
  // RE-COLLECT SELECT FUNCTION EVENTS
81
- const collection: AgenticaSelectPrompt<Model> = new AgenticaSelectPrompt({
83
+ const collection: AgenticaSelectPrompt<Model> = createSelectPrompt({
82
84
  id: v4(),
83
85
  selections: [],
84
86
  });
@@ -224,7 +226,7 @@ async function step<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Model>,
224
226
  }
225
227
 
226
228
  const collection: AgenticaSelectPrompt<Model>
227
- = new AgenticaSelectPrompt({
229
+ = createSelectPrompt({
228
230
  id: tc.id,
229
231
  selections: [],
230
232
  });
@@ -237,7 +239,7 @@ async function step<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Model>,
237
239
  }
238
240
 
239
241
  collection.selections.push(
240
- new AgenticaOperationSelection({
242
+ createOperationSelection({
241
243
  operation,
242
244
  reason: reference.reason,
243
245
  }),
@@ -255,14 +257,14 @@ async function step<Model extends ILlmSchema.Model>(ctx: AgenticaContext<Model>,
255
257
  choice.message.role === "assistant"
256
258
  && choice.message.content != null
257
259
  ) {
258
- const text: AgenticaTextPrompt = new AgenticaTextPrompt({
260
+ const text: AgenticaTextPrompt = createTextPrompt({
259
261
  role: "assistant",
260
262
  text: choice.message.content,
261
263
  });
262
264
  prompts.push(text);
263
265
 
264
266
  await ctx.dispatch(
265
- new AgenticaTextEvent({
267
+ createTextEvent({
266
268
  role: "assistant",
267
269
  stream: StreamUtil.to(text.text),
268
270
  join: async () => Promise.resolve(text.text),
@@ -284,13 +286,13 @@ async function selectFunction<Model extends ILlmSchema.Model>(ctx: AgenticaConte
284
286
  }
285
287
 
286
288
  const selection: AgenticaOperationSelection<Model>
287
- = new AgenticaOperationSelection({
289
+ = createOperationSelection({
288
290
  operation,
289
291
  reason: reference.reason,
290
292
  });
291
293
  ctx.stack.push(selection);
292
294
  void ctx.dispatch(
293
- new AgenticaSelectEvent({
295
+ createSelectEvent({
294
296
  selection,
295
297
  }),
296
298
  );
@@ -1,6 +1,6 @@
1
1
  import type { CompletionUsage } from "openai/resources";
2
2
 
3
- export function sumCompletionTokenDetail(x: CompletionUsage.CompletionTokensDetails, y: CompletionUsage.CompletionTokensDetails): CompletionUsage.CompletionTokensDetails {
3
+ function sumCompletionTokenDetail(x: CompletionUsage.CompletionTokensDetails, y: CompletionUsage.CompletionTokensDetails): CompletionUsage.CompletionTokensDetails {
4
4
  return {
5
5
  accepted_prediction_tokens:
6
6
  (x.accepted_prediction_tokens ?? 0)
@@ -12,14 +12,14 @@ export function sumCompletionTokenDetail(x: CompletionUsage.CompletionTokensDeta
12
12
  };
13
13
  }
14
14
 
15
- export function sumPromptTokenDetail(x: CompletionUsage.PromptTokensDetails, y: CompletionUsage.PromptTokensDetails): CompletionUsage.PromptTokensDetails {
15
+ function sumPromptTokenDetail(x: CompletionUsage.PromptTokensDetails, y: CompletionUsage.PromptTokensDetails): CompletionUsage.PromptTokensDetails {
16
16
  return {
17
17
  audio_tokens: (x.audio_tokens ?? 0) + (y.audio_tokens ?? 0),
18
18
  cached_tokens: (x.cached_tokens ?? 0) + (y.cached_tokens ?? 0),
19
19
  };
20
20
  }
21
21
 
22
- export function sum(x: CompletionUsage, y: CompletionUsage): CompletionUsage {
22
+ function sum(x: CompletionUsage, y: CompletionUsage): CompletionUsage {
23
23
  return {
24
24
  prompt_tokens: (x.prompt_tokens ?? 0) + (y.prompt_tokens ?? 0),
25
25
  completion_tokens:
@@ -0,0 +1,8 @@
1
+ export * from "./ChatGptAgent";
2
+ export * from "./ChatGptCallFunctionAgent";
3
+ export * from "./ChatGptCancelFunctionAgent";
4
+ export * from "./ChatGptCompletionMessageUtil";
5
+ export * from "./ChatGptDescribeFunctionAgent";
6
+ export * from "./ChatGptHistoryDecoder";
7
+ export * from "./ChatGptInitializeFunctionAgent";
8
+ export * from "./ChatGptSelectFunctionAgent";
@@ -1,32 +1,11 @@
1
1
  import type { ILlmSchema } from "@samchon/openapi";
2
-
3
2
  import type { AgenticaOperationSelection } from "../context/AgenticaOperationSelection";
4
3
  import type { IAgenticaPromptJson } from "../json/IAgenticaPromptJson";
5
- import { AgenticaPromptBase } from "./AgenticaPromptBase";
4
+ import type { AgenticaPromptBase } from "./AgenticaPromptBase";
6
5
 
7
- export class AgenticaCancelPrompt<
6
+ export interface AgenticaCancelPrompt<
8
7
  Model extends ILlmSchema.Model,
9
8
  > extends AgenticaPromptBase<"cancel", IAgenticaPromptJson.ICancel> {
10
- public readonly id: string;
11
- public readonly selections: AgenticaOperationSelection<Model>[];
12
-
13
- public constructor(props: AgenticaCancelPrompt.IProps<Model>) {
14
- super("cancel");
15
- this.id = props.id;
16
- this.selections = props.selections;
17
- }
18
-
19
- public toJSON(): IAgenticaPromptJson.ICancel {
20
- return {
21
- type: this.type,
22
- id: this.id,
23
- selections: this.selections.map(s => s.toJSON()),
24
- };
25
- }
26
- }
27
- export namespace AgenticaCancelPrompt {
28
- export interface IProps<Model extends ILlmSchema.Model> {
29
- id: string;
30
- selections: AgenticaOperationSelection<Model>[];
31
- }
9
+ id: string;
10
+ selections: AgenticaOperationSelection<Model>[];
32
11
  }