@agentica/core 0.27.3 → 0.29.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (92) hide show
  1. package/README.md +17 -2
  2. package/lib/Agentica.d.ts +2 -1
  3. package/lib/Agentica.js +92 -60
  4. package/lib/Agentica.js.map +1 -1
  5. package/lib/MicroAgentica.d.ts +1 -0
  6. package/lib/MicroAgentica.js +88 -60
  7. package/lib/MicroAgentica.js.map +1 -1
  8. package/lib/constants/AgenticaDefaultPrompt.js +2 -18
  9. package/lib/constants/AgenticaDefaultPrompt.js.map +1 -1
  10. package/lib/context/AgenticaContext.d.ts +1 -1
  11. package/lib/context/MicroAgenticaContext.d.ts +2 -2
  12. package/lib/context/internal/AgenticaOperationComposer.spec.js +0 -2
  13. package/lib/context/internal/AgenticaOperationComposer.spec.js.map +1 -1
  14. package/lib/events/AgenticaEventBase.d.ts +9 -0
  15. package/lib/factory/events.d.ts +1 -2
  16. package/lib/factory/events.js +71 -7
  17. package/lib/factory/events.js.map +1 -1
  18. package/lib/factory/histories.js +27 -9
  19. package/lib/factory/histories.js.map +1 -1
  20. package/lib/histories/AgenticaCancelHistory.d.ts +1 -2
  21. package/lib/histories/AgenticaExecuteHistory.d.ts +0 -4
  22. package/lib/histories/AgenticaHistoryBase.d.ts +9 -0
  23. package/lib/histories/AgenticaSelectHistory.d.ts +1 -2
  24. package/lib/index.mjs +415 -731
  25. package/lib/index.mjs.map +1 -1
  26. package/lib/json/IAgenticaEventJson.d.ts +9 -1
  27. package/lib/json/IAgenticaHistoryJson.d.ts +11 -14
  28. package/lib/orchestrate/call.d.ts +2 -2
  29. package/lib/orchestrate/call.js +43 -72
  30. package/lib/orchestrate/call.js.map +1 -1
  31. package/lib/orchestrate/cancel.d.ts +1 -2
  32. package/lib/orchestrate/cancel.js +14 -39
  33. package/lib/orchestrate/cancel.js.map +1 -1
  34. package/lib/orchestrate/describe.d.ts +1 -2
  35. package/lib/orchestrate/describe.js +5 -17
  36. package/lib/orchestrate/describe.js.map +1 -1
  37. package/lib/orchestrate/execute.d.ts +1 -2
  38. package/lib/orchestrate/execute.js +9 -13
  39. package/lib/orchestrate/execute.js.map +1 -1
  40. package/lib/orchestrate/initialize.d.ts +1 -2
  41. package/lib/orchestrate/initialize.js +4 -12
  42. package/lib/orchestrate/initialize.js.map +1 -1
  43. package/lib/orchestrate/internal/{cancelFunction.js → cancelFunctionFromContext.js} +7 -7
  44. package/lib/orchestrate/internal/cancelFunctionFromContext.js.map +1 -0
  45. package/lib/orchestrate/internal/selectFunctionFromContext.js +24 -0
  46. package/lib/orchestrate/internal/selectFunctionFromContext.js.map +1 -0
  47. package/lib/orchestrate/select.d.ts +1 -2
  48. package/lib/orchestrate/select.js +21 -52
  49. package/lib/orchestrate/select.js.map +1 -1
  50. package/lib/structures/IAgenticaConfig.d.ts +1 -3
  51. package/lib/structures/IAgenticaExecutor.d.ts +6 -7
  52. package/lib/structures/IAgenticaVendor.d.ts +14 -0
  53. package/lib/structures/IMicroAgenticaExecutor.d.ts +2 -3
  54. package/lib/transformers/transformHistory.js +13 -8
  55. package/lib/transformers/transformHistory.js.map +1 -1
  56. package/lib/utils/ChatGptCompletionMessageUtil.js +5 -353
  57. package/lib/utils/ChatGptCompletionMessageUtil.js.map +1 -1
  58. package/package.json +9 -7
  59. package/src/Agentica.ts +127 -87
  60. package/src/MicroAgentica.ts +118 -81
  61. package/src/constants/AgenticaDefaultPrompt.ts +3 -20
  62. package/src/context/AgenticaContext.ts +1 -1
  63. package/src/context/MicroAgenticaContext.ts +2 -2
  64. package/src/context/internal/AgenticaOperationComposer.spec.ts +1 -2
  65. package/src/events/AgenticaEventBase.ts +12 -0
  66. package/src/factory/events.ts +78 -8
  67. package/src/factory/histories.ts +41 -11
  68. package/src/histories/AgenticaCancelHistory.ts +1 -2
  69. package/src/histories/AgenticaExecuteHistory.ts +0 -5
  70. package/src/histories/AgenticaHistoryBase.ts +12 -0
  71. package/src/histories/AgenticaSelectHistory.ts +1 -2
  72. package/src/json/IAgenticaEventJson.ts +11 -1
  73. package/src/json/IAgenticaHistoryJson.ts +14 -17
  74. package/src/orchestrate/call.ts +59 -109
  75. package/src/orchestrate/cancel.ts +77 -100
  76. package/src/orchestrate/describe.ts +16 -36
  77. package/src/orchestrate/execute.ts +17 -37
  78. package/src/orchestrate/initialize.ts +37 -50
  79. package/src/orchestrate/internal/{cancelFunction.ts → cancelFunctionFromContext.ts} +11 -11
  80. package/src/orchestrate/internal/{selectFunction.ts → selectFunctionFromContext.ts} +18 -13
  81. package/src/orchestrate/select.ts +113 -152
  82. package/src/structures/IAgenticaConfig.ts +1 -3
  83. package/src/structures/IAgenticaExecutor.ts +10 -8
  84. package/src/structures/IAgenticaVendor.ts +15 -0
  85. package/src/structures/IMicroAgenticaExecutor.ts +2 -3
  86. package/src/transformers/transformHistory.ts +19 -20
  87. package/src/utils/ChatGptCompletionMessageUtil.ts +5 -6
  88. package/lib/orchestrate/internal/cancelFunction.js.map +0 -1
  89. package/lib/orchestrate/internal/selectFunction.js +0 -35
  90. package/lib/orchestrate/internal/selectFunction.js.map +0 -1
  91. /package/lib/orchestrate/internal/{cancelFunction.d.ts → cancelFunctionFromContext.d.ts} +0 -0
  92. /package/lib/orchestrate/internal/{selectFunction.d.ts → selectFunctionFromContext.d.ts} +0 -0
@@ -16,29 +16,25 @@ import {
16
16
  import type { AgenticaContext } from "../context/AgenticaContext";
17
17
  import type { AgenticaOperation } from "../context/AgenticaOperation";
18
18
  import type { MicroAgenticaContext } from "../context/MicroAgenticaContext";
19
+ import type { AgenticaAssistantMessageEvent, AgenticaExecuteEvent } from "../events";
19
20
  import type { AgenticaCallEvent } from "../events/AgenticaCallEvent";
20
- import type { AgenticaAssistantMessageHistory } from "../histories/AgenticaAssistantMessageHistory";
21
- import type { AgenticaCancelHistory } from "../histories/AgenticaCancelHistory";
22
- import type { AgenticaExecuteHistory } from "../histories/AgenticaExecuteHistory";
23
- import type { AgenticaHistory } from "../histories/AgenticaHistory";
24
21
  import type { MicroAgenticaHistory } from "../histories/MicroAgenticaHistory";
25
22
 
26
23
  import { AgenticaConstant } from "../constants/AgenticaConstant";
27
24
  import { AgenticaDefaultPrompt } from "../constants/AgenticaDefaultPrompt";
28
25
  import { AgenticaSystemPrompt } from "../constants/AgenticaSystemPrompt";
29
26
  import { isAgenticaContext } from "../context/internal/isAgenticaContext";
30
- import { creatAssistantEvent, createCallEvent, createExecuteEvent, createValidateEvent } from "../factory/events";
31
- import { createAssistantMessageHistory, createCancelHistory, createExecuteHistory, decodeHistory, decodeUserMessageContent } from "../factory/histories";
32
- import { createOperationSelection } from "../factory/operations";
27
+ import { creatAssistantMessageEvent, createCallEvent, createExecuteEvent, createValidateEvent } from "../factory/events";
28
+ import { decodeHistory, decodeUserMessageContent } from "../factory/histories";
33
29
  import { ChatGptCompletionMessageUtil } from "../utils/ChatGptCompletionMessageUtil";
34
30
  import { StreamUtil, toAsyncGenerator } from "../utils/StreamUtil";
35
31
 
36
- import { cancelFunction } from "./internal/cancelFunction";
32
+ import { cancelFunctionFromContext } from "./internal/cancelFunctionFromContext";
37
33
 
38
34
  export async function call<Model extends ILlmSchema.Model>(
39
35
  ctx: AgenticaContext<Model> | MicroAgenticaContext<Model>,
40
36
  operations: AgenticaOperation<Model>[],
41
- ): Promise<AgenticaHistory<Model>[]> {
37
+ ): Promise<AgenticaExecuteEvent<Model>[]> {
42
38
  // ----
43
39
  // EXECUTE CHATGPT API
44
40
  // ----
@@ -93,24 +89,15 @@ export async function call<Model extends ILlmSchema.Model>(
93
89
  }) as OpenAI.ChatCompletionTool,
94
90
  ),
95
91
  tool_choice: "auto",
96
- parallel_tool_calls: false,
92
+ // parallel_tool_calls: false,
97
93
  });
98
94
 
99
95
  // ----
100
96
  // PROCESS COMPLETION
101
97
  // ----
102
- const closures: Array<
103
- () => Promise<
104
- Array<
105
- | AgenticaExecuteHistory<Model>
106
- | AgenticaCancelHistory<Model>
107
- | AgenticaAssistantMessageHistory
108
- >
109
- >
110
- > = [];
111
-
112
98
  const chunks = await StreamUtil.readAll(completionStream);
113
99
  const completion = ChatGptCompletionMessageUtil.merge(chunks);
100
+ const executes: AgenticaExecuteEvent<Model>[] = [];
114
101
 
115
102
  for (const choice of completion.choices) {
116
103
  for (const tc of choice.message.tool_calls ?? []) {
@@ -120,59 +107,34 @@ export async function call<Model extends ILlmSchema.Model>(
120
107
  if (operation === undefined) {
121
108
  continue;
122
109
  }
123
- closures.push(
124
- async (): Promise<
125
- Array<AgenticaExecuteHistory<Model> | AgenticaCancelHistory<Model>>
126
- > => {
127
- const call: AgenticaCallEvent<Model> = createCallEvent({
128
- id: tc.id,
129
- operation,
130
- // @TODO add type assertion!
131
- arguments: JSON.parse(tc.function.arguments) as Record<string, unknown>,
132
- });
133
- if (call.operation.protocol === "http") {
134
- fillHttpArguments({
135
- operation: call.operation,
136
- arguments: call.arguments,
137
- });
138
- }
139
- await ctx.dispatch(call);
140
-
141
- const execute: AgenticaExecuteHistory<Model> = await propagate(
142
- ctx,
143
- call,
144
- 0,
145
- );
146
- ctx.dispatch(
147
- createExecuteEvent({
148
- id: call.id,
149
- operation: call.operation,
150
- arguments: execute.arguments,
151
- value: execute.value,
152
- }),
153
- ).catch(() => {});
154
-
155
- if (isAgenticaContext(ctx)) {
156
- cancelFunction(ctx, {
157
- name: call.operation.name,
158
- reason: "completed",
159
- });
160
- return [
161
- execute,
162
- createCancelHistory({
163
- id: call.id,
164
- selections: [
165
- createOperationSelection({
166
- operation: call.operation,
167
- reason: "complete",
168
- }),
169
- ],
170
- }),
171
- ];
172
- }
173
- return [execute];
174
- },
110
+ const call: AgenticaCallEvent<Model> = createCallEvent({
111
+ id: tc.id,
112
+ operation,
113
+ // @TODO add type assertion!
114
+ arguments: JSON.parse(tc.function.arguments) as Record<string, unknown>,
115
+ });
116
+ if (call.operation.protocol === "http") {
117
+ fillHttpArguments({
118
+ operation: call.operation,
119
+ arguments: call.arguments,
120
+ });
121
+ }
122
+ ctx.dispatch(call);
123
+
124
+ const exec: AgenticaExecuteEvent<Model> = await propagate(
125
+ ctx,
126
+ call,
127
+ 0,
175
128
  );
129
+ ctx.dispatch(exec);
130
+ executes.push(exec);
131
+
132
+ if (isAgenticaContext(ctx)) {
133
+ cancelFunctionFromContext(ctx, {
134
+ name: call.operation.name,
135
+ reason: "completed",
136
+ });
137
+ }
176
138
  }
177
139
  }
178
140
  if (
@@ -180,30 +142,24 @@ export async function call<Model extends ILlmSchema.Model>(
180
142
  && choice.message.content != null
181
143
  && choice.message.content.length !== 0
182
144
  ) {
183
- closures.push(async () => {
184
- const value: AgenticaAssistantMessageHistory = createAssistantMessageHistory(
185
- { text: choice.message.content! },
186
- );
187
- ctx.dispatch(
188
- creatAssistantEvent({
189
- get: () => value.text,
190
- done: () => true,
191
- stream: toAsyncGenerator(value.text),
192
- join: async () => Promise.resolve(value.text),
193
- }),
194
- ).catch(() => {});
195
- return [value];
145
+ const text: string = choice.message.content;
146
+ const event: AgenticaAssistantMessageEvent = creatAssistantMessageEvent({
147
+ get: () => text,
148
+ done: () => true,
149
+ stream: toAsyncGenerator(text),
150
+ join: async () => Promise.resolve(text),
196
151
  });
152
+ ctx.dispatch(event);
197
153
  }
198
154
  }
199
- return (await Promise.all(closures.map(async fn => fn()))).flat();
155
+ return executes;
200
156
  }
201
157
 
202
158
  async function propagate<Model extends ILlmSchema.Model>(
203
159
  ctx: AgenticaContext<Model> | MicroAgenticaContext<Model>,
204
160
  call: AgenticaCallEvent<Model>,
205
161
  retry: number,
206
- ): Promise<AgenticaExecuteHistory<Model>> {
162
+ ): Promise<AgenticaExecuteEvent<Model>> {
207
163
  switch (call.operation.protocol) {
208
164
  case "http": {
209
165
  return propagateHttp({ ctx, operation: call.operation, call, retry });
@@ -228,7 +184,7 @@ async function propagateHttp<Model extends ILlmSchema.Model>(
228
184
  call: AgenticaCallEvent<Model>;
229
185
  retry: number;
230
186
  },
231
- ): Promise<AgenticaExecuteHistory<Model>> {
187
+ ): Promise<AgenticaExecuteEvent<Model>> {
232
188
  // ----
233
189
  // HTTP PROTOCOL
234
190
  // ----
@@ -243,10 +199,10 @@ async function propagateHttp<Model extends ILlmSchema.Model>(
243
199
  operation: props.operation,
244
200
  result: check,
245
201
  }),
246
- ).catch(() => {});
202
+ );
247
203
 
248
204
  if (props.retry++ < (props.ctx.config?.retry ?? AgenticaConstant.RETRY)) {
249
- const trial: AgenticaExecuteHistory<Model> | null = await correct(
205
+ const trial: AgenticaExecuteEvent<Model> | null = await correct(
250
206
  props.ctx,
251
207
  props.call,
252
208
  props.retry,
@@ -273,9 +229,8 @@ async function propagateHttp<Model extends ILlmSchema.Model>(
273
229
  (success === false
274
230
  ? await correct(props.ctx, props.call, props.retry, response.body)
275
231
  : null)
276
- ?? createExecuteHistory({
232
+ ?? createExecuteEvent({
277
233
  operation: props.call.operation,
278
- id: props.call.id,
279
234
  arguments: props.call.arguments,
280
235
  value: response,
281
236
  })
@@ -283,9 +238,8 @@ async function propagateHttp<Model extends ILlmSchema.Model>(
283
238
  }
284
239
  catch (error) {
285
240
  // DISPATCH ERROR
286
- return createExecuteHistory({
241
+ return createExecuteEvent({
287
242
  operation: props.call.operation,
288
- id: props.call.id,
289
243
  arguments: props.call.arguments,
290
244
  value: {
291
245
  status: 500,
@@ -308,7 +262,7 @@ async function propagateClass<Model extends ILlmSchema.Model>(props: {
308
262
  operation: AgenticaOperation.Class<Model>;
309
263
  call: AgenticaCallEvent<Model>;
310
264
  retry: number;
311
- }): Promise<AgenticaExecuteHistory<Model>> {
265
+ }): Promise<AgenticaExecuteEvent<Model>> {
312
266
  // ----
313
267
  // CLASS FUNCTION
314
268
  // ----
@@ -323,13 +277,12 @@ async function propagateClass<Model extends ILlmSchema.Model>(props: {
323
277
  operation: props.call.operation,
324
278
  result: check,
325
279
  }),
326
- ).catch(() => {});
280
+ );
327
281
  return (
328
282
  (props.retry++ < (props.ctx.config?.retry ?? AgenticaConstant.RETRY)
329
283
  ? await correct(props.ctx, props.call, props.retry, check.errors)
330
284
  : null)
331
- ?? createExecuteHistory({
332
- id: props.call.id,
285
+ ?? createExecuteEvent({
333
286
  operation: props.call.operation,
334
287
  arguments: props.call.arguments,
335
288
  value: {
@@ -343,16 +296,14 @@ async function propagateClass<Model extends ILlmSchema.Model>(props: {
343
296
  // EXECUTE FUNCTION
344
297
  try {
345
298
  const value = await executeClassOperation(props.operation, props.call.arguments);
346
- return createExecuteHistory({
347
- id: props.call.id,
299
+ return createExecuteEvent({
348
300
  operation: props.call.operation,
349
301
  arguments: props.call.arguments,
350
302
  value,
351
303
  });
352
304
  }
353
305
  catch (error) {
354
- return createExecuteHistory({
355
- id: props.call.id,
306
+ return createExecuteEvent({
356
307
  operation: props.call.operation,
357
308
  arguments: props.call.arguments,
358
309
  value:
@@ -372,23 +323,21 @@ async function propagateMcp<Model extends ILlmSchema.Model>(props: {
372
323
  operation: AgenticaOperation.Mcp<Model>;
373
324
  call: AgenticaCallEvent<Model>;
374
325
  retry: number;
375
- }): Promise<AgenticaExecuteHistory<Model>> {
326
+ }): Promise<AgenticaExecuteEvent<Model>> {
376
327
  // ----
377
328
  // MCP PROTOCOL
378
329
  // ----
379
330
  // @TODO: implement argument validation logic
380
331
  try {
381
332
  const value = await executeMcpOperation(props.operation, props.call.arguments);
382
- return createExecuteHistory({
383
- id: props.call.id,
333
+ return createExecuteEvent({
384
334
  operation: props.call.operation,
385
335
  arguments: props.call.arguments,
386
336
  value,
387
337
  });
388
338
  }
389
339
  catch (error) {
390
- return createExecuteHistory({
391
- id: props.call.id,
340
+ return createExecuteEvent({
392
341
  operation: props.call.operation,
393
342
  arguments: props.call.arguments,
394
343
  value:
@@ -454,7 +403,7 @@ async function correct<Model extends ILlmSchema.Model>(
454
403
  call: AgenticaCallEvent<Model>,
455
404
  retry: number,
456
405
  error: unknown,
457
- ): Promise<AgenticaExecuteHistory<Model> | null> {
406
+ ): Promise<AgenticaExecuteEvent<Model> | null> {
458
407
  // ----
459
408
  // EXECUTE CHATGPT API
460
409
  // ----
@@ -544,11 +493,12 @@ async function correct<Model extends ILlmSchema.Model>(
544
493
  name: call.operation.name,
545
494
  },
546
495
  },
547
- parallel_tool_calls: false,
496
+ // parallel_tool_calls: false,
548
497
  });
549
498
 
550
499
  const chunks = await StreamUtil.readAll(completionStream);
551
500
  const completion = ChatGptCompletionMessageUtil.merge(chunks);
501
+
552
502
  // ----
553
503
  // PROCESS COMPLETION
554
504
  // ----
@@ -3,24 +3,23 @@ import type OpenAI from "openai";
3
3
  import type { IValidation } from "typia";
4
4
 
5
5
  import typia from "typia";
6
- import { v4 } from "uuid";
7
6
 
8
7
  import type { AgenticaContext } from "../context/AgenticaContext";
9
8
  import type { AgenticaOperation } from "../context/AgenticaOperation";
10
9
  import type { AgenticaOperationSelection } from "../context/AgenticaOperationSelection";
11
10
  import type { __IChatCancelFunctionsApplication } from "../context/internal/__IChatCancelFunctionsApplication";
12
11
  import type { __IChatFunctionReference } from "../context/internal/__IChatFunctionReference";
12
+ import type { AgenticaCancelEvent } from "../events/AgenticaCancelEvent";
13
13
  import type { AgenticaEvent } from "../events/AgenticaEvent";
14
- import type { AgenticaCancelHistory } from "../histories/AgenticaCancelHistory";
15
14
 
16
15
  import { AgenticaConstant } from "../constants/AgenticaConstant";
17
16
  import { AgenticaDefaultPrompt } from "../constants/AgenticaDefaultPrompt";
18
17
  import { AgenticaSystemPrompt } from "../constants/AgenticaSystemPrompt";
19
- import { createCancelHistory, decodeHistory, decodeUserMessageContent } from "../factory/histories";
18
+ import { decodeHistory, decodeUserMessageContent } from "../factory/histories";
20
19
  import { ChatGptCompletionMessageUtil } from "../utils/ChatGptCompletionMessageUtil";
21
20
  import { StreamUtil } from "../utils/StreamUtil";
22
21
 
23
- import { cancelFunction } from "./internal/cancelFunction";
22
+ import { cancelFunctionFromContext } from "./internal/cancelFunctionFromContext";
24
23
 
25
24
  const CONTAINER: ILlmApplication<"chatgpt"> = typia.llm.application<
26
25
  __IChatCancelFunctionsApplication,
@@ -35,7 +34,7 @@ interface IFailure {
35
34
 
36
35
  export async function cancel<Model extends ILlmSchema.Model>(
37
36
  ctx: AgenticaContext<Model>,
38
- ): Promise<AgenticaCancelHistory<Model>[]> {
37
+ ): Promise<void> {
39
38
  if (ctx.operations.divided === undefined) {
40
39
  return step(ctx, ctx.operations.array, 0);
41
40
  }
@@ -43,14 +42,15 @@ export async function cancel<Model extends ILlmSchema.Model>(
43
42
  const stacks: AgenticaOperationSelection<Model>[][]
44
43
  = ctx.operations.divided.map(() => []);
45
44
  const events: AgenticaEvent<Model>[] = [];
46
- const prompts: AgenticaCancelHistory<Model>[][] = await Promise.all(
45
+ await Promise.all(
47
46
  ctx.operations.divided.map(async (operations, i) =>
48
47
  step(
49
48
  {
50
49
  ...ctx,
51
50
  stack: stacks[i]!,
52
- dispatch: async (e) => {
51
+ dispatch: (e) => {
53
52
  events.push(e);
53
+ return e;
54
54
  },
55
55
  },
56
56
  operations,
@@ -59,12 +59,11 @@ export async function cancel<Model extends ILlmSchema.Model>(
59
59
  ),
60
60
  );
61
61
 
62
- // NO FUNCTION SELECTION, SO THAT ONLY TEXT LEFT
63
- if (stacks.every(s => s.length === 0)) {
64
- return prompts[0]!;
65
- }
66
62
  // ELITICISM
67
- else if ((ctx.config?.eliticism ?? AgenticaConstant.ELITICISM) === true) {
63
+ if (
64
+ (ctx.config?.eliticism ?? AgenticaConstant.ELITICISM) === true
65
+ && stacks.some(s => s.length !== 0)
66
+ ) {
68
67
  return step(
69
68
  ctx,
70
69
  stacks
@@ -78,22 +77,12 @@ export async function cancel<Model extends ILlmSchema.Model>(
78
77
  0,
79
78
  );
80
79
  }
81
-
82
- // RE-COLLECT SELECT FUNCTION EVENTS
83
- const collection: AgenticaCancelHistory<Model> = createCancelHistory({
84
- id: v4(),
85
- selections: [],
86
- });
87
- for (const e of events) {
88
- if (e.type === "select") {
89
- collection.selections.push(e.selection);
90
- cancelFunction(ctx, {
91
- name: e.selection.operation.name,
92
- reason: e.selection.reason,
93
- });
94
- }
80
+ else {
81
+ const cancelled: AgenticaCancelEvent<Model>[]
82
+ = events.filter(e => e.type === "cancel");
83
+ (cancelled.length !== 0 ? cancelled : events)
84
+ .forEach(ctx.dispatch);
95
85
  }
96
- return [collection];
97
86
  }
98
87
 
99
88
  async function step<Model extends ILlmSchema.Model>(
@@ -101,64 +90,64 @@ async function step<Model extends ILlmSchema.Model>(
101
90
  operations: AgenticaOperation<Model>[],
102
91
  retry: number,
103
92
  failures?: IFailure[],
104
- ): Promise<AgenticaCancelHistory<Model>[]> {
93
+ ): Promise<void> {
105
94
  // ----
106
95
  // EXECUTE CHATGPT API
107
96
  // ----
108
97
  const completionStream = await ctx.request("cancel", {
109
98
  messages: [
110
- // COMMON SYSTEM PROMPT
111
- {
112
- role: "system",
113
- content: AgenticaDefaultPrompt.write(ctx.config),
114
- } satisfies OpenAI.ChatCompletionSystemMessageParam,
115
- // CANDIDATE FUNCTIONS
116
- {
117
- role: "assistant",
118
- tool_calls: [
119
- {
120
- type: "function",
121
- id: "getApiFunctions",
122
- function: {
123
- name: "getApiFunctions",
124
- arguments: JSON.stringify({}),
125
- },
99
+ // COMMON SYSTEM PROMPT
100
+ {
101
+ role: "system",
102
+ content: AgenticaDefaultPrompt.write(ctx.config),
103
+ } satisfies OpenAI.ChatCompletionSystemMessageParam,
104
+ // CANDIDATE FUNCTIONS
105
+ {
106
+ role: "assistant",
107
+ tool_calls: [
108
+ {
109
+ type: "function",
110
+ id: "getApiFunctions",
111
+ function: {
112
+ name: "getApiFunctions",
113
+ arguments: JSON.stringify({}),
126
114
  },
127
- ],
128
- },
129
- {
130
- role: "tool",
131
- tool_call_id: "getApiFunctions",
132
- content: JSON.stringify(
133
- operations.map(op => ({
134
- name: op.name,
135
- description: op.function.description,
136
- ...(op.protocol === "http"
137
- ? {
138
- method: op.function.method,
139
- path: op.function.path,
140
- tags: op.function.tags,
141
- }
142
- : {}),
143
- })),
144
- ),
145
- },
146
- // PREVIOUS HISTORIES
147
- ...ctx.histories.map(decodeHistory).flat(),
148
- // USER INPUT
149
- {
150
- role: "user",
151
- content: ctx.prompt.contents.map(decodeUserMessageContent),
152
- },
153
- // SYSTEM PROMPT
154
- {
155
- role: "system",
156
- content:
157
- ctx.config?.systemPrompt?.cancel?.(ctx.histories)
158
- ?? AgenticaSystemPrompt.CANCEL,
159
- },
160
- // TYPE CORRECTIONS
161
- ...emendMessages(failures ?? []),
115
+ },
116
+ ],
117
+ },
118
+ {
119
+ role: "tool",
120
+ tool_call_id: "getApiFunctions",
121
+ content: JSON.stringify(
122
+ operations.map(op => ({
123
+ name: op.name,
124
+ description: op.function.description,
125
+ ...(op.protocol === "http"
126
+ ? {
127
+ method: op.function.method,
128
+ path: op.function.path,
129
+ tags: op.function.tags,
130
+ }
131
+ : {}),
132
+ })),
133
+ ),
134
+ },
135
+ // PREVIOUS HISTORIES
136
+ ...ctx.histories.map(decodeHistory).flat(),
137
+ // USER INPUT
138
+ {
139
+ role: "user",
140
+ content: ctx.prompt.contents.map(decodeUserMessageContent),
141
+ },
142
+ // SYSTEM PROMPT
143
+ {
144
+ role: "system",
145
+ content:
146
+ ctx.config?.systemPrompt?.cancel?.(ctx.histories)
147
+ ?? AgenticaSystemPrompt.CANCEL,
148
+ },
149
+ // TYPE CORRECTIONS
150
+ ...emendMessages(failures ?? []),
162
151
  ],
163
152
  // STACK FUNCTIONS
164
153
  tools: [{
@@ -183,7 +172,7 @@ async function step<Model extends ILlmSchema.Model>(
183
172
  name: CONTAINER.functions[0]!.name,
184
173
  },
185
174
  },
186
- parallel_tool_calls: true,
175
+ // parallel_tool_calls: true,
187
176
  });
188
177
 
189
178
  const chunks = await StreamUtil.readAll(completionStream);
@@ -200,9 +189,9 @@ async function step<Model extends ILlmSchema.Model>(
200
189
  continue;
201
190
  }
202
191
 
203
- const input = JSON.parse(tc.function.arguments) as object;
192
+ const input: object = JSON.parse(tc.function.arguments) as object;
204
193
  const validation: IValidation<__IChatFunctionReference.IProps>
205
- = typia.validate<__IChatFunctionReference.IProps>(input);
194
+ = typia.validate<__IChatFunctionReference.IProps>(input);
206
195
  if (validation.success === false) {
207
196
  failures.push({
208
197
  id: tc.id,
@@ -220,7 +209,6 @@ async function step<Model extends ILlmSchema.Model>(
220
209
  // ----
221
210
  // PROCESS COMPLETION
222
211
  // ----
223
- const prompts: AgenticaCancelHistory<Model>[] = [];
224
212
  for (const choice of completion.choices) {
225
213
  // TOOL CALLING HANDLER
226
214
  if (choice.message.tool_calls != null) {
@@ -228,35 +216,24 @@ async function step<Model extends ILlmSchema.Model>(
228
216
  if (tc.type !== "function") {
229
217
  continue;
230
218
  }
231
-
232
- if (tc.function.name !== "cancelFunctions") {
219
+ else if (tc.function.name !== "cancelFunctions") {
233
220
  continue;
234
221
  }
235
222
 
236
- const input = typia.json.isParse<__IChatFunctionReference.IProps>(tc.function.arguments);
223
+ const input: __IChatFunctionReference.IProps | null
224
+ = typia.json.isParse<
225
+ __IChatFunctionReference.IProps
226
+ >(tc.function.arguments);
237
227
  if (input === null) {
238
228
  continue;
239
229
  }
240
230
 
241
- const collection: AgenticaCancelHistory<Model> = createCancelHistory({
242
- id: tc.id,
243
- selections: [],
244
- });
245
-
246
231
  for (const reference of input.functions) {
247
- const operation = cancelFunction(ctx, reference);
248
- if (operation !== null) {
249
- collection.selections.push(operation);
250
- }
251
- }
252
-
253
- if (collection.selections.length !== 0) {
254
- prompts.push(collection);
232
+ cancelFunctionFromContext(ctx, reference);
255
233
  }
256
234
  }
257
235
  }
258
236
  }
259
- return prompts;
260
237
  }
261
238
 
262
239
  function emendMessages(failures: IFailure[]): OpenAI.ChatCompletionMessageParam[] {