@agentica/core 0.9.0 → 0.10.0-dev.20250302

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. package/LICENSE +21 -21
  2. package/README.md +419 -419
  3. package/lib/Agentica.d.ts +4 -4
  4. package/lib/Agentica.js +7 -7
  5. package/lib/Agentica.js.map +1 -1
  6. package/lib/chatgpt/ChatGptCallFunctionAgent.js +14 -6
  7. package/lib/chatgpt/ChatGptCallFunctionAgent.js.map +1 -1
  8. package/lib/index.d.ts +1 -1
  9. package/lib/index.js +1 -1
  10. package/lib/index.js.map +1 -1
  11. package/lib/index.mjs +10 -5
  12. package/lib/index.mjs.map +1 -1
  13. package/lib/structures/IAgenticaContext.d.ts +1 -1
  14. package/lib/structures/IAgenticaController.d.ts +4 -5
  15. package/lib/structures/IAgenticaEvent.d.ts +3 -3
  16. package/lib/structures/IAgenticaOperation.d.ts +2 -3
  17. package/lib/structures/IAgenticaOperationSelection.d.ts +2 -3
  18. package/lib/structures/IAgenticaPrompt.d.ts +2 -3
  19. package/lib/structures/IAgenticaProps.d.ts +4 -4
  20. package/lib/structures/IAgenticaTokenUsage.d.ts +3 -3
  21. package/lib/structures/{IAgenticaProvider.d.ts → IAgenticaVendor.d.ts} +6 -6
  22. package/lib/structures/{IAgenticaProvider.js → IAgenticaVendor.js} +1 -1
  23. package/lib/structures/IAgenticaVendor.js.map +1 -0
  24. package/package.json +1 -1
  25. package/prompts/cancel.md +4 -4
  26. package/prompts/common.md +2 -2
  27. package/prompts/describe.md +6 -6
  28. package/prompts/execute.md +6 -6
  29. package/prompts/initialize.md +2 -2
  30. package/prompts/select.md +6 -6
  31. package/src/Agentica.ts +323 -323
  32. package/src/chatgpt/ChatGptAgent.ts +75 -75
  33. package/src/chatgpt/ChatGptCallFunctionAgent.ts +464 -448
  34. package/src/chatgpt/ChatGptCancelFunctionAgent.ts +287 -287
  35. package/src/chatgpt/ChatGptDescribeFunctionAgent.ts +52 -52
  36. package/src/chatgpt/ChatGptHistoryDecoder.ts +88 -88
  37. package/src/chatgpt/ChatGptInitializeFunctionAgent.ts +88 -88
  38. package/src/chatgpt/ChatGptSelectFunctionAgent.ts +319 -319
  39. package/src/functional/createHttpLlmApplication.ts +63 -63
  40. package/src/index.ts +19 -19
  41. package/src/internal/AgenticaConstant.ts +4 -4
  42. package/src/internal/AgenticaDefaultPrompt.ts +43 -43
  43. package/src/internal/AgenticaOperationComposer.ts +87 -87
  44. package/src/internal/AgenticaPromptFactory.ts +32 -32
  45. package/src/internal/AgenticaPromptTransformer.ts +86 -86
  46. package/src/internal/AgenticaTokenUsageAggregator.ts +115 -115
  47. package/src/internal/MathUtil.ts +3 -3
  48. package/src/internal/Singleton.ts +22 -22
  49. package/src/internal/__map_take.ts +15 -15
  50. package/src/structures/IAgenticaConfig.ts +123 -123
  51. package/src/structures/IAgenticaContext.ts +129 -129
  52. package/src/structures/IAgenticaController.ts +133 -132
  53. package/src/structures/IAgenticaEvent.ts +229 -229
  54. package/src/structures/IAgenticaExecutor.ts +156 -156
  55. package/src/structures/IAgenticaOperation.ts +63 -64
  56. package/src/structures/IAgenticaOperationCollection.ts +52 -52
  57. package/src/structures/IAgenticaOperationSelection.ts +68 -69
  58. package/src/structures/IAgenticaPrompt.ts +182 -178
  59. package/src/structures/IAgenticaProps.ts +70 -70
  60. package/src/structures/IAgenticaSystemPrompt.ts +124 -124
  61. package/src/structures/IAgenticaTokenUsage.ts +107 -107
  62. package/src/structures/{IAgenticaProvider.ts → IAgenticaVendor.ts} +39 -39
  63. package/src/structures/internal/__IChatCancelFunctionsApplication.ts +23 -23
  64. package/src/structures/internal/__IChatFunctionReference.ts +21 -21
  65. package/src/structures/internal/__IChatInitialApplication.ts +15 -15
  66. package/src/structures/internal/__IChatSelectFunctionsApplication.ts +24 -24
  67. package/src/typings/AgenticaSource.ts +6 -6
  68. package/lib/structures/IAgenticaProvider.js.map +0 -1
@@ -1,319 +1,319 @@
1
- import {
2
- IHttpLlmFunction,
3
- ILlmApplication,
4
- ILlmSchema,
5
- } from "@samchon/openapi";
6
- import OpenAI from "openai";
7
- import typia, { IValidation } from "typia";
8
- import { v4 } from "uuid";
9
-
10
- import { AgenticaConstant } from "../internal/AgenticaConstant";
11
- import { AgenticaDefaultPrompt } from "../internal/AgenticaDefaultPrompt";
12
- import { AgenticaPromptFactory } from "../internal/AgenticaPromptFactory";
13
- import { AgenticaSystemPrompt } from "../internal/AgenticaSystemPrompt";
14
- import { IAgenticaContext } from "../structures/IAgenticaContext";
15
- import { IAgenticaController } from "../structures/IAgenticaController";
16
- import { IAgenticaEvent } from "../structures/IAgenticaEvent";
17
- import { IAgenticaOperation } from "../structures/IAgenticaOperation";
18
- import { IAgenticaOperationSelection } from "../structures/IAgenticaOperationSelection";
19
- import { IAgenticaPrompt } from "../structures/IAgenticaPrompt";
20
- import { __IChatFunctionReference } from "../structures/internal/__IChatFunctionReference";
21
- import { __IChatSelectFunctionsApplication } from "../structures/internal/__IChatSelectFunctionsApplication";
22
- import { ChatGptHistoryDecoder } from "./ChatGptHistoryDecoder";
23
-
24
- export namespace ChatGptSelectFunctionAgent {
25
- export const execute = async <Model extends ILlmSchema.Model>(
26
- ctx: IAgenticaContext<Model>,
27
- ): Promise<IAgenticaPrompt<Model>[]> => {
28
- if (ctx.operations.divided === undefined)
29
- return step(ctx, ctx.operations.array, 0);
30
-
31
- const stacks: IAgenticaOperationSelection<Model>[][] =
32
- ctx.operations.divided.map(() => []);
33
- const events: IAgenticaEvent<Model>[] = [];
34
- const prompts: IAgenticaPrompt<Model>[][] = await Promise.all(
35
- ctx.operations.divided.map((operations, i) =>
36
- step(
37
- {
38
- ...ctx,
39
- stack: stacks[i]!,
40
- dispatch: async (e) => {
41
- events.push(e);
42
- },
43
- },
44
- operations,
45
- 0,
46
- ),
47
- ),
48
- );
49
-
50
- // NO FUNCTION SELECTION, SO THAT ONLY TEXT LEFT
51
- if (stacks.every((s) => s.length === 0)) return prompts[0]!;
52
- // ELITICISM
53
- else if ((ctx.config?.eliticism ?? AgenticaConstant.ELITICISM) === true)
54
- return step(
55
- ctx,
56
- stacks
57
- .flat()
58
- .map(
59
- (s) =>
60
- ctx.operations.group
61
- .get(s.controller.name)!
62
- .get(s.function.name)!,
63
- ),
64
- 0,
65
- );
66
-
67
- // RE-COLLECT SELECT FUNCTION EVENTS
68
- const collection: IAgenticaPrompt.ISelect<Model> = {
69
- id: v4(),
70
- type: "select",
71
- operations: [],
72
- };
73
- for (const e of events)
74
- if (e.type === "select") {
75
- collection.operations.push(
76
- AgenticaPromptFactory.selection({
77
- protocol: e.operation.protocol as "http",
78
- controller: e.operation
79
- .controller as IAgenticaController.IHttp<Model>,
80
- function: e.operation.function as IHttpLlmFunction<Model>,
81
- reason: e.reason,
82
- name: e.operation.name,
83
- }),
84
- );
85
- await selectFunction(ctx, {
86
- name: e.operation.name,
87
- reason: e.reason,
88
- });
89
- }
90
- return [collection];
91
- };
92
-
93
- const step = async <Model extends ILlmSchema.Model>(
94
- ctx: IAgenticaContext<Model>,
95
- operations: IAgenticaOperation<Model>[],
96
- retry: number,
97
- failures?: IFailure[],
98
- ): Promise<IAgenticaPrompt<Model>[]> => {
99
- //----
100
- // EXECUTE CHATGPT API
101
- //----
102
- const completion: OpenAI.ChatCompletion = await ctx.request("select", {
103
- messages: [
104
- // COMMON SYSTEM PROMPT
105
- {
106
- role: "system",
107
- content: AgenticaDefaultPrompt.write(ctx.config),
108
- } satisfies OpenAI.ChatCompletionSystemMessageParam,
109
- // CANDIDATE FUNCTIONS
110
- {
111
- role: "assistant",
112
- tool_calls: [
113
- {
114
- type: "function",
115
- id: "getApiFunctions",
116
- function: {
117
- name: "getApiFunctions",
118
- arguments: JSON.stringify({}),
119
- },
120
- },
121
- ],
122
- },
123
- {
124
- role: "tool",
125
- tool_call_id: "getApiFunctions",
126
- content: JSON.stringify(
127
- operations.map((op) => ({
128
- name: op.name,
129
- description: op.function.description,
130
- ...(op.protocol === "http"
131
- ? {
132
- method: op.function.method,
133
- path: op.function.path,
134
- tags: op.function.tags,
135
- }
136
- : {}),
137
- })),
138
- ),
139
- },
140
- // PREVIOUS HISTORIES
141
- ...ctx.histories.map(ChatGptHistoryDecoder.decode).flat(),
142
- // USER INPUT
143
- {
144
- role: "user",
145
- content: ctx.prompt.text,
146
- },
147
- // SYSTEM PROMPT
148
- {
149
- role: "system",
150
- content:
151
- ctx.config?.systemPrompt?.select?.(ctx.histories) ??
152
- AgenticaSystemPrompt.SELECT,
153
- },
154
- // TYPE CORRECTIONS
155
- ...emendMessages(failures ?? []),
156
- ],
157
- // STACK FUNCTIONS
158
- tools: CONTAINER.functions.map(
159
- (func) =>
160
- ({
161
- type: "function",
162
- function: {
163
- name: func.name,
164
- description: func.description,
165
- parameters: func.parameters as any,
166
- },
167
- }) satisfies OpenAI.ChatCompletionTool,
168
- ),
169
- tool_choice: "auto",
170
- parallel_tool_calls: false,
171
- });
172
-
173
- //----
174
- // VALIDATION
175
- //----
176
- if (retry++ < (ctx.config?.retry ?? AgenticaConstant.RETRY)) {
177
- const failures: IFailure[] = [];
178
- for (const choice of completion.choices)
179
- for (const tc of choice.message.tool_calls ?? []) {
180
- if (tc.function.name !== "selectFunctions") continue;
181
- const input: object = JSON.parse(tc.function.arguments);
182
- const validation: IValidation<__IChatFunctionReference.IProps> =
183
- typia.validate<__IChatFunctionReference.IProps>(input);
184
- if (validation.success === false)
185
- failures.push({
186
- id: tc.id,
187
- name: tc.function.name,
188
- validation,
189
- });
190
- }
191
- if (failures.length > 0) return step(ctx, operations, retry, failures);
192
- }
193
-
194
- //----
195
- // PROCESS COMPLETION
196
- //----
197
- const prompts: IAgenticaPrompt<Model>[] = [];
198
- for (const choice of completion.choices) {
199
- // TOOL CALLING HANDLER
200
- if (choice.message.tool_calls)
201
- for (const tc of choice.message.tool_calls) {
202
- if (tc.type !== "function") continue;
203
-
204
- const input: __IChatFunctionReference.IProps = JSON.parse(
205
- tc.function.arguments,
206
- );
207
- if (typia.is(input) === false) continue;
208
- else if (tc.function.name === "selectFunctions") {
209
- const collection: IAgenticaPrompt.ISelect<Model> = {
210
- id: tc.id,
211
- type: "select",
212
- operations: [],
213
- };
214
- for (const reference of input.functions) {
215
- const operation: IAgenticaOperation<Model> | null =
216
- await selectFunction(ctx, reference);
217
- if (operation !== null)
218
- collection.operations.push(
219
- AgenticaPromptFactory.selection({
220
- protocol: operation.protocol as "http",
221
- controller:
222
- operation.controller as IAgenticaController.IHttp<Model>,
223
- function: operation.function as IHttpLlmFunction<Model>,
224
- name: operation.name,
225
- reason: reference.reason,
226
- }),
227
- );
228
- }
229
- if (collection.operations.length !== 0) prompts.push(collection);
230
- }
231
- }
232
-
233
- // ASSISTANT MESSAGE
234
- if (
235
- choice.message.role === "assistant" &&
236
- !!choice.message.content?.length
237
- ) {
238
- const text: IAgenticaPrompt.IText = {
239
- type: "text",
240
- role: "assistant",
241
- text: choice.message.content,
242
- };
243
- prompts.push(text);
244
- await ctx.dispatch(text);
245
- }
246
- }
247
- return prompts;
248
- };
249
-
250
- const selectFunction = async <Model extends ILlmSchema.Model>(
251
- ctx: IAgenticaContext<Model>,
252
- reference: __IChatFunctionReference,
253
- ): Promise<IAgenticaOperation<Model> | null> => {
254
- const operation: IAgenticaOperation<Model> | undefined =
255
- ctx.operations.flat.get(reference.name);
256
- if (operation === undefined) return null;
257
-
258
- ctx.stack.push(
259
- AgenticaPromptFactory.selection({
260
- protocol: operation.protocol as "http",
261
- controller: operation.controller as IAgenticaController.IHttp<Model>,
262
- function: operation.function as IHttpLlmFunction<Model>,
263
- name: reference.name,
264
- reason: reference.reason,
265
- }),
266
- );
267
- await ctx.dispatch({
268
- type: "select",
269
- reason: reference.reason,
270
- operation,
271
- });
272
- return operation;
273
- };
274
-
275
- const emendMessages = (
276
- failures: IFailure[],
277
- ): OpenAI.ChatCompletionMessageParam[] =>
278
- failures
279
- .map((f) => [
280
- {
281
- role: "assistant",
282
- tool_calls: [
283
- {
284
- type: "function",
285
- id: f.id,
286
- function: {
287
- name: f.name,
288
- arguments: JSON.stringify(f.validation.data),
289
- },
290
- },
291
- ],
292
- } satisfies OpenAI.ChatCompletionAssistantMessageParam,
293
- {
294
- role: "tool",
295
- content: JSON.stringify(f.validation.errors),
296
- tool_call_id: f.id,
297
- } satisfies OpenAI.ChatCompletionToolMessageParam,
298
- {
299
- role: "system",
300
- content: [
301
- "You A.I. assistant has composed wrong typed arguments.",
302
- "",
303
- "Correct it at the next function calling.",
304
- ].join("\n"),
305
- } satisfies OpenAI.ChatCompletionSystemMessageParam,
306
- ])
307
- .flat();
308
- }
309
-
310
- const CONTAINER: ILlmApplication<"chatgpt"> = typia.llm.application<
311
- __IChatSelectFunctionsApplication,
312
- "chatgpt"
313
- >();
314
-
315
- interface IFailure {
316
- id: string;
317
- name: string;
318
- validation: IValidation.IFailure;
319
- }
1
+ import {
2
+ IHttpLlmFunction,
3
+ ILlmApplication,
4
+ ILlmSchema,
5
+ } from "@samchon/openapi";
6
+ import OpenAI from "openai";
7
+ import typia, { IValidation } from "typia";
8
+ import { v4 } from "uuid";
9
+
10
+ import { AgenticaConstant } from "../internal/AgenticaConstant";
11
+ import { AgenticaDefaultPrompt } from "../internal/AgenticaDefaultPrompt";
12
+ import { AgenticaPromptFactory } from "../internal/AgenticaPromptFactory";
13
+ import { AgenticaSystemPrompt } from "../internal/AgenticaSystemPrompt";
14
+ import { IAgenticaContext } from "../structures/IAgenticaContext";
15
+ import { IAgenticaController } from "../structures/IAgenticaController";
16
+ import { IAgenticaEvent } from "../structures/IAgenticaEvent";
17
+ import { IAgenticaOperation } from "../structures/IAgenticaOperation";
18
+ import { IAgenticaOperationSelection } from "../structures/IAgenticaOperationSelection";
19
+ import { IAgenticaPrompt } from "../structures/IAgenticaPrompt";
20
+ import { __IChatFunctionReference } from "../structures/internal/__IChatFunctionReference";
21
+ import { __IChatSelectFunctionsApplication } from "../structures/internal/__IChatSelectFunctionsApplication";
22
+ import { ChatGptHistoryDecoder } from "./ChatGptHistoryDecoder";
23
+
24
+ export namespace ChatGptSelectFunctionAgent {
25
+ export const execute = async <Model extends ILlmSchema.Model>(
26
+ ctx: IAgenticaContext<Model>,
27
+ ): Promise<IAgenticaPrompt<Model>[]> => {
28
+ if (ctx.operations.divided === undefined)
29
+ return step(ctx, ctx.operations.array, 0);
30
+
31
+ const stacks: IAgenticaOperationSelection<Model>[][] =
32
+ ctx.operations.divided.map(() => []);
33
+ const events: IAgenticaEvent<Model>[] = [];
34
+ const prompts: IAgenticaPrompt<Model>[][] = await Promise.all(
35
+ ctx.operations.divided.map((operations, i) =>
36
+ step(
37
+ {
38
+ ...ctx,
39
+ stack: stacks[i]!,
40
+ dispatch: async (e) => {
41
+ events.push(e);
42
+ },
43
+ },
44
+ operations,
45
+ 0,
46
+ ),
47
+ ),
48
+ );
49
+
50
+ // NO FUNCTION SELECTION, SO THAT ONLY TEXT LEFT
51
+ if (stacks.every((s) => s.length === 0)) return prompts[0]!;
52
+ // ELITICISM
53
+ else if ((ctx.config?.eliticism ?? AgenticaConstant.ELITICISM) === true)
54
+ return step(
55
+ ctx,
56
+ stacks
57
+ .flat()
58
+ .map(
59
+ (s) =>
60
+ ctx.operations.group
61
+ .get(s.controller.name)!
62
+ .get(s.function.name)!,
63
+ ),
64
+ 0,
65
+ );
66
+
67
+ // RE-COLLECT SELECT FUNCTION EVENTS
68
+ const collection: IAgenticaPrompt.ISelect<Model> = {
69
+ id: v4(),
70
+ type: "select",
71
+ operations: [],
72
+ };
73
+ for (const e of events)
74
+ if (e.type === "select") {
75
+ collection.operations.push(
76
+ AgenticaPromptFactory.selection({
77
+ protocol: e.operation.protocol as "http",
78
+ controller: e.operation
79
+ .controller as IAgenticaController.IHttp<Model>,
80
+ function: e.operation.function as IHttpLlmFunction<Model>,
81
+ reason: e.reason,
82
+ name: e.operation.name,
83
+ }),
84
+ );
85
+ await selectFunction(ctx, {
86
+ name: e.operation.name,
87
+ reason: e.reason,
88
+ });
89
+ }
90
+ return [collection];
91
+ };
92
+
93
+ const step = async <Model extends ILlmSchema.Model>(
94
+ ctx: IAgenticaContext<Model>,
95
+ operations: IAgenticaOperation<Model>[],
96
+ retry: number,
97
+ failures?: IFailure[],
98
+ ): Promise<IAgenticaPrompt<Model>[]> => {
99
+ //----
100
+ // EXECUTE CHATGPT API
101
+ //----
102
+ const completion: OpenAI.ChatCompletion = await ctx.request("select", {
103
+ messages: [
104
+ // COMMON SYSTEM PROMPT
105
+ {
106
+ role: "system",
107
+ content: AgenticaDefaultPrompt.write(ctx.config),
108
+ } satisfies OpenAI.ChatCompletionSystemMessageParam,
109
+ // CANDIDATE FUNCTIONS
110
+ {
111
+ role: "assistant",
112
+ tool_calls: [
113
+ {
114
+ type: "function",
115
+ id: "getApiFunctions",
116
+ function: {
117
+ name: "getApiFunctions",
118
+ arguments: JSON.stringify({}),
119
+ },
120
+ },
121
+ ],
122
+ },
123
+ {
124
+ role: "tool",
125
+ tool_call_id: "getApiFunctions",
126
+ content: JSON.stringify(
127
+ operations.map((op) => ({
128
+ name: op.name,
129
+ description: op.function.description,
130
+ ...(op.protocol === "http"
131
+ ? {
132
+ method: op.function.method,
133
+ path: op.function.path,
134
+ tags: op.function.tags,
135
+ }
136
+ : {}),
137
+ })),
138
+ ),
139
+ },
140
+ // PREVIOUS HISTORIES
141
+ ...ctx.histories.map(ChatGptHistoryDecoder.decode).flat(),
142
+ // USER INPUT
143
+ {
144
+ role: "user",
145
+ content: ctx.prompt.text,
146
+ },
147
+ // SYSTEM PROMPT
148
+ {
149
+ role: "system",
150
+ content:
151
+ ctx.config?.systemPrompt?.select?.(ctx.histories) ??
152
+ AgenticaSystemPrompt.SELECT,
153
+ },
154
+ // TYPE CORRECTIONS
155
+ ...emendMessages(failures ?? []),
156
+ ],
157
+ // STACK FUNCTIONS
158
+ tools: CONTAINER.functions.map(
159
+ (func) =>
160
+ ({
161
+ type: "function",
162
+ function: {
163
+ name: func.name,
164
+ description: func.description,
165
+ parameters: func.parameters as any,
166
+ },
167
+ }) satisfies OpenAI.ChatCompletionTool,
168
+ ),
169
+ tool_choice: "auto",
170
+ parallel_tool_calls: false,
171
+ });
172
+
173
+ //----
174
+ // VALIDATION
175
+ //----
176
+ if (retry++ < (ctx.config?.retry ?? AgenticaConstant.RETRY)) {
177
+ const failures: IFailure[] = [];
178
+ for (const choice of completion.choices)
179
+ for (const tc of choice.message.tool_calls ?? []) {
180
+ if (tc.function.name !== "selectFunctions") continue;
181
+ const input: object = JSON.parse(tc.function.arguments);
182
+ const validation: IValidation<__IChatFunctionReference.IProps> =
183
+ typia.validate<__IChatFunctionReference.IProps>(input);
184
+ if (validation.success === false)
185
+ failures.push({
186
+ id: tc.id,
187
+ name: tc.function.name,
188
+ validation,
189
+ });
190
+ }
191
+ if (failures.length > 0) return step(ctx, operations, retry, failures);
192
+ }
193
+
194
+ //----
195
+ // PROCESS COMPLETION
196
+ //----
197
+ const prompts: IAgenticaPrompt<Model>[] = [];
198
+ for (const choice of completion.choices) {
199
+ // TOOL CALLING HANDLER
200
+ if (choice.message.tool_calls)
201
+ for (const tc of choice.message.tool_calls) {
202
+ if (tc.type !== "function") continue;
203
+
204
+ const input: __IChatFunctionReference.IProps = JSON.parse(
205
+ tc.function.arguments,
206
+ );
207
+ if (typia.is(input) === false) continue;
208
+ else if (tc.function.name === "selectFunctions") {
209
+ const collection: IAgenticaPrompt.ISelect<Model> = {
210
+ id: tc.id,
211
+ type: "select",
212
+ operations: [],
213
+ };
214
+ for (const reference of input.functions) {
215
+ const operation: IAgenticaOperation<Model> | null =
216
+ await selectFunction(ctx, reference);
217
+ if (operation !== null)
218
+ collection.operations.push(
219
+ AgenticaPromptFactory.selection({
220
+ protocol: operation.protocol as "http",
221
+ controller:
222
+ operation.controller as IAgenticaController.IHttp<Model>,
223
+ function: operation.function as IHttpLlmFunction<Model>,
224
+ name: operation.name,
225
+ reason: reference.reason,
226
+ }),
227
+ );
228
+ }
229
+ if (collection.operations.length !== 0) prompts.push(collection);
230
+ }
231
+ }
232
+
233
+ // ASSISTANT MESSAGE
234
+ if (
235
+ choice.message.role === "assistant" &&
236
+ !!choice.message.content?.length
237
+ ) {
238
+ const text: IAgenticaPrompt.IText = {
239
+ type: "text",
240
+ role: "assistant",
241
+ text: choice.message.content,
242
+ };
243
+ prompts.push(text);
244
+ await ctx.dispatch(text);
245
+ }
246
+ }
247
+ return prompts;
248
+ };
249
+
250
+ const selectFunction = async <Model extends ILlmSchema.Model>(
251
+ ctx: IAgenticaContext<Model>,
252
+ reference: __IChatFunctionReference,
253
+ ): Promise<IAgenticaOperation<Model> | null> => {
254
+ const operation: IAgenticaOperation<Model> | undefined =
255
+ ctx.operations.flat.get(reference.name);
256
+ if (operation === undefined) return null;
257
+
258
+ ctx.stack.push(
259
+ AgenticaPromptFactory.selection({
260
+ protocol: operation.protocol as "http",
261
+ controller: operation.controller as IAgenticaController.IHttp<Model>,
262
+ function: operation.function as IHttpLlmFunction<Model>,
263
+ name: reference.name,
264
+ reason: reference.reason,
265
+ }),
266
+ );
267
+ await ctx.dispatch({
268
+ type: "select",
269
+ reason: reference.reason,
270
+ operation,
271
+ });
272
+ return operation;
273
+ };
274
+
275
+ const emendMessages = (
276
+ failures: IFailure[],
277
+ ): OpenAI.ChatCompletionMessageParam[] =>
278
+ failures
279
+ .map((f) => [
280
+ {
281
+ role: "assistant",
282
+ tool_calls: [
283
+ {
284
+ type: "function",
285
+ id: f.id,
286
+ function: {
287
+ name: f.name,
288
+ arguments: JSON.stringify(f.validation.data),
289
+ },
290
+ },
291
+ ],
292
+ } satisfies OpenAI.ChatCompletionAssistantMessageParam,
293
+ {
294
+ role: "tool",
295
+ content: JSON.stringify(f.validation.errors),
296
+ tool_call_id: f.id,
297
+ } satisfies OpenAI.ChatCompletionToolMessageParam,
298
+ {
299
+ role: "system",
300
+ content: [
301
+ "You A.I. assistant has composed wrong typed arguments.",
302
+ "",
303
+ "Correct it at the next function calling.",
304
+ ].join("\n"),
305
+ } satisfies OpenAI.ChatCompletionSystemMessageParam,
306
+ ])
307
+ .flat();
308
+ }
309
+
310
+ const CONTAINER: ILlmApplication<"chatgpt"> = typia.llm.application<
311
+ __IChatSelectFunctionsApplication,
312
+ "chatgpt"
313
+ >();
314
+
315
+ interface IFailure {
316
+ id: string;
317
+ name: string;
318
+ validation: IValidation.IFailure;
319
+ }