@agentica/core 0.8.2 → 0.8.3-dev.20250227

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. package/LICENSE +21 -21
  2. package/README.md +404 -404
  3. package/lib/Agentica.js +1 -4
  4. package/lib/Agentica.js.map +1 -1
  5. package/lib/index.mjs +8 -13
  6. package/lib/index.mjs.map +1 -1
  7. package/lib/internal/AgenticaTokenUsageAggregator.js +7 -9
  8. package/lib/internal/AgenticaTokenUsageAggregator.js.map +1 -1
  9. package/lib/structures/IAgenticaTokenUsage.d.ts +7 -11
  10. package/package.json +1 -1
  11. package/prompts/cancel.md +4 -4
  12. package/prompts/common.md +2 -2
  13. package/prompts/describe.md +6 -6
  14. package/prompts/execute.md +6 -6
  15. package/prompts/initialize.md +2 -2
  16. package/prompts/select.md +6 -6
  17. package/src/Agentica.ts +318 -322
  18. package/src/chatgpt/ChatGptAgent.ts +71 -71
  19. package/src/chatgpt/ChatGptCallFunctionAgent.ts +445 -445
  20. package/src/chatgpt/ChatGptCancelFunctionAgent.ts +283 -283
  21. package/src/chatgpt/ChatGptDescribeFunctionAgent.ts +51 -51
  22. package/src/chatgpt/ChatGptHistoryDecoder.ts +87 -87
  23. package/src/chatgpt/ChatGptInitializeFunctionAgent.ts +88 -88
  24. package/src/chatgpt/ChatGptSelectFunctionAgent.ts +318 -318
  25. package/src/functional/createHttpLlmApplication.ts +63 -63
  26. package/src/index.ts +19 -19
  27. package/src/internal/AgenticaConstant.ts +4 -4
  28. package/src/internal/AgenticaDefaultPrompt.ts +39 -39
  29. package/src/internal/AgenticaOperationComposer.ts +82 -82
  30. package/src/internal/AgenticaPromptFactory.ts +30 -30
  31. package/src/internal/AgenticaPromptTransformer.ts +83 -83
  32. package/src/internal/AgenticaTokenUsageAggregator.ts +115 -123
  33. package/src/internal/MathUtil.ts +3 -3
  34. package/src/internal/Singleton.ts +22 -22
  35. package/src/internal/__map_take.ts +15 -15
  36. package/src/structures/IAgenticaConfig.ts +121 -121
  37. package/src/structures/IAgenticaContext.ts +128 -128
  38. package/src/structures/IAgenticaController.ts +130 -130
  39. package/src/structures/IAgenticaEvent.ts +224 -224
  40. package/src/structures/IAgenticaExecutor.ts +152 -152
  41. package/src/structures/IAgenticaOperation.ts +64 -64
  42. package/src/structures/IAgenticaOperationCollection.ts +50 -50
  43. package/src/structures/IAgenticaOperationSelection.ts +69 -69
  44. package/src/structures/IAgenticaPrompt.ts +173 -173
  45. package/src/structures/IAgenticaProps.ts +64 -64
  46. package/src/structures/IAgenticaProvider.ts +45 -45
  47. package/src/structures/IAgenticaSystemPrompt.ts +122 -122
  48. package/src/structures/IAgenticaTokenUsage.ts +107 -112
  49. package/src/structures/internal/__IChatCancelFunctionsApplication.ts +23 -23
  50. package/src/structures/internal/__IChatFunctionReference.ts +21 -21
  51. package/src/structures/internal/__IChatInitialApplication.ts +15 -15
  52. package/src/structures/internal/__IChatSelectFunctionsApplication.ts +24 -24
  53. package/src/typings/AgenticaSource.ts +6 -6
@@ -1,318 +1,318 @@
1
- import { IHttpLlmFunction, ILlmApplication } from "@samchon/openapi";
2
- import OpenAI from "openai";
3
- import typia, { IValidation } from "typia";
4
- import { v4 } from "uuid";
5
-
6
- import { AgenticaConstant } from "../internal/AgenticaConstant";
7
- import { AgenticaDefaultPrompt } from "../internal/AgenticaDefaultPrompt";
8
- import { AgenticaPromptFactory } from "../internal/AgenticaPromptFactory";
9
- import { AgenticaSystemPrompt } from "../internal/AgenticaSystemPrompt";
10
- import { IAgenticaContext } from "../structures/IAgenticaContext";
11
- import { IAgenticaController } from "../structures/IAgenticaController";
12
- import { IAgenticaEvent } from "../structures/IAgenticaEvent";
13
- import { IAgenticaOperation } from "../structures/IAgenticaOperation";
14
- import { IAgenticaOperationSelection } from "../structures/IAgenticaOperationSelection";
15
- import { IAgenticaPrompt } from "../structures/IAgenticaPrompt";
16
- import { __IChatFunctionReference } from "../structures/internal/__IChatFunctionReference";
17
- import { __IChatSelectFunctionsApplication } from "../structures/internal/__IChatSelectFunctionsApplication";
18
- import { ChatGptHistoryDecoder } from "./ChatGptHistoryDecoder";
19
-
20
- export namespace ChatGptSelectFunctionAgent {
21
- export const execute = async (
22
- ctx: IAgenticaContext,
23
- ): Promise<IAgenticaPrompt[]> => {
24
- if (ctx.operations.divided === undefined)
25
- return step(ctx, ctx.operations.array, 0);
26
-
27
- const stacks: IAgenticaOperationSelection[][] = ctx.operations.divided.map(
28
- () => [],
29
- );
30
- const events: IAgenticaEvent[] = [];
31
- const prompts: IAgenticaPrompt[][] = await Promise.all(
32
- ctx.operations.divided.map((operations, i) =>
33
- step(
34
- {
35
- ...ctx,
36
- stack: stacks[i]!,
37
- dispatch: async (e) => {
38
- events.push(e);
39
- },
40
- },
41
- operations,
42
- 0,
43
- ),
44
- ),
45
- );
46
-
47
- // NO FUNCTION SELECTION, SO THAT ONLY TEXT LEFT
48
- if (stacks.every((s) => s.length === 0)) return prompts[0]!;
49
- // ELITICISM
50
- else if ((ctx.config?.eliticism ?? AgenticaConstant.ELITICISM) === true)
51
- return step(
52
- ctx,
53
- stacks
54
- .flat()
55
- .map(
56
- (s) =>
57
- ctx.operations.group
58
- .get(s.controller.name)!
59
- .get(s.function.name)!,
60
- ),
61
- 0,
62
- );
63
-
64
- // RE-COLLECT SELECT FUNCTION EVENTS
65
- const collection: IAgenticaPrompt.ISelect = {
66
- id: v4(),
67
- type: "select",
68
- operations: [],
69
- };
70
- for (const e of events)
71
- if (e.type === "select") {
72
- collection.operations.push(
73
- AgenticaPromptFactory.selection({
74
- protocol: e.operation.protocol as "http",
75
- controller: e.operation.controller as IAgenticaController.IHttp,
76
- function: e.operation.function as IHttpLlmFunction<"chatgpt">,
77
- reason: e.reason,
78
- name: e.operation.name,
79
- }),
80
- );
81
- await selectFunction(ctx, {
82
- name: e.operation.name,
83
- reason: e.reason,
84
- });
85
- }
86
- return [collection];
87
- };
88
-
89
- const step = async (
90
- ctx: IAgenticaContext,
91
- operations: IAgenticaOperation[],
92
- retry: number,
93
- failures?: IFailure[],
94
- ): Promise<IAgenticaPrompt[]> => {
95
- //----
96
- // EXECUTE CHATGPT API
97
- //----
98
- const completion: OpenAI.ChatCompletion = await ctx.request("select", {
99
- messages: [
100
- // COMMON SYSTEM PROMPT
101
- {
102
- role: "system",
103
- content: AgenticaDefaultPrompt.write(ctx.config),
104
- } satisfies OpenAI.ChatCompletionSystemMessageParam,
105
- // CANDIDATE FUNCTIONS
106
- {
107
- role: "assistant",
108
- tool_calls: [
109
- {
110
- type: "function",
111
- id: "getApiFunctions",
112
- function: {
113
- name: "getApiFunctions",
114
- arguments: JSON.stringify({}),
115
- },
116
- },
117
- ],
118
- },
119
- {
120
- role: "tool",
121
- tool_call_id: "getApiFunctions",
122
- content: JSON.stringify(
123
- operations.map((op) => ({
124
- name: op.name,
125
- description: op.function.description,
126
- ...(op.protocol === "http"
127
- ? {
128
- method: op.function.method,
129
- path: op.function.path,
130
- tags: op.function.tags,
131
- }
132
- : {}),
133
- })),
134
- ),
135
- },
136
- // PREVIOUS HISTORIES
137
- ...ctx.histories.map(ChatGptHistoryDecoder.decode).flat(),
138
- // USER INPUT
139
- {
140
- role: "user",
141
- content: ctx.prompt.text,
142
- },
143
- // SYSTEM PROMPT
144
- {
145
- role: "system",
146
- content:
147
- ctx.config?.systemPrompt?.select?.(ctx.histories) ??
148
- AgenticaSystemPrompt.SELECT,
149
- },
150
- // TYPE CORRECTIONS
151
- ...emendMessages(failures ?? []),
152
- ],
153
- // STACK FUNCTIONS
154
- tools: CONTAINER.functions.map(
155
- (func) =>
156
- ({
157
- type: "function",
158
- function: {
159
- name: func.name,
160
- description: func.description,
161
- parameters: func.parameters as any,
162
- },
163
- }) satisfies OpenAI.ChatCompletionTool,
164
- ),
165
- tool_choice: "auto",
166
- parallel_tool_calls: false,
167
- });
168
-
169
- //----
170
- // VALIDATION
171
- //----
172
- if (retry++ < (ctx.config?.retry ?? AgenticaConstant.RETRY)) {
173
- const failures: IFailure[] = [];
174
- for (const choice of completion.choices)
175
- for (const tc of choice.message.tool_calls ?? []) {
176
- if (tc.function.name !== "selectFunctions") continue;
177
- const input: object = JSON.parse(tc.function.arguments);
178
- const validation: IValidation<__IChatFunctionReference.IProps> =
179
- typia.validate<__IChatFunctionReference.IProps>(input);
180
- if (validation.success === false)
181
- failures.push({
182
- id: tc.id,
183
- name: tc.function.name,
184
- validation,
185
- });
186
- }
187
- if (failures.length > 0) return step(ctx, operations, retry, failures);
188
- }
189
-
190
- //----
191
- // PROCESS COMPLETION
192
- //----
193
- const prompts: IAgenticaPrompt[] = [];
194
- for (const choice of completion.choices) {
195
- // TOOL CALLING HANDLER
196
- if (choice.message.tool_calls)
197
- for (const tc of choice.message.tool_calls) {
198
- if (tc.type !== "function") continue;
199
-
200
- const input: __IChatFunctionReference.IProps = JSON.parse(
201
- tc.function.arguments,
202
- );
203
- if (typia.is(input) === false) continue;
204
- else if (tc.function.name === "selectFunctions") {
205
- const collection: IAgenticaPrompt.ISelect = {
206
- id: tc.id,
207
- type: "select",
208
- operations: [],
209
- };
210
- for (const reference of input.functions) {
211
- const operation: IAgenticaOperation | null = await selectFunction(
212
- ctx,
213
- reference,
214
- );
215
- if (operation !== null)
216
- collection.operations.push(
217
- AgenticaPromptFactory.selection({
218
- protocol: operation.protocol as "http",
219
- controller:
220
- operation.controller as IAgenticaController.IHttp,
221
- function: operation.function as IHttpLlmFunction<"chatgpt">,
222
- name: operation.name,
223
- reason: reference.reason,
224
- }),
225
- );
226
- }
227
- if (collection.operations.length !== 0) prompts.push(collection);
228
- }
229
- }
230
-
231
- // ASSISTANT MESSAGE
232
- if (
233
- choice.message.role === "assistant" &&
234
- !!choice.message.content?.length
235
- ) {
236
- const text: IAgenticaPrompt.IText = {
237
- type: "text",
238
- role: "assistant",
239
- text: choice.message.content,
240
- };
241
- prompts.push(text);
242
- await ctx.dispatch(text);
243
- }
244
- }
245
- return prompts;
246
- };
247
-
248
- const selectFunction = async (
249
- ctx: IAgenticaContext,
250
- reference: __IChatFunctionReference,
251
- ): Promise<IAgenticaOperation | null> => {
252
- const operation: IAgenticaOperation | undefined = ctx.operations.flat.get(
253
- reference.name,
254
- );
255
- if (operation === undefined) return null;
256
-
257
- ctx.stack.push(
258
- AgenticaPromptFactory.selection({
259
- protocol: operation.protocol as "http",
260
- controller: operation.controller as IAgenticaController.IHttp,
261
- function: operation.function as IHttpLlmFunction<"chatgpt">,
262
- name: reference.name,
263
- reason: reference.reason,
264
- }),
265
- );
266
- await ctx.dispatch({
267
- type: "select",
268
- reason: reference.reason,
269
- operation,
270
- });
271
- return operation;
272
- };
273
-
274
- const emendMessages = (
275
- failures: IFailure[],
276
- ): OpenAI.ChatCompletionMessageParam[] =>
277
- failures
278
- .map((f) => [
279
- {
280
- role: "assistant",
281
- tool_calls: [
282
- {
283
- type: "function",
284
- id: f.id,
285
- function: {
286
- name: f.name,
287
- arguments: JSON.stringify(f.validation.data),
288
- },
289
- },
290
- ],
291
- } satisfies OpenAI.ChatCompletionAssistantMessageParam,
292
- {
293
- role: "tool",
294
- content: JSON.stringify(f.validation.errors),
295
- tool_call_id: f.id,
296
- } satisfies OpenAI.ChatCompletionToolMessageParam,
297
- {
298
- role: "system",
299
- content: [
300
- "You A.I. assistant has composed wrong typed arguments.",
301
- "",
302
- "Correct it at the next function calling.",
303
- ].join("\n"),
304
- } satisfies OpenAI.ChatCompletionSystemMessageParam,
305
- ])
306
- .flat();
307
- }
308
-
309
- const CONTAINER: ILlmApplication<"chatgpt"> = typia.llm.application<
310
- __IChatSelectFunctionsApplication,
311
- "chatgpt"
312
- >();
313
-
314
- interface IFailure {
315
- id: string;
316
- name: string;
317
- validation: IValidation.IFailure;
318
- }
1
+ import { IHttpLlmFunction, ILlmApplication } from "@samchon/openapi";
2
+ import OpenAI from "openai";
3
+ import typia, { IValidation } from "typia";
4
+ import { v4 } from "uuid";
5
+
6
+ import { AgenticaConstant } from "../internal/AgenticaConstant";
7
+ import { AgenticaDefaultPrompt } from "../internal/AgenticaDefaultPrompt";
8
+ import { AgenticaPromptFactory } from "../internal/AgenticaPromptFactory";
9
+ import { AgenticaSystemPrompt } from "../internal/AgenticaSystemPrompt";
10
+ import { IAgenticaContext } from "../structures/IAgenticaContext";
11
+ import { IAgenticaController } from "../structures/IAgenticaController";
12
+ import { IAgenticaEvent } from "../structures/IAgenticaEvent";
13
+ import { IAgenticaOperation } from "../structures/IAgenticaOperation";
14
+ import { IAgenticaOperationSelection } from "../structures/IAgenticaOperationSelection";
15
+ import { IAgenticaPrompt } from "../structures/IAgenticaPrompt";
16
+ import { __IChatFunctionReference } from "../structures/internal/__IChatFunctionReference";
17
+ import { __IChatSelectFunctionsApplication } from "../structures/internal/__IChatSelectFunctionsApplication";
18
+ import { ChatGptHistoryDecoder } from "./ChatGptHistoryDecoder";
19
+
20
+ export namespace ChatGptSelectFunctionAgent {
21
+ export const execute = async (
22
+ ctx: IAgenticaContext,
23
+ ): Promise<IAgenticaPrompt[]> => {
24
+ if (ctx.operations.divided === undefined)
25
+ return step(ctx, ctx.operations.array, 0);
26
+
27
+ const stacks: IAgenticaOperationSelection[][] = ctx.operations.divided.map(
28
+ () => [],
29
+ );
30
+ const events: IAgenticaEvent[] = [];
31
+ const prompts: IAgenticaPrompt[][] = await Promise.all(
32
+ ctx.operations.divided.map((operations, i) =>
33
+ step(
34
+ {
35
+ ...ctx,
36
+ stack: stacks[i]!,
37
+ dispatch: async (e) => {
38
+ events.push(e);
39
+ },
40
+ },
41
+ operations,
42
+ 0,
43
+ ),
44
+ ),
45
+ );
46
+
47
+ // NO FUNCTION SELECTION, SO THAT ONLY TEXT LEFT
48
+ if (stacks.every((s) => s.length === 0)) return prompts[0]!;
49
+ // ELITICISM
50
+ else if ((ctx.config?.eliticism ?? AgenticaConstant.ELITICISM) === true)
51
+ return step(
52
+ ctx,
53
+ stacks
54
+ .flat()
55
+ .map(
56
+ (s) =>
57
+ ctx.operations.group
58
+ .get(s.controller.name)!
59
+ .get(s.function.name)!,
60
+ ),
61
+ 0,
62
+ );
63
+
64
+ // RE-COLLECT SELECT FUNCTION EVENTS
65
+ const collection: IAgenticaPrompt.ISelect = {
66
+ id: v4(),
67
+ type: "select",
68
+ operations: [],
69
+ };
70
+ for (const e of events)
71
+ if (e.type === "select") {
72
+ collection.operations.push(
73
+ AgenticaPromptFactory.selection({
74
+ protocol: e.operation.protocol as "http",
75
+ controller: e.operation.controller as IAgenticaController.IHttp,
76
+ function: e.operation.function as IHttpLlmFunction<"chatgpt">,
77
+ reason: e.reason,
78
+ name: e.operation.name,
79
+ }),
80
+ );
81
+ await selectFunction(ctx, {
82
+ name: e.operation.name,
83
+ reason: e.reason,
84
+ });
85
+ }
86
+ return [collection];
87
+ };
88
+
89
+ const step = async (
90
+ ctx: IAgenticaContext,
91
+ operations: IAgenticaOperation[],
92
+ retry: number,
93
+ failures?: IFailure[],
94
+ ): Promise<IAgenticaPrompt[]> => {
95
+ //----
96
+ // EXECUTE CHATGPT API
97
+ //----
98
+ const completion: OpenAI.ChatCompletion = await ctx.request("select", {
99
+ messages: [
100
+ // COMMON SYSTEM PROMPT
101
+ {
102
+ role: "system",
103
+ content: AgenticaDefaultPrompt.write(ctx.config),
104
+ } satisfies OpenAI.ChatCompletionSystemMessageParam,
105
+ // CANDIDATE FUNCTIONS
106
+ {
107
+ role: "assistant",
108
+ tool_calls: [
109
+ {
110
+ type: "function",
111
+ id: "getApiFunctions",
112
+ function: {
113
+ name: "getApiFunctions",
114
+ arguments: JSON.stringify({}),
115
+ },
116
+ },
117
+ ],
118
+ },
119
+ {
120
+ role: "tool",
121
+ tool_call_id: "getApiFunctions",
122
+ content: JSON.stringify(
123
+ operations.map((op) => ({
124
+ name: op.name,
125
+ description: op.function.description,
126
+ ...(op.protocol === "http"
127
+ ? {
128
+ method: op.function.method,
129
+ path: op.function.path,
130
+ tags: op.function.tags,
131
+ }
132
+ : {}),
133
+ })),
134
+ ),
135
+ },
136
+ // PREVIOUS HISTORIES
137
+ ...ctx.histories.map(ChatGptHistoryDecoder.decode).flat(),
138
+ // USER INPUT
139
+ {
140
+ role: "user",
141
+ content: ctx.prompt.text,
142
+ },
143
+ // SYSTEM PROMPT
144
+ {
145
+ role: "system",
146
+ content:
147
+ ctx.config?.systemPrompt?.select?.(ctx.histories) ??
148
+ AgenticaSystemPrompt.SELECT,
149
+ },
150
+ // TYPE CORRECTIONS
151
+ ...emendMessages(failures ?? []),
152
+ ],
153
+ // STACK FUNCTIONS
154
+ tools: CONTAINER.functions.map(
155
+ (func) =>
156
+ ({
157
+ type: "function",
158
+ function: {
159
+ name: func.name,
160
+ description: func.description,
161
+ parameters: func.parameters as any,
162
+ },
163
+ }) satisfies OpenAI.ChatCompletionTool,
164
+ ),
165
+ tool_choice: "auto",
166
+ parallel_tool_calls: false,
167
+ });
168
+
169
+ //----
170
+ // VALIDATION
171
+ //----
172
+ if (retry++ < (ctx.config?.retry ?? AgenticaConstant.RETRY)) {
173
+ const failures: IFailure[] = [];
174
+ for (const choice of completion.choices)
175
+ for (const tc of choice.message.tool_calls ?? []) {
176
+ if (tc.function.name !== "selectFunctions") continue;
177
+ const input: object = JSON.parse(tc.function.arguments);
178
+ const validation: IValidation<__IChatFunctionReference.IProps> =
179
+ typia.validate<__IChatFunctionReference.IProps>(input);
180
+ if (validation.success === false)
181
+ failures.push({
182
+ id: tc.id,
183
+ name: tc.function.name,
184
+ validation,
185
+ });
186
+ }
187
+ if (failures.length > 0) return step(ctx, operations, retry, failures);
188
+ }
189
+
190
+ //----
191
+ // PROCESS COMPLETION
192
+ //----
193
+ const prompts: IAgenticaPrompt[] = [];
194
+ for (const choice of completion.choices) {
195
+ // TOOL CALLING HANDLER
196
+ if (choice.message.tool_calls)
197
+ for (const tc of choice.message.tool_calls) {
198
+ if (tc.type !== "function") continue;
199
+
200
+ const input: __IChatFunctionReference.IProps = JSON.parse(
201
+ tc.function.arguments,
202
+ );
203
+ if (typia.is(input) === false) continue;
204
+ else if (tc.function.name === "selectFunctions") {
205
+ const collection: IAgenticaPrompt.ISelect = {
206
+ id: tc.id,
207
+ type: "select",
208
+ operations: [],
209
+ };
210
+ for (const reference of input.functions) {
211
+ const operation: IAgenticaOperation | null = await selectFunction(
212
+ ctx,
213
+ reference,
214
+ );
215
+ if (operation !== null)
216
+ collection.operations.push(
217
+ AgenticaPromptFactory.selection({
218
+ protocol: operation.protocol as "http",
219
+ controller:
220
+ operation.controller as IAgenticaController.IHttp,
221
+ function: operation.function as IHttpLlmFunction<"chatgpt">,
222
+ name: operation.name,
223
+ reason: reference.reason,
224
+ }),
225
+ );
226
+ }
227
+ if (collection.operations.length !== 0) prompts.push(collection);
228
+ }
229
+ }
230
+
231
+ // ASSISTANT MESSAGE
232
+ if (
233
+ choice.message.role === "assistant" &&
234
+ !!choice.message.content?.length
235
+ ) {
236
+ const text: IAgenticaPrompt.IText = {
237
+ type: "text",
238
+ role: "assistant",
239
+ text: choice.message.content,
240
+ };
241
+ prompts.push(text);
242
+ await ctx.dispatch(text);
243
+ }
244
+ }
245
+ return prompts;
246
+ };
247
+
248
+ const selectFunction = async (
249
+ ctx: IAgenticaContext,
250
+ reference: __IChatFunctionReference,
251
+ ): Promise<IAgenticaOperation | null> => {
252
+ const operation: IAgenticaOperation | undefined = ctx.operations.flat.get(
253
+ reference.name,
254
+ );
255
+ if (operation === undefined) return null;
256
+
257
+ ctx.stack.push(
258
+ AgenticaPromptFactory.selection({
259
+ protocol: operation.protocol as "http",
260
+ controller: operation.controller as IAgenticaController.IHttp,
261
+ function: operation.function as IHttpLlmFunction<"chatgpt">,
262
+ name: reference.name,
263
+ reason: reference.reason,
264
+ }),
265
+ );
266
+ await ctx.dispatch({
267
+ type: "select",
268
+ reason: reference.reason,
269
+ operation,
270
+ });
271
+ return operation;
272
+ };
273
+
274
+ const emendMessages = (
275
+ failures: IFailure[],
276
+ ): OpenAI.ChatCompletionMessageParam[] =>
277
+ failures
278
+ .map((f) => [
279
+ {
280
+ role: "assistant",
281
+ tool_calls: [
282
+ {
283
+ type: "function",
284
+ id: f.id,
285
+ function: {
286
+ name: f.name,
287
+ arguments: JSON.stringify(f.validation.data),
288
+ },
289
+ },
290
+ ],
291
+ } satisfies OpenAI.ChatCompletionAssistantMessageParam,
292
+ {
293
+ role: "tool",
294
+ content: JSON.stringify(f.validation.errors),
295
+ tool_call_id: f.id,
296
+ } satisfies OpenAI.ChatCompletionToolMessageParam,
297
+ {
298
+ role: "system",
299
+ content: [
300
+ "You A.I. assistant has composed wrong typed arguments.",
301
+ "",
302
+ "Correct it at the next function calling.",
303
+ ].join("\n"),
304
+ } satisfies OpenAI.ChatCompletionSystemMessageParam,
305
+ ])
306
+ .flat();
307
+ }
308
+
309
+ const CONTAINER: ILlmApplication<"chatgpt"> = typia.llm.application<
310
+ __IChatSelectFunctionsApplication,
311
+ "chatgpt"
312
+ >();
313
+
314
+ interface IFailure {
315
+ id: string;
316
+ name: string;
317
+ validation: IValidation.IFailure;
318
+ }