@agentica/core 0.12.1 → 0.12.2-dev.20250314

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. package/LICENSE +21 -21
  2. package/README.md +461 -461
  3. package/package.json +1 -1
  4. package/prompts/cancel.md +4 -4
  5. package/prompts/common.md +2 -2
  6. package/prompts/describe.md +6 -6
  7. package/prompts/execute.md +6 -6
  8. package/prompts/initialize.md +2 -2
  9. package/prompts/select.md +6 -6
  10. package/src/Agentica.ts +359 -359
  11. package/src/chatgpt/ChatGptAgent.ts +76 -76
  12. package/src/chatgpt/ChatGptCallFunctionAgent.ts +466 -466
  13. package/src/chatgpt/ChatGptCancelFunctionAgent.ts +280 -280
  14. package/src/chatgpt/ChatGptCompletionMessageUtil.ts +166 -166
  15. package/src/chatgpt/ChatGptDescribeFunctionAgent.ts +122 -122
  16. package/src/chatgpt/ChatGptHistoryDecoder.ts +88 -88
  17. package/src/chatgpt/ChatGptInitializeFunctionAgent.ts +96 -96
  18. package/src/chatgpt/ChatGptSelectFunctionAgent.ts +311 -311
  19. package/src/chatgpt/ChatGptUsageAggregator.ts +62 -62
  20. package/src/context/AgenticaCancelPrompt.ts +32 -32
  21. package/src/context/AgenticaClassOperation.ts +23 -23
  22. package/src/context/AgenticaContext.ts +130 -130
  23. package/src/context/AgenticaHttpOperation.ts +27 -27
  24. package/src/context/AgenticaOperation.ts +66 -66
  25. package/src/context/AgenticaOperationBase.ts +57 -57
  26. package/src/context/AgenticaOperationCollection.ts +52 -52
  27. package/src/context/AgenticaOperationSelection.ts +27 -27
  28. package/src/context/AgenticaTokenUsage.ts +170 -170
  29. package/src/context/internal/AgenticaTokenUsageAggregator.ts +66 -66
  30. package/src/context/internal/__IChatCancelFunctionsApplication.ts +23 -23
  31. package/src/context/internal/__IChatFunctionReference.ts +21 -21
  32. package/src/context/internal/__IChatInitialApplication.ts +15 -15
  33. package/src/context/internal/__IChatSelectFunctionsApplication.ts +24 -24
  34. package/src/events/AgenticaCallEvent.ts +36 -36
  35. package/src/events/AgenticaCancelEvent.ts +28 -28
  36. package/src/events/AgenticaDescribeEvent.ts +66 -66
  37. package/src/events/AgenticaEvent.ts +36 -36
  38. package/src/events/AgenticaEventBase.ts +7 -7
  39. package/src/events/AgenticaEventSource.ts +6 -6
  40. package/src/events/AgenticaExecuteEvent.ts +50 -50
  41. package/src/events/AgenticaInitializeEvent.ts +14 -14
  42. package/src/events/AgenticaRequestEvent.ts +45 -45
  43. package/src/events/AgenticaResponseEvent.ts +48 -48
  44. package/src/events/AgenticaSelectEvent.ts +37 -37
  45. package/src/events/AgenticaTextEvent.ts +62 -62
  46. package/src/functional/assertHttpLlmApplication.ts +55 -55
  47. package/src/functional/validateHttpLlmApplication.ts +66 -66
  48. package/src/index.ts +44 -44
  49. package/src/internal/AgenticaConstant.ts +4 -4
  50. package/src/internal/AgenticaDefaultPrompt.ts +43 -43
  51. package/src/internal/AgenticaOperationComposer.ts +96 -96
  52. package/src/internal/ByteArrayUtil.ts +5 -5
  53. package/src/internal/MPSCUtil.ts +111 -111
  54. package/src/internal/MathUtil.ts +3 -3
  55. package/src/internal/Singleton.ts +22 -22
  56. package/src/internal/StreamUtil.ts +64 -64
  57. package/src/internal/__map_take.ts +15 -15
  58. package/src/json/IAgenticaEventJson.ts +178 -178
  59. package/src/json/IAgenticaOperationJson.ts +36 -36
  60. package/src/json/IAgenticaOperationSelectionJson.ts +19 -19
  61. package/src/json/IAgenticaPromptJson.ts +130 -130
  62. package/src/json/IAgenticaTokenUsageJson.ts +107 -107
  63. package/src/prompts/AgenticaCancelPrompt.ts +32 -32
  64. package/src/prompts/AgenticaDescribePrompt.ts +41 -41
  65. package/src/prompts/AgenticaExecutePrompt.ts +52 -52
  66. package/src/prompts/AgenticaPrompt.ts +14 -14
  67. package/src/prompts/AgenticaPromptBase.ts +27 -27
  68. package/src/prompts/AgenticaSelectPrompt.ts +32 -32
  69. package/src/prompts/AgenticaTextPrompt.ts +31 -31
  70. package/src/structures/IAgenticaConfig.ts +123 -123
  71. package/src/structures/IAgenticaController.ts +133 -133
  72. package/src/structures/IAgenticaExecutor.ts +157 -157
  73. package/src/structures/IAgenticaProps.ts +69 -69
  74. package/src/structures/IAgenticaSystemPrompt.ts +125 -125
  75. package/src/structures/IAgenticaVendor.ts +39 -39
  76. package/src/transformers/AgenticaEventTransformer.ts +165 -165
  77. package/src/transformers/AgenticaPromptTransformer.ts +134 -134
@@ -1,280 +1,280 @@
1
- import { ILlmApplication, ILlmSchema } from "@samchon/openapi";
2
- import OpenAI from "openai";
3
- import typia, { IValidation } from "typia";
4
- import { v4 } from "uuid";
5
-
6
- import { AgenticaCancelPrompt } from "../context/AgenticaCancelPrompt";
7
- import { AgenticaContext } from "../context/AgenticaContext";
8
- import { AgenticaOperation } from "../context/AgenticaOperation";
9
- import { AgenticaOperationSelection } from "../context/AgenticaOperationSelection";
10
- import { __IChatCancelFunctionsApplication } from "../context/internal/__IChatCancelFunctionsApplication";
11
- import { __IChatFunctionReference } from "../context/internal/__IChatFunctionReference";
12
- import { AgenticaCancelEvent } from "../events/AgenticaCancelEvent";
13
- import { AgenticaEvent } from "../events/AgenticaEvent";
14
- import { AgenticaConstant } from "../internal/AgenticaConstant";
15
- import { AgenticaDefaultPrompt } from "../internal/AgenticaDefaultPrompt";
16
- import { AgenticaSystemPrompt } from "../internal/AgenticaSystemPrompt";
17
- import { StreamUtil } from "../internal/StreamUtil";
18
- import { ChatGptCompletionMessageUtil } from "./ChatGptCompletionMessageUtil";
19
- import { ChatGptHistoryDecoder } from "./ChatGptHistoryDecoder";
20
-
21
- export namespace ChatGptCancelFunctionAgent {
22
- export const execute = async <Model extends ILlmSchema.Model>(
23
- ctx: AgenticaContext<Model>,
24
- ): Promise<AgenticaCancelPrompt<Model>[]> => {
25
- if (ctx.operations.divided === undefined)
26
- return step(ctx, ctx.operations.array, 0);
27
-
28
- const stacks: AgenticaOperationSelection<Model>[][] =
29
- ctx.operations.divided.map(() => []);
30
- const events: AgenticaEvent<Model>[] = [];
31
- const prompts: AgenticaCancelPrompt<Model>[][] = await Promise.all(
32
- ctx.operations.divided.map((operations, i) =>
33
- step(
34
- {
35
- ...ctx,
36
- stack: stacks[i]!,
37
- dispatch: async (e) => {
38
- events.push(e);
39
- },
40
- },
41
- operations,
42
- 0,
43
- ),
44
- ),
45
- );
46
-
47
- // NO FUNCTION SELECTION, SO THAT ONLY TEXT LEFT
48
- if (stacks.every((s) => s.length === 0)) return prompts[0]!;
49
- // ELITICISM
50
- else if ((ctx.config?.eliticism ?? AgenticaConstant.ELITICISM) === true)
51
- return step(
52
- ctx,
53
- stacks
54
- .flat()
55
- .map(
56
- (s) =>
57
- ctx.operations.group
58
- .get(s.operation.controller.name)!
59
- .get(s.operation.function.name)!,
60
- ),
61
- 0,
62
- );
63
-
64
- // RE-COLLECT SELECT FUNCTION EVENTS
65
- const collection: AgenticaCancelPrompt<Model> = new AgenticaCancelPrompt({
66
- id: v4(),
67
- selections: [],
68
- });
69
- for (const e of events)
70
- if (e.type === "select") {
71
- collection.selections.push(e.selection);
72
- await cancelFunction(ctx, {
73
- name: e.selection.operation.name,
74
- reason: e.selection.reason,
75
- });
76
- }
77
- return [collection];
78
- };
79
-
80
- export const cancelFunction = async <Model extends ILlmSchema.Model>(
81
- ctx: AgenticaContext<Model>,
82
- reference: __IChatFunctionReference,
83
- ): Promise<AgenticaOperationSelection<Model> | null> => {
84
- const index: number = ctx.stack.findIndex(
85
- (item) => item.operation.name === reference.name,
86
- );
87
- if (index === -1) return null;
88
-
89
- const item: AgenticaOperationSelection<Model> = ctx.stack[index]!;
90
- ctx.stack.splice(index, 1);
91
- await ctx.dispatch(
92
- new AgenticaCancelEvent({
93
- selection: new AgenticaOperationSelection({
94
- operation: item.operation,
95
- reason: reference.reason,
96
- }),
97
- }),
98
- );
99
- return item;
100
- };
101
-
102
- const step = async <Model extends ILlmSchema.Model>(
103
- ctx: AgenticaContext<Model>,
104
- operations: AgenticaOperation<Model>[],
105
- retry: number,
106
- failures?: IFailure[],
107
- ): Promise<AgenticaCancelPrompt<Model>[]> => {
108
- //----
109
- // EXECUTE CHATGPT API
110
- //----
111
- const completionStream = await ctx.request("cancel", {
112
- messages: [
113
- // COMMON SYSTEM PROMPT
114
- {
115
- role: "system",
116
- content: AgenticaDefaultPrompt.write(ctx.config),
117
- } satisfies OpenAI.ChatCompletionSystemMessageParam,
118
- // CANDIDATE FUNCTIONS
119
- {
120
- role: "assistant",
121
- tool_calls: [
122
- {
123
- type: "function",
124
- id: "getApiFunctions",
125
- function: {
126
- name: "getApiFunctions",
127
- arguments: JSON.stringify({}),
128
- },
129
- },
130
- ],
131
- },
132
- {
133
- role: "tool",
134
- tool_call_id: "getApiFunctions",
135
- content: JSON.stringify(
136
- operations.map((op) => ({
137
- name: op.name,
138
- description: op.function.description,
139
- ...(op.protocol === "http"
140
- ? {
141
- method: op.function.method,
142
- path: op.function.path,
143
- tags: op.function.tags,
144
- }
145
- : {}),
146
- })),
147
- ),
148
- },
149
- // PREVIOUS HISTORIES
150
- ...ctx.histories.map(ChatGptHistoryDecoder.decode).flat(),
151
- // USER INPUT
152
- {
153
- role: "user",
154
- content: ctx.prompt.text,
155
- },
156
- // SYSTEM PROMPT
157
- {
158
- role: "system",
159
- content:
160
- ctx.config?.systemPrompt?.cancel?.(ctx.histories) ??
161
- AgenticaSystemPrompt.CANCEL,
162
- },
163
- // TYPE CORRECTIONS
164
- ...emendMessages(failures ?? []),
165
- ],
166
- // STACK FUNCTIONS
167
- tools: CONTAINER.functions.map(
168
- (func) =>
169
- ({
170
- type: "function",
171
- function: {
172
- name: func.name,
173
- description: func.description,
174
- parameters: func.parameters as any,
175
- },
176
- }) satisfies OpenAI.ChatCompletionTool,
177
- ),
178
- tool_choice: "auto",
179
- parallel_tool_calls: true,
180
- });
181
-
182
- const chunks = await StreamUtil.readAll(completionStream);
183
- const completion = ChatGptCompletionMessageUtil.merge(chunks);
184
-
185
- //----
186
- // VALIDATION
187
- //----
188
- if (retry++ < (ctx.config?.retry ?? AgenticaConstant.RETRY)) {
189
- const failures: IFailure[] = [];
190
- for (const choice of completion.choices)
191
- for (const tc of choice.message.tool_calls ?? []) {
192
- if (tc.function.name !== "cancelFunctions") continue;
193
- const input: object = JSON.parse(tc.function.arguments);
194
- const validation: IValidation<__IChatFunctionReference.IProps> =
195
- typia.validate<__IChatFunctionReference.IProps>(input);
196
- if (validation.success === false)
197
- failures.push({
198
- id: tc.id,
199
- name: tc.function.name,
200
- validation,
201
- });
202
- }
203
- if (failures.length > 0) return step(ctx, operations, retry, failures);
204
- }
205
-
206
- //----
207
- // PROCESS COMPLETION
208
- //----
209
- const prompts: AgenticaCancelPrompt<Model>[] = [];
210
- for (const choice of completion.choices) {
211
- // TOOL CALLING HANDLER
212
- if (choice.message.tool_calls)
213
- for (const tc of choice.message.tool_calls) {
214
- if (tc.type !== "function") continue;
215
- const input: __IChatFunctionReference.IProps = JSON.parse(
216
- tc.function.arguments,
217
- );
218
- if (typia.is(input) === false) continue;
219
- else if (tc.function.name === "cancelFunctions") {
220
- const collection: AgenticaCancelPrompt<Model> =
221
- new AgenticaCancelPrompt({
222
- id: tc.id,
223
- selections: [],
224
- });
225
- for (const reference of input.functions) {
226
- const operation = await cancelFunction(ctx, reference);
227
- if (operation !== null) collection.selections.push(operation);
228
- }
229
- if (collection.selections.length !== 0) prompts.push(collection);
230
- }
231
- }
232
- }
233
- return prompts;
234
- };
235
-
236
- const emendMessages = (
237
- failures: IFailure[],
238
- ): OpenAI.ChatCompletionMessageParam[] =>
239
- failures
240
- .map((f) => [
241
- {
242
- role: "assistant",
243
- tool_calls: [
244
- {
245
- type: "function",
246
- id: f.id,
247
- function: {
248
- name: f.name,
249
- arguments: JSON.stringify(f.validation.data),
250
- },
251
- },
252
- ],
253
- } satisfies OpenAI.ChatCompletionAssistantMessageParam,
254
- {
255
- role: "tool",
256
- content: JSON.stringify(f.validation.errors),
257
- tool_call_id: f.id,
258
- } satisfies OpenAI.ChatCompletionToolMessageParam,
259
- {
260
- role: "system",
261
- content: [
262
- "You A.I. assistant has composed wrong typed arguments.",
263
- "",
264
- "Correct it at the next function calling.",
265
- ].join("\n"),
266
- } satisfies OpenAI.ChatCompletionSystemMessageParam,
267
- ])
268
- .flat();
269
- }
270
-
271
- const CONTAINER: ILlmApplication<"chatgpt"> = typia.llm.application<
272
- __IChatCancelFunctionsApplication,
273
- "chatgpt"
274
- >();
275
-
276
- interface IFailure {
277
- id: string;
278
- name: string;
279
- validation: IValidation.IFailure;
280
- }
1
+ import { ILlmApplication, ILlmSchema } from "@samchon/openapi";
2
+ import OpenAI from "openai";
3
+ import typia, { IValidation } from "typia";
4
+ import { v4 } from "uuid";
5
+
6
+ import { AgenticaCancelPrompt } from "../context/AgenticaCancelPrompt";
7
+ import { AgenticaContext } from "../context/AgenticaContext";
8
+ import { AgenticaOperation } from "../context/AgenticaOperation";
9
+ import { AgenticaOperationSelection } from "../context/AgenticaOperationSelection";
10
+ import { __IChatCancelFunctionsApplication } from "../context/internal/__IChatCancelFunctionsApplication";
11
+ import { __IChatFunctionReference } from "../context/internal/__IChatFunctionReference";
12
+ import { AgenticaCancelEvent } from "../events/AgenticaCancelEvent";
13
+ import { AgenticaEvent } from "../events/AgenticaEvent";
14
+ import { AgenticaConstant } from "../internal/AgenticaConstant";
15
+ import { AgenticaDefaultPrompt } from "../internal/AgenticaDefaultPrompt";
16
+ import { AgenticaSystemPrompt } from "../internal/AgenticaSystemPrompt";
17
+ import { StreamUtil } from "../internal/StreamUtil";
18
+ import { ChatGptCompletionMessageUtil } from "./ChatGptCompletionMessageUtil";
19
+ import { ChatGptHistoryDecoder } from "./ChatGptHistoryDecoder";
20
+
21
+ export namespace ChatGptCancelFunctionAgent {
22
+ export const execute = async <Model extends ILlmSchema.Model>(
23
+ ctx: AgenticaContext<Model>,
24
+ ): Promise<AgenticaCancelPrompt<Model>[]> => {
25
+ if (ctx.operations.divided === undefined)
26
+ return step(ctx, ctx.operations.array, 0);
27
+
28
+ const stacks: AgenticaOperationSelection<Model>[][] =
29
+ ctx.operations.divided.map(() => []);
30
+ const events: AgenticaEvent<Model>[] = [];
31
+ const prompts: AgenticaCancelPrompt<Model>[][] = await Promise.all(
32
+ ctx.operations.divided.map((operations, i) =>
33
+ step(
34
+ {
35
+ ...ctx,
36
+ stack: stacks[i]!,
37
+ dispatch: async (e) => {
38
+ events.push(e);
39
+ },
40
+ },
41
+ operations,
42
+ 0,
43
+ ),
44
+ ),
45
+ );
46
+
47
+ // NO FUNCTION SELECTION, SO THAT ONLY TEXT LEFT
48
+ if (stacks.every((s) => s.length === 0)) return prompts[0]!;
49
+ // ELITICISM
50
+ else if ((ctx.config?.eliticism ?? AgenticaConstant.ELITICISM) === true)
51
+ return step(
52
+ ctx,
53
+ stacks
54
+ .flat()
55
+ .map(
56
+ (s) =>
57
+ ctx.operations.group
58
+ .get(s.operation.controller.name)!
59
+ .get(s.operation.function.name)!,
60
+ ),
61
+ 0,
62
+ );
63
+
64
+ // RE-COLLECT SELECT FUNCTION EVENTS
65
+ const collection: AgenticaCancelPrompt<Model> = new AgenticaCancelPrompt({
66
+ id: v4(),
67
+ selections: [],
68
+ });
69
+ for (const e of events)
70
+ if (e.type === "select") {
71
+ collection.selections.push(e.selection);
72
+ await cancelFunction(ctx, {
73
+ name: e.selection.operation.name,
74
+ reason: e.selection.reason,
75
+ });
76
+ }
77
+ return [collection];
78
+ };
79
+
80
+ export const cancelFunction = async <Model extends ILlmSchema.Model>(
81
+ ctx: AgenticaContext<Model>,
82
+ reference: __IChatFunctionReference,
83
+ ): Promise<AgenticaOperationSelection<Model> | null> => {
84
+ const index: number = ctx.stack.findIndex(
85
+ (item) => item.operation.name === reference.name,
86
+ );
87
+ if (index === -1) return null;
88
+
89
+ const item: AgenticaOperationSelection<Model> = ctx.stack[index]!;
90
+ ctx.stack.splice(index, 1);
91
+ await ctx.dispatch(
92
+ new AgenticaCancelEvent({
93
+ selection: new AgenticaOperationSelection({
94
+ operation: item.operation,
95
+ reason: reference.reason,
96
+ }),
97
+ }),
98
+ );
99
+ return item;
100
+ };
101
+
102
+ const step = async <Model extends ILlmSchema.Model>(
103
+ ctx: AgenticaContext<Model>,
104
+ operations: AgenticaOperation<Model>[],
105
+ retry: number,
106
+ failures?: IFailure[],
107
+ ): Promise<AgenticaCancelPrompt<Model>[]> => {
108
+ //----
109
+ // EXECUTE CHATGPT API
110
+ //----
111
+ const completionStream = await ctx.request("cancel", {
112
+ messages: [
113
+ // COMMON SYSTEM PROMPT
114
+ {
115
+ role: "system",
116
+ content: AgenticaDefaultPrompt.write(ctx.config),
117
+ } satisfies OpenAI.ChatCompletionSystemMessageParam,
118
+ // CANDIDATE FUNCTIONS
119
+ {
120
+ role: "assistant",
121
+ tool_calls: [
122
+ {
123
+ type: "function",
124
+ id: "getApiFunctions",
125
+ function: {
126
+ name: "getApiFunctions",
127
+ arguments: JSON.stringify({}),
128
+ },
129
+ },
130
+ ],
131
+ },
132
+ {
133
+ role: "tool",
134
+ tool_call_id: "getApiFunctions",
135
+ content: JSON.stringify(
136
+ operations.map((op) => ({
137
+ name: op.name,
138
+ description: op.function.description,
139
+ ...(op.protocol === "http"
140
+ ? {
141
+ method: op.function.method,
142
+ path: op.function.path,
143
+ tags: op.function.tags,
144
+ }
145
+ : {}),
146
+ })),
147
+ ),
148
+ },
149
+ // PREVIOUS HISTORIES
150
+ ...ctx.histories.map(ChatGptHistoryDecoder.decode).flat(),
151
+ // USER INPUT
152
+ {
153
+ role: "user",
154
+ content: ctx.prompt.text,
155
+ },
156
+ // SYSTEM PROMPT
157
+ {
158
+ role: "system",
159
+ content:
160
+ ctx.config?.systemPrompt?.cancel?.(ctx.histories) ??
161
+ AgenticaSystemPrompt.CANCEL,
162
+ },
163
+ // TYPE CORRECTIONS
164
+ ...emendMessages(failures ?? []),
165
+ ],
166
+ // STACK FUNCTIONS
167
+ tools: CONTAINER.functions.map(
168
+ (func) =>
169
+ ({
170
+ type: "function",
171
+ function: {
172
+ name: func.name,
173
+ description: func.description,
174
+ parameters: func.parameters as any,
175
+ },
176
+ }) satisfies OpenAI.ChatCompletionTool,
177
+ ),
178
+ tool_choice: "auto",
179
+ parallel_tool_calls: true,
180
+ });
181
+
182
+ const chunks = await StreamUtil.readAll(completionStream);
183
+ const completion = ChatGptCompletionMessageUtil.merge(chunks);
184
+
185
+ //----
186
+ // VALIDATION
187
+ //----
188
+ if (retry++ < (ctx.config?.retry ?? AgenticaConstant.RETRY)) {
189
+ const failures: IFailure[] = [];
190
+ for (const choice of completion.choices)
191
+ for (const tc of choice.message.tool_calls ?? []) {
192
+ if (tc.function.name !== "cancelFunctions") continue;
193
+ const input: object = JSON.parse(tc.function.arguments);
194
+ const validation: IValidation<__IChatFunctionReference.IProps> =
195
+ typia.validate<__IChatFunctionReference.IProps>(input);
196
+ if (validation.success === false)
197
+ failures.push({
198
+ id: tc.id,
199
+ name: tc.function.name,
200
+ validation,
201
+ });
202
+ }
203
+ if (failures.length > 0) return step(ctx, operations, retry, failures);
204
+ }
205
+
206
+ //----
207
+ // PROCESS COMPLETION
208
+ //----
209
+ const prompts: AgenticaCancelPrompt<Model>[] = [];
210
+ for (const choice of completion.choices) {
211
+ // TOOL CALLING HANDLER
212
+ if (choice.message.tool_calls)
213
+ for (const tc of choice.message.tool_calls) {
214
+ if (tc.type !== "function") continue;
215
+ const input: __IChatFunctionReference.IProps = JSON.parse(
216
+ tc.function.arguments,
217
+ );
218
+ if (typia.is(input) === false) continue;
219
+ else if (tc.function.name === "cancelFunctions") {
220
+ const collection: AgenticaCancelPrompt<Model> =
221
+ new AgenticaCancelPrompt({
222
+ id: tc.id,
223
+ selections: [],
224
+ });
225
+ for (const reference of input.functions) {
226
+ const operation = await cancelFunction(ctx, reference);
227
+ if (operation !== null) collection.selections.push(operation);
228
+ }
229
+ if (collection.selections.length !== 0) prompts.push(collection);
230
+ }
231
+ }
232
+ }
233
+ return prompts;
234
+ };
235
+
236
+ const emendMessages = (
237
+ failures: IFailure[],
238
+ ): OpenAI.ChatCompletionMessageParam[] =>
239
+ failures
240
+ .map((f) => [
241
+ {
242
+ role: "assistant",
243
+ tool_calls: [
244
+ {
245
+ type: "function",
246
+ id: f.id,
247
+ function: {
248
+ name: f.name,
249
+ arguments: JSON.stringify(f.validation.data),
250
+ },
251
+ },
252
+ ],
253
+ } satisfies OpenAI.ChatCompletionAssistantMessageParam,
254
+ {
255
+ role: "tool",
256
+ content: JSON.stringify(f.validation.errors),
257
+ tool_call_id: f.id,
258
+ } satisfies OpenAI.ChatCompletionToolMessageParam,
259
+ {
260
+ role: "system",
261
+ content: [
262
+ "You A.I. assistant has composed wrong typed arguments.",
263
+ "",
264
+ "Correct it at the next function calling.",
265
+ ].join("\n"),
266
+ } satisfies OpenAI.ChatCompletionSystemMessageParam,
267
+ ])
268
+ .flat();
269
+ }
270
+
271
+ const CONTAINER: ILlmApplication<"chatgpt"> = typia.llm.application<
272
+ __IChatCancelFunctionsApplication,
273
+ "chatgpt"
274
+ >();
275
+
276
+ interface IFailure {
277
+ id: string;
278
+ name: string;
279
+ validation: IValidation.IFailure;
280
+ }