@agentica/core 0.9.0-dev.20250302 → 0.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/LICENSE +21 -21
  2. package/README.md +419 -419
  3. package/package.json +1 -1
  4. package/prompts/cancel.md +4 -4
  5. package/prompts/common.md +2 -2
  6. package/prompts/describe.md +6 -6
  7. package/prompts/execute.md +6 -6
  8. package/prompts/initialize.md +2 -2
  9. package/prompts/select.md +6 -6
  10. package/src/Agentica.ts +323 -323
  11. package/src/chatgpt/ChatGptAgent.ts +75 -75
  12. package/src/chatgpt/ChatGptCallFunctionAgent.ts +448 -448
  13. package/src/chatgpt/ChatGptCancelFunctionAgent.ts +287 -287
  14. package/src/chatgpt/ChatGptDescribeFunctionAgent.ts +52 -52
  15. package/src/chatgpt/ChatGptHistoryDecoder.ts +88 -88
  16. package/src/chatgpt/ChatGptInitializeFunctionAgent.ts +88 -88
  17. package/src/chatgpt/ChatGptSelectFunctionAgent.ts +319 -319
  18. package/src/functional/createHttpLlmApplication.ts +63 -63
  19. package/src/index.ts +19 -19
  20. package/src/internal/AgenticaConstant.ts +4 -4
  21. package/src/internal/AgenticaDefaultPrompt.ts +43 -43
  22. package/src/internal/AgenticaOperationComposer.ts +87 -87
  23. package/src/internal/AgenticaPromptFactory.ts +32 -32
  24. package/src/internal/AgenticaPromptTransformer.ts +86 -86
  25. package/src/internal/AgenticaTokenUsageAggregator.ts +115 -115
  26. package/src/internal/MathUtil.ts +3 -3
  27. package/src/internal/Singleton.ts +22 -22
  28. package/src/internal/__map_take.ts +15 -15
  29. package/src/structures/IAgenticaConfig.ts +123 -123
  30. package/src/structures/IAgenticaContext.ts +129 -129
  31. package/src/structures/IAgenticaController.ts +132 -132
  32. package/src/structures/IAgenticaEvent.ts +229 -229
  33. package/src/structures/IAgenticaExecutor.ts +156 -156
  34. package/src/structures/IAgenticaOperation.ts +64 -64
  35. package/src/structures/IAgenticaOperationCollection.ts +52 -52
  36. package/src/structures/IAgenticaOperationSelection.ts +69 -69
  37. package/src/structures/IAgenticaPrompt.ts +178 -178
  38. package/src/structures/IAgenticaProps.ts +70 -70
  39. package/src/structures/IAgenticaProvider.ts +39 -39
  40. package/src/structures/IAgenticaSystemPrompt.ts +124 -124
  41. package/src/structures/IAgenticaTokenUsage.ts +107 -107
  42. package/src/structures/internal/__IChatCancelFunctionsApplication.ts +23 -23
  43. package/src/structures/internal/__IChatFunctionReference.ts +21 -21
  44. package/src/structures/internal/__IChatInitialApplication.ts +15 -15
  45. package/src/structures/internal/__IChatSelectFunctionsApplication.ts +24 -24
  46. package/src/typings/AgenticaSource.ts +6 -6
@@ -1,448 +1,448 @@
1
- import {
2
- ChatGptTypeChecker,
3
- HttpLlm,
4
- IChatGptSchema,
5
- IHttpMigrateRoute,
6
- IHttpResponse,
7
- ILlmSchema,
8
- } from "@samchon/openapi";
9
- import OpenAI from "openai";
10
- import { IValidation } from "typia";
11
-
12
- import { AgenticaConstant } from "../internal/AgenticaConstant";
13
- import { AgenticaDefaultPrompt } from "../internal/AgenticaDefaultPrompt";
14
- import { AgenticaPromptFactory } from "../internal/AgenticaPromptFactory";
15
- import { AgenticaSystemPrompt } from "../internal/AgenticaSystemPrompt";
16
- import { IAgenticaContext } from "../structures/IAgenticaContext";
17
- import { IAgenticaEvent } from "../structures/IAgenticaEvent";
18
- import { IAgenticaOperation } from "../structures/IAgenticaOperation";
19
- import { IAgenticaPrompt } from "../structures/IAgenticaPrompt";
20
- import { ChatGptCancelFunctionAgent } from "./ChatGptCancelFunctionAgent";
21
- import { ChatGptHistoryDecoder } from "./ChatGptHistoryDecoder";
22
-
23
- export namespace ChatGptCallFunctionAgent {
24
- export const execute = async <Model extends ILlmSchema.Model>(
25
- ctx: IAgenticaContext<Model>,
26
- ): Promise<IAgenticaPrompt<Model>[]> => {
27
- //----
28
- // EXECUTE CHATGPT API
29
- //----
30
- const completion: OpenAI.ChatCompletion = await ctx.request("call", {
31
- messages: [
32
- // COMMON SYSTEM PROMPT
33
- {
34
- role: "system",
35
- content: AgenticaDefaultPrompt.write(ctx.config),
36
- } satisfies OpenAI.ChatCompletionSystemMessageParam,
37
- // PREVIOUS HISTORIES
38
- ...ctx.histories.map(ChatGptHistoryDecoder.decode).flat(),
39
- // USER INPUT
40
- {
41
- role: "user",
42
- content: ctx.prompt.text,
43
- },
44
- // SYSTEM PROMPT
45
- {
46
- role: "system",
47
- content:
48
- ctx.config?.systemPrompt?.execute?.(ctx.histories) ??
49
- AgenticaSystemPrompt.EXECUTE,
50
- },
51
- ],
52
- // STACKED FUNCTIONS
53
- tools: ctx.stack.map(
54
- (op) =>
55
- ({
56
- type: "function",
57
- function: {
58
- name: op.name,
59
- description: op.function.description,
60
- parameters: (op.function.separated
61
- ? (op.function.separated.llm ??
62
- ({
63
- type: "object",
64
- properties: {},
65
- required: [],
66
- additionalProperties: false,
67
- $defs: {},
68
- } satisfies IChatGptSchema.IParameters))
69
- : op.function.parameters) as Record<string, any>,
70
- },
71
- }) as OpenAI.ChatCompletionTool,
72
- ),
73
- tool_choice: "auto",
74
- parallel_tool_calls: false,
75
- });
76
-
77
- //----
78
- // PROCESS COMPLETION
79
- //----
80
- const closures: Array<
81
- () => Promise<
82
- Array<
83
- | IAgenticaPrompt.IExecute<Model>
84
- | IAgenticaPrompt.ICancel<Model>
85
- | IAgenticaPrompt.IText
86
- >
87
- >
88
- > = [];
89
- for (const choice of completion.choices) {
90
- for (const tc of choice.message.tool_calls ?? []) {
91
- if (tc.type === "function") {
92
- const operation: IAgenticaOperation<Model> | undefined =
93
- ctx.operations.flat.get(tc.function.name);
94
- if (operation === undefined) continue;
95
- closures.push(
96
- async (): Promise<
97
- [IAgenticaPrompt.IExecute<Model>, IAgenticaPrompt.ICancel<Model>]
98
- > => {
99
- const call: IAgenticaEvent.ICall<Model> = {
100
- type: "call",
101
- id: tc.id,
102
- operation,
103
- arguments: JSON.parse(tc.function.arguments),
104
- };
105
- if (call.operation.protocol === "http")
106
- fillHttpArguments({
107
- operation: call.operation,
108
- arguments: call.arguments,
109
- });
110
- await ctx.dispatch(call);
111
-
112
- const execute: IAgenticaPrompt.IExecute<Model> = await propagate(
113
- ctx,
114
- call,
115
- 0,
116
- );
117
- await ctx.dispatch({
118
- type: "execute",
119
- id: call.id,
120
- operation: call.operation,
121
- arguments: execute.arguments,
122
- value: execute.value,
123
- });
124
-
125
- await ChatGptCancelFunctionAgent.cancelFunction(ctx, {
126
- name: call.operation.name,
127
- reason: "completed",
128
- });
129
- await ctx.dispatch({
130
- type: "cancel",
131
- operation: call.operation,
132
- reason: "complete",
133
- });
134
- return [
135
- execute,
136
- {
137
- type: "cancel",
138
- id: call.id,
139
- operations: [
140
- AgenticaPromptFactory.selection({
141
- ...call.operation,
142
- reason: "complete",
143
- }),
144
- ],
145
- } satisfies IAgenticaPrompt.ICancel<Model>,
146
- ] as const;
147
- },
148
- );
149
- }
150
- }
151
- if (
152
- choice.message.role === "assistant" &&
153
- !!choice.message.content?.length
154
- )
155
- closures.push(async () => {
156
- const value: IAgenticaPrompt.IText = {
157
- type: "text",
158
- role: "assistant",
159
- text: choice.message.content!,
160
- };
161
- await ctx.dispatch(value);
162
- return [value];
163
- });
164
- }
165
- return (await Promise.all(closures.map((fn) => fn()))).flat();
166
- };
167
-
168
- const propagate = async <Model extends ILlmSchema.Model>(
169
- ctx: IAgenticaContext<Model>,
170
- call: IAgenticaEvent.ICall<Model>,
171
- retry: number,
172
- ): Promise<IAgenticaPrompt.IExecute<Model>> => {
173
- if (call.operation.protocol === "http") {
174
- //----
175
- // HTTP PROTOCOL
176
- //----
177
- try {
178
- // CALL HTTP API
179
- const response: IHttpResponse = call.operation.controller.execute
180
- ? await call.operation.controller.execute({
181
- connection: call.operation.controller.connection,
182
- application: call.operation.controller.application,
183
- function: call.operation.function,
184
- arguments: call.arguments,
185
- })
186
- : await HttpLlm.propagate({
187
- connection: call.operation.controller.connection,
188
- application: call.operation.controller.application,
189
- function: call.operation.function,
190
- input: call.arguments,
191
- });
192
- // CHECK STATUS
193
- const success: boolean =
194
- ((response.status === 400 ||
195
- response.status === 404 ||
196
- response.status === 422) &&
197
- retry++ < (ctx.config?.retry ?? AgenticaConstant.RETRY) &&
198
- typeof response.body) === false;
199
- // DISPATCH EVENT
200
- return (
201
- (success === false
202
- ? await correct(ctx, call, retry, response.body)
203
- : null) ??
204
- (await AgenticaPromptFactory.execute({
205
- type: "execute",
206
- protocol: "http",
207
- controller: call.operation.controller,
208
- function: call.operation.function,
209
- id: call.id,
210
- name: call.operation.name,
211
- arguments: call.arguments,
212
- value: response,
213
- }))
214
- );
215
- } catch (error) {
216
- // DISPATCH ERROR
217
- return AgenticaPromptFactory.execute({
218
- type: "execute",
219
- protocol: "http",
220
- controller: call.operation.controller,
221
- function: call.operation.function,
222
- id: call.id,
223
- name: call.operation.name,
224
- arguments: call.arguments,
225
- value: {
226
- status: 500,
227
- headers: {},
228
- body:
229
- error instanceof Error
230
- ? {
231
- ...error,
232
- name: error.name,
233
- message: error.message,
234
- }
235
- : error,
236
- },
237
- });
238
- }
239
- } else {
240
- //----
241
- // CLASS FUNCTION
242
- //----
243
- // VALIDATE FIRST
244
- const check: IValidation<unknown> = call.operation.function.validate(
245
- call.arguments,
246
- );
247
- if (check.success === false)
248
- return (
249
- (retry++ < (ctx.config?.retry ?? AgenticaConstant.RETRY)
250
- ? await correct(ctx, call, retry, check.errors)
251
- : null) ??
252
- AgenticaPromptFactory.execute({
253
- type: "execute",
254
- protocol: "class",
255
- controller: call.operation.controller,
256
- function: call.operation.function,
257
- id: call.id,
258
- name: call.operation.name,
259
- arguments: call.arguments,
260
- value: {
261
- name: "TypeGuardError",
262
- message: "Invalid arguments.",
263
- errors: check.errors,
264
- },
265
- })
266
- );
267
- // EXECUTE FUNCTION
268
- try {
269
- const value: any =
270
- typeof call.operation.controller.execute === "function"
271
- ? await call.operation.controller.execute({
272
- application: call.operation.controller.application,
273
- function: call.operation.function,
274
- arguments: call.arguments,
275
- })
276
- : await (call.operation.controller.execute as any)[
277
- call.operation.function.name
278
- ](call.arguments);
279
- return AgenticaPromptFactory.execute({
280
- type: "execute",
281
- protocol: "class",
282
- controller: call.operation.controller,
283
- function: call.operation.function,
284
- id: call.id,
285
- name: call.operation.name,
286
- arguments: call.arguments,
287
- value,
288
- });
289
- } catch (error) {
290
- return AgenticaPromptFactory.execute({
291
- type: "execute",
292
- protocol: "class",
293
- controller: call.operation.controller,
294
- function: call.operation.function,
295
- id: call.id,
296
- name: call.operation.name,
297
- arguments: call.arguments,
298
- value:
299
- error instanceof Error
300
- ? {
301
- ...error,
302
- name: error.name,
303
- message: error.message,
304
- }
305
- : error,
306
- });
307
- }
308
- }
309
- };
310
-
311
- const correct = async <Model extends ILlmSchema.Model>(
312
- ctx: IAgenticaContext<Model>,
313
- call: IAgenticaEvent.ICall<Model>,
314
- retry: number,
315
- error: unknown,
316
- ): Promise<IAgenticaPrompt.IExecute<Model> | null> => {
317
- //----
318
- // EXECUTE CHATGPT API
319
- //----
320
- const completion: OpenAI.ChatCompletion = await ctx.request("call", {
321
- messages: [
322
- // COMMON SYSTEM PROMPT
323
- {
324
- role: "system",
325
- content: AgenticaDefaultPrompt.write(ctx.config),
326
- } satisfies OpenAI.ChatCompletionSystemMessageParam,
327
- // PREVIOUS HISTORIES
328
- ...ctx.histories.map(ChatGptHistoryDecoder.decode).flat(),
329
- // USER INPUT
330
- {
331
- role: "user",
332
- content: ctx.prompt.text,
333
- },
334
- // TYPE CORRECTION
335
- {
336
- role: "system",
337
- content:
338
- ctx.config?.systemPrompt?.execute?.(ctx.histories) ??
339
- AgenticaSystemPrompt.EXECUTE,
340
- },
341
- {
342
- role: "assistant",
343
- tool_calls: [
344
- {
345
- type: "function",
346
- id: call.id,
347
- function: {
348
- name: call.operation.name,
349
- arguments: JSON.stringify(call.arguments),
350
- },
351
- } satisfies OpenAI.ChatCompletionMessageToolCall,
352
- ],
353
- } satisfies OpenAI.ChatCompletionAssistantMessageParam,
354
- {
355
- role: "tool",
356
- content: typeof error === "string" ? error : JSON.stringify(error),
357
- tool_call_id: call.id,
358
- } satisfies OpenAI.ChatCompletionToolMessageParam,
359
- {
360
- role: "system",
361
- content: [
362
- "You A.I. assistant has composed wrong arguments.",
363
- "",
364
- "Correct it at the next function calling.",
365
- ].join("\n"),
366
- },
367
- ],
368
- // STACK FUNCTIONS
369
- tools: [
370
- {
371
- type: "function",
372
- function: {
373
- name: call.operation.name,
374
- description: call.operation.function.description,
375
- parameters: (call.operation.function.separated
376
- ? (call.operation.function.separated?.llm ??
377
- ({
378
- $defs: {},
379
- type: "object",
380
- properties: {},
381
- additionalProperties: false,
382
- required: [],
383
- } satisfies IChatGptSchema.IParameters))
384
- : call.operation.function.parameters) as any,
385
- },
386
- },
387
- ],
388
- tool_choice: "auto",
389
- parallel_tool_calls: false,
390
- });
391
-
392
- //----
393
- // PROCESS COMPLETION
394
- //----
395
- const toolCall: OpenAI.ChatCompletionMessageToolCall | undefined = (
396
- completion.choices[0]?.message.tool_calls ?? []
397
- ).find(
398
- (tc) =>
399
- tc.type === "function" && tc.function.name === call.operation.name,
400
- );
401
- if (toolCall === undefined) return null;
402
- return propagate(
403
- ctx,
404
- {
405
- id: toolCall.id,
406
- type: "call",
407
- operation: call.operation,
408
- arguments: JSON.parse(toolCall.function.arguments),
409
- },
410
- retry,
411
- );
412
- };
413
-
414
- const fillHttpArguments = <Model extends ILlmSchema.Model>(props: {
415
- operation: IAgenticaOperation<Model>;
416
- arguments: object;
417
- }): void => {
418
- if (props.operation.protocol !== "http") return;
419
- const route: IHttpMigrateRoute = props.operation.function.route();
420
- if (
421
- route.body &&
422
- route.operation().requestBody?.required === true &&
423
- (props.arguments as any).body === undefined &&
424
- isObject(
425
- (props.operation.function.parameters as IChatGptSchema.IParameters)
426
- .$defs,
427
- (props.operation.function.parameters as IChatGptSchema.IParameters)
428
- .properties.body!,
429
- )
430
- )
431
- (props.arguments as any).body = {};
432
- if (route.query && (props.arguments as any).query === undefined)
433
- (props.arguments as any).query = {};
434
- };
435
-
436
- const isObject = (
437
- $defs: Record<string, IChatGptSchema>,
438
- schema: IChatGptSchema,
439
- ): boolean => {
440
- return (
441
- ChatGptTypeChecker.isObject(schema) ||
442
- (ChatGptTypeChecker.isReference(schema) &&
443
- isObject($defs, $defs[schema.$ref.split("/").at(-1)!]!)) ||
444
- (ChatGptTypeChecker.isAnyOf(schema) &&
445
- schema.anyOf.every((schema) => isObject($defs, schema)))
446
- );
447
- };
448
- }
1
+ import {
2
+ ChatGptTypeChecker,
3
+ HttpLlm,
4
+ IChatGptSchema,
5
+ IHttpMigrateRoute,
6
+ IHttpResponse,
7
+ ILlmSchema,
8
+ } from "@samchon/openapi";
9
+ import OpenAI from "openai";
10
+ import { IValidation } from "typia";
11
+
12
+ import { AgenticaConstant } from "../internal/AgenticaConstant";
13
+ import { AgenticaDefaultPrompt } from "../internal/AgenticaDefaultPrompt";
14
+ import { AgenticaPromptFactory } from "../internal/AgenticaPromptFactory";
15
+ import { AgenticaSystemPrompt } from "../internal/AgenticaSystemPrompt";
16
+ import { IAgenticaContext } from "../structures/IAgenticaContext";
17
+ import { IAgenticaEvent } from "../structures/IAgenticaEvent";
18
+ import { IAgenticaOperation } from "../structures/IAgenticaOperation";
19
+ import { IAgenticaPrompt } from "../structures/IAgenticaPrompt";
20
+ import { ChatGptCancelFunctionAgent } from "./ChatGptCancelFunctionAgent";
21
+ import { ChatGptHistoryDecoder } from "./ChatGptHistoryDecoder";
22
+
23
+ export namespace ChatGptCallFunctionAgent {
24
+ export const execute = async <Model extends ILlmSchema.Model>(
25
+ ctx: IAgenticaContext<Model>,
26
+ ): Promise<IAgenticaPrompt<Model>[]> => {
27
+ //----
28
+ // EXECUTE CHATGPT API
29
+ //----
30
+ const completion: OpenAI.ChatCompletion = await ctx.request("call", {
31
+ messages: [
32
+ // COMMON SYSTEM PROMPT
33
+ {
34
+ role: "system",
35
+ content: AgenticaDefaultPrompt.write(ctx.config),
36
+ } satisfies OpenAI.ChatCompletionSystemMessageParam,
37
+ // PREVIOUS HISTORIES
38
+ ...ctx.histories.map(ChatGptHistoryDecoder.decode).flat(),
39
+ // USER INPUT
40
+ {
41
+ role: "user",
42
+ content: ctx.prompt.text,
43
+ },
44
+ // SYSTEM PROMPT
45
+ {
46
+ role: "system",
47
+ content:
48
+ ctx.config?.systemPrompt?.execute?.(ctx.histories) ??
49
+ AgenticaSystemPrompt.EXECUTE,
50
+ },
51
+ ],
52
+ // STACKED FUNCTIONS
53
+ tools: ctx.stack.map(
54
+ (op) =>
55
+ ({
56
+ type: "function",
57
+ function: {
58
+ name: op.name,
59
+ description: op.function.description,
60
+ parameters: (op.function.separated
61
+ ? (op.function.separated.llm ??
62
+ ({
63
+ type: "object",
64
+ properties: {},
65
+ required: [],
66
+ additionalProperties: false,
67
+ $defs: {},
68
+ } satisfies IChatGptSchema.IParameters))
69
+ : op.function.parameters) as Record<string, any>,
70
+ },
71
+ }) as OpenAI.ChatCompletionTool,
72
+ ),
73
+ tool_choice: "auto",
74
+ parallel_tool_calls: false,
75
+ });
76
+
77
+ //----
78
+ // PROCESS COMPLETION
79
+ //----
80
+ const closures: Array<
81
+ () => Promise<
82
+ Array<
83
+ | IAgenticaPrompt.IExecute<Model>
84
+ | IAgenticaPrompt.ICancel<Model>
85
+ | IAgenticaPrompt.IText
86
+ >
87
+ >
88
+ > = [];
89
+ for (const choice of completion.choices) {
90
+ for (const tc of choice.message.tool_calls ?? []) {
91
+ if (tc.type === "function") {
92
+ const operation: IAgenticaOperation<Model> | undefined =
93
+ ctx.operations.flat.get(tc.function.name);
94
+ if (operation === undefined) continue;
95
+ closures.push(
96
+ async (): Promise<
97
+ [IAgenticaPrompt.IExecute<Model>, IAgenticaPrompt.ICancel<Model>]
98
+ > => {
99
+ const call: IAgenticaEvent.ICall<Model> = {
100
+ type: "call",
101
+ id: tc.id,
102
+ operation,
103
+ arguments: JSON.parse(tc.function.arguments),
104
+ };
105
+ if (call.operation.protocol === "http")
106
+ fillHttpArguments({
107
+ operation: call.operation,
108
+ arguments: call.arguments,
109
+ });
110
+ await ctx.dispatch(call);
111
+
112
+ const execute: IAgenticaPrompt.IExecute<Model> = await propagate(
113
+ ctx,
114
+ call,
115
+ 0,
116
+ );
117
+ await ctx.dispatch({
118
+ type: "execute",
119
+ id: call.id,
120
+ operation: call.operation,
121
+ arguments: execute.arguments,
122
+ value: execute.value,
123
+ });
124
+
125
+ await ChatGptCancelFunctionAgent.cancelFunction(ctx, {
126
+ name: call.operation.name,
127
+ reason: "completed",
128
+ });
129
+ await ctx.dispatch({
130
+ type: "cancel",
131
+ operation: call.operation,
132
+ reason: "complete",
133
+ });
134
+ return [
135
+ execute,
136
+ {
137
+ type: "cancel",
138
+ id: call.id,
139
+ operations: [
140
+ AgenticaPromptFactory.selection({
141
+ ...call.operation,
142
+ reason: "complete",
143
+ }),
144
+ ],
145
+ } satisfies IAgenticaPrompt.ICancel<Model>,
146
+ ] as const;
147
+ },
148
+ );
149
+ }
150
+ }
151
+ if (
152
+ choice.message.role === "assistant" &&
153
+ !!choice.message.content?.length
154
+ )
155
+ closures.push(async () => {
156
+ const value: IAgenticaPrompt.IText = {
157
+ type: "text",
158
+ role: "assistant",
159
+ text: choice.message.content!,
160
+ };
161
+ await ctx.dispatch(value);
162
+ return [value];
163
+ });
164
+ }
165
+ return (await Promise.all(closures.map((fn) => fn()))).flat();
166
+ };
167
+
168
+ const propagate = async <Model extends ILlmSchema.Model>(
169
+ ctx: IAgenticaContext<Model>,
170
+ call: IAgenticaEvent.ICall<Model>,
171
+ retry: number,
172
+ ): Promise<IAgenticaPrompt.IExecute<Model>> => {
173
+ if (call.operation.protocol === "http") {
174
+ //----
175
+ // HTTP PROTOCOL
176
+ //----
177
+ try {
178
+ // CALL HTTP API
179
+ const response: IHttpResponse = call.operation.controller.execute
180
+ ? await call.operation.controller.execute({
181
+ connection: call.operation.controller.connection,
182
+ application: call.operation.controller.application,
183
+ function: call.operation.function,
184
+ arguments: call.arguments,
185
+ })
186
+ : await HttpLlm.propagate({
187
+ connection: call.operation.controller.connection,
188
+ application: call.operation.controller.application,
189
+ function: call.operation.function,
190
+ input: call.arguments,
191
+ });
192
+ // CHECK STATUS
193
+ const success: boolean =
194
+ ((response.status === 400 ||
195
+ response.status === 404 ||
196
+ response.status === 422) &&
197
+ retry++ < (ctx.config?.retry ?? AgenticaConstant.RETRY) &&
198
+ typeof response.body) === false;
199
+ // DISPATCH EVENT
200
+ return (
201
+ (success === false
202
+ ? await correct(ctx, call, retry, response.body)
203
+ : null) ??
204
+ (await AgenticaPromptFactory.execute({
205
+ type: "execute",
206
+ protocol: "http",
207
+ controller: call.operation.controller,
208
+ function: call.operation.function,
209
+ id: call.id,
210
+ name: call.operation.name,
211
+ arguments: call.arguments,
212
+ value: response,
213
+ }))
214
+ );
215
+ } catch (error) {
216
+ // DISPATCH ERROR
217
+ return AgenticaPromptFactory.execute({
218
+ type: "execute",
219
+ protocol: "http",
220
+ controller: call.operation.controller,
221
+ function: call.operation.function,
222
+ id: call.id,
223
+ name: call.operation.name,
224
+ arguments: call.arguments,
225
+ value: {
226
+ status: 500,
227
+ headers: {},
228
+ body:
229
+ error instanceof Error
230
+ ? {
231
+ ...error,
232
+ name: error.name,
233
+ message: error.message,
234
+ }
235
+ : error,
236
+ },
237
+ });
238
+ }
239
+ } else {
240
+ //----
241
+ // CLASS FUNCTION
242
+ //----
243
+ // VALIDATE FIRST
244
+ const check: IValidation<unknown> = call.operation.function.validate(
245
+ call.arguments,
246
+ );
247
+ if (check.success === false)
248
+ return (
249
+ (retry++ < (ctx.config?.retry ?? AgenticaConstant.RETRY)
250
+ ? await correct(ctx, call, retry, check.errors)
251
+ : null) ??
252
+ AgenticaPromptFactory.execute({
253
+ type: "execute",
254
+ protocol: "class",
255
+ controller: call.operation.controller,
256
+ function: call.operation.function,
257
+ id: call.id,
258
+ name: call.operation.name,
259
+ arguments: call.arguments,
260
+ value: {
261
+ name: "TypeGuardError",
262
+ message: "Invalid arguments.",
263
+ errors: check.errors,
264
+ },
265
+ })
266
+ );
267
+ // EXECUTE FUNCTION
268
+ try {
269
+ const value: any =
270
+ typeof call.operation.controller.execute === "function"
271
+ ? await call.operation.controller.execute({
272
+ application: call.operation.controller.application,
273
+ function: call.operation.function,
274
+ arguments: call.arguments,
275
+ })
276
+ : await (call.operation.controller.execute as any)[
277
+ call.operation.function.name
278
+ ](call.arguments);
279
+ return AgenticaPromptFactory.execute({
280
+ type: "execute",
281
+ protocol: "class",
282
+ controller: call.operation.controller,
283
+ function: call.operation.function,
284
+ id: call.id,
285
+ name: call.operation.name,
286
+ arguments: call.arguments,
287
+ value,
288
+ });
289
+ } catch (error) {
290
+ return AgenticaPromptFactory.execute({
291
+ type: "execute",
292
+ protocol: "class",
293
+ controller: call.operation.controller,
294
+ function: call.operation.function,
295
+ id: call.id,
296
+ name: call.operation.name,
297
+ arguments: call.arguments,
298
+ value:
299
+ error instanceof Error
300
+ ? {
301
+ ...error,
302
+ name: error.name,
303
+ message: error.message,
304
+ }
305
+ : error,
306
+ });
307
+ }
308
+ }
309
+ };
310
+
311
+ const correct = async <Model extends ILlmSchema.Model>(
312
+ ctx: IAgenticaContext<Model>,
313
+ call: IAgenticaEvent.ICall<Model>,
314
+ retry: number,
315
+ error: unknown,
316
+ ): Promise<IAgenticaPrompt.IExecute<Model> | null> => {
317
+ //----
318
+ // EXECUTE CHATGPT API
319
+ //----
320
+ const completion: OpenAI.ChatCompletion = await ctx.request("call", {
321
+ messages: [
322
+ // COMMON SYSTEM PROMPT
323
+ {
324
+ role: "system",
325
+ content: AgenticaDefaultPrompt.write(ctx.config),
326
+ } satisfies OpenAI.ChatCompletionSystemMessageParam,
327
+ // PREVIOUS HISTORIES
328
+ ...ctx.histories.map(ChatGptHistoryDecoder.decode).flat(),
329
+ // USER INPUT
330
+ {
331
+ role: "user",
332
+ content: ctx.prompt.text,
333
+ },
334
+ // TYPE CORRECTION
335
+ {
336
+ role: "system",
337
+ content:
338
+ ctx.config?.systemPrompt?.execute?.(ctx.histories) ??
339
+ AgenticaSystemPrompt.EXECUTE,
340
+ },
341
+ {
342
+ role: "assistant",
343
+ tool_calls: [
344
+ {
345
+ type: "function",
346
+ id: call.id,
347
+ function: {
348
+ name: call.operation.name,
349
+ arguments: JSON.stringify(call.arguments),
350
+ },
351
+ } satisfies OpenAI.ChatCompletionMessageToolCall,
352
+ ],
353
+ } satisfies OpenAI.ChatCompletionAssistantMessageParam,
354
+ {
355
+ role: "tool",
356
+ content: typeof error === "string" ? error : JSON.stringify(error),
357
+ tool_call_id: call.id,
358
+ } satisfies OpenAI.ChatCompletionToolMessageParam,
359
+ {
360
+ role: "system",
361
+ content: [
362
+ "You A.I. assistant has composed wrong arguments.",
363
+ "",
364
+ "Correct it at the next function calling.",
365
+ ].join("\n"),
366
+ },
367
+ ],
368
+ // STACK FUNCTIONS
369
+ tools: [
370
+ {
371
+ type: "function",
372
+ function: {
373
+ name: call.operation.name,
374
+ description: call.operation.function.description,
375
+ parameters: (call.operation.function.separated
376
+ ? (call.operation.function.separated?.llm ??
377
+ ({
378
+ $defs: {},
379
+ type: "object",
380
+ properties: {},
381
+ additionalProperties: false,
382
+ required: [],
383
+ } satisfies IChatGptSchema.IParameters))
384
+ : call.operation.function.parameters) as any,
385
+ },
386
+ },
387
+ ],
388
+ tool_choice: "auto",
389
+ parallel_tool_calls: false,
390
+ });
391
+
392
+ //----
393
+ // PROCESS COMPLETION
394
+ //----
395
+ const toolCall: OpenAI.ChatCompletionMessageToolCall | undefined = (
396
+ completion.choices[0]?.message.tool_calls ?? []
397
+ ).find(
398
+ (tc) =>
399
+ tc.type === "function" && tc.function.name === call.operation.name,
400
+ );
401
+ if (toolCall === undefined) return null;
402
+ return propagate(
403
+ ctx,
404
+ {
405
+ id: toolCall.id,
406
+ type: "call",
407
+ operation: call.operation,
408
+ arguments: JSON.parse(toolCall.function.arguments),
409
+ },
410
+ retry,
411
+ );
412
+ };
413
+
414
+ const fillHttpArguments = <Model extends ILlmSchema.Model>(props: {
415
+ operation: IAgenticaOperation<Model>;
416
+ arguments: object;
417
+ }): void => {
418
+ if (props.operation.protocol !== "http") return;
419
+ const route: IHttpMigrateRoute = props.operation.function.route();
420
+ if (
421
+ route.body &&
422
+ route.operation().requestBody?.required === true &&
423
+ (props.arguments as any).body === undefined &&
424
+ isObject(
425
+ (props.operation.function.parameters as IChatGptSchema.IParameters)
426
+ .$defs,
427
+ (props.operation.function.parameters as IChatGptSchema.IParameters)
428
+ .properties.body!,
429
+ )
430
+ )
431
+ (props.arguments as any).body = {};
432
+ if (route.query && (props.arguments as any).query === undefined)
433
+ (props.arguments as any).query = {};
434
+ };
435
+
436
+ const isObject = (
437
+ $defs: Record<string, IChatGptSchema>,
438
+ schema: IChatGptSchema,
439
+ ): boolean => {
440
+ return (
441
+ ChatGptTypeChecker.isObject(schema) ||
442
+ (ChatGptTypeChecker.isReference(schema) &&
443
+ isObject($defs, $defs[schema.$ref.split("/").at(-1)!]!)) ||
444
+ (ChatGptTypeChecker.isAnyOf(schema) &&
445
+ schema.anyOf.every((schema) => isObject($defs, schema)))
446
+ );
447
+ };
448
+ }