@agentica/core 0.10.1-dev.20250302 → 0.10.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -21
- package/README.md +419 -419
- package/package.json +1 -1
- package/prompts/cancel.md +4 -4
- package/prompts/common.md +2 -2
- package/prompts/describe.md +6 -6
- package/prompts/execute.md +6 -6
- package/prompts/initialize.md +2 -2
- package/prompts/select.md +6 -6
- package/src/Agentica.ts +323 -323
- package/src/chatgpt/ChatGptAgent.ts +75 -75
- package/src/chatgpt/ChatGptCallFunctionAgent.ts +464 -464
- package/src/chatgpt/ChatGptCancelFunctionAgent.ts +287 -287
- package/src/chatgpt/ChatGptDescribeFunctionAgent.ts +52 -52
- package/src/chatgpt/ChatGptHistoryDecoder.ts +88 -88
- package/src/chatgpt/ChatGptInitializeFunctionAgent.ts +88 -88
- package/src/chatgpt/ChatGptSelectFunctionAgent.ts +319 -319
- package/src/functional/createHttpLlmApplication.ts +63 -63
- package/src/index.ts +19 -19
- package/src/internal/AgenticaConstant.ts +4 -4
- package/src/internal/AgenticaDefaultPrompt.ts +43 -43
- package/src/internal/AgenticaOperationComposer.ts +87 -87
- package/src/internal/AgenticaPromptFactory.ts +32 -32
- package/src/internal/AgenticaPromptTransformer.ts +86 -86
- package/src/internal/AgenticaTokenUsageAggregator.ts +115 -115
- package/src/internal/MathUtil.ts +3 -3
- package/src/internal/Singleton.ts +22 -22
- package/src/internal/__map_take.ts +15 -15
- package/src/structures/IAgenticaConfig.ts +123 -123
- package/src/structures/IAgenticaContext.ts +129 -129
- package/src/structures/IAgenticaController.ts +133 -133
- package/src/structures/IAgenticaEvent.ts +229 -229
- package/src/structures/IAgenticaExecutor.ts +156 -156
- package/src/structures/IAgenticaOperation.ts +63 -63
- package/src/structures/IAgenticaOperationCollection.ts +52 -52
- package/src/structures/IAgenticaOperationSelection.ts +68 -68
- package/src/structures/IAgenticaPrompt.ts +182 -182
- package/src/structures/IAgenticaProps.ts +70 -70
- package/src/structures/IAgenticaSystemPrompt.ts +124 -124
- package/src/structures/IAgenticaTokenUsage.ts +107 -107
- package/src/structures/IAgenticaVendor.ts +39 -39
- package/src/structures/internal/__IChatCancelFunctionsApplication.ts +23 -23
- package/src/structures/internal/__IChatFunctionReference.ts +21 -21
- package/src/structures/internal/__IChatInitialApplication.ts +15 -15
- package/src/structures/internal/__IChatSelectFunctionsApplication.ts +24 -24
- package/src/typings/AgenticaSource.ts +6 -6
|
@@ -1,464 +1,464 @@
|
|
|
1
|
-
import {
|
|
2
|
-
ChatGptTypeChecker,
|
|
3
|
-
HttpLlm,
|
|
4
|
-
IChatGptSchema,
|
|
5
|
-
IHttpMigrateRoute,
|
|
6
|
-
IHttpResponse,
|
|
7
|
-
ILlmSchema,
|
|
8
|
-
} from "@samchon/openapi";
|
|
9
|
-
import OpenAI from "openai";
|
|
10
|
-
import { IValidation } from "typia";
|
|
11
|
-
|
|
12
|
-
import { AgenticaConstant } from "../internal/AgenticaConstant";
|
|
13
|
-
import { AgenticaDefaultPrompt } from "../internal/AgenticaDefaultPrompt";
|
|
14
|
-
import { AgenticaPromptFactory } from "../internal/AgenticaPromptFactory";
|
|
15
|
-
import { AgenticaSystemPrompt } from "../internal/AgenticaSystemPrompt";
|
|
16
|
-
import { IAgenticaContext } from "../structures/IAgenticaContext";
|
|
17
|
-
import { IAgenticaEvent } from "../structures/IAgenticaEvent";
|
|
18
|
-
import { IAgenticaOperation } from "../structures/IAgenticaOperation";
|
|
19
|
-
import { IAgenticaPrompt } from "../structures/IAgenticaPrompt";
|
|
20
|
-
import { ChatGptCancelFunctionAgent } from "./ChatGptCancelFunctionAgent";
|
|
21
|
-
import { ChatGptHistoryDecoder } from "./ChatGptHistoryDecoder";
|
|
22
|
-
|
|
23
|
-
export namespace ChatGptCallFunctionAgent {
|
|
24
|
-
export const execute = async <Model extends ILlmSchema.Model>(
|
|
25
|
-
ctx: IAgenticaContext<Model>,
|
|
26
|
-
): Promise<IAgenticaPrompt<Model>[]> => {
|
|
27
|
-
//----
|
|
28
|
-
// EXECUTE CHATGPT API
|
|
29
|
-
//----
|
|
30
|
-
const completion: OpenAI.ChatCompletion = await ctx.request("call", {
|
|
31
|
-
messages: [
|
|
32
|
-
// COMMON SYSTEM PROMPT
|
|
33
|
-
{
|
|
34
|
-
role: "system",
|
|
35
|
-
content: AgenticaDefaultPrompt.write(ctx.config),
|
|
36
|
-
} satisfies OpenAI.ChatCompletionSystemMessageParam,
|
|
37
|
-
// PREVIOUS HISTORIES
|
|
38
|
-
...ctx.histories.map(ChatGptHistoryDecoder.decode).flat(),
|
|
39
|
-
// USER INPUT
|
|
40
|
-
{
|
|
41
|
-
role: "user",
|
|
42
|
-
content: ctx.prompt.text,
|
|
43
|
-
},
|
|
44
|
-
// SYSTEM PROMPT
|
|
45
|
-
{
|
|
46
|
-
role: "system",
|
|
47
|
-
content:
|
|
48
|
-
ctx.config?.systemPrompt?.execute?.(ctx.histories) ??
|
|
49
|
-
AgenticaSystemPrompt.EXECUTE,
|
|
50
|
-
},
|
|
51
|
-
],
|
|
52
|
-
// STACKED FUNCTIONS
|
|
53
|
-
tools: ctx.stack.map(
|
|
54
|
-
(op) =>
|
|
55
|
-
({
|
|
56
|
-
type: "function",
|
|
57
|
-
function: {
|
|
58
|
-
name: op.name,
|
|
59
|
-
description: op.function.description,
|
|
60
|
-
parameters: (op.function.separated
|
|
61
|
-
? (op.function.separated.llm ??
|
|
62
|
-
({
|
|
63
|
-
type: "object",
|
|
64
|
-
properties: {},
|
|
65
|
-
required: [],
|
|
66
|
-
additionalProperties: false,
|
|
67
|
-
$defs: {},
|
|
68
|
-
} satisfies IChatGptSchema.IParameters))
|
|
69
|
-
: op.function.parameters) as Record<string, any>,
|
|
70
|
-
},
|
|
71
|
-
}) as OpenAI.ChatCompletionTool,
|
|
72
|
-
),
|
|
73
|
-
tool_choice: "auto",
|
|
74
|
-
parallel_tool_calls: false,
|
|
75
|
-
});
|
|
76
|
-
|
|
77
|
-
//----
|
|
78
|
-
// PROCESS COMPLETION
|
|
79
|
-
//----
|
|
80
|
-
const closures: Array<
|
|
81
|
-
() => Promise<
|
|
82
|
-
Array<
|
|
83
|
-
| IAgenticaPrompt.IExecute<Model>
|
|
84
|
-
| IAgenticaPrompt.ICancel<Model>
|
|
85
|
-
| IAgenticaPrompt.IText
|
|
86
|
-
>
|
|
87
|
-
>
|
|
88
|
-
> = [];
|
|
89
|
-
for (const choice of completion.choices) {
|
|
90
|
-
for (const tc of choice.message.tool_calls ?? []) {
|
|
91
|
-
if (tc.type === "function") {
|
|
92
|
-
const operation: IAgenticaOperation<Model> | undefined =
|
|
93
|
-
ctx.operations.flat.get(tc.function.name);
|
|
94
|
-
if (operation === undefined) continue;
|
|
95
|
-
closures.push(
|
|
96
|
-
async (): Promise<
|
|
97
|
-
[IAgenticaPrompt.IExecute<Model>, IAgenticaPrompt.ICancel<Model>]
|
|
98
|
-
> => {
|
|
99
|
-
const call: IAgenticaEvent.ICall<Model> = {
|
|
100
|
-
type: "call",
|
|
101
|
-
id: tc.id,
|
|
102
|
-
operation,
|
|
103
|
-
arguments: JSON.parse(tc.function.arguments),
|
|
104
|
-
};
|
|
105
|
-
if (call.operation.protocol === "http")
|
|
106
|
-
fillHttpArguments({
|
|
107
|
-
operation: call.operation,
|
|
108
|
-
arguments: call.arguments,
|
|
109
|
-
});
|
|
110
|
-
await ctx.dispatch(call);
|
|
111
|
-
|
|
112
|
-
const execute: IAgenticaPrompt.IExecute<Model> = await propagate(
|
|
113
|
-
ctx,
|
|
114
|
-
call,
|
|
115
|
-
0,
|
|
116
|
-
);
|
|
117
|
-
await ctx.dispatch({
|
|
118
|
-
type: "execute",
|
|
119
|
-
id: call.id,
|
|
120
|
-
operation: call.operation,
|
|
121
|
-
arguments: execute.arguments,
|
|
122
|
-
value: execute.value,
|
|
123
|
-
});
|
|
124
|
-
|
|
125
|
-
await ChatGptCancelFunctionAgent.cancelFunction(ctx, {
|
|
126
|
-
name: call.operation.name,
|
|
127
|
-
reason: "completed",
|
|
128
|
-
});
|
|
129
|
-
await ctx.dispatch({
|
|
130
|
-
type: "cancel",
|
|
131
|
-
operation: call.operation,
|
|
132
|
-
reason: "complete",
|
|
133
|
-
});
|
|
134
|
-
return [
|
|
135
|
-
execute,
|
|
136
|
-
{
|
|
137
|
-
type: "cancel",
|
|
138
|
-
id: call.id,
|
|
139
|
-
operations: [
|
|
140
|
-
AgenticaPromptFactory.selection({
|
|
141
|
-
...call.operation,
|
|
142
|
-
reason: "complete",
|
|
143
|
-
}),
|
|
144
|
-
],
|
|
145
|
-
} satisfies IAgenticaPrompt.ICancel<Model>,
|
|
146
|
-
] as const;
|
|
147
|
-
},
|
|
148
|
-
);
|
|
149
|
-
}
|
|
150
|
-
}
|
|
151
|
-
if (
|
|
152
|
-
choice.message.role === "assistant" &&
|
|
153
|
-
!!choice.message.content?.length
|
|
154
|
-
)
|
|
155
|
-
closures.push(async () => {
|
|
156
|
-
const value: IAgenticaPrompt.IText = {
|
|
157
|
-
type: "text",
|
|
158
|
-
role: "assistant",
|
|
159
|
-
text: choice.message.content!,
|
|
160
|
-
};
|
|
161
|
-
await ctx.dispatch(value);
|
|
162
|
-
return [value];
|
|
163
|
-
});
|
|
164
|
-
}
|
|
165
|
-
return (await Promise.all(closures.map((fn) => fn()))).flat();
|
|
166
|
-
};
|
|
167
|
-
|
|
168
|
-
const propagate = async <Model extends ILlmSchema.Model>(
|
|
169
|
-
ctx: IAgenticaContext<Model>,
|
|
170
|
-
call: IAgenticaEvent.ICall<Model>,
|
|
171
|
-
retry: number,
|
|
172
|
-
): Promise<IAgenticaPrompt.IExecute<Model>> => {
|
|
173
|
-
if (call.operation.protocol === "http") {
|
|
174
|
-
//----
|
|
175
|
-
// HTTP PROTOCOL
|
|
176
|
-
//----
|
|
177
|
-
// NESTED VALIDATOR
|
|
178
|
-
const check: IValidation<unknown> = call.operation.function.validate(
|
|
179
|
-
call.arguments,
|
|
180
|
-
);
|
|
181
|
-
if (
|
|
182
|
-
check.success === false &&
|
|
183
|
-
retry++ < (ctx.config?.retry ?? AgenticaConstant.RETRY)
|
|
184
|
-
) {
|
|
185
|
-
const trial: IAgenticaPrompt.IExecute<Model> | null = await correct(
|
|
186
|
-
ctx,
|
|
187
|
-
call,
|
|
188
|
-
retry,
|
|
189
|
-
check.errors,
|
|
190
|
-
);
|
|
191
|
-
if (trial !== null) return trial;
|
|
192
|
-
}
|
|
193
|
-
try {
|
|
194
|
-
// CALL HTTP API
|
|
195
|
-
const response: IHttpResponse = call.operation.controller.execute
|
|
196
|
-
? await call.operation.controller.execute({
|
|
197
|
-
connection: call.operation.controller.connection,
|
|
198
|
-
application: call.operation.controller.application,
|
|
199
|
-
function: call.operation.function,
|
|
200
|
-
arguments: call.arguments,
|
|
201
|
-
})
|
|
202
|
-
: await HttpLlm.propagate({
|
|
203
|
-
connection: call.operation.controller.connection,
|
|
204
|
-
application: call.operation.controller.application,
|
|
205
|
-
function: call.operation.function,
|
|
206
|
-
input: call.arguments,
|
|
207
|
-
});
|
|
208
|
-
// CHECK STATUS
|
|
209
|
-
const success: boolean =
|
|
210
|
-
((response.status === 400 ||
|
|
211
|
-
response.status === 404 ||
|
|
212
|
-
response.status === 422) &&
|
|
213
|
-
retry++ < (ctx.config?.retry ?? AgenticaConstant.RETRY) &&
|
|
214
|
-
typeof response.body) === false;
|
|
215
|
-
// DISPATCH EVENT
|
|
216
|
-
return (
|
|
217
|
-
(success === false
|
|
218
|
-
? await correct(ctx, call, retry, response.body)
|
|
219
|
-
: null) ??
|
|
220
|
-
(await AgenticaPromptFactory.execute({
|
|
221
|
-
type: "execute",
|
|
222
|
-
protocol: "http",
|
|
223
|
-
controller: call.operation.controller,
|
|
224
|
-
function: call.operation.function,
|
|
225
|
-
id: call.id,
|
|
226
|
-
name: call.operation.name,
|
|
227
|
-
arguments: call.arguments,
|
|
228
|
-
value: response,
|
|
229
|
-
}))
|
|
230
|
-
);
|
|
231
|
-
} catch (error) {
|
|
232
|
-
// DISPATCH ERROR
|
|
233
|
-
return AgenticaPromptFactory.execute({
|
|
234
|
-
type: "execute",
|
|
235
|
-
protocol: "http",
|
|
236
|
-
controller: call.operation.controller,
|
|
237
|
-
function: call.operation.function,
|
|
238
|
-
id: call.id,
|
|
239
|
-
name: call.operation.name,
|
|
240
|
-
arguments: call.arguments,
|
|
241
|
-
value: {
|
|
242
|
-
status: 500,
|
|
243
|
-
headers: {},
|
|
244
|
-
body:
|
|
245
|
-
error instanceof Error
|
|
246
|
-
? {
|
|
247
|
-
...error,
|
|
248
|
-
name: error.name,
|
|
249
|
-
message: error.message,
|
|
250
|
-
}
|
|
251
|
-
: error,
|
|
252
|
-
},
|
|
253
|
-
});
|
|
254
|
-
}
|
|
255
|
-
} else {
|
|
256
|
-
//----
|
|
257
|
-
// CLASS FUNCTION
|
|
258
|
-
//----
|
|
259
|
-
// VALIDATE FIRST
|
|
260
|
-
const check: IValidation<unknown> = call.operation.function.validate(
|
|
261
|
-
call.arguments,
|
|
262
|
-
);
|
|
263
|
-
if (check.success === false)
|
|
264
|
-
return (
|
|
265
|
-
(retry++ < (ctx.config?.retry ?? AgenticaConstant.RETRY)
|
|
266
|
-
? await correct(ctx, call, retry, check.errors)
|
|
267
|
-
: null) ??
|
|
268
|
-
AgenticaPromptFactory.execute({
|
|
269
|
-
type: "execute",
|
|
270
|
-
protocol: "class",
|
|
271
|
-
controller: call.operation.controller,
|
|
272
|
-
function: call.operation.function,
|
|
273
|
-
id: call.id,
|
|
274
|
-
name: call.operation.name,
|
|
275
|
-
arguments: call.arguments,
|
|
276
|
-
value: {
|
|
277
|
-
name: "TypeGuardError",
|
|
278
|
-
message: "Invalid arguments.",
|
|
279
|
-
errors: check.errors,
|
|
280
|
-
},
|
|
281
|
-
})
|
|
282
|
-
);
|
|
283
|
-
// EXECUTE FUNCTION
|
|
284
|
-
try {
|
|
285
|
-
const value: any =
|
|
286
|
-
typeof call.operation.controller.execute === "function"
|
|
287
|
-
? await call.operation.controller.execute({
|
|
288
|
-
application: call.operation.controller.application,
|
|
289
|
-
function: call.operation.function,
|
|
290
|
-
arguments: call.arguments,
|
|
291
|
-
})
|
|
292
|
-
: await (call.operation.controller.execute as any)[
|
|
293
|
-
call.operation.function.name
|
|
294
|
-
](call.arguments);
|
|
295
|
-
return AgenticaPromptFactory.execute({
|
|
296
|
-
type: "execute",
|
|
297
|
-
protocol: "class",
|
|
298
|
-
controller: call.operation.controller,
|
|
299
|
-
function: call.operation.function,
|
|
300
|
-
id: call.id,
|
|
301
|
-
name: call.operation.name,
|
|
302
|
-
arguments: call.arguments,
|
|
303
|
-
value,
|
|
304
|
-
});
|
|
305
|
-
} catch (error) {
|
|
306
|
-
return AgenticaPromptFactory.execute({
|
|
307
|
-
type: "execute",
|
|
308
|
-
protocol: "class",
|
|
309
|
-
controller: call.operation.controller,
|
|
310
|
-
function: call.operation.function,
|
|
311
|
-
id: call.id,
|
|
312
|
-
name: call.operation.name,
|
|
313
|
-
arguments: call.arguments,
|
|
314
|
-
value:
|
|
315
|
-
error instanceof Error
|
|
316
|
-
? {
|
|
317
|
-
...error,
|
|
318
|
-
name: error.name,
|
|
319
|
-
message: error.message,
|
|
320
|
-
}
|
|
321
|
-
: error,
|
|
322
|
-
});
|
|
323
|
-
}
|
|
324
|
-
}
|
|
325
|
-
};
|
|
326
|
-
|
|
327
|
-
const correct = async <Model extends ILlmSchema.Model>(
|
|
328
|
-
ctx: IAgenticaContext<Model>,
|
|
329
|
-
call: IAgenticaEvent.ICall<Model>,
|
|
330
|
-
retry: number,
|
|
331
|
-
error: unknown,
|
|
332
|
-
): Promise<IAgenticaPrompt.IExecute<Model> | null> => {
|
|
333
|
-
//----
|
|
334
|
-
// EXECUTE CHATGPT API
|
|
335
|
-
//----
|
|
336
|
-
const completion: OpenAI.ChatCompletion = await ctx.request("call", {
|
|
337
|
-
messages: [
|
|
338
|
-
// COMMON SYSTEM PROMPT
|
|
339
|
-
{
|
|
340
|
-
role: "system",
|
|
341
|
-
content: AgenticaDefaultPrompt.write(ctx.config),
|
|
342
|
-
} satisfies OpenAI.ChatCompletionSystemMessageParam,
|
|
343
|
-
// PREVIOUS HISTORIES
|
|
344
|
-
...ctx.histories.map(ChatGptHistoryDecoder.decode).flat(),
|
|
345
|
-
// USER INPUT
|
|
346
|
-
{
|
|
347
|
-
role: "user",
|
|
348
|
-
content: ctx.prompt.text,
|
|
349
|
-
},
|
|
350
|
-
// TYPE CORRECTION
|
|
351
|
-
{
|
|
352
|
-
role: "system",
|
|
353
|
-
content:
|
|
354
|
-
ctx.config?.systemPrompt?.execute?.(ctx.histories) ??
|
|
355
|
-
AgenticaSystemPrompt.EXECUTE,
|
|
356
|
-
},
|
|
357
|
-
{
|
|
358
|
-
role: "assistant",
|
|
359
|
-
tool_calls: [
|
|
360
|
-
{
|
|
361
|
-
type: "function",
|
|
362
|
-
id: call.id,
|
|
363
|
-
function: {
|
|
364
|
-
name: call.operation.name,
|
|
365
|
-
arguments: JSON.stringify(call.arguments),
|
|
366
|
-
},
|
|
367
|
-
} satisfies OpenAI.ChatCompletionMessageToolCall,
|
|
368
|
-
],
|
|
369
|
-
} satisfies OpenAI.ChatCompletionAssistantMessageParam,
|
|
370
|
-
{
|
|
371
|
-
role: "tool",
|
|
372
|
-
content: typeof error === "string" ? error : JSON.stringify(error),
|
|
373
|
-
tool_call_id: call.id,
|
|
374
|
-
} satisfies OpenAI.ChatCompletionToolMessageParam,
|
|
375
|
-
{
|
|
376
|
-
role: "system",
|
|
377
|
-
content: [
|
|
378
|
-
"You A.I. assistant has composed wrong arguments.",
|
|
379
|
-
"",
|
|
380
|
-
"Correct it at the next function calling.",
|
|
381
|
-
].join("\n"),
|
|
382
|
-
},
|
|
383
|
-
],
|
|
384
|
-
// STACK FUNCTIONS
|
|
385
|
-
tools: [
|
|
386
|
-
{
|
|
387
|
-
type: "function",
|
|
388
|
-
function: {
|
|
389
|
-
name: call.operation.name,
|
|
390
|
-
description: call.operation.function.description,
|
|
391
|
-
parameters: (call.operation.function.separated
|
|
392
|
-
? (call.operation.function.separated?.llm ??
|
|
393
|
-
({
|
|
394
|
-
$defs: {},
|
|
395
|
-
type: "object",
|
|
396
|
-
properties: {},
|
|
397
|
-
additionalProperties: false,
|
|
398
|
-
required: [],
|
|
399
|
-
} satisfies IChatGptSchema.IParameters))
|
|
400
|
-
: call.operation.function.parameters) as any,
|
|
401
|
-
},
|
|
402
|
-
},
|
|
403
|
-
],
|
|
404
|
-
tool_choice: "auto",
|
|
405
|
-
parallel_tool_calls: false,
|
|
406
|
-
});
|
|
407
|
-
|
|
408
|
-
//----
|
|
409
|
-
// PROCESS COMPLETION
|
|
410
|
-
//----
|
|
411
|
-
const toolCall: OpenAI.ChatCompletionMessageToolCall | undefined = (
|
|
412
|
-
completion.choices[0]?.message.tool_calls ?? []
|
|
413
|
-
).find(
|
|
414
|
-
(tc) =>
|
|
415
|
-
tc.type === "function" && tc.function.name === call.operation.name,
|
|
416
|
-
);
|
|
417
|
-
if (toolCall === undefined) return null;
|
|
418
|
-
return propagate(
|
|
419
|
-
ctx,
|
|
420
|
-
{
|
|
421
|
-
id: toolCall.id,
|
|
422
|
-
type: "call",
|
|
423
|
-
operation: call.operation,
|
|
424
|
-
arguments: JSON.parse(toolCall.function.arguments),
|
|
425
|
-
},
|
|
426
|
-
retry,
|
|
427
|
-
);
|
|
428
|
-
};
|
|
429
|
-
|
|
430
|
-
const fillHttpArguments = <Model extends ILlmSchema.Model>(props: {
|
|
431
|
-
operation: IAgenticaOperation<Model>;
|
|
432
|
-
arguments: object;
|
|
433
|
-
}): void => {
|
|
434
|
-
if (props.operation.protocol !== "http") return;
|
|
435
|
-
const route: IHttpMigrateRoute = props.operation.function.route();
|
|
436
|
-
if (
|
|
437
|
-
route.body &&
|
|
438
|
-
route.operation().requestBody?.required === true &&
|
|
439
|
-
(props.arguments as any).body === undefined &&
|
|
440
|
-
isObject(
|
|
441
|
-
(props.operation.function.parameters as IChatGptSchema.IParameters)
|
|
442
|
-
.$defs,
|
|
443
|
-
(props.operation.function.parameters as IChatGptSchema.IParameters)
|
|
444
|
-
.properties.body!,
|
|
445
|
-
)
|
|
446
|
-
)
|
|
447
|
-
(props.arguments as any).body = {};
|
|
448
|
-
if (route.query && (props.arguments as any).query === undefined)
|
|
449
|
-
(props.arguments as any).query = {};
|
|
450
|
-
};
|
|
451
|
-
|
|
452
|
-
const isObject = (
|
|
453
|
-
$defs: Record<string, IChatGptSchema>,
|
|
454
|
-
schema: IChatGptSchema,
|
|
455
|
-
): boolean => {
|
|
456
|
-
return (
|
|
457
|
-
ChatGptTypeChecker.isObject(schema) ||
|
|
458
|
-
(ChatGptTypeChecker.isReference(schema) &&
|
|
459
|
-
isObject($defs, $defs[schema.$ref.split("/").at(-1)!]!)) ||
|
|
460
|
-
(ChatGptTypeChecker.isAnyOf(schema) &&
|
|
461
|
-
schema.anyOf.every((schema) => isObject($defs, schema)))
|
|
462
|
-
);
|
|
463
|
-
};
|
|
464
|
-
}
|
|
1
|
+
import {
|
|
2
|
+
ChatGptTypeChecker,
|
|
3
|
+
HttpLlm,
|
|
4
|
+
IChatGptSchema,
|
|
5
|
+
IHttpMigrateRoute,
|
|
6
|
+
IHttpResponse,
|
|
7
|
+
ILlmSchema,
|
|
8
|
+
} from "@samchon/openapi";
|
|
9
|
+
import OpenAI from "openai";
|
|
10
|
+
import { IValidation } from "typia";
|
|
11
|
+
|
|
12
|
+
import { AgenticaConstant } from "../internal/AgenticaConstant";
|
|
13
|
+
import { AgenticaDefaultPrompt } from "../internal/AgenticaDefaultPrompt";
|
|
14
|
+
import { AgenticaPromptFactory } from "../internal/AgenticaPromptFactory";
|
|
15
|
+
import { AgenticaSystemPrompt } from "../internal/AgenticaSystemPrompt";
|
|
16
|
+
import { IAgenticaContext } from "../structures/IAgenticaContext";
|
|
17
|
+
import { IAgenticaEvent } from "../structures/IAgenticaEvent";
|
|
18
|
+
import { IAgenticaOperation } from "../structures/IAgenticaOperation";
|
|
19
|
+
import { IAgenticaPrompt } from "../structures/IAgenticaPrompt";
|
|
20
|
+
import { ChatGptCancelFunctionAgent } from "./ChatGptCancelFunctionAgent";
|
|
21
|
+
import { ChatGptHistoryDecoder } from "./ChatGptHistoryDecoder";
|
|
22
|
+
|
|
23
|
+
export namespace ChatGptCallFunctionAgent {
|
|
24
|
+
export const execute = async <Model extends ILlmSchema.Model>(
|
|
25
|
+
ctx: IAgenticaContext<Model>,
|
|
26
|
+
): Promise<IAgenticaPrompt<Model>[]> => {
|
|
27
|
+
//----
|
|
28
|
+
// EXECUTE CHATGPT API
|
|
29
|
+
//----
|
|
30
|
+
const completion: OpenAI.ChatCompletion = await ctx.request("call", {
|
|
31
|
+
messages: [
|
|
32
|
+
// COMMON SYSTEM PROMPT
|
|
33
|
+
{
|
|
34
|
+
role: "system",
|
|
35
|
+
content: AgenticaDefaultPrompt.write(ctx.config),
|
|
36
|
+
} satisfies OpenAI.ChatCompletionSystemMessageParam,
|
|
37
|
+
// PREVIOUS HISTORIES
|
|
38
|
+
...ctx.histories.map(ChatGptHistoryDecoder.decode).flat(),
|
|
39
|
+
// USER INPUT
|
|
40
|
+
{
|
|
41
|
+
role: "user",
|
|
42
|
+
content: ctx.prompt.text,
|
|
43
|
+
},
|
|
44
|
+
// SYSTEM PROMPT
|
|
45
|
+
{
|
|
46
|
+
role: "system",
|
|
47
|
+
content:
|
|
48
|
+
ctx.config?.systemPrompt?.execute?.(ctx.histories) ??
|
|
49
|
+
AgenticaSystemPrompt.EXECUTE,
|
|
50
|
+
},
|
|
51
|
+
],
|
|
52
|
+
// STACKED FUNCTIONS
|
|
53
|
+
tools: ctx.stack.map(
|
|
54
|
+
(op) =>
|
|
55
|
+
({
|
|
56
|
+
type: "function",
|
|
57
|
+
function: {
|
|
58
|
+
name: op.name,
|
|
59
|
+
description: op.function.description,
|
|
60
|
+
parameters: (op.function.separated
|
|
61
|
+
? (op.function.separated.llm ??
|
|
62
|
+
({
|
|
63
|
+
type: "object",
|
|
64
|
+
properties: {},
|
|
65
|
+
required: [],
|
|
66
|
+
additionalProperties: false,
|
|
67
|
+
$defs: {},
|
|
68
|
+
} satisfies IChatGptSchema.IParameters))
|
|
69
|
+
: op.function.parameters) as Record<string, any>,
|
|
70
|
+
},
|
|
71
|
+
}) as OpenAI.ChatCompletionTool,
|
|
72
|
+
),
|
|
73
|
+
tool_choice: "auto",
|
|
74
|
+
parallel_tool_calls: false,
|
|
75
|
+
});
|
|
76
|
+
|
|
77
|
+
//----
|
|
78
|
+
// PROCESS COMPLETION
|
|
79
|
+
//----
|
|
80
|
+
const closures: Array<
|
|
81
|
+
() => Promise<
|
|
82
|
+
Array<
|
|
83
|
+
| IAgenticaPrompt.IExecute<Model>
|
|
84
|
+
| IAgenticaPrompt.ICancel<Model>
|
|
85
|
+
| IAgenticaPrompt.IText
|
|
86
|
+
>
|
|
87
|
+
>
|
|
88
|
+
> = [];
|
|
89
|
+
for (const choice of completion.choices) {
|
|
90
|
+
for (const tc of choice.message.tool_calls ?? []) {
|
|
91
|
+
if (tc.type === "function") {
|
|
92
|
+
const operation: IAgenticaOperation<Model> | undefined =
|
|
93
|
+
ctx.operations.flat.get(tc.function.name);
|
|
94
|
+
if (operation === undefined) continue;
|
|
95
|
+
closures.push(
|
|
96
|
+
async (): Promise<
|
|
97
|
+
[IAgenticaPrompt.IExecute<Model>, IAgenticaPrompt.ICancel<Model>]
|
|
98
|
+
> => {
|
|
99
|
+
const call: IAgenticaEvent.ICall<Model> = {
|
|
100
|
+
type: "call",
|
|
101
|
+
id: tc.id,
|
|
102
|
+
operation,
|
|
103
|
+
arguments: JSON.parse(tc.function.arguments),
|
|
104
|
+
};
|
|
105
|
+
if (call.operation.protocol === "http")
|
|
106
|
+
fillHttpArguments({
|
|
107
|
+
operation: call.operation,
|
|
108
|
+
arguments: call.arguments,
|
|
109
|
+
});
|
|
110
|
+
await ctx.dispatch(call);
|
|
111
|
+
|
|
112
|
+
const execute: IAgenticaPrompt.IExecute<Model> = await propagate(
|
|
113
|
+
ctx,
|
|
114
|
+
call,
|
|
115
|
+
0,
|
|
116
|
+
);
|
|
117
|
+
await ctx.dispatch({
|
|
118
|
+
type: "execute",
|
|
119
|
+
id: call.id,
|
|
120
|
+
operation: call.operation,
|
|
121
|
+
arguments: execute.arguments,
|
|
122
|
+
value: execute.value,
|
|
123
|
+
});
|
|
124
|
+
|
|
125
|
+
await ChatGptCancelFunctionAgent.cancelFunction(ctx, {
|
|
126
|
+
name: call.operation.name,
|
|
127
|
+
reason: "completed",
|
|
128
|
+
});
|
|
129
|
+
await ctx.dispatch({
|
|
130
|
+
type: "cancel",
|
|
131
|
+
operation: call.operation,
|
|
132
|
+
reason: "complete",
|
|
133
|
+
});
|
|
134
|
+
return [
|
|
135
|
+
execute,
|
|
136
|
+
{
|
|
137
|
+
type: "cancel",
|
|
138
|
+
id: call.id,
|
|
139
|
+
operations: [
|
|
140
|
+
AgenticaPromptFactory.selection({
|
|
141
|
+
...call.operation,
|
|
142
|
+
reason: "complete",
|
|
143
|
+
}),
|
|
144
|
+
],
|
|
145
|
+
} satisfies IAgenticaPrompt.ICancel<Model>,
|
|
146
|
+
] as const;
|
|
147
|
+
},
|
|
148
|
+
);
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
if (
|
|
152
|
+
choice.message.role === "assistant" &&
|
|
153
|
+
!!choice.message.content?.length
|
|
154
|
+
)
|
|
155
|
+
closures.push(async () => {
|
|
156
|
+
const value: IAgenticaPrompt.IText = {
|
|
157
|
+
type: "text",
|
|
158
|
+
role: "assistant",
|
|
159
|
+
text: choice.message.content!,
|
|
160
|
+
};
|
|
161
|
+
await ctx.dispatch(value);
|
|
162
|
+
return [value];
|
|
163
|
+
});
|
|
164
|
+
}
|
|
165
|
+
return (await Promise.all(closures.map((fn) => fn()))).flat();
|
|
166
|
+
};
|
|
167
|
+
|
|
168
|
+
const propagate = async <Model extends ILlmSchema.Model>(
|
|
169
|
+
ctx: IAgenticaContext<Model>,
|
|
170
|
+
call: IAgenticaEvent.ICall<Model>,
|
|
171
|
+
retry: number,
|
|
172
|
+
): Promise<IAgenticaPrompt.IExecute<Model>> => {
|
|
173
|
+
if (call.operation.protocol === "http") {
|
|
174
|
+
//----
|
|
175
|
+
// HTTP PROTOCOL
|
|
176
|
+
//----
|
|
177
|
+
// NESTED VALIDATOR
|
|
178
|
+
const check: IValidation<unknown> = call.operation.function.validate(
|
|
179
|
+
call.arguments,
|
|
180
|
+
);
|
|
181
|
+
if (
|
|
182
|
+
check.success === false &&
|
|
183
|
+
retry++ < (ctx.config?.retry ?? AgenticaConstant.RETRY)
|
|
184
|
+
) {
|
|
185
|
+
const trial: IAgenticaPrompt.IExecute<Model> | null = await correct(
|
|
186
|
+
ctx,
|
|
187
|
+
call,
|
|
188
|
+
retry,
|
|
189
|
+
check.errors,
|
|
190
|
+
);
|
|
191
|
+
if (trial !== null) return trial;
|
|
192
|
+
}
|
|
193
|
+
try {
|
|
194
|
+
// CALL HTTP API
|
|
195
|
+
const response: IHttpResponse = call.operation.controller.execute
|
|
196
|
+
? await call.operation.controller.execute({
|
|
197
|
+
connection: call.operation.controller.connection,
|
|
198
|
+
application: call.operation.controller.application,
|
|
199
|
+
function: call.operation.function,
|
|
200
|
+
arguments: call.arguments,
|
|
201
|
+
})
|
|
202
|
+
: await HttpLlm.propagate({
|
|
203
|
+
connection: call.operation.controller.connection,
|
|
204
|
+
application: call.operation.controller.application,
|
|
205
|
+
function: call.operation.function,
|
|
206
|
+
input: call.arguments,
|
|
207
|
+
});
|
|
208
|
+
// CHECK STATUS
|
|
209
|
+
const success: boolean =
|
|
210
|
+
((response.status === 400 ||
|
|
211
|
+
response.status === 404 ||
|
|
212
|
+
response.status === 422) &&
|
|
213
|
+
retry++ < (ctx.config?.retry ?? AgenticaConstant.RETRY) &&
|
|
214
|
+
typeof response.body) === false;
|
|
215
|
+
// DISPATCH EVENT
|
|
216
|
+
return (
|
|
217
|
+
(success === false
|
|
218
|
+
? await correct(ctx, call, retry, response.body)
|
|
219
|
+
: null) ??
|
|
220
|
+
(await AgenticaPromptFactory.execute({
|
|
221
|
+
type: "execute",
|
|
222
|
+
protocol: "http",
|
|
223
|
+
controller: call.operation.controller,
|
|
224
|
+
function: call.operation.function,
|
|
225
|
+
id: call.id,
|
|
226
|
+
name: call.operation.name,
|
|
227
|
+
arguments: call.arguments,
|
|
228
|
+
value: response,
|
|
229
|
+
}))
|
|
230
|
+
);
|
|
231
|
+
} catch (error) {
|
|
232
|
+
// DISPATCH ERROR
|
|
233
|
+
return AgenticaPromptFactory.execute({
|
|
234
|
+
type: "execute",
|
|
235
|
+
protocol: "http",
|
|
236
|
+
controller: call.operation.controller,
|
|
237
|
+
function: call.operation.function,
|
|
238
|
+
id: call.id,
|
|
239
|
+
name: call.operation.name,
|
|
240
|
+
arguments: call.arguments,
|
|
241
|
+
value: {
|
|
242
|
+
status: 500,
|
|
243
|
+
headers: {},
|
|
244
|
+
body:
|
|
245
|
+
error instanceof Error
|
|
246
|
+
? {
|
|
247
|
+
...error,
|
|
248
|
+
name: error.name,
|
|
249
|
+
message: error.message,
|
|
250
|
+
}
|
|
251
|
+
: error,
|
|
252
|
+
},
|
|
253
|
+
});
|
|
254
|
+
}
|
|
255
|
+
} else {
|
|
256
|
+
//----
|
|
257
|
+
// CLASS FUNCTION
|
|
258
|
+
//----
|
|
259
|
+
// VALIDATE FIRST
|
|
260
|
+
const check: IValidation<unknown> = call.operation.function.validate(
|
|
261
|
+
call.arguments,
|
|
262
|
+
);
|
|
263
|
+
if (check.success === false)
|
|
264
|
+
return (
|
|
265
|
+
(retry++ < (ctx.config?.retry ?? AgenticaConstant.RETRY)
|
|
266
|
+
? await correct(ctx, call, retry, check.errors)
|
|
267
|
+
: null) ??
|
|
268
|
+
AgenticaPromptFactory.execute({
|
|
269
|
+
type: "execute",
|
|
270
|
+
protocol: "class",
|
|
271
|
+
controller: call.operation.controller,
|
|
272
|
+
function: call.operation.function,
|
|
273
|
+
id: call.id,
|
|
274
|
+
name: call.operation.name,
|
|
275
|
+
arguments: call.arguments,
|
|
276
|
+
value: {
|
|
277
|
+
name: "TypeGuardError",
|
|
278
|
+
message: "Invalid arguments.",
|
|
279
|
+
errors: check.errors,
|
|
280
|
+
},
|
|
281
|
+
})
|
|
282
|
+
);
|
|
283
|
+
// EXECUTE FUNCTION
|
|
284
|
+
try {
|
|
285
|
+
const value: any =
|
|
286
|
+
typeof call.operation.controller.execute === "function"
|
|
287
|
+
? await call.operation.controller.execute({
|
|
288
|
+
application: call.operation.controller.application,
|
|
289
|
+
function: call.operation.function,
|
|
290
|
+
arguments: call.arguments,
|
|
291
|
+
})
|
|
292
|
+
: await (call.operation.controller.execute as any)[
|
|
293
|
+
call.operation.function.name
|
|
294
|
+
](call.arguments);
|
|
295
|
+
return AgenticaPromptFactory.execute({
|
|
296
|
+
type: "execute",
|
|
297
|
+
protocol: "class",
|
|
298
|
+
controller: call.operation.controller,
|
|
299
|
+
function: call.operation.function,
|
|
300
|
+
id: call.id,
|
|
301
|
+
name: call.operation.name,
|
|
302
|
+
arguments: call.arguments,
|
|
303
|
+
value,
|
|
304
|
+
});
|
|
305
|
+
} catch (error) {
|
|
306
|
+
return AgenticaPromptFactory.execute({
|
|
307
|
+
type: "execute",
|
|
308
|
+
protocol: "class",
|
|
309
|
+
controller: call.operation.controller,
|
|
310
|
+
function: call.operation.function,
|
|
311
|
+
id: call.id,
|
|
312
|
+
name: call.operation.name,
|
|
313
|
+
arguments: call.arguments,
|
|
314
|
+
value:
|
|
315
|
+
error instanceof Error
|
|
316
|
+
? {
|
|
317
|
+
...error,
|
|
318
|
+
name: error.name,
|
|
319
|
+
message: error.message,
|
|
320
|
+
}
|
|
321
|
+
: error,
|
|
322
|
+
});
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
};
|
|
326
|
+
|
|
327
|
+
const correct = async <Model extends ILlmSchema.Model>(
|
|
328
|
+
ctx: IAgenticaContext<Model>,
|
|
329
|
+
call: IAgenticaEvent.ICall<Model>,
|
|
330
|
+
retry: number,
|
|
331
|
+
error: unknown,
|
|
332
|
+
): Promise<IAgenticaPrompt.IExecute<Model> | null> => {
|
|
333
|
+
//----
|
|
334
|
+
// EXECUTE CHATGPT API
|
|
335
|
+
//----
|
|
336
|
+
const completion: OpenAI.ChatCompletion = await ctx.request("call", {
|
|
337
|
+
messages: [
|
|
338
|
+
// COMMON SYSTEM PROMPT
|
|
339
|
+
{
|
|
340
|
+
role: "system",
|
|
341
|
+
content: AgenticaDefaultPrompt.write(ctx.config),
|
|
342
|
+
} satisfies OpenAI.ChatCompletionSystemMessageParam,
|
|
343
|
+
// PREVIOUS HISTORIES
|
|
344
|
+
...ctx.histories.map(ChatGptHistoryDecoder.decode).flat(),
|
|
345
|
+
// USER INPUT
|
|
346
|
+
{
|
|
347
|
+
role: "user",
|
|
348
|
+
content: ctx.prompt.text,
|
|
349
|
+
},
|
|
350
|
+
// TYPE CORRECTION
|
|
351
|
+
{
|
|
352
|
+
role: "system",
|
|
353
|
+
content:
|
|
354
|
+
ctx.config?.systemPrompt?.execute?.(ctx.histories) ??
|
|
355
|
+
AgenticaSystemPrompt.EXECUTE,
|
|
356
|
+
},
|
|
357
|
+
{
|
|
358
|
+
role: "assistant",
|
|
359
|
+
tool_calls: [
|
|
360
|
+
{
|
|
361
|
+
type: "function",
|
|
362
|
+
id: call.id,
|
|
363
|
+
function: {
|
|
364
|
+
name: call.operation.name,
|
|
365
|
+
arguments: JSON.stringify(call.arguments),
|
|
366
|
+
},
|
|
367
|
+
} satisfies OpenAI.ChatCompletionMessageToolCall,
|
|
368
|
+
],
|
|
369
|
+
} satisfies OpenAI.ChatCompletionAssistantMessageParam,
|
|
370
|
+
{
|
|
371
|
+
role: "tool",
|
|
372
|
+
content: typeof error === "string" ? error : JSON.stringify(error),
|
|
373
|
+
tool_call_id: call.id,
|
|
374
|
+
} satisfies OpenAI.ChatCompletionToolMessageParam,
|
|
375
|
+
{
|
|
376
|
+
role: "system",
|
|
377
|
+
content: [
|
|
378
|
+
"You A.I. assistant has composed wrong arguments.",
|
|
379
|
+
"",
|
|
380
|
+
"Correct it at the next function calling.",
|
|
381
|
+
].join("\n"),
|
|
382
|
+
},
|
|
383
|
+
],
|
|
384
|
+
// STACK FUNCTIONS
|
|
385
|
+
tools: [
|
|
386
|
+
{
|
|
387
|
+
type: "function",
|
|
388
|
+
function: {
|
|
389
|
+
name: call.operation.name,
|
|
390
|
+
description: call.operation.function.description,
|
|
391
|
+
parameters: (call.operation.function.separated
|
|
392
|
+
? (call.operation.function.separated?.llm ??
|
|
393
|
+
({
|
|
394
|
+
$defs: {},
|
|
395
|
+
type: "object",
|
|
396
|
+
properties: {},
|
|
397
|
+
additionalProperties: false,
|
|
398
|
+
required: [],
|
|
399
|
+
} satisfies IChatGptSchema.IParameters))
|
|
400
|
+
: call.operation.function.parameters) as any,
|
|
401
|
+
},
|
|
402
|
+
},
|
|
403
|
+
],
|
|
404
|
+
tool_choice: "auto",
|
|
405
|
+
parallel_tool_calls: false,
|
|
406
|
+
});
|
|
407
|
+
|
|
408
|
+
//----
|
|
409
|
+
// PROCESS COMPLETION
|
|
410
|
+
//----
|
|
411
|
+
const toolCall: OpenAI.ChatCompletionMessageToolCall | undefined = (
|
|
412
|
+
completion.choices[0]?.message.tool_calls ?? []
|
|
413
|
+
).find(
|
|
414
|
+
(tc) =>
|
|
415
|
+
tc.type === "function" && tc.function.name === call.operation.name,
|
|
416
|
+
);
|
|
417
|
+
if (toolCall === undefined) return null;
|
|
418
|
+
return propagate(
|
|
419
|
+
ctx,
|
|
420
|
+
{
|
|
421
|
+
id: toolCall.id,
|
|
422
|
+
type: "call",
|
|
423
|
+
operation: call.operation,
|
|
424
|
+
arguments: JSON.parse(toolCall.function.arguments),
|
|
425
|
+
},
|
|
426
|
+
retry,
|
|
427
|
+
);
|
|
428
|
+
};
|
|
429
|
+
|
|
430
|
+
const fillHttpArguments = <Model extends ILlmSchema.Model>(props: {
|
|
431
|
+
operation: IAgenticaOperation<Model>;
|
|
432
|
+
arguments: object;
|
|
433
|
+
}): void => {
|
|
434
|
+
if (props.operation.protocol !== "http") return;
|
|
435
|
+
const route: IHttpMigrateRoute = props.operation.function.route();
|
|
436
|
+
if (
|
|
437
|
+
route.body &&
|
|
438
|
+
route.operation().requestBody?.required === true &&
|
|
439
|
+
(props.arguments as any).body === undefined &&
|
|
440
|
+
isObject(
|
|
441
|
+
(props.operation.function.parameters as IChatGptSchema.IParameters)
|
|
442
|
+
.$defs,
|
|
443
|
+
(props.operation.function.parameters as IChatGptSchema.IParameters)
|
|
444
|
+
.properties.body!,
|
|
445
|
+
)
|
|
446
|
+
)
|
|
447
|
+
(props.arguments as any).body = {};
|
|
448
|
+
if (route.query && (props.arguments as any).query === undefined)
|
|
449
|
+
(props.arguments as any).query = {};
|
|
450
|
+
};
|
|
451
|
+
|
|
452
|
+
const isObject = (
|
|
453
|
+
$defs: Record<string, IChatGptSchema>,
|
|
454
|
+
schema: IChatGptSchema,
|
|
455
|
+
): boolean => {
|
|
456
|
+
return (
|
|
457
|
+
ChatGptTypeChecker.isObject(schema) ||
|
|
458
|
+
(ChatGptTypeChecker.isReference(schema) &&
|
|
459
|
+
isObject($defs, $defs[schema.$ref.split("/").at(-1)!]!)) ||
|
|
460
|
+
(ChatGptTypeChecker.isAnyOf(schema) &&
|
|
461
|
+
schema.anyOf.every((schema) => isObject($defs, schema)))
|
|
462
|
+
);
|
|
463
|
+
};
|
|
464
|
+
}
|