@agentica/core 0.10.1-dev.20250302 → 0.10.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/LICENSE +21 -21
  2. package/README.md +419 -419
  3. package/package.json +1 -1
  4. package/prompts/cancel.md +4 -4
  5. package/prompts/common.md +2 -2
  6. package/prompts/describe.md +6 -6
  7. package/prompts/execute.md +6 -6
  8. package/prompts/initialize.md +2 -2
  9. package/prompts/select.md +6 -6
  10. package/src/Agentica.ts +323 -323
  11. package/src/chatgpt/ChatGptAgent.ts +75 -75
  12. package/src/chatgpt/ChatGptCallFunctionAgent.ts +464 -464
  13. package/src/chatgpt/ChatGptCancelFunctionAgent.ts +287 -287
  14. package/src/chatgpt/ChatGptDescribeFunctionAgent.ts +52 -52
  15. package/src/chatgpt/ChatGptHistoryDecoder.ts +88 -88
  16. package/src/chatgpt/ChatGptInitializeFunctionAgent.ts +88 -88
  17. package/src/chatgpt/ChatGptSelectFunctionAgent.ts +319 -319
  18. package/src/functional/createHttpLlmApplication.ts +63 -63
  19. package/src/index.ts +19 -19
  20. package/src/internal/AgenticaConstant.ts +4 -4
  21. package/src/internal/AgenticaDefaultPrompt.ts +43 -43
  22. package/src/internal/AgenticaOperationComposer.ts +87 -87
  23. package/src/internal/AgenticaPromptFactory.ts +32 -32
  24. package/src/internal/AgenticaPromptTransformer.ts +86 -86
  25. package/src/internal/AgenticaTokenUsageAggregator.ts +115 -115
  26. package/src/internal/MathUtil.ts +3 -3
  27. package/src/internal/Singleton.ts +22 -22
  28. package/src/internal/__map_take.ts +15 -15
  29. package/src/structures/IAgenticaConfig.ts +123 -123
  30. package/src/structures/IAgenticaContext.ts +129 -129
  31. package/src/structures/IAgenticaController.ts +133 -133
  32. package/src/structures/IAgenticaEvent.ts +229 -229
  33. package/src/structures/IAgenticaExecutor.ts +156 -156
  34. package/src/structures/IAgenticaOperation.ts +63 -63
  35. package/src/structures/IAgenticaOperationCollection.ts +52 -52
  36. package/src/structures/IAgenticaOperationSelection.ts +68 -68
  37. package/src/structures/IAgenticaPrompt.ts +182 -182
  38. package/src/structures/IAgenticaProps.ts +70 -70
  39. package/src/structures/IAgenticaSystemPrompt.ts +124 -124
  40. package/src/structures/IAgenticaTokenUsage.ts +107 -107
  41. package/src/structures/IAgenticaVendor.ts +39 -39
  42. package/src/structures/internal/__IChatCancelFunctionsApplication.ts +23 -23
  43. package/src/structures/internal/__IChatFunctionReference.ts +21 -21
  44. package/src/structures/internal/__IChatInitialApplication.ts +15 -15
  45. package/src/structures/internal/__IChatSelectFunctionsApplication.ts +24 -24
  46. package/src/typings/AgenticaSource.ts +6 -6
@@ -1,287 +1,287 @@
1
- import {
2
- IHttpLlmFunction,
3
- ILlmApplication,
4
- ILlmSchema,
5
- } from "@samchon/openapi";
6
- import OpenAI from "openai";
7
- import typia, { IValidation } from "typia";
8
- import { v4 } from "uuid";
9
-
10
- import { AgenticaConstant } from "../internal/AgenticaConstant";
11
- import { AgenticaDefaultPrompt } from "../internal/AgenticaDefaultPrompt";
12
- import { AgenticaPromptFactory } from "../internal/AgenticaPromptFactory";
13
- import { AgenticaSystemPrompt } from "../internal/AgenticaSystemPrompt";
14
- import { IAgenticaContext } from "../structures/IAgenticaContext";
15
- import { IAgenticaController } from "../structures/IAgenticaController";
16
- import { IAgenticaEvent } from "../structures/IAgenticaEvent";
17
- import { IAgenticaOperation } from "../structures/IAgenticaOperation";
18
- import { IAgenticaOperationSelection } from "../structures/IAgenticaOperationSelection";
19
- import { IAgenticaPrompt } from "../structures/IAgenticaPrompt";
20
- import { __IChatCancelFunctionsApplication } from "../structures/internal/__IChatCancelFunctionsApplication";
21
- import { __IChatFunctionReference } from "../structures/internal/__IChatFunctionReference";
22
- import { ChatGptHistoryDecoder } from "./ChatGptHistoryDecoder";
23
-
24
- export namespace ChatGptCancelFunctionAgent {
25
- export const execute = async <Model extends ILlmSchema.Model>(
26
- ctx: IAgenticaContext<Model>,
27
- ): Promise<IAgenticaPrompt.ICancel<Model>[]> => {
28
- if (ctx.operations.divided === undefined)
29
- return step(ctx, ctx.operations.array, 0);
30
-
31
- const stacks: IAgenticaOperationSelection<Model>[][] =
32
- ctx.operations.divided.map(() => []);
33
- const events: IAgenticaEvent<Model>[] = [];
34
- const prompts: IAgenticaPrompt.ICancel<Model>[][] = await Promise.all(
35
- ctx.operations.divided.map((operations, i) =>
36
- step(
37
- {
38
- ...ctx,
39
- stack: stacks[i]!,
40
- dispatch: async (e) => {
41
- events.push(e);
42
- },
43
- },
44
- operations,
45
- 0,
46
- ),
47
- ),
48
- );
49
-
50
- // NO FUNCTION SELECTION, SO THAT ONLY TEXT LEFT
51
- if (stacks.every((s) => s.length === 0)) return prompts[0]!;
52
- // ELITICISM
53
- else if ((ctx.config?.eliticism ?? AgenticaConstant.ELITICISM) === true)
54
- return step(
55
- ctx,
56
- stacks
57
- .flat()
58
- .map(
59
- (s) =>
60
- ctx.operations.group
61
- .get(s.controller.name)!
62
- .get(s.function.name)!,
63
- ),
64
- 0,
65
- );
66
-
67
- // RE-COLLECT SELECT FUNCTION EVENTS
68
- const collection: IAgenticaPrompt.ICancel<Model> = {
69
- id: v4(),
70
- type: "cancel",
71
- operations: [],
72
- };
73
- for (const e of events)
74
- if (e.type === "select") {
75
- collection.operations.push(
76
- AgenticaPromptFactory.selection({
77
- protocol: e.operation.protocol as "http",
78
- controller: e.operation
79
- .controller as IAgenticaController.IHttp<Model>,
80
- function: e.operation.function as IHttpLlmFunction<Model>,
81
- reason: e.reason,
82
- name: e.operation.name,
83
- }),
84
- );
85
- await cancelFunction(ctx, {
86
- name: e.operation.name,
87
- reason: e.reason,
88
- });
89
- }
90
- return [collection];
91
- };
92
-
93
- export const cancelFunction = async <Model extends ILlmSchema.Model>(
94
- ctx: IAgenticaContext<Model>,
95
- reference: __IChatFunctionReference,
96
- ): Promise<IAgenticaOperationSelection<Model> | null> => {
97
- const index: number = ctx.stack.findIndex(
98
- (item) => item.name === reference.name,
99
- );
100
- if (index === -1) return null;
101
-
102
- const item: IAgenticaOperationSelection<Model> = ctx.stack[index]!;
103
- ctx.stack.splice(index, 1);
104
- await ctx.dispatch({
105
- type: "cancel",
106
- operation: item,
107
- reason: reference.reason,
108
- });
109
- return item;
110
- };
111
-
112
- const step = async <Model extends ILlmSchema.Model>(
113
- ctx: IAgenticaContext<Model>,
114
- operations: IAgenticaOperation<Model>[],
115
- retry: number,
116
- failures?: IFailure[],
117
- ): Promise<IAgenticaPrompt.ICancel<Model>[]> => {
118
- //----
119
- // EXECUTE CHATGPT API
120
- //----
121
- const completion: OpenAI.ChatCompletion = await ctx.request("cancel", {
122
- messages: [
123
- // COMMON SYSTEM PROMPT
124
- {
125
- role: "system",
126
- content: AgenticaDefaultPrompt.write(ctx.config),
127
- } satisfies OpenAI.ChatCompletionSystemMessageParam,
128
- // CANDIDATE FUNCTIONS
129
- {
130
- role: "assistant",
131
- tool_calls: [
132
- {
133
- type: "function",
134
- id: "getApiFunctions",
135
- function: {
136
- name: "getApiFunctions",
137
- arguments: JSON.stringify({}),
138
- },
139
- },
140
- ],
141
- },
142
- {
143
- role: "tool",
144
- tool_call_id: "getApiFunctions",
145
- content: JSON.stringify(
146
- operations.map((op) => ({
147
- name: op.name,
148
- description: op.function.description,
149
- ...(op.protocol === "http"
150
- ? {
151
- method: op.function.method,
152
- path: op.function.path,
153
- tags: op.function.tags,
154
- }
155
- : {}),
156
- })),
157
- ),
158
- },
159
- // PREVIOUS HISTORIES
160
- ...ctx.histories.map(ChatGptHistoryDecoder.decode).flat(),
161
- // USER INPUT
162
- {
163
- role: "user",
164
- content: ctx.prompt.text,
165
- },
166
- // SYSTEM PROMPT
167
- {
168
- role: "system",
169
- content:
170
- ctx.config?.systemPrompt?.cancel?.(ctx.histories) ??
171
- AgenticaSystemPrompt.CANCEL,
172
- },
173
- // TYPE CORRECTIONS
174
- ...emendMessages(failures ?? []),
175
- ],
176
- // STACK FUNCTIONS
177
- tools: CONTAINER.functions.map(
178
- (func) =>
179
- ({
180
- type: "function",
181
- function: {
182
- name: func.name,
183
- description: func.description,
184
- parameters: func.parameters as any,
185
- },
186
- }) satisfies OpenAI.ChatCompletionTool,
187
- ),
188
- tool_choice: "auto",
189
- parallel_tool_calls: true,
190
- });
191
-
192
- //----
193
- // VALIDATION
194
- //----
195
- if (retry++ < (ctx.config?.retry ?? AgenticaConstant.RETRY)) {
196
- const failures: IFailure[] = [];
197
- for (const choice of completion.choices)
198
- for (const tc of choice.message.tool_calls ?? []) {
199
- if (tc.function.name !== "cancelFunctions") continue;
200
- const input: object = JSON.parse(tc.function.arguments);
201
- const validation: IValidation<__IChatFunctionReference.IProps> =
202
- typia.validate<__IChatFunctionReference.IProps>(input);
203
- if (validation.success === false)
204
- failures.push({
205
- id: tc.id,
206
- name: tc.function.name,
207
- validation,
208
- });
209
- }
210
- if (failures.length > 0) return step(ctx, operations, retry, failures);
211
- }
212
-
213
- //----
214
- // PROCESS COMPLETION
215
- //----
216
- const prompts: IAgenticaPrompt.ICancel<Model>[] = [];
217
- for (const choice of completion.choices) {
218
- // TOOL CALLING HANDLER
219
- if (choice.message.tool_calls)
220
- for (const tc of choice.message.tool_calls) {
221
- if (tc.type !== "function") continue;
222
- const input: __IChatFunctionReference.IProps = JSON.parse(
223
- tc.function.arguments,
224
- );
225
- if (typia.is(input) === false) continue;
226
- else if (tc.function.name === "cancelFunctions") {
227
- const collection: IAgenticaPrompt.ICancel<Model> = {
228
- id: tc.id,
229
- type: "cancel",
230
- operations: [],
231
- };
232
- for (const reference of input.functions) {
233
- const operation = await cancelFunction(ctx, reference);
234
- if (operation !== null) collection.operations.push(operation);
235
- }
236
- if (collection.operations.length !== 0) prompts.push(collection);
237
- }
238
- }
239
- }
240
- return prompts;
241
- };
242
-
243
- const emendMessages = (
244
- failures: IFailure[],
245
- ): OpenAI.ChatCompletionMessageParam[] =>
246
- failures
247
- .map((f) => [
248
- {
249
- role: "assistant",
250
- tool_calls: [
251
- {
252
- type: "function",
253
- id: f.id,
254
- function: {
255
- name: f.name,
256
- arguments: JSON.stringify(f.validation.data),
257
- },
258
- },
259
- ],
260
- } satisfies OpenAI.ChatCompletionAssistantMessageParam,
261
- {
262
- role: "tool",
263
- content: JSON.stringify(f.validation.errors),
264
- tool_call_id: f.id,
265
- } satisfies OpenAI.ChatCompletionToolMessageParam,
266
- {
267
- role: "system",
268
- content: [
269
- "You A.I. assistant has composed wrong typed arguments.",
270
- "",
271
- "Correct it at the next function calling.",
272
- ].join("\n"),
273
- } satisfies OpenAI.ChatCompletionSystemMessageParam,
274
- ])
275
- .flat();
276
- }
277
-
278
- const CONTAINER: ILlmApplication<"chatgpt"> = typia.llm.application<
279
- __IChatCancelFunctionsApplication,
280
- "chatgpt"
281
- >();
282
-
283
- interface IFailure {
284
- id: string;
285
- name: string;
286
- validation: IValidation.IFailure;
287
- }
1
+ import {
2
+ IHttpLlmFunction,
3
+ ILlmApplication,
4
+ ILlmSchema,
5
+ } from "@samchon/openapi";
6
+ import OpenAI from "openai";
7
+ import typia, { IValidation } from "typia";
8
+ import { v4 } from "uuid";
9
+
10
+ import { AgenticaConstant } from "../internal/AgenticaConstant";
11
+ import { AgenticaDefaultPrompt } from "../internal/AgenticaDefaultPrompt";
12
+ import { AgenticaPromptFactory } from "../internal/AgenticaPromptFactory";
13
+ import { AgenticaSystemPrompt } from "../internal/AgenticaSystemPrompt";
14
+ import { IAgenticaContext } from "../structures/IAgenticaContext";
15
+ import { IAgenticaController } from "../structures/IAgenticaController";
16
+ import { IAgenticaEvent } from "../structures/IAgenticaEvent";
17
+ import { IAgenticaOperation } from "../structures/IAgenticaOperation";
18
+ import { IAgenticaOperationSelection } from "../structures/IAgenticaOperationSelection";
19
+ import { IAgenticaPrompt } from "../structures/IAgenticaPrompt";
20
+ import { __IChatCancelFunctionsApplication } from "../structures/internal/__IChatCancelFunctionsApplication";
21
+ import { __IChatFunctionReference } from "../structures/internal/__IChatFunctionReference";
22
+ import { ChatGptHistoryDecoder } from "./ChatGptHistoryDecoder";
23
+
24
+ export namespace ChatGptCancelFunctionAgent {
25
+ export const execute = async <Model extends ILlmSchema.Model>(
26
+ ctx: IAgenticaContext<Model>,
27
+ ): Promise<IAgenticaPrompt.ICancel<Model>[]> => {
28
+ if (ctx.operations.divided === undefined)
29
+ return step(ctx, ctx.operations.array, 0);
30
+
31
+ const stacks: IAgenticaOperationSelection<Model>[][] =
32
+ ctx.operations.divided.map(() => []);
33
+ const events: IAgenticaEvent<Model>[] = [];
34
+ const prompts: IAgenticaPrompt.ICancel<Model>[][] = await Promise.all(
35
+ ctx.operations.divided.map((operations, i) =>
36
+ step(
37
+ {
38
+ ...ctx,
39
+ stack: stacks[i]!,
40
+ dispatch: async (e) => {
41
+ events.push(e);
42
+ },
43
+ },
44
+ operations,
45
+ 0,
46
+ ),
47
+ ),
48
+ );
49
+
50
+ // NO FUNCTION SELECTION, SO THAT ONLY TEXT LEFT
51
+ if (stacks.every((s) => s.length === 0)) return prompts[0]!;
52
+ // ELITICISM
53
+ else if ((ctx.config?.eliticism ?? AgenticaConstant.ELITICISM) === true)
54
+ return step(
55
+ ctx,
56
+ stacks
57
+ .flat()
58
+ .map(
59
+ (s) =>
60
+ ctx.operations.group
61
+ .get(s.controller.name)!
62
+ .get(s.function.name)!,
63
+ ),
64
+ 0,
65
+ );
66
+
67
+ // RE-COLLECT SELECT FUNCTION EVENTS
68
+ const collection: IAgenticaPrompt.ICancel<Model> = {
69
+ id: v4(),
70
+ type: "cancel",
71
+ operations: [],
72
+ };
73
+ for (const e of events)
74
+ if (e.type === "select") {
75
+ collection.operations.push(
76
+ AgenticaPromptFactory.selection({
77
+ protocol: e.operation.protocol as "http",
78
+ controller: e.operation
79
+ .controller as IAgenticaController.IHttp<Model>,
80
+ function: e.operation.function as IHttpLlmFunction<Model>,
81
+ reason: e.reason,
82
+ name: e.operation.name,
83
+ }),
84
+ );
85
+ await cancelFunction(ctx, {
86
+ name: e.operation.name,
87
+ reason: e.reason,
88
+ });
89
+ }
90
+ return [collection];
91
+ };
92
+
93
+ export const cancelFunction = async <Model extends ILlmSchema.Model>(
94
+ ctx: IAgenticaContext<Model>,
95
+ reference: __IChatFunctionReference,
96
+ ): Promise<IAgenticaOperationSelection<Model> | null> => {
97
+ const index: number = ctx.stack.findIndex(
98
+ (item) => item.name === reference.name,
99
+ );
100
+ if (index === -1) return null;
101
+
102
+ const item: IAgenticaOperationSelection<Model> = ctx.stack[index]!;
103
+ ctx.stack.splice(index, 1);
104
+ await ctx.dispatch({
105
+ type: "cancel",
106
+ operation: item,
107
+ reason: reference.reason,
108
+ });
109
+ return item;
110
+ };
111
+
112
+ const step = async <Model extends ILlmSchema.Model>(
113
+ ctx: IAgenticaContext<Model>,
114
+ operations: IAgenticaOperation<Model>[],
115
+ retry: number,
116
+ failures?: IFailure[],
117
+ ): Promise<IAgenticaPrompt.ICancel<Model>[]> => {
118
+ //----
119
+ // EXECUTE CHATGPT API
120
+ //----
121
+ const completion: OpenAI.ChatCompletion = await ctx.request("cancel", {
122
+ messages: [
123
+ // COMMON SYSTEM PROMPT
124
+ {
125
+ role: "system",
126
+ content: AgenticaDefaultPrompt.write(ctx.config),
127
+ } satisfies OpenAI.ChatCompletionSystemMessageParam,
128
+ // CANDIDATE FUNCTIONS
129
+ {
130
+ role: "assistant",
131
+ tool_calls: [
132
+ {
133
+ type: "function",
134
+ id: "getApiFunctions",
135
+ function: {
136
+ name: "getApiFunctions",
137
+ arguments: JSON.stringify({}),
138
+ },
139
+ },
140
+ ],
141
+ },
142
+ {
143
+ role: "tool",
144
+ tool_call_id: "getApiFunctions",
145
+ content: JSON.stringify(
146
+ operations.map((op) => ({
147
+ name: op.name,
148
+ description: op.function.description,
149
+ ...(op.protocol === "http"
150
+ ? {
151
+ method: op.function.method,
152
+ path: op.function.path,
153
+ tags: op.function.tags,
154
+ }
155
+ : {}),
156
+ })),
157
+ ),
158
+ },
159
+ // PREVIOUS HISTORIES
160
+ ...ctx.histories.map(ChatGptHistoryDecoder.decode).flat(),
161
+ // USER INPUT
162
+ {
163
+ role: "user",
164
+ content: ctx.prompt.text,
165
+ },
166
+ // SYSTEM PROMPT
167
+ {
168
+ role: "system",
169
+ content:
170
+ ctx.config?.systemPrompt?.cancel?.(ctx.histories) ??
171
+ AgenticaSystemPrompt.CANCEL,
172
+ },
173
+ // TYPE CORRECTIONS
174
+ ...emendMessages(failures ?? []),
175
+ ],
176
+ // STACK FUNCTIONS
177
+ tools: CONTAINER.functions.map(
178
+ (func) =>
179
+ ({
180
+ type: "function",
181
+ function: {
182
+ name: func.name,
183
+ description: func.description,
184
+ parameters: func.parameters as any,
185
+ },
186
+ }) satisfies OpenAI.ChatCompletionTool,
187
+ ),
188
+ tool_choice: "auto",
189
+ parallel_tool_calls: true,
190
+ });
191
+
192
+ //----
193
+ // VALIDATION
194
+ //----
195
+ if (retry++ < (ctx.config?.retry ?? AgenticaConstant.RETRY)) {
196
+ const failures: IFailure[] = [];
197
+ for (const choice of completion.choices)
198
+ for (const tc of choice.message.tool_calls ?? []) {
199
+ if (tc.function.name !== "cancelFunctions") continue;
200
+ const input: object = JSON.parse(tc.function.arguments);
201
+ const validation: IValidation<__IChatFunctionReference.IProps> =
202
+ typia.validate<__IChatFunctionReference.IProps>(input);
203
+ if (validation.success === false)
204
+ failures.push({
205
+ id: tc.id,
206
+ name: tc.function.name,
207
+ validation,
208
+ });
209
+ }
210
+ if (failures.length > 0) return step(ctx, operations, retry, failures);
211
+ }
212
+
213
+ //----
214
+ // PROCESS COMPLETION
215
+ //----
216
+ const prompts: IAgenticaPrompt.ICancel<Model>[] = [];
217
+ for (const choice of completion.choices) {
218
+ // TOOL CALLING HANDLER
219
+ if (choice.message.tool_calls)
220
+ for (const tc of choice.message.tool_calls) {
221
+ if (tc.type !== "function") continue;
222
+ const input: __IChatFunctionReference.IProps = JSON.parse(
223
+ tc.function.arguments,
224
+ );
225
+ if (typia.is(input) === false) continue;
226
+ else if (tc.function.name === "cancelFunctions") {
227
+ const collection: IAgenticaPrompt.ICancel<Model> = {
228
+ id: tc.id,
229
+ type: "cancel",
230
+ operations: [],
231
+ };
232
+ for (const reference of input.functions) {
233
+ const operation = await cancelFunction(ctx, reference);
234
+ if (operation !== null) collection.operations.push(operation);
235
+ }
236
+ if (collection.operations.length !== 0) prompts.push(collection);
237
+ }
238
+ }
239
+ }
240
+ return prompts;
241
+ };
242
+
243
+ const emendMessages = (
244
+ failures: IFailure[],
245
+ ): OpenAI.ChatCompletionMessageParam[] =>
246
+ failures
247
+ .map((f) => [
248
+ {
249
+ role: "assistant",
250
+ tool_calls: [
251
+ {
252
+ type: "function",
253
+ id: f.id,
254
+ function: {
255
+ name: f.name,
256
+ arguments: JSON.stringify(f.validation.data),
257
+ },
258
+ },
259
+ ],
260
+ } satisfies OpenAI.ChatCompletionAssistantMessageParam,
261
+ {
262
+ role: "tool",
263
+ content: JSON.stringify(f.validation.errors),
264
+ tool_call_id: f.id,
265
+ } satisfies OpenAI.ChatCompletionToolMessageParam,
266
+ {
267
+ role: "system",
268
+ content: [
269
+ "You A.I. assistant has composed wrong typed arguments.",
270
+ "",
271
+ "Correct it at the next function calling.",
272
+ ].join("\n"),
273
+ } satisfies OpenAI.ChatCompletionSystemMessageParam,
274
+ ])
275
+ .flat();
276
+ }
277
+
278
+ const CONTAINER: ILlmApplication<"chatgpt"> = typia.llm.application<
279
+ __IChatCancelFunctionsApplication,
280
+ "chatgpt"
281
+ >();
282
+
283
+ interface IFailure {
284
+ id: string;
285
+ name: string;
286
+ validation: IValidation.IFailure;
287
+ }