@agentica/core 0.8.2 → 0.8.3-dev.20250227

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. package/LICENSE +21 -21
  2. package/README.md +404 -404
  3. package/lib/Agentica.js +1 -4
  4. package/lib/Agentica.js.map +1 -1
  5. package/lib/index.mjs +8 -13
  6. package/lib/index.mjs.map +1 -1
  7. package/lib/internal/AgenticaTokenUsageAggregator.js +7 -9
  8. package/lib/internal/AgenticaTokenUsageAggregator.js.map +1 -1
  9. package/lib/structures/IAgenticaTokenUsage.d.ts +7 -11
  10. package/package.json +1 -1
  11. package/prompts/cancel.md +4 -4
  12. package/prompts/common.md +2 -2
  13. package/prompts/describe.md +6 -6
  14. package/prompts/execute.md +6 -6
  15. package/prompts/initialize.md +2 -2
  16. package/prompts/select.md +6 -6
  17. package/src/Agentica.ts +318 -322
  18. package/src/chatgpt/ChatGptAgent.ts +71 -71
  19. package/src/chatgpt/ChatGptCallFunctionAgent.ts +445 -445
  20. package/src/chatgpt/ChatGptCancelFunctionAgent.ts +283 -283
  21. package/src/chatgpt/ChatGptDescribeFunctionAgent.ts +51 -51
  22. package/src/chatgpt/ChatGptHistoryDecoder.ts +87 -87
  23. package/src/chatgpt/ChatGptInitializeFunctionAgent.ts +88 -88
  24. package/src/chatgpt/ChatGptSelectFunctionAgent.ts +318 -318
  25. package/src/functional/createHttpLlmApplication.ts +63 -63
  26. package/src/index.ts +19 -19
  27. package/src/internal/AgenticaConstant.ts +4 -4
  28. package/src/internal/AgenticaDefaultPrompt.ts +39 -39
  29. package/src/internal/AgenticaOperationComposer.ts +82 -82
  30. package/src/internal/AgenticaPromptFactory.ts +30 -30
  31. package/src/internal/AgenticaPromptTransformer.ts +83 -83
  32. package/src/internal/AgenticaTokenUsageAggregator.ts +115 -123
  33. package/src/internal/MathUtil.ts +3 -3
  34. package/src/internal/Singleton.ts +22 -22
  35. package/src/internal/__map_take.ts +15 -15
  36. package/src/structures/IAgenticaConfig.ts +121 -121
  37. package/src/structures/IAgenticaContext.ts +128 -128
  38. package/src/structures/IAgenticaController.ts +130 -130
  39. package/src/structures/IAgenticaEvent.ts +224 -224
  40. package/src/structures/IAgenticaExecutor.ts +152 -152
  41. package/src/structures/IAgenticaOperation.ts +64 -64
  42. package/src/structures/IAgenticaOperationCollection.ts +50 -50
  43. package/src/structures/IAgenticaOperationSelection.ts +69 -69
  44. package/src/structures/IAgenticaPrompt.ts +173 -173
  45. package/src/structures/IAgenticaProps.ts +64 -64
  46. package/src/structures/IAgenticaProvider.ts +45 -45
  47. package/src/structures/IAgenticaSystemPrompt.ts +122 -122
  48. package/src/structures/IAgenticaTokenUsage.ts +107 -112
  49. package/src/structures/internal/__IChatCancelFunctionsApplication.ts +23 -23
  50. package/src/structures/internal/__IChatFunctionReference.ts +21 -21
  51. package/src/structures/internal/__IChatInitialApplication.ts +15 -15
  52. package/src/structures/internal/__IChatSelectFunctionsApplication.ts +24 -24
  53. package/src/typings/AgenticaSource.ts +6 -6
@@ -1,283 +1,283 @@
1
- import { IHttpLlmFunction, ILlmApplication } from "@samchon/openapi";
2
- import OpenAI from "openai";
3
- import typia, { IValidation } from "typia";
4
- import { v4 } from "uuid";
5
-
6
- import { AgenticaConstant } from "../internal/AgenticaConstant";
7
- import { AgenticaDefaultPrompt } from "../internal/AgenticaDefaultPrompt";
8
- import { AgenticaPromptFactory } from "../internal/AgenticaPromptFactory";
9
- import { AgenticaSystemPrompt } from "../internal/AgenticaSystemPrompt";
10
- import { IAgenticaContext } from "../structures/IAgenticaContext";
11
- import { IAgenticaController } from "../structures/IAgenticaController";
12
- import { IAgenticaEvent } from "../structures/IAgenticaEvent";
13
- import { IAgenticaOperation } from "../structures/IAgenticaOperation";
14
- import { IAgenticaOperationSelection } from "../structures/IAgenticaOperationSelection";
15
- import { IAgenticaPrompt } from "../structures/IAgenticaPrompt";
16
- import { __IChatCancelFunctionsApplication } from "../structures/internal/__IChatCancelFunctionsApplication";
17
- import { __IChatFunctionReference } from "../structures/internal/__IChatFunctionReference";
18
- import { ChatGptHistoryDecoder } from "./ChatGptHistoryDecoder";
19
-
20
- export namespace ChatGptCancelFunctionAgent {
21
- export const execute = async (
22
- ctx: IAgenticaContext,
23
- ): Promise<IAgenticaPrompt.ICancel[]> => {
24
- if (ctx.operations.divided === undefined)
25
- return step(ctx, ctx.operations.array, 0);
26
-
27
- const stacks: IAgenticaOperationSelection[][] = ctx.operations.divided.map(
28
- () => [],
29
- );
30
- const events: IAgenticaEvent[] = [];
31
- const prompts: IAgenticaPrompt.ICancel[][] = await Promise.all(
32
- ctx.operations.divided.map((operations, i) =>
33
- step(
34
- {
35
- ...ctx,
36
- stack: stacks[i]!,
37
- dispatch: async (e) => {
38
- events.push(e);
39
- },
40
- },
41
- operations,
42
- 0,
43
- ),
44
- ),
45
- );
46
-
47
- // NO FUNCTION SELECTION, SO THAT ONLY TEXT LEFT
48
- if (stacks.every((s) => s.length === 0)) return prompts[0]!;
49
- // ELITICISM
50
- else if ((ctx.config?.eliticism ?? AgenticaConstant.ELITICISM) === true)
51
- return step(
52
- ctx,
53
- stacks
54
- .flat()
55
- .map(
56
- (s) =>
57
- ctx.operations.group
58
- .get(s.controller.name)!
59
- .get(s.function.name)!,
60
- ),
61
- 0,
62
- );
63
-
64
- // RE-COLLECT SELECT FUNCTION EVENTS
65
- const collection: IAgenticaPrompt.ICancel = {
66
- id: v4(),
67
- type: "cancel",
68
- operations: [],
69
- };
70
- for (const e of events)
71
- if (e.type === "select") {
72
- collection.operations.push(
73
- AgenticaPromptFactory.selection({
74
- protocol: e.operation.protocol as "http",
75
- controller: e.operation.controller as IAgenticaController.IHttp,
76
- function: e.operation.function as IHttpLlmFunction<"chatgpt">,
77
- reason: e.reason,
78
- name: e.operation.name,
79
- }),
80
- );
81
- await cancelFunction(ctx, {
82
- name: e.operation.name,
83
- reason: e.reason,
84
- });
85
- }
86
- return [collection];
87
- };
88
-
89
- export const cancelFunction = async (
90
- ctx: IAgenticaContext,
91
- reference: __IChatFunctionReference,
92
- ): Promise<IAgenticaOperationSelection | null> => {
93
- const index: number = ctx.stack.findIndex(
94
- (item) => item.name === reference.name,
95
- );
96
- if (index === -1) return null;
97
-
98
- const item: IAgenticaOperationSelection = ctx.stack[index]!;
99
- ctx.stack.splice(index, 1);
100
- await ctx.dispatch({
101
- type: "cancel",
102
- operation: item,
103
- reason: reference.reason,
104
- });
105
- return item;
106
- };
107
-
108
- const step = async (
109
- ctx: IAgenticaContext,
110
- operations: IAgenticaOperation[],
111
- retry: number,
112
- failures?: IFailure[],
113
- ): Promise<IAgenticaPrompt.ICancel[]> => {
114
- //----
115
- // EXECUTE CHATGPT API
116
- //----
117
- const completion: OpenAI.ChatCompletion = await ctx.request("cancel", {
118
- messages: [
119
- // COMMON SYSTEM PROMPT
120
- {
121
- role: "system",
122
- content: AgenticaDefaultPrompt.write(ctx.config),
123
- } satisfies OpenAI.ChatCompletionSystemMessageParam,
124
- // CANDIDATE FUNCTIONS
125
- {
126
- role: "assistant",
127
- tool_calls: [
128
- {
129
- type: "function",
130
- id: "getApiFunctions",
131
- function: {
132
- name: "getApiFunctions",
133
- arguments: JSON.stringify({}),
134
- },
135
- },
136
- ],
137
- },
138
- {
139
- role: "tool",
140
- tool_call_id: "getApiFunctions",
141
- content: JSON.stringify(
142
- operations.map((op) => ({
143
- name: op.name,
144
- description: op.function.description,
145
- ...(op.protocol === "http"
146
- ? {
147
- method: op.function.method,
148
- path: op.function.path,
149
- tags: op.function.tags,
150
- }
151
- : {}),
152
- })),
153
- ),
154
- },
155
- // PREVIOUS HISTORIES
156
- ...ctx.histories.map(ChatGptHistoryDecoder.decode).flat(),
157
- // USER INPUT
158
- {
159
- role: "user",
160
- content: ctx.prompt.text,
161
- },
162
- // SYSTEM PROMPT
163
- {
164
- role: "system",
165
- content:
166
- ctx.config?.systemPrompt?.cancel?.(ctx.histories) ??
167
- AgenticaSystemPrompt.CANCEL,
168
- },
169
- // TYPE CORRECTIONS
170
- ...emendMessages(failures ?? []),
171
- ],
172
- // STACK FUNCTIONS
173
- tools: CONTAINER.functions.map(
174
- (func) =>
175
- ({
176
- type: "function",
177
- function: {
178
- name: func.name,
179
- description: func.description,
180
- parameters: func.parameters as any,
181
- },
182
- }) satisfies OpenAI.ChatCompletionTool,
183
- ),
184
- tool_choice: "auto",
185
- parallel_tool_calls: true,
186
- });
187
-
188
- //----
189
- // VALIDATION
190
- //----
191
- if (retry++ < (ctx.config?.retry ?? AgenticaConstant.RETRY)) {
192
- const failures: IFailure[] = [];
193
- for (const choice of completion.choices)
194
- for (const tc of choice.message.tool_calls ?? []) {
195
- if (tc.function.name !== "cancelFunctions") continue;
196
- const input: object = JSON.parse(tc.function.arguments);
197
- const validation: IValidation<__IChatFunctionReference.IProps> =
198
- typia.validate<__IChatFunctionReference.IProps>(input);
199
- if (validation.success === false)
200
- failures.push({
201
- id: tc.id,
202
- name: tc.function.name,
203
- validation,
204
- });
205
- }
206
- if (failures.length > 0) return step(ctx, operations, retry, failures);
207
- }
208
-
209
- //----
210
- // PROCESS COMPLETION
211
- //----
212
- const prompts: IAgenticaPrompt.ICancel[] = [];
213
- for (const choice of completion.choices) {
214
- // TOOL CALLING HANDLER
215
- if (choice.message.tool_calls)
216
- for (const tc of choice.message.tool_calls) {
217
- if (tc.type !== "function") continue;
218
- const input: __IChatFunctionReference.IProps = JSON.parse(
219
- tc.function.arguments,
220
- );
221
- if (typia.is(input) === false) continue;
222
- else if (tc.function.name === "cancelFunctions") {
223
- const collection: IAgenticaPrompt.ICancel = {
224
- id: tc.id,
225
- type: "cancel",
226
- operations: [],
227
- };
228
- for (const reference of input.functions) {
229
- const operation = await cancelFunction(ctx, reference);
230
- if (operation !== null) collection.operations.push(operation);
231
- }
232
- if (collection.operations.length !== 0) prompts.push(collection);
233
- }
234
- }
235
- }
236
- return prompts;
237
- };
238
-
239
- const emendMessages = (
240
- failures: IFailure[],
241
- ): OpenAI.ChatCompletionMessageParam[] =>
242
- failures
243
- .map((f) => [
244
- {
245
- role: "assistant",
246
- tool_calls: [
247
- {
248
- type: "function",
249
- id: f.id,
250
- function: {
251
- name: f.name,
252
- arguments: JSON.stringify(f.validation.data),
253
- },
254
- },
255
- ],
256
- } satisfies OpenAI.ChatCompletionAssistantMessageParam,
257
- {
258
- role: "tool",
259
- content: JSON.stringify(f.validation.errors),
260
- tool_call_id: f.id,
261
- } satisfies OpenAI.ChatCompletionToolMessageParam,
262
- {
263
- role: "system",
264
- content: [
265
- "You A.I. assistant has composed wrong typed arguments.",
266
- "",
267
- "Correct it at the next function calling.",
268
- ].join("\n"),
269
- } satisfies OpenAI.ChatCompletionSystemMessageParam,
270
- ])
271
- .flat();
272
- }
273
-
274
- const CONTAINER: ILlmApplication<"chatgpt"> = typia.llm.application<
275
- __IChatCancelFunctionsApplication,
276
- "chatgpt"
277
- >();
278
-
279
- interface IFailure {
280
- id: string;
281
- name: string;
282
- validation: IValidation.IFailure;
283
- }
1
+ import { IHttpLlmFunction, ILlmApplication } from "@samchon/openapi";
2
+ import OpenAI from "openai";
3
+ import typia, { IValidation } from "typia";
4
+ import { v4 } from "uuid";
5
+
6
+ import { AgenticaConstant } from "../internal/AgenticaConstant";
7
+ import { AgenticaDefaultPrompt } from "../internal/AgenticaDefaultPrompt";
8
+ import { AgenticaPromptFactory } from "../internal/AgenticaPromptFactory";
9
+ import { AgenticaSystemPrompt } from "../internal/AgenticaSystemPrompt";
10
+ import { IAgenticaContext } from "../structures/IAgenticaContext";
11
+ import { IAgenticaController } from "../structures/IAgenticaController";
12
+ import { IAgenticaEvent } from "../structures/IAgenticaEvent";
13
+ import { IAgenticaOperation } from "../structures/IAgenticaOperation";
14
+ import { IAgenticaOperationSelection } from "../structures/IAgenticaOperationSelection";
15
+ import { IAgenticaPrompt } from "../structures/IAgenticaPrompt";
16
+ import { __IChatCancelFunctionsApplication } from "../structures/internal/__IChatCancelFunctionsApplication";
17
+ import { __IChatFunctionReference } from "../structures/internal/__IChatFunctionReference";
18
+ import { ChatGptHistoryDecoder } from "./ChatGptHistoryDecoder";
19
+
20
+ export namespace ChatGptCancelFunctionAgent {
21
+ export const execute = async (
22
+ ctx: IAgenticaContext,
23
+ ): Promise<IAgenticaPrompt.ICancel[]> => {
24
+ if (ctx.operations.divided === undefined)
25
+ return step(ctx, ctx.operations.array, 0);
26
+
27
+ const stacks: IAgenticaOperationSelection[][] = ctx.operations.divided.map(
28
+ () => [],
29
+ );
30
+ const events: IAgenticaEvent[] = [];
31
+ const prompts: IAgenticaPrompt.ICancel[][] = await Promise.all(
32
+ ctx.operations.divided.map((operations, i) =>
33
+ step(
34
+ {
35
+ ...ctx,
36
+ stack: stacks[i]!,
37
+ dispatch: async (e) => {
38
+ events.push(e);
39
+ },
40
+ },
41
+ operations,
42
+ 0,
43
+ ),
44
+ ),
45
+ );
46
+
47
+ // NO FUNCTION SELECTION, SO THAT ONLY TEXT LEFT
48
+ if (stacks.every((s) => s.length === 0)) return prompts[0]!;
49
+ // ELITICISM
50
+ else if ((ctx.config?.eliticism ?? AgenticaConstant.ELITICISM) === true)
51
+ return step(
52
+ ctx,
53
+ stacks
54
+ .flat()
55
+ .map(
56
+ (s) =>
57
+ ctx.operations.group
58
+ .get(s.controller.name)!
59
+ .get(s.function.name)!,
60
+ ),
61
+ 0,
62
+ );
63
+
64
+ // RE-COLLECT SELECT FUNCTION EVENTS
65
+ const collection: IAgenticaPrompt.ICancel = {
66
+ id: v4(),
67
+ type: "cancel",
68
+ operations: [],
69
+ };
70
+ for (const e of events)
71
+ if (e.type === "select") {
72
+ collection.operations.push(
73
+ AgenticaPromptFactory.selection({
74
+ protocol: e.operation.protocol as "http",
75
+ controller: e.operation.controller as IAgenticaController.IHttp,
76
+ function: e.operation.function as IHttpLlmFunction<"chatgpt">,
77
+ reason: e.reason,
78
+ name: e.operation.name,
79
+ }),
80
+ );
81
+ await cancelFunction(ctx, {
82
+ name: e.operation.name,
83
+ reason: e.reason,
84
+ });
85
+ }
86
+ return [collection];
87
+ };
88
+
89
+ export const cancelFunction = async (
90
+ ctx: IAgenticaContext,
91
+ reference: __IChatFunctionReference,
92
+ ): Promise<IAgenticaOperationSelection | null> => {
93
+ const index: number = ctx.stack.findIndex(
94
+ (item) => item.name === reference.name,
95
+ );
96
+ if (index === -1) return null;
97
+
98
+ const item: IAgenticaOperationSelection = ctx.stack[index]!;
99
+ ctx.stack.splice(index, 1);
100
+ await ctx.dispatch({
101
+ type: "cancel",
102
+ operation: item,
103
+ reason: reference.reason,
104
+ });
105
+ return item;
106
+ };
107
+
108
+ const step = async (
109
+ ctx: IAgenticaContext,
110
+ operations: IAgenticaOperation[],
111
+ retry: number,
112
+ failures?: IFailure[],
113
+ ): Promise<IAgenticaPrompt.ICancel[]> => {
114
+ //----
115
+ // EXECUTE CHATGPT API
116
+ //----
117
+ const completion: OpenAI.ChatCompletion = await ctx.request("cancel", {
118
+ messages: [
119
+ // COMMON SYSTEM PROMPT
120
+ {
121
+ role: "system",
122
+ content: AgenticaDefaultPrompt.write(ctx.config),
123
+ } satisfies OpenAI.ChatCompletionSystemMessageParam,
124
+ // CANDIDATE FUNCTIONS
125
+ {
126
+ role: "assistant",
127
+ tool_calls: [
128
+ {
129
+ type: "function",
130
+ id: "getApiFunctions",
131
+ function: {
132
+ name: "getApiFunctions",
133
+ arguments: JSON.stringify({}),
134
+ },
135
+ },
136
+ ],
137
+ },
138
+ {
139
+ role: "tool",
140
+ tool_call_id: "getApiFunctions",
141
+ content: JSON.stringify(
142
+ operations.map((op) => ({
143
+ name: op.name,
144
+ description: op.function.description,
145
+ ...(op.protocol === "http"
146
+ ? {
147
+ method: op.function.method,
148
+ path: op.function.path,
149
+ tags: op.function.tags,
150
+ }
151
+ : {}),
152
+ })),
153
+ ),
154
+ },
155
+ // PREVIOUS HISTORIES
156
+ ...ctx.histories.map(ChatGptHistoryDecoder.decode).flat(),
157
+ // USER INPUT
158
+ {
159
+ role: "user",
160
+ content: ctx.prompt.text,
161
+ },
162
+ // SYSTEM PROMPT
163
+ {
164
+ role: "system",
165
+ content:
166
+ ctx.config?.systemPrompt?.cancel?.(ctx.histories) ??
167
+ AgenticaSystemPrompt.CANCEL,
168
+ },
169
+ // TYPE CORRECTIONS
170
+ ...emendMessages(failures ?? []),
171
+ ],
172
+ // STACK FUNCTIONS
173
+ tools: CONTAINER.functions.map(
174
+ (func) =>
175
+ ({
176
+ type: "function",
177
+ function: {
178
+ name: func.name,
179
+ description: func.description,
180
+ parameters: func.parameters as any,
181
+ },
182
+ }) satisfies OpenAI.ChatCompletionTool,
183
+ ),
184
+ tool_choice: "auto",
185
+ parallel_tool_calls: true,
186
+ });
187
+
188
+ //----
189
+ // VALIDATION
190
+ //----
191
+ if (retry++ < (ctx.config?.retry ?? AgenticaConstant.RETRY)) {
192
+ const failures: IFailure[] = [];
193
+ for (const choice of completion.choices)
194
+ for (const tc of choice.message.tool_calls ?? []) {
195
+ if (tc.function.name !== "cancelFunctions") continue;
196
+ const input: object = JSON.parse(tc.function.arguments);
197
+ const validation: IValidation<__IChatFunctionReference.IProps> =
198
+ typia.validate<__IChatFunctionReference.IProps>(input);
199
+ if (validation.success === false)
200
+ failures.push({
201
+ id: tc.id,
202
+ name: tc.function.name,
203
+ validation,
204
+ });
205
+ }
206
+ if (failures.length > 0) return step(ctx, operations, retry, failures);
207
+ }
208
+
209
+ //----
210
+ // PROCESS COMPLETION
211
+ //----
212
+ const prompts: IAgenticaPrompt.ICancel[] = [];
213
+ for (const choice of completion.choices) {
214
+ // TOOL CALLING HANDLER
215
+ if (choice.message.tool_calls)
216
+ for (const tc of choice.message.tool_calls) {
217
+ if (tc.type !== "function") continue;
218
+ const input: __IChatFunctionReference.IProps = JSON.parse(
219
+ tc.function.arguments,
220
+ );
221
+ if (typia.is(input) === false) continue;
222
+ else if (tc.function.name === "cancelFunctions") {
223
+ const collection: IAgenticaPrompt.ICancel = {
224
+ id: tc.id,
225
+ type: "cancel",
226
+ operations: [],
227
+ };
228
+ for (const reference of input.functions) {
229
+ const operation = await cancelFunction(ctx, reference);
230
+ if (operation !== null) collection.operations.push(operation);
231
+ }
232
+ if (collection.operations.length !== 0) prompts.push(collection);
233
+ }
234
+ }
235
+ }
236
+ return prompts;
237
+ };
238
+
239
+ const emendMessages = (
240
+ failures: IFailure[],
241
+ ): OpenAI.ChatCompletionMessageParam[] =>
242
+ failures
243
+ .map((f) => [
244
+ {
245
+ role: "assistant",
246
+ tool_calls: [
247
+ {
248
+ type: "function",
249
+ id: f.id,
250
+ function: {
251
+ name: f.name,
252
+ arguments: JSON.stringify(f.validation.data),
253
+ },
254
+ },
255
+ ],
256
+ } satisfies OpenAI.ChatCompletionAssistantMessageParam,
257
+ {
258
+ role: "tool",
259
+ content: JSON.stringify(f.validation.errors),
260
+ tool_call_id: f.id,
261
+ } satisfies OpenAI.ChatCompletionToolMessageParam,
262
+ {
263
+ role: "system",
264
+ content: [
265
+ "You A.I. assistant has composed wrong typed arguments.",
266
+ "",
267
+ "Correct it at the next function calling.",
268
+ ].join("\n"),
269
+ } satisfies OpenAI.ChatCompletionSystemMessageParam,
270
+ ])
271
+ .flat();
272
+ }
273
+
274
+ const CONTAINER: ILlmApplication<"chatgpt"> = typia.llm.application<
275
+ __IChatCancelFunctionsApplication,
276
+ "chatgpt"
277
+ >();
278
+
279
+ interface IFailure {
280
+ id: string;
281
+ name: string;
282
+ validation: IValidation.IFailure;
283
+ }
@@ -1,51 +1,51 @@
1
- import OpenAI from "openai";
2
-
3
- import { AgenticaDefaultPrompt } from "../internal/AgenticaDefaultPrompt";
4
- import { AgenticaSystemPrompt } from "../internal/AgenticaSystemPrompt";
5
- import { IAgenticaContext } from "../structures/IAgenticaContext";
6
- import { IAgenticaPrompt } from "../structures/IAgenticaPrompt";
7
- import { ChatGptHistoryDecoder } from "./ChatGptHistoryDecoder";
8
-
9
- export namespace ChatGptDescribeFunctionAgent {
10
- export const execute = async (
11
- ctx: IAgenticaContext,
12
- histories: IAgenticaPrompt.IExecute[],
13
- ): Promise<IAgenticaPrompt.IDescribe[]> => {
14
- if (histories.length === 0) return [];
15
- const completion: OpenAI.ChatCompletion = await ctx.request("describe", {
16
- messages: [
17
- // COMMON SYSTEM PROMPT
18
- {
19
- role: "system",
20
- content: AgenticaDefaultPrompt.write(ctx.config),
21
- } satisfies OpenAI.ChatCompletionSystemMessageParam,
22
- // FUNCTION CALLING HISTORIES
23
- ...histories.map(ChatGptHistoryDecoder.decode).flat(),
24
- // SYSTEM PROMPT
25
- {
26
- role: "system",
27
- content:
28
- ctx.config?.systemPrompt?.describe?.(histories) ??
29
- AgenticaSystemPrompt.DESCRIBE,
30
- },
31
- ],
32
- });
33
- const descriptions: IAgenticaPrompt.IDescribe[] = completion.choices
34
- .map((choice) =>
35
- choice.message.role === "assistant" && !!choice.message.content?.length
36
- ? choice.message.content
37
- : null,
38
- )
39
- .filter((str) => str !== null)
40
- .map(
41
- (content) =>
42
- ({
43
- type: "describe",
44
- executions: histories,
45
- text: content,
46
- }) satisfies IAgenticaPrompt.IDescribe,
47
- );
48
- for (const describe of descriptions) await ctx.dispatch(describe);
49
- return descriptions;
50
- };
51
- }
1
+ import OpenAI from "openai";
2
+
3
+ import { AgenticaDefaultPrompt } from "../internal/AgenticaDefaultPrompt";
4
+ import { AgenticaSystemPrompt } from "../internal/AgenticaSystemPrompt";
5
+ import { IAgenticaContext } from "../structures/IAgenticaContext";
6
+ import { IAgenticaPrompt } from "../structures/IAgenticaPrompt";
7
+ import { ChatGptHistoryDecoder } from "./ChatGptHistoryDecoder";
8
+
9
+ export namespace ChatGptDescribeFunctionAgent {
10
+ export const execute = async (
11
+ ctx: IAgenticaContext,
12
+ histories: IAgenticaPrompt.IExecute[],
13
+ ): Promise<IAgenticaPrompt.IDescribe[]> => {
14
+ if (histories.length === 0) return [];
15
+ const completion: OpenAI.ChatCompletion = await ctx.request("describe", {
16
+ messages: [
17
+ // COMMON SYSTEM PROMPT
18
+ {
19
+ role: "system",
20
+ content: AgenticaDefaultPrompt.write(ctx.config),
21
+ } satisfies OpenAI.ChatCompletionSystemMessageParam,
22
+ // FUNCTION CALLING HISTORIES
23
+ ...histories.map(ChatGptHistoryDecoder.decode).flat(),
24
+ // SYSTEM PROMPT
25
+ {
26
+ role: "system",
27
+ content:
28
+ ctx.config?.systemPrompt?.describe?.(histories) ??
29
+ AgenticaSystemPrompt.DESCRIBE,
30
+ },
31
+ ],
32
+ });
33
+ const descriptions: IAgenticaPrompt.IDescribe[] = completion.choices
34
+ .map((choice) =>
35
+ choice.message.role === "assistant" && !!choice.message.content?.length
36
+ ? choice.message.content
37
+ : null,
38
+ )
39
+ .filter((str) => str !== null)
40
+ .map(
41
+ (content) =>
42
+ ({
43
+ type: "describe",
44
+ executions: histories,
45
+ text: content,
46
+ }) satisfies IAgenticaPrompt.IDescribe,
47
+ );
48
+ for (const describe of descriptions) await ctx.dispatch(describe);
49
+ return descriptions;
50
+ };
51
+ }