@agentica/core 0.8.3-dev.20250227 → 0.9.0-dev.20250302
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +25 -10
- package/lib/Agentica.d.ts +10 -9
- package/lib/Agentica.js.map +1 -1
- package/lib/chatgpt/ChatGptAgent.d.ts +2 -1
- package/lib/chatgpt/ChatGptAgent.js.map +1 -1
- package/lib/chatgpt/ChatGptCallFunctionAgent.d.ts +2 -1
- package/lib/chatgpt/ChatGptCallFunctionAgent.js +3 -1
- package/lib/chatgpt/ChatGptCallFunctionAgent.js.map +1 -1
- package/lib/chatgpt/ChatGptCancelFunctionAgent.d.ts +3 -2
- package/lib/chatgpt/ChatGptCancelFunctionAgent.js +55 -2
- package/lib/chatgpt/ChatGptCancelFunctionAgent.js.map +1 -1
- package/lib/chatgpt/ChatGptDescribeFunctionAgent.d.ts +2 -1
- package/lib/chatgpt/ChatGptDescribeFunctionAgent.js.map +1 -1
- package/lib/chatgpt/ChatGptHistoryDecoder.d.ts +2 -1
- package/lib/chatgpt/ChatGptHistoryDecoder.js.map +1 -1
- package/lib/chatgpt/ChatGptInitializeFunctionAgent.d.ts +2 -1
- package/lib/chatgpt/ChatGptInitializeFunctionAgent.js +63 -1
- package/lib/chatgpt/ChatGptInitializeFunctionAgent.js.map +1 -1
- package/lib/chatgpt/ChatGptSelectFunctionAgent.d.ts +2 -1
- package/lib/chatgpt/ChatGptSelectFunctionAgent.js +55 -2
- package/lib/chatgpt/ChatGptSelectFunctionAgent.js.map +1 -1
- package/lib/functional/createHttpLlmApplication.js +826 -798
- package/lib/functional/createHttpLlmApplication.js.map +1 -1
- package/lib/index.mjs +987 -804
- package/lib/index.mjs.map +1 -1
- package/lib/internal/AgenticaDefaultPrompt.d.ts +2 -1
- package/lib/internal/AgenticaDefaultPrompt.js.map +1 -1
- package/lib/internal/AgenticaOperationComposer.d.ts +5 -4
- package/lib/internal/AgenticaOperationComposer.js +1 -1
- package/lib/internal/AgenticaOperationComposer.js.map +1 -1
- package/lib/internal/AgenticaPromptFactory.d.ts +3 -2
- package/lib/internal/AgenticaPromptFactory.js.map +1 -1
- package/lib/internal/AgenticaPromptTransformer.d.ts +5 -4
- package/lib/internal/AgenticaPromptTransformer.js.map +1 -1
- package/lib/structures/IAgenticaConfig.d.ts +4 -3
- package/lib/structures/IAgenticaContext.d.ts +7 -6
- package/lib/structures/IAgenticaController.d.ts +8 -8
- package/lib/structures/IAgenticaEvent.d.ts +19 -18
- package/lib/structures/IAgenticaExecutor.d.ts +7 -6
- package/lib/structures/IAgenticaOperation.d.ts +4 -4
- package/lib/structures/IAgenticaOperationCollection.d.ts +6 -5
- package/lib/structures/IAgenticaOperationSelection.d.ts +4 -4
- package/lib/structures/IAgenticaPrompt.d.ts +11 -11
- package/lib/structures/IAgenticaProps.d.ts +9 -4
- package/lib/structures/IAgenticaProvider.d.ts +22 -27
- package/lib/structures/IAgenticaSystemPrompt.d.ts +8 -7
- package/package.json +6 -9
- package/src/Agentica.ts +24 -19
- package/src/chatgpt/ChatGptAgent.ts +9 -5
- package/src/chatgpt/ChatGptCallFunctionAgent.ts +25 -22
- package/src/chatgpt/ChatGptCancelFunctionAgent.ts +26 -22
- package/src/chatgpt/ChatGptDescribeFunctionAgent.ts +7 -6
- package/src/chatgpt/ChatGptHistoryDecoder.ts +3 -2
- package/src/chatgpt/ChatGptInitializeFunctionAgent.ts +5 -5
- package/src/chatgpt/ChatGptSelectFunctionAgent.ts +33 -32
- package/src/internal/AgenticaDefaultPrompt.ts +5 -1
- package/src/internal/AgenticaOperationComposer.ts +20 -15
- package/src/internal/AgenticaPromptFactory.ts +10 -8
- package/src/internal/AgenticaPromptTransformer.ts +19 -16
- package/src/structures/IAgenticaConfig.ts +6 -4
- package/src/structures/IAgenticaContext.ts +7 -6
- package/src/structures/IAgenticaController.ts +12 -10
- package/src/structures/IAgenticaEvent.ts +28 -23
- package/src/structures/IAgenticaExecutor.ts +12 -8
- package/src/structures/IAgenticaOperation.ts +10 -10
- package/src/structures/IAgenticaOperationCollection.ts +7 -5
- package/src/structures/IAgenticaOperationSelection.ts +10 -10
- package/src/structures/IAgenticaPrompt.ts +24 -19
- package/src/structures/IAgenticaProps.ts +10 -4
- package/src/structures/IAgenticaProvider.ts +22 -28
- package/src/structures/IAgenticaSystemPrompt.ts +9 -7
|
@@ -4,6 +4,7 @@ import {
|
|
|
4
4
|
IChatGptSchema,
|
|
5
5
|
IHttpMigrateRoute,
|
|
6
6
|
IHttpResponse,
|
|
7
|
+
ILlmSchema,
|
|
7
8
|
} from "@samchon/openapi";
|
|
8
9
|
import OpenAI from "openai";
|
|
9
10
|
import { IValidation } from "typia";
|
|
@@ -20,9 +21,9 @@ import { ChatGptCancelFunctionAgent } from "./ChatGptCancelFunctionAgent";
|
|
|
20
21
|
import { ChatGptHistoryDecoder } from "./ChatGptHistoryDecoder";
|
|
21
22
|
|
|
22
23
|
export namespace ChatGptCallFunctionAgent {
|
|
23
|
-
export const execute = async (
|
|
24
|
-
ctx: IAgenticaContext
|
|
25
|
-
): Promise<IAgenticaPrompt[]> => {
|
|
24
|
+
export const execute = async <Model extends ILlmSchema.Model>(
|
|
25
|
+
ctx: IAgenticaContext<Model>,
|
|
26
|
+
): Promise<IAgenticaPrompt<Model>[]> => {
|
|
26
27
|
//----
|
|
27
28
|
// EXECUTE CHATGPT API
|
|
28
29
|
//----
|
|
@@ -79,8 +80,8 @@ export namespace ChatGptCallFunctionAgent {
|
|
|
79
80
|
const closures: Array<
|
|
80
81
|
() => Promise<
|
|
81
82
|
Array<
|
|
82
|
-
| IAgenticaPrompt.IExecute
|
|
83
|
-
| IAgenticaPrompt.ICancel
|
|
83
|
+
| IAgenticaPrompt.IExecute<Model>
|
|
84
|
+
| IAgenticaPrompt.ICancel<Model>
|
|
84
85
|
| IAgenticaPrompt.IText
|
|
85
86
|
>
|
|
86
87
|
>
|
|
@@ -88,14 +89,14 @@ export namespace ChatGptCallFunctionAgent {
|
|
|
88
89
|
for (const choice of completion.choices) {
|
|
89
90
|
for (const tc of choice.message.tool_calls ?? []) {
|
|
90
91
|
if (tc.type === "function") {
|
|
91
|
-
const operation: IAgenticaOperation | undefined =
|
|
92
|
+
const operation: IAgenticaOperation<Model> | undefined =
|
|
92
93
|
ctx.operations.flat.get(tc.function.name);
|
|
93
94
|
if (operation === undefined) continue;
|
|
94
95
|
closures.push(
|
|
95
96
|
async (): Promise<
|
|
96
|
-
[IAgenticaPrompt.IExecute
|
|
97
|
+
[IAgenticaPrompt.IExecute<Model>, IAgenticaPrompt.ICancel<Model>]
|
|
97
98
|
> => {
|
|
98
|
-
const call: IAgenticaEvent.ICall = {
|
|
99
|
+
const call: IAgenticaEvent.ICall<Model> = {
|
|
99
100
|
type: "call",
|
|
100
101
|
id: tc.id,
|
|
101
102
|
operation,
|
|
@@ -108,7 +109,7 @@ export namespace ChatGptCallFunctionAgent {
|
|
|
108
109
|
});
|
|
109
110
|
await ctx.dispatch(call);
|
|
110
111
|
|
|
111
|
-
const execute: IAgenticaPrompt.IExecute = await propagate(
|
|
112
|
+
const execute: IAgenticaPrompt.IExecute<Model> = await propagate(
|
|
112
113
|
ctx,
|
|
113
114
|
call,
|
|
114
115
|
0,
|
|
@@ -141,7 +142,7 @@ export namespace ChatGptCallFunctionAgent {
|
|
|
141
142
|
reason: "complete",
|
|
142
143
|
}),
|
|
143
144
|
],
|
|
144
|
-
} satisfies IAgenticaPrompt.ICancel
|
|
145
|
+
} satisfies IAgenticaPrompt.ICancel<Model>,
|
|
145
146
|
] as const;
|
|
146
147
|
},
|
|
147
148
|
);
|
|
@@ -164,11 +165,11 @@ export namespace ChatGptCallFunctionAgent {
|
|
|
164
165
|
return (await Promise.all(closures.map((fn) => fn()))).flat();
|
|
165
166
|
};
|
|
166
167
|
|
|
167
|
-
const propagate = async (
|
|
168
|
-
ctx: IAgenticaContext
|
|
169
|
-
call: IAgenticaEvent.ICall
|
|
168
|
+
const propagate = async <Model extends ILlmSchema.Model>(
|
|
169
|
+
ctx: IAgenticaContext<Model>,
|
|
170
|
+
call: IAgenticaEvent.ICall<Model>,
|
|
170
171
|
retry: number,
|
|
171
|
-
): Promise<IAgenticaPrompt.IExecute
|
|
172
|
+
): Promise<IAgenticaPrompt.IExecute<Model>> => {
|
|
172
173
|
if (call.operation.protocol === "http") {
|
|
173
174
|
//----
|
|
174
175
|
// HTTP PROTOCOL
|
|
@@ -307,12 +308,12 @@ export namespace ChatGptCallFunctionAgent {
|
|
|
307
308
|
}
|
|
308
309
|
};
|
|
309
310
|
|
|
310
|
-
const correct = async (
|
|
311
|
-
ctx: IAgenticaContext
|
|
312
|
-
call: IAgenticaEvent.ICall
|
|
311
|
+
const correct = async <Model extends ILlmSchema.Model>(
|
|
312
|
+
ctx: IAgenticaContext<Model>,
|
|
313
|
+
call: IAgenticaEvent.ICall<Model>,
|
|
313
314
|
retry: number,
|
|
314
315
|
error: unknown,
|
|
315
|
-
): Promise<IAgenticaPrompt.IExecute | null> => {
|
|
316
|
+
): Promise<IAgenticaPrompt.IExecute<Model> | null> => {
|
|
316
317
|
//----
|
|
317
318
|
// EXECUTE CHATGPT API
|
|
318
319
|
//----
|
|
@@ -410,8 +411,8 @@ export namespace ChatGptCallFunctionAgent {
|
|
|
410
411
|
);
|
|
411
412
|
};
|
|
412
413
|
|
|
413
|
-
const fillHttpArguments = (props: {
|
|
414
|
-
operation: IAgenticaOperation
|
|
414
|
+
const fillHttpArguments = <Model extends ILlmSchema.Model>(props: {
|
|
415
|
+
operation: IAgenticaOperation<Model>;
|
|
415
416
|
arguments: object;
|
|
416
417
|
}): void => {
|
|
417
418
|
if (props.operation.protocol !== "http") return;
|
|
@@ -421,8 +422,10 @@ export namespace ChatGptCallFunctionAgent {
|
|
|
421
422
|
route.operation().requestBody?.required === true &&
|
|
422
423
|
(props.arguments as any).body === undefined &&
|
|
423
424
|
isObject(
|
|
424
|
-
props.operation.function.parameters
|
|
425
|
-
|
|
425
|
+
(props.operation.function.parameters as IChatGptSchema.IParameters)
|
|
426
|
+
.$defs,
|
|
427
|
+
(props.operation.function.parameters as IChatGptSchema.IParameters)
|
|
428
|
+
.properties.body!,
|
|
426
429
|
)
|
|
427
430
|
)
|
|
428
431
|
(props.arguments as any).body = {};
|
|
@@ -1,4 +1,8 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import {
|
|
2
|
+
IHttpLlmFunction,
|
|
3
|
+
ILlmApplication,
|
|
4
|
+
ILlmSchema,
|
|
5
|
+
} from "@samchon/openapi";
|
|
2
6
|
import OpenAI from "openai";
|
|
3
7
|
import typia, { IValidation } from "typia";
|
|
4
8
|
import { v4 } from "uuid";
|
|
@@ -18,17 +22,16 @@ import { __IChatFunctionReference } from "../structures/internal/__IChatFunction
|
|
|
18
22
|
import { ChatGptHistoryDecoder } from "./ChatGptHistoryDecoder";
|
|
19
23
|
|
|
20
24
|
export namespace ChatGptCancelFunctionAgent {
|
|
21
|
-
export const execute = async (
|
|
22
|
-
ctx: IAgenticaContext
|
|
23
|
-
): Promise<IAgenticaPrompt.ICancel[]> => {
|
|
25
|
+
export const execute = async <Model extends ILlmSchema.Model>(
|
|
26
|
+
ctx: IAgenticaContext<Model>,
|
|
27
|
+
): Promise<IAgenticaPrompt.ICancel<Model>[]> => {
|
|
24
28
|
if (ctx.operations.divided === undefined)
|
|
25
29
|
return step(ctx, ctx.operations.array, 0);
|
|
26
30
|
|
|
27
|
-
const stacks: IAgenticaOperationSelection[][] =
|
|
28
|
-
() => []
|
|
29
|
-
|
|
30
|
-
const
|
|
31
|
-
const prompts: IAgenticaPrompt.ICancel[][] = await Promise.all(
|
|
31
|
+
const stacks: IAgenticaOperationSelection<Model>[][] =
|
|
32
|
+
ctx.operations.divided.map(() => []);
|
|
33
|
+
const events: IAgenticaEvent<Model>[] = [];
|
|
34
|
+
const prompts: IAgenticaPrompt.ICancel<Model>[][] = await Promise.all(
|
|
32
35
|
ctx.operations.divided.map((operations, i) =>
|
|
33
36
|
step(
|
|
34
37
|
{
|
|
@@ -62,7 +65,7 @@ export namespace ChatGptCancelFunctionAgent {
|
|
|
62
65
|
);
|
|
63
66
|
|
|
64
67
|
// RE-COLLECT SELECT FUNCTION EVENTS
|
|
65
|
-
const collection: IAgenticaPrompt.ICancel = {
|
|
68
|
+
const collection: IAgenticaPrompt.ICancel<Model> = {
|
|
66
69
|
id: v4(),
|
|
67
70
|
type: "cancel",
|
|
68
71
|
operations: [],
|
|
@@ -72,8 +75,9 @@ export namespace ChatGptCancelFunctionAgent {
|
|
|
72
75
|
collection.operations.push(
|
|
73
76
|
AgenticaPromptFactory.selection({
|
|
74
77
|
protocol: e.operation.protocol as "http",
|
|
75
|
-
controller: e.operation
|
|
76
|
-
|
|
78
|
+
controller: e.operation
|
|
79
|
+
.controller as IAgenticaController.IHttp<Model>,
|
|
80
|
+
function: e.operation.function as IHttpLlmFunction<Model>,
|
|
77
81
|
reason: e.reason,
|
|
78
82
|
name: e.operation.name,
|
|
79
83
|
}),
|
|
@@ -86,16 +90,16 @@ export namespace ChatGptCancelFunctionAgent {
|
|
|
86
90
|
return [collection];
|
|
87
91
|
};
|
|
88
92
|
|
|
89
|
-
export const cancelFunction = async (
|
|
90
|
-
ctx: IAgenticaContext
|
|
93
|
+
export const cancelFunction = async <Model extends ILlmSchema.Model>(
|
|
94
|
+
ctx: IAgenticaContext<Model>,
|
|
91
95
|
reference: __IChatFunctionReference,
|
|
92
|
-
): Promise<IAgenticaOperationSelection | null> => {
|
|
96
|
+
): Promise<IAgenticaOperationSelection<Model> | null> => {
|
|
93
97
|
const index: number = ctx.stack.findIndex(
|
|
94
98
|
(item) => item.name === reference.name,
|
|
95
99
|
);
|
|
96
100
|
if (index === -1) return null;
|
|
97
101
|
|
|
98
|
-
const item: IAgenticaOperationSelection = ctx.stack[index]!;
|
|
102
|
+
const item: IAgenticaOperationSelection<Model> = ctx.stack[index]!;
|
|
99
103
|
ctx.stack.splice(index, 1);
|
|
100
104
|
await ctx.dispatch({
|
|
101
105
|
type: "cancel",
|
|
@@ -105,12 +109,12 @@ export namespace ChatGptCancelFunctionAgent {
|
|
|
105
109
|
return item;
|
|
106
110
|
};
|
|
107
111
|
|
|
108
|
-
const step = async (
|
|
109
|
-
ctx: IAgenticaContext
|
|
110
|
-
operations: IAgenticaOperation[],
|
|
112
|
+
const step = async <Model extends ILlmSchema.Model>(
|
|
113
|
+
ctx: IAgenticaContext<Model>,
|
|
114
|
+
operations: IAgenticaOperation<Model>[],
|
|
111
115
|
retry: number,
|
|
112
116
|
failures?: IFailure[],
|
|
113
|
-
): Promise<IAgenticaPrompt.ICancel[]> => {
|
|
117
|
+
): Promise<IAgenticaPrompt.ICancel<Model>[]> => {
|
|
114
118
|
//----
|
|
115
119
|
// EXECUTE CHATGPT API
|
|
116
120
|
//----
|
|
@@ -209,7 +213,7 @@ export namespace ChatGptCancelFunctionAgent {
|
|
|
209
213
|
//----
|
|
210
214
|
// PROCESS COMPLETION
|
|
211
215
|
//----
|
|
212
|
-
const prompts: IAgenticaPrompt.ICancel[] = [];
|
|
216
|
+
const prompts: IAgenticaPrompt.ICancel<Model>[] = [];
|
|
213
217
|
for (const choice of completion.choices) {
|
|
214
218
|
// TOOL CALLING HANDLER
|
|
215
219
|
if (choice.message.tool_calls)
|
|
@@ -220,7 +224,7 @@ export namespace ChatGptCancelFunctionAgent {
|
|
|
220
224
|
);
|
|
221
225
|
if (typia.is(input) === false) continue;
|
|
222
226
|
else if (tc.function.name === "cancelFunctions") {
|
|
223
|
-
const collection: IAgenticaPrompt.ICancel = {
|
|
227
|
+
const collection: IAgenticaPrompt.ICancel<Model> = {
|
|
224
228
|
id: tc.id,
|
|
225
229
|
type: "cancel",
|
|
226
230
|
operations: [],
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { ILlmSchema } from "@samchon/openapi";
|
|
1
2
|
import OpenAI from "openai";
|
|
2
3
|
|
|
3
4
|
import { AgenticaDefaultPrompt } from "../internal/AgenticaDefaultPrompt";
|
|
@@ -7,10 +8,10 @@ import { IAgenticaPrompt } from "../structures/IAgenticaPrompt";
|
|
|
7
8
|
import { ChatGptHistoryDecoder } from "./ChatGptHistoryDecoder";
|
|
8
9
|
|
|
9
10
|
export namespace ChatGptDescribeFunctionAgent {
|
|
10
|
-
export const execute = async (
|
|
11
|
-
ctx: IAgenticaContext
|
|
12
|
-
histories: IAgenticaPrompt.IExecute[],
|
|
13
|
-
): Promise<IAgenticaPrompt.IDescribe[]> => {
|
|
11
|
+
export const execute = async <Model extends ILlmSchema.Model>(
|
|
12
|
+
ctx: IAgenticaContext<Model>,
|
|
13
|
+
histories: IAgenticaPrompt.IExecute<Model>[],
|
|
14
|
+
): Promise<IAgenticaPrompt.IDescribe<Model>[]> => {
|
|
14
15
|
if (histories.length === 0) return [];
|
|
15
16
|
const completion: OpenAI.ChatCompletion = await ctx.request("describe", {
|
|
16
17
|
messages: [
|
|
@@ -30,7 +31,7 @@ export namespace ChatGptDescribeFunctionAgent {
|
|
|
30
31
|
},
|
|
31
32
|
],
|
|
32
33
|
});
|
|
33
|
-
const descriptions: IAgenticaPrompt.IDescribe[] = completion.choices
|
|
34
|
+
const descriptions: IAgenticaPrompt.IDescribe<Model>[] = completion.choices
|
|
34
35
|
.map((choice) =>
|
|
35
36
|
choice.message.role === "assistant" && !!choice.message.content?.length
|
|
36
37
|
? choice.message.content
|
|
@@ -43,7 +44,7 @@ export namespace ChatGptDescribeFunctionAgent {
|
|
|
43
44
|
type: "describe",
|
|
44
45
|
executions: histories,
|
|
45
46
|
text: content,
|
|
46
|
-
}) satisfies IAgenticaPrompt.IDescribe
|
|
47
|
+
}) satisfies IAgenticaPrompt.IDescribe<Model>,
|
|
47
48
|
);
|
|
48
49
|
for (const describe of descriptions) await ctx.dispatch(describe);
|
|
49
50
|
return descriptions;
|
|
@@ -1,10 +1,11 @@
|
|
|
1
|
+
import { ILlmSchema } from "@samchon/openapi";
|
|
1
2
|
import OpenAI from "openai";
|
|
2
3
|
|
|
3
4
|
import { IAgenticaPrompt } from "../structures/IAgenticaPrompt";
|
|
4
5
|
|
|
5
6
|
export namespace ChatGptHistoryDecoder {
|
|
6
|
-
export const decode = (
|
|
7
|
-
history: IAgenticaPrompt
|
|
7
|
+
export const decode = <Model extends ILlmSchema.Model>(
|
|
8
|
+
history: IAgenticaPrompt<Model>,
|
|
8
9
|
): OpenAI.ChatCompletionMessageParam[] => {
|
|
9
10
|
// NO NEED TO DECODE DESCRIBE
|
|
10
11
|
if (history.type === "describe") return [];
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { ILlmFunction } from "@samchon/openapi";
|
|
1
|
+
import { ILlmFunction, ILlmSchema } from "@samchon/openapi";
|
|
2
2
|
import OpenAI from "openai";
|
|
3
3
|
import typia from "typia";
|
|
4
4
|
|
|
@@ -10,9 +10,9 @@ import { __IChatInitialApplication } from "../structures/internal/__IChatInitial
|
|
|
10
10
|
import { ChatGptHistoryDecoder } from "./ChatGptHistoryDecoder";
|
|
11
11
|
|
|
12
12
|
export namespace ChatGptInitializeFunctionAgent {
|
|
13
|
-
export const execute = async (
|
|
14
|
-
ctx: IAgenticaContext
|
|
15
|
-
): Promise<IAgenticaPrompt[]> => {
|
|
13
|
+
export const execute = async <Model extends ILlmSchema.Model>(
|
|
14
|
+
ctx: IAgenticaContext<Model>,
|
|
15
|
+
): Promise<IAgenticaPrompt<Model>[]> => {
|
|
16
16
|
//----
|
|
17
17
|
// EXECUTE CHATGPT API
|
|
18
18
|
//----
|
|
@@ -56,7 +56,7 @@ export namespace ChatGptInitializeFunctionAgent {
|
|
|
56
56
|
//----
|
|
57
57
|
// PROCESS COMPLETION
|
|
58
58
|
//----
|
|
59
|
-
const prompts: IAgenticaPrompt[] = [];
|
|
59
|
+
const prompts: IAgenticaPrompt<Model>[] = [];
|
|
60
60
|
for (const choice of completion.choices) {
|
|
61
61
|
if (
|
|
62
62
|
choice.message.role === "assistant" &&
|
|
@@ -1,4 +1,8 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import {
|
|
2
|
+
IHttpLlmFunction,
|
|
3
|
+
ILlmApplication,
|
|
4
|
+
ILlmSchema,
|
|
5
|
+
} from "@samchon/openapi";
|
|
2
6
|
import OpenAI from "openai";
|
|
3
7
|
import typia, { IValidation } from "typia";
|
|
4
8
|
import { v4 } from "uuid";
|
|
@@ -18,17 +22,16 @@ import { __IChatSelectFunctionsApplication } from "../structures/internal/__ICha
|
|
|
18
22
|
import { ChatGptHistoryDecoder } from "./ChatGptHistoryDecoder";
|
|
19
23
|
|
|
20
24
|
export namespace ChatGptSelectFunctionAgent {
|
|
21
|
-
export const execute = async (
|
|
22
|
-
ctx: IAgenticaContext
|
|
23
|
-
): Promise<IAgenticaPrompt[]> => {
|
|
25
|
+
export const execute = async <Model extends ILlmSchema.Model>(
|
|
26
|
+
ctx: IAgenticaContext<Model>,
|
|
27
|
+
): Promise<IAgenticaPrompt<Model>[]> => {
|
|
24
28
|
if (ctx.operations.divided === undefined)
|
|
25
29
|
return step(ctx, ctx.operations.array, 0);
|
|
26
30
|
|
|
27
|
-
const stacks: IAgenticaOperationSelection[][] =
|
|
28
|
-
() => []
|
|
29
|
-
|
|
30
|
-
const
|
|
31
|
-
const prompts: IAgenticaPrompt[][] = await Promise.all(
|
|
31
|
+
const stacks: IAgenticaOperationSelection<Model>[][] =
|
|
32
|
+
ctx.operations.divided.map(() => []);
|
|
33
|
+
const events: IAgenticaEvent<Model>[] = [];
|
|
34
|
+
const prompts: IAgenticaPrompt<Model>[][] = await Promise.all(
|
|
32
35
|
ctx.operations.divided.map((operations, i) =>
|
|
33
36
|
step(
|
|
34
37
|
{
|
|
@@ -62,7 +65,7 @@ export namespace ChatGptSelectFunctionAgent {
|
|
|
62
65
|
);
|
|
63
66
|
|
|
64
67
|
// RE-COLLECT SELECT FUNCTION EVENTS
|
|
65
|
-
const collection: IAgenticaPrompt.ISelect = {
|
|
68
|
+
const collection: IAgenticaPrompt.ISelect<Model> = {
|
|
66
69
|
id: v4(),
|
|
67
70
|
type: "select",
|
|
68
71
|
operations: [],
|
|
@@ -72,8 +75,9 @@ export namespace ChatGptSelectFunctionAgent {
|
|
|
72
75
|
collection.operations.push(
|
|
73
76
|
AgenticaPromptFactory.selection({
|
|
74
77
|
protocol: e.operation.protocol as "http",
|
|
75
|
-
controller: e.operation
|
|
76
|
-
|
|
78
|
+
controller: e.operation
|
|
79
|
+
.controller as IAgenticaController.IHttp<Model>,
|
|
80
|
+
function: e.operation.function as IHttpLlmFunction<Model>,
|
|
77
81
|
reason: e.reason,
|
|
78
82
|
name: e.operation.name,
|
|
79
83
|
}),
|
|
@@ -86,12 +90,12 @@ export namespace ChatGptSelectFunctionAgent {
|
|
|
86
90
|
return [collection];
|
|
87
91
|
};
|
|
88
92
|
|
|
89
|
-
const step = async (
|
|
90
|
-
ctx: IAgenticaContext
|
|
91
|
-
operations: IAgenticaOperation[],
|
|
93
|
+
const step = async <Model extends ILlmSchema.Model>(
|
|
94
|
+
ctx: IAgenticaContext<Model>,
|
|
95
|
+
operations: IAgenticaOperation<Model>[],
|
|
92
96
|
retry: number,
|
|
93
97
|
failures?: IFailure[],
|
|
94
|
-
): Promise<IAgenticaPrompt[]> => {
|
|
98
|
+
): Promise<IAgenticaPrompt<Model>[]> => {
|
|
95
99
|
//----
|
|
96
100
|
// EXECUTE CHATGPT API
|
|
97
101
|
//----
|
|
@@ -190,7 +194,7 @@ export namespace ChatGptSelectFunctionAgent {
|
|
|
190
194
|
//----
|
|
191
195
|
// PROCESS COMPLETION
|
|
192
196
|
//----
|
|
193
|
-
const prompts: IAgenticaPrompt[] = [];
|
|
197
|
+
const prompts: IAgenticaPrompt<Model>[] = [];
|
|
194
198
|
for (const choice of completion.choices) {
|
|
195
199
|
// TOOL CALLING HANDLER
|
|
196
200
|
if (choice.message.tool_calls)
|
|
@@ -202,23 +206,21 @@ export namespace ChatGptSelectFunctionAgent {
|
|
|
202
206
|
);
|
|
203
207
|
if (typia.is(input) === false) continue;
|
|
204
208
|
else if (tc.function.name === "selectFunctions") {
|
|
205
|
-
const collection: IAgenticaPrompt.ISelect = {
|
|
209
|
+
const collection: IAgenticaPrompt.ISelect<Model> = {
|
|
206
210
|
id: tc.id,
|
|
207
211
|
type: "select",
|
|
208
212
|
operations: [],
|
|
209
213
|
};
|
|
210
214
|
for (const reference of input.functions) {
|
|
211
|
-
const operation: IAgenticaOperation | null =
|
|
212
|
-
ctx,
|
|
213
|
-
reference,
|
|
214
|
-
);
|
|
215
|
+
const operation: IAgenticaOperation<Model> | null =
|
|
216
|
+
await selectFunction(ctx, reference);
|
|
215
217
|
if (operation !== null)
|
|
216
218
|
collection.operations.push(
|
|
217
219
|
AgenticaPromptFactory.selection({
|
|
218
220
|
protocol: operation.protocol as "http",
|
|
219
221
|
controller:
|
|
220
|
-
operation.controller as IAgenticaController.IHttp
|
|
221
|
-
function: operation.function as IHttpLlmFunction<
|
|
222
|
+
operation.controller as IAgenticaController.IHttp<Model>,
|
|
223
|
+
function: operation.function as IHttpLlmFunction<Model>,
|
|
222
224
|
name: operation.name,
|
|
223
225
|
reason: reference.reason,
|
|
224
226
|
}),
|
|
@@ -245,20 +247,19 @@ export namespace ChatGptSelectFunctionAgent {
|
|
|
245
247
|
return prompts;
|
|
246
248
|
};
|
|
247
249
|
|
|
248
|
-
const selectFunction = async (
|
|
249
|
-
ctx: IAgenticaContext
|
|
250
|
+
const selectFunction = async <Model extends ILlmSchema.Model>(
|
|
251
|
+
ctx: IAgenticaContext<Model>,
|
|
250
252
|
reference: __IChatFunctionReference,
|
|
251
|
-
): Promise<IAgenticaOperation | null> => {
|
|
252
|
-
const operation: IAgenticaOperation | undefined =
|
|
253
|
-
reference.name
|
|
254
|
-
);
|
|
253
|
+
): Promise<IAgenticaOperation<Model> | null> => {
|
|
254
|
+
const operation: IAgenticaOperation<Model> | undefined =
|
|
255
|
+
ctx.operations.flat.get(reference.name);
|
|
255
256
|
if (operation === undefined) return null;
|
|
256
257
|
|
|
257
258
|
ctx.stack.push(
|
|
258
259
|
AgenticaPromptFactory.selection({
|
|
259
260
|
protocol: operation.protocol as "http",
|
|
260
|
-
controller: operation.controller as IAgenticaController.IHttp
|
|
261
|
-
function: operation.function as IHttpLlmFunction<
|
|
261
|
+
controller: operation.controller as IAgenticaController.IHttp<Model>,
|
|
262
|
+
function: operation.function as IHttpLlmFunction<Model>,
|
|
262
263
|
name: reference.name,
|
|
263
264
|
reason: reference.reason,
|
|
264
265
|
}),
|
|
@@ -1,9 +1,13 @@
|
|
|
1
|
+
import { ILlmSchema } from "@samchon/openapi";
|
|
2
|
+
|
|
1
3
|
import { IAgenticaConfig } from "../structures/IAgenticaConfig";
|
|
2
4
|
import { AgenticaSystemPrompt } from "./AgenticaSystemPrompt";
|
|
3
5
|
import { Singleton } from "./Singleton";
|
|
4
6
|
|
|
5
7
|
export namespace AgenticaDefaultPrompt {
|
|
6
|
-
export const write =
|
|
8
|
+
export const write = <Model extends ILlmSchema.Model>(
|
|
9
|
+
config?: IAgenticaConfig<Model>,
|
|
10
|
+
): string => {
|
|
7
11
|
if (config?.systemPrompt?.common)
|
|
8
12
|
return config?.systemPrompt?.common(config);
|
|
9
13
|
|
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
import { ILlmSchema } from "@samchon/openapi";
|
|
2
|
+
|
|
1
3
|
import { IAgenticaConfig } from "../structures/IAgenticaConfig";
|
|
2
4
|
import { IAgenticaController } from "../structures/IAgenticaController";
|
|
3
5
|
import { IAgenticaOperation } from "../structures/IAgenticaOperation";
|
|
@@ -5,10 +7,10 @@ import { IAgenticaOperationCollection } from "../structures/IAgenticaOperationCo
|
|
|
5
7
|
import { __map_take } from "./__map_take";
|
|
6
8
|
|
|
7
9
|
export namespace AgenticaOperationComposer {
|
|
8
|
-
export const compose = (props: {
|
|
9
|
-
controllers: IAgenticaController[];
|
|
10
|
-
config?: IAgenticaConfig | undefined;
|
|
11
|
-
}): IAgenticaOperationCollection => {
|
|
10
|
+
export const compose = <Model extends ILlmSchema.Model>(props: {
|
|
11
|
+
controllers: IAgenticaController<Model>[];
|
|
12
|
+
config?: IAgenticaConfig<Model> | undefined;
|
|
13
|
+
}): IAgenticaOperationCollection<Model> => {
|
|
12
14
|
const unique: boolean =
|
|
13
15
|
props.controllers.length === 1 ||
|
|
14
16
|
(() => {
|
|
@@ -22,17 +24,17 @@ export namespace AgenticaOperationComposer {
|
|
|
22
24
|
const naming = (func: string, ci: number) =>
|
|
23
25
|
unique ? func : `_${ci}_${func}`;
|
|
24
26
|
|
|
25
|
-
const array: IAgenticaOperation[] = props.controllers
|
|
27
|
+
const array: IAgenticaOperation<Model>[] = props.controllers
|
|
26
28
|
.map((controller, ci) =>
|
|
27
29
|
controller.protocol === "http"
|
|
28
30
|
? controller.application.functions.map(
|
|
29
31
|
(func) =>
|
|
30
32
|
({
|
|
31
33
|
protocol: "http",
|
|
32
|
-
controller,
|
|
34
|
+
controller: controller,
|
|
33
35
|
function: func,
|
|
34
36
|
name: naming(func.name, ci),
|
|
35
|
-
}) satisfies IAgenticaOperation.IHttp
|
|
37
|
+
}) satisfies IAgenticaOperation.IHttp<Model>,
|
|
36
38
|
)
|
|
37
39
|
: controller.application.functions.map(
|
|
38
40
|
(func) =>
|
|
@@ -41,11 +43,11 @@ export namespace AgenticaOperationComposer {
|
|
|
41
43
|
controller,
|
|
42
44
|
function: func,
|
|
43
45
|
name: naming(func.name, ci),
|
|
44
|
-
}) satisfies IAgenticaOperation.IClass
|
|
46
|
+
}) satisfies IAgenticaOperation.IClass<Model>,
|
|
45
47
|
),
|
|
46
48
|
)
|
|
47
49
|
.flat();
|
|
48
|
-
const divided: IAgenticaOperation[][] | undefined =
|
|
50
|
+
const divided: IAgenticaOperation<Model>[][] | undefined =
|
|
49
51
|
!!props.config?.capacity && array.length > props.config.capacity
|
|
50
52
|
? divideOperations({
|
|
51
53
|
array,
|
|
@@ -53,8 +55,11 @@ export namespace AgenticaOperationComposer {
|
|
|
53
55
|
})
|
|
54
56
|
: undefined;
|
|
55
57
|
|
|
56
|
-
const flat: Map<string, IAgenticaOperation
|
|
57
|
-
const group: Map<
|
|
58
|
+
const flat: Map<string, IAgenticaOperation<Model>> = new Map();
|
|
59
|
+
const group: Map<
|
|
60
|
+
string,
|
|
61
|
+
Map<string, IAgenticaOperation<Model>>
|
|
62
|
+
> = new Map();
|
|
58
63
|
for (const item of array) {
|
|
59
64
|
flat.set(item.name, item);
|
|
60
65
|
__map_take(group, item.controller.name, () => new Map()).set(
|
|
@@ -70,13 +75,13 @@ export namespace AgenticaOperationComposer {
|
|
|
70
75
|
};
|
|
71
76
|
};
|
|
72
77
|
|
|
73
|
-
const divideOperations = (props: {
|
|
74
|
-
array: IAgenticaOperation[];
|
|
78
|
+
const divideOperations = <Model extends ILlmSchema.Model>(props: {
|
|
79
|
+
array: IAgenticaOperation<Model>[];
|
|
75
80
|
capacity: number;
|
|
76
|
-
}): IAgenticaOperation[][] => {
|
|
81
|
+
}): IAgenticaOperation<Model>[][] => {
|
|
77
82
|
const size: number = Math.ceil(props.array.length / props.capacity);
|
|
78
83
|
const capacity: number = Math.ceil(props.array.length / size);
|
|
79
|
-
const replica: IAgenticaOperation[] = props.array.slice();
|
|
84
|
+
const replica: IAgenticaOperation<Model>[] = props.array.slice();
|
|
80
85
|
return new Array(size).fill(0).map(() => replica.splice(0, capacity));
|
|
81
86
|
};
|
|
82
87
|
}
|
|
@@ -1,10 +1,12 @@
|
|
|
1
|
+
import { ILlmSchema } from "@samchon/openapi";
|
|
2
|
+
|
|
1
3
|
import { IAgenticaOperationSelection } from "../structures/IAgenticaOperationSelection";
|
|
2
4
|
import { IAgenticaPrompt } from "../structures/IAgenticaPrompt";
|
|
3
5
|
|
|
4
6
|
export namespace AgenticaPromptFactory {
|
|
5
|
-
export const execute = (
|
|
6
|
-
props: Omit<IAgenticaPrompt.IExecute
|
|
7
|
-
): IAgenticaPrompt.IExecute =>
|
|
7
|
+
export const execute = <Model extends ILlmSchema.Model>(
|
|
8
|
+
props: Omit<IAgenticaPrompt.IExecute<Model>, "toJSON">,
|
|
9
|
+
): IAgenticaPrompt.IExecute<Model> =>
|
|
8
10
|
({
|
|
9
11
|
...props,
|
|
10
12
|
toJSON: () =>
|
|
@@ -13,11 +15,11 @@ export namespace AgenticaPromptFactory {
|
|
|
13
15
|
controller: props.controller.name,
|
|
14
16
|
function: props.function.name,
|
|
15
17
|
}) as any,
|
|
16
|
-
}) as IAgenticaPrompt.IExecute
|
|
18
|
+
}) as IAgenticaPrompt.IExecute<Model>;
|
|
17
19
|
|
|
18
|
-
export const selection = (
|
|
19
|
-
props: Omit<IAgenticaOperationSelection
|
|
20
|
-
): IAgenticaOperationSelection =>
|
|
20
|
+
export const selection = <Model extends ILlmSchema.Model>(
|
|
21
|
+
props: Omit<IAgenticaOperationSelection<Model>, "toJSON">,
|
|
22
|
+
): IAgenticaOperationSelection<Model> =>
|
|
21
23
|
({
|
|
22
24
|
...props,
|
|
23
25
|
toJSON: () =>
|
|
@@ -26,5 +28,5 @@ export namespace AgenticaPromptFactory {
|
|
|
26
28
|
controller: props.controller.name,
|
|
27
29
|
function: props.function.name,
|
|
28
30
|
}) as any,
|
|
29
|
-
}) as IAgenticaOperationSelection
|
|
31
|
+
}) as IAgenticaOperationSelection<Model>;
|
|
30
32
|
}
|