@agentica/core 0.12.2-dev.20250314 → 0.12.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (78) hide show
  1. package/LICENSE +21 -21
  2. package/README.md +461 -461
  3. package/lib/context/AgenticaTokenUsage.d.ts +6 -6
  4. package/package.json +1 -1
  5. package/prompts/cancel.md +4 -4
  6. package/prompts/common.md +2 -2
  7. package/prompts/describe.md +6 -6
  8. package/prompts/execute.md +6 -6
  9. package/prompts/initialize.md +2 -2
  10. package/prompts/select.md +6 -6
  11. package/src/Agentica.ts +359 -359
  12. package/src/chatgpt/ChatGptAgent.ts +76 -76
  13. package/src/chatgpt/ChatGptCallFunctionAgent.ts +466 -466
  14. package/src/chatgpt/ChatGptCancelFunctionAgent.ts +280 -280
  15. package/src/chatgpt/ChatGptCompletionMessageUtil.ts +166 -166
  16. package/src/chatgpt/ChatGptDescribeFunctionAgent.ts +122 -122
  17. package/src/chatgpt/ChatGptHistoryDecoder.ts +88 -88
  18. package/src/chatgpt/ChatGptInitializeFunctionAgent.ts +96 -96
  19. package/src/chatgpt/ChatGptSelectFunctionAgent.ts +311 -311
  20. package/src/chatgpt/ChatGptUsageAggregator.ts +62 -62
  21. package/src/context/AgenticaCancelPrompt.ts +32 -32
  22. package/src/context/AgenticaClassOperation.ts +23 -23
  23. package/src/context/AgenticaContext.ts +130 -130
  24. package/src/context/AgenticaHttpOperation.ts +27 -27
  25. package/src/context/AgenticaOperation.ts +66 -66
  26. package/src/context/AgenticaOperationBase.ts +57 -57
  27. package/src/context/AgenticaOperationCollection.ts +52 -52
  28. package/src/context/AgenticaOperationSelection.ts +27 -27
  29. package/src/context/AgenticaTokenUsage.ts +170 -170
  30. package/src/context/internal/AgenticaTokenUsageAggregator.ts +66 -66
  31. package/src/context/internal/__IChatCancelFunctionsApplication.ts +23 -23
  32. package/src/context/internal/__IChatFunctionReference.ts +21 -21
  33. package/src/context/internal/__IChatInitialApplication.ts +15 -15
  34. package/src/context/internal/__IChatSelectFunctionsApplication.ts +24 -24
  35. package/src/events/AgenticaCallEvent.ts +36 -36
  36. package/src/events/AgenticaCancelEvent.ts +28 -28
  37. package/src/events/AgenticaDescribeEvent.ts +66 -66
  38. package/src/events/AgenticaEvent.ts +36 -36
  39. package/src/events/AgenticaEventBase.ts +7 -7
  40. package/src/events/AgenticaEventSource.ts +6 -6
  41. package/src/events/AgenticaExecuteEvent.ts +50 -50
  42. package/src/events/AgenticaInitializeEvent.ts +14 -14
  43. package/src/events/AgenticaRequestEvent.ts +45 -45
  44. package/src/events/AgenticaResponseEvent.ts +48 -48
  45. package/src/events/AgenticaSelectEvent.ts +37 -37
  46. package/src/events/AgenticaTextEvent.ts +62 -62
  47. package/src/functional/assertHttpLlmApplication.ts +55 -55
  48. package/src/functional/validateHttpLlmApplication.ts +66 -66
  49. package/src/index.ts +44 -44
  50. package/src/internal/AgenticaConstant.ts +4 -4
  51. package/src/internal/AgenticaDefaultPrompt.ts +43 -43
  52. package/src/internal/AgenticaOperationComposer.ts +96 -96
  53. package/src/internal/ByteArrayUtil.ts +5 -5
  54. package/src/internal/MPSCUtil.ts +111 -111
  55. package/src/internal/MathUtil.ts +3 -3
  56. package/src/internal/Singleton.ts +22 -22
  57. package/src/internal/StreamUtil.ts +64 -64
  58. package/src/internal/__map_take.ts +15 -15
  59. package/src/json/IAgenticaEventJson.ts +178 -178
  60. package/src/json/IAgenticaOperationJson.ts +36 -36
  61. package/src/json/IAgenticaOperationSelectionJson.ts +19 -19
  62. package/src/json/IAgenticaPromptJson.ts +130 -130
  63. package/src/json/IAgenticaTokenUsageJson.ts +107 -107
  64. package/src/prompts/AgenticaCancelPrompt.ts +32 -32
  65. package/src/prompts/AgenticaDescribePrompt.ts +41 -41
  66. package/src/prompts/AgenticaExecutePrompt.ts +52 -52
  67. package/src/prompts/AgenticaPrompt.ts +14 -14
  68. package/src/prompts/AgenticaPromptBase.ts +27 -27
  69. package/src/prompts/AgenticaSelectPrompt.ts +32 -32
  70. package/src/prompts/AgenticaTextPrompt.ts +31 -31
  71. package/src/structures/IAgenticaConfig.ts +123 -123
  72. package/src/structures/IAgenticaController.ts +133 -133
  73. package/src/structures/IAgenticaExecutor.ts +157 -157
  74. package/src/structures/IAgenticaProps.ts +69 -69
  75. package/src/structures/IAgenticaSystemPrompt.ts +125 -125
  76. package/src/structures/IAgenticaVendor.ts +39 -39
  77. package/src/transformers/AgenticaEventTransformer.ts +165 -165
  78. package/src/transformers/AgenticaPromptTransformer.ts +134 -134
@@ -1,62 +1,62 @@
1
- import { CompletionUsage } from "openai/resources";
2
-
3
- export namespace ChatGptUsageAggregator {
4
- export const sumCompletionTokenDetail = (
5
- x: CompletionUsage.CompletionTokensDetails,
6
- y: CompletionUsage.CompletionTokensDetails,
7
- ): CompletionUsage.CompletionTokensDetails => {
8
- return {
9
- accepted_prediction_tokens:
10
- (x.accepted_prediction_tokens ?? 0) +
11
- (y.accepted_prediction_tokens ?? 0),
12
- reasoning_tokens: (x.reasoning_tokens ?? 0) + (y.reasoning_tokens ?? 0),
13
- rejected_prediction_tokens:
14
- (x.rejected_prediction_tokens ?? 0) +
15
- (y.rejected_prediction_tokens ?? 0),
16
- };
17
- };
18
-
19
- export const sumPromptTokenDetail = (
20
- x: CompletionUsage.PromptTokensDetails,
21
- y: CompletionUsage.PromptTokensDetails,
22
- ): CompletionUsage.PromptTokensDetails => {
23
- return {
24
- audio_tokens: (x.audio_tokens ?? 0) + (y.audio_tokens ?? 0),
25
- cached_tokens: (x.cached_tokens ?? 0) + (y.cached_tokens ?? 0),
26
- };
27
- };
28
-
29
- export const sum = (
30
- x: CompletionUsage,
31
- y: CompletionUsage,
32
- ): CompletionUsage => {
33
- return {
34
- prompt_tokens: (x.prompt_tokens ?? 0) + (y.prompt_tokens ?? 0),
35
- completion_tokens:
36
- (x.completion_tokens ?? 0) + (y.completion_tokens ?? 0),
37
- total_tokens: (x.total_tokens ?? 0) + (y.total_tokens ?? 0),
38
- completion_tokens_details: sumCompletionTokenDetail(
39
- x.completion_tokens_details ?? {
40
- accepted_prediction_tokens: 0,
41
- reasoning_tokens: 0,
42
- rejected_prediction_tokens: 0,
43
- },
44
- y.completion_tokens_details ?? {
45
- accepted_prediction_tokens: 0,
46
- reasoning_tokens: 0,
47
- rejected_prediction_tokens: 0,
48
- },
49
- ),
50
- prompt_tokens_details: sumPromptTokenDetail(
51
- x.prompt_tokens_details ?? {
52
- audio_tokens: 0,
53
- cached_tokens: 0,
54
- },
55
- y.prompt_tokens_details ?? {
56
- audio_tokens: 0,
57
- cached_tokens: 0,
58
- },
59
- ),
60
- };
61
- };
62
- }
1
+ import { CompletionUsage } from "openai/resources";
2
+
3
+ export namespace ChatGptUsageAggregator {
4
+ export const sumCompletionTokenDetail = (
5
+ x: CompletionUsage.CompletionTokensDetails,
6
+ y: CompletionUsage.CompletionTokensDetails,
7
+ ): CompletionUsage.CompletionTokensDetails => {
8
+ return {
9
+ accepted_prediction_tokens:
10
+ (x.accepted_prediction_tokens ?? 0) +
11
+ (y.accepted_prediction_tokens ?? 0),
12
+ reasoning_tokens: (x.reasoning_tokens ?? 0) + (y.reasoning_tokens ?? 0),
13
+ rejected_prediction_tokens:
14
+ (x.rejected_prediction_tokens ?? 0) +
15
+ (y.rejected_prediction_tokens ?? 0),
16
+ };
17
+ };
18
+
19
+ export const sumPromptTokenDetail = (
20
+ x: CompletionUsage.PromptTokensDetails,
21
+ y: CompletionUsage.PromptTokensDetails,
22
+ ): CompletionUsage.PromptTokensDetails => {
23
+ return {
24
+ audio_tokens: (x.audio_tokens ?? 0) + (y.audio_tokens ?? 0),
25
+ cached_tokens: (x.cached_tokens ?? 0) + (y.cached_tokens ?? 0),
26
+ };
27
+ };
28
+
29
+ export const sum = (
30
+ x: CompletionUsage,
31
+ y: CompletionUsage,
32
+ ): CompletionUsage => {
33
+ return {
34
+ prompt_tokens: (x.prompt_tokens ?? 0) + (y.prompt_tokens ?? 0),
35
+ completion_tokens:
36
+ (x.completion_tokens ?? 0) + (y.completion_tokens ?? 0),
37
+ total_tokens: (x.total_tokens ?? 0) + (y.total_tokens ?? 0),
38
+ completion_tokens_details: sumCompletionTokenDetail(
39
+ x.completion_tokens_details ?? {
40
+ accepted_prediction_tokens: 0,
41
+ reasoning_tokens: 0,
42
+ rejected_prediction_tokens: 0,
43
+ },
44
+ y.completion_tokens_details ?? {
45
+ accepted_prediction_tokens: 0,
46
+ reasoning_tokens: 0,
47
+ rejected_prediction_tokens: 0,
48
+ },
49
+ ),
50
+ prompt_tokens_details: sumPromptTokenDetail(
51
+ x.prompt_tokens_details ?? {
52
+ audio_tokens: 0,
53
+ cached_tokens: 0,
54
+ },
55
+ y.prompt_tokens_details ?? {
56
+ audio_tokens: 0,
57
+ cached_tokens: 0,
58
+ },
59
+ ),
60
+ };
61
+ };
62
+ }
@@ -1,32 +1,32 @@
1
- import { ILlmSchema } from "@samchon/openapi";
2
-
3
- import { IAgenticaPromptJson } from "../json/IAgenticaPromptJson";
4
- import { AgenticaPromptBase } from "../prompts/AgenticaPromptBase";
5
- import { AgenticaOperationSelection } from "./AgenticaOperationSelection";
6
-
7
- export class AgenticaCancelPrompt<
8
- Model extends ILlmSchema.Model,
9
- > extends AgenticaPromptBase<"cancel", IAgenticaPromptJson.ICancel> {
10
- public readonly id: string;
11
- public readonly selections: AgenticaOperationSelection<Model>[];
12
-
13
- public constructor(props: AgenticaCancelPrompt.IProps<Model>) {
14
- super("cancel");
15
- this.id = props.id;
16
- this.selections = props.selections;
17
- }
18
-
19
- public toJSON(): IAgenticaPromptJson.ICancel {
20
- return {
21
- type: this.type,
22
- id: this.id,
23
- selections: this.selections.map((s) => s.toJSON()),
24
- };
25
- }
26
- }
27
- export namespace AgenticaCancelPrompt {
28
- export interface IProps<Model extends ILlmSchema.Model> {
29
- id: string;
30
- selections: AgenticaOperationSelection<Model>[];
31
- }
32
- }
1
+ import { ILlmSchema } from "@samchon/openapi";
2
+
3
+ import { IAgenticaPromptJson } from "../json/IAgenticaPromptJson";
4
+ import { AgenticaPromptBase } from "../prompts/AgenticaPromptBase";
5
+ import { AgenticaOperationSelection } from "./AgenticaOperationSelection";
6
+
7
+ export class AgenticaCancelPrompt<
8
+ Model extends ILlmSchema.Model,
9
+ > extends AgenticaPromptBase<"cancel", IAgenticaPromptJson.ICancel> {
10
+ public readonly id: string;
11
+ public readonly selections: AgenticaOperationSelection<Model>[];
12
+
13
+ public constructor(props: AgenticaCancelPrompt.IProps<Model>) {
14
+ super("cancel");
15
+ this.id = props.id;
16
+ this.selections = props.selections;
17
+ }
18
+
19
+ public toJSON(): IAgenticaPromptJson.ICancel {
20
+ return {
21
+ type: this.type,
22
+ id: this.id,
23
+ selections: this.selections.map((s) => s.toJSON()),
24
+ };
25
+ }
26
+ }
27
+ export namespace AgenticaCancelPrompt {
28
+ export interface IProps<Model extends ILlmSchema.Model> {
29
+ id: string;
30
+ selections: AgenticaOperationSelection<Model>[];
31
+ }
32
+ }
@@ -1,23 +1,23 @@
1
- import { ILlmApplication, ILlmFunction, ILlmSchema } from "@samchon/openapi";
2
-
3
- import { AgenticaOperationBase } from "./AgenticaOperationBase";
4
-
5
- export class AgenticaClassOperation<
6
- Model extends ILlmSchema.Model,
7
- > extends AgenticaOperationBase<
8
- "class",
9
- ILlmApplication<Model>,
10
- ILlmFunction<Model>
11
- > {
12
- public constructor(props: AgenticaClassOperation.IProps<Model>) {
13
- super(props);
14
- }
15
- }
16
- export namespace AgenticaClassOperation {
17
- export type IProps<Model extends ILlmSchema.Model> =
18
- AgenticaOperationBase.IProps<
19
- "class",
20
- ILlmApplication<Model>,
21
- ILlmFunction<Model>
22
- >;
23
- }
1
+ import { ILlmApplication, ILlmFunction, ILlmSchema } from "@samchon/openapi";
2
+
3
+ import { AgenticaOperationBase } from "./AgenticaOperationBase";
4
+
5
+ export class AgenticaClassOperation<
6
+ Model extends ILlmSchema.Model,
7
+ > extends AgenticaOperationBase<
8
+ "class",
9
+ ILlmApplication<Model>,
10
+ ILlmFunction<Model>
11
+ > {
12
+ public constructor(props: AgenticaClassOperation.IProps<Model>) {
13
+ super(props);
14
+ }
15
+ }
16
+ export namespace AgenticaClassOperation {
17
+ export type IProps<Model extends ILlmSchema.Model> =
18
+ AgenticaOperationBase.IProps<
19
+ "class",
20
+ ILlmApplication<Model>,
21
+ ILlmFunction<Model>
22
+ >;
23
+ }
@@ -1,130 +1,130 @@
1
- import { ILlmSchema } from "@samchon/openapi";
2
- import OpenAI from "openai";
3
-
4
- import { AgenticaEvent } from "../events/AgenticaEvent";
5
- import { AgenticaEventSource } from "../events/AgenticaEventSource";
6
- import { AgenticaPrompt } from "../prompts/AgenticaPrompt";
7
- import { AgenticaTextPrompt } from "../prompts/AgenticaTextPrompt";
8
- import { IAgenticaConfig } from "../structures/IAgenticaConfig";
9
- import { AgenticaOperationCollection } from "./AgenticaOperationCollection";
10
- import { AgenticaOperationSelection } from "./AgenticaOperationSelection";
11
-
12
- /**
13
- * Context of the Nestia A.I. agent.
14
- *
15
- * `IAgenticaContext` is a structure defining the context of the
16
- * internal agents composing the {@link Agentica}, like function
17
- * selector, executor, and describer, and so on. For example, if an
18
- * agent has been configured to utilize the OpenAI, the context will
19
- * be delivered to the below components.
20
- *
21
- * - {@link ChatGptAgent}
22
- * - {@link ChatGptInitializeFunctionAgent}
23
- * - {@link ChatGptSelectFunctionAgent}
24
- * - {@link ChatGptExecuteFunctionAgent}
25
- * - {@link ChatGptDescribeFunctionAgent}
26
- * - {@link ChatGptCancelFunctionAgent}
27
- *
28
- * Also, as its name is context, it contains every information that
29
- * is required to interact with the AI vendor like OpenAI. It
30
- * contains every operations for LLM function calling, and
31
- * configuration used for the agent construction. And it contains
32
- * the prompt histories, and facade controller functions for
33
- * interacting with the {@link Agentica} like {@link dispatch}.
34
- *
35
- * In such reasons, if you're planning to customize some internal
36
- * agents, or add new agents with new process routine, you have to
37
- * understand this context structure. Otherwise you don't have any
38
- * plan to customize the internal agents, this context information is
39
- * not important for you.
40
- *
41
- * @author Samchon
42
- */
43
- export interface AgenticaContext<Model extends ILlmSchema.Model> {
44
- //----
45
- // APPLICATION
46
- //----
47
- /**
48
- * Collection of operations.
49
- *
50
- * Collection of operations from every controllers, and their
51
- * groups composed by the divide and conquer rule for the
52
- * efficient operation selection if configured.
53
- */
54
- operations: AgenticaOperationCollection<Model>;
55
-
56
- /**
57
- * Configuration of the agent.
58
- *
59
- * Configuration of the agent, that is used when constructing the
60
- * {@link Agentica} instance.
61
- *
62
- * @todo Write detaily after supporting the agent customization feature
63
- */
64
- config: IAgenticaConfig<Model> | undefined;
65
-
66
- //----
67
- // STATES
68
- //----
69
- /**
70
- * Prompt histories.
71
- */
72
- histories: AgenticaPrompt<Model>[];
73
-
74
- /**
75
- * Stacked operations.
76
- *
77
- * In other words, list of candidate operations for the LLM function calling.
78
- */
79
- stack: AgenticaOperationSelection<Model>[];
80
-
81
- /**
82
- * Text prompt of the user.
83
- *
84
- * Text conversation written the by user through the
85
- * {@link Agentica.conversate} function.
86
- */
87
- prompt: AgenticaTextPrompt<"user">;
88
-
89
- /**
90
- * Whether the agent is ready.
91
- *
92
- * Returns a boolean value indicates whether the agent is ready to
93
- * perform the function calling.
94
- *
95
- * If the agent has called the {@link AgenticaContext.initialize},
96
- * it returns `true`. Otherwise the {@link initialize} has never been
97
- * called, returns `false`.
98
- */
99
- ready: () => boolean;
100
-
101
- //----
102
- // HANDLERS
103
- //----
104
- /**
105
- * Dispatch event.
106
- *
107
- * Dispatch event so that the agent can be handle the event
108
- * through the {@link Agentica.on} function.
109
- *
110
- * @param event Event to deliver
111
- */
112
- dispatch: (event: AgenticaEvent<Model>) => Promise<void>;
113
-
114
- /**
115
- * Request to the OpenAI server.
116
- *
117
- * @param source The source agent of the agent
118
- * @param body The request body to the OpenAI server
119
- * @returns Response from the OpenAI server
120
- */
121
- request: (
122
- source: AgenticaEventSource,
123
- body: Omit<OpenAI.ChatCompletionCreateParamsStreaming, "model" | "stream">,
124
- ) => Promise<ReadableStream<OpenAI.Chat.Completions.ChatCompletionChunk>>;
125
-
126
- /**
127
- * Initialize the agent.
128
- */
129
- initialize: () => Promise<void>;
130
- }
1
+ import { ILlmSchema } from "@samchon/openapi";
2
+ import OpenAI from "openai";
3
+
4
+ import { AgenticaEvent } from "../events/AgenticaEvent";
5
+ import { AgenticaEventSource } from "../events/AgenticaEventSource";
6
+ import { AgenticaPrompt } from "../prompts/AgenticaPrompt";
7
+ import { AgenticaTextPrompt } from "../prompts/AgenticaTextPrompt";
8
+ import { IAgenticaConfig } from "../structures/IAgenticaConfig";
9
+ import { AgenticaOperationCollection } from "./AgenticaOperationCollection";
10
+ import { AgenticaOperationSelection } from "./AgenticaOperationSelection";
11
+
12
+ /**
13
+ * Context of the Nestia A.I. agent.
14
+ *
15
+ * `IAgenticaContext` is a structure defining the context of the
16
+ * internal agents composing the {@link Agentica}, like function
17
+ * selector, executor, and describer, and so on. For example, if an
18
+ * agent has been configured to utilize the OpenAI, the context will
19
+ * be delivered to the below components.
20
+ *
21
+ * - {@link ChatGptAgent}
22
+ * - {@link ChatGptInitializeFunctionAgent}
23
+ * - {@link ChatGptSelectFunctionAgent}
24
+ * - {@link ChatGptExecuteFunctionAgent}
25
+ * - {@link ChatGptDescribeFunctionAgent}
26
+ * - {@link ChatGptCancelFunctionAgent}
27
+ *
28
+ * Also, as its name is context, it contains every information that
29
+ * is required to interact with the AI vendor like OpenAI. It
30
+ * contains every operations for LLM function calling, and
31
+ * configuration used for the agent construction. And it contains
32
+ * the prompt histories, and facade controller functions for
33
+ * interacting with the {@link Agentica} like {@link dispatch}.
34
+ *
35
+ * In such reasons, if you're planning to customize some internal
36
+ * agents, or add new agents with new process routine, you have to
37
+ * understand this context structure. Otherwise you don't have any
38
+ * plan to customize the internal agents, this context information is
39
+ * not important for you.
40
+ *
41
+ * @author Samchon
42
+ */
43
+ export interface AgenticaContext<Model extends ILlmSchema.Model> {
44
+ //----
45
+ // APPLICATION
46
+ //----
47
+ /**
48
+ * Collection of operations.
49
+ *
50
+ * Collection of operations from every controllers, and their
51
+ * groups composed by the divide and conquer rule for the
52
+ * efficient operation selection if configured.
53
+ */
54
+ operations: AgenticaOperationCollection<Model>;
55
+
56
+ /**
57
+ * Configuration of the agent.
58
+ *
59
+ * Configuration of the agent, that is used when constructing the
60
+ * {@link Agentica} instance.
61
+ *
62
+ * @todo Write detaily after supporting the agent customization feature
63
+ */
64
+ config: IAgenticaConfig<Model> | undefined;
65
+
66
+ //----
67
+ // STATES
68
+ //----
69
+ /**
70
+ * Prompt histories.
71
+ */
72
+ histories: AgenticaPrompt<Model>[];
73
+
74
+ /**
75
+ * Stacked operations.
76
+ *
77
+ * In other words, list of candidate operations for the LLM function calling.
78
+ */
79
+ stack: AgenticaOperationSelection<Model>[];
80
+
81
+ /**
82
+ * Text prompt of the user.
83
+ *
84
+ * Text conversation written the by user through the
85
+ * {@link Agentica.conversate} function.
86
+ */
87
+ prompt: AgenticaTextPrompt<"user">;
88
+
89
+ /**
90
+ * Whether the agent is ready.
91
+ *
92
+ * Returns a boolean value indicates whether the agent is ready to
93
+ * perform the function calling.
94
+ *
95
+ * If the agent has called the {@link AgenticaContext.initialize},
96
+ * it returns `true`. Otherwise the {@link initialize} has never been
97
+ * called, returns `false`.
98
+ */
99
+ ready: () => boolean;
100
+
101
+ //----
102
+ // HANDLERS
103
+ //----
104
+ /**
105
+ * Dispatch event.
106
+ *
107
+ * Dispatch event so that the agent can be handle the event
108
+ * through the {@link Agentica.on} function.
109
+ *
110
+ * @param event Event to deliver
111
+ */
112
+ dispatch: (event: AgenticaEvent<Model>) => Promise<void>;
113
+
114
+ /**
115
+ * Request to the OpenAI server.
116
+ *
117
+ * @param source The source agent of the agent
118
+ * @param body The request body to the OpenAI server
119
+ * @returns Response from the OpenAI server
120
+ */
121
+ request: (
122
+ source: AgenticaEventSource,
123
+ body: Omit<OpenAI.ChatCompletionCreateParamsStreaming, "model" | "stream">,
124
+ ) => Promise<ReadableStream<OpenAI.Chat.Completions.ChatCompletionChunk>>;
125
+
126
+ /**
127
+ * Initialize the agent.
128
+ */
129
+ initialize: () => Promise<void>;
130
+ }
@@ -1,27 +1,27 @@
1
- import {
2
- IHttpLlmApplication,
3
- IHttpLlmFunction,
4
- ILlmSchema,
5
- } from "@samchon/openapi";
6
-
7
- import { AgenticaOperationBase } from "./AgenticaOperationBase";
8
-
9
- export class AgenticaHttpOperation<
10
- Model extends ILlmSchema.Model,
11
- > extends AgenticaOperationBase<
12
- "http",
13
- IHttpLlmApplication<Model>,
14
- IHttpLlmFunction<Model>
15
- > {
16
- public constructor(props: AgenticaHttpOperation.IProps<Model>) {
17
- super(props);
18
- }
19
- }
20
- export namespace AgenticaHttpOperation {
21
- export type IProps<Model extends ILlmSchema.Model> =
22
- AgenticaOperationBase.IProps<
23
- "http",
24
- IHttpLlmApplication<Model>,
25
- IHttpLlmFunction<Model>
26
- >;
27
- }
1
+ import {
2
+ IHttpLlmApplication,
3
+ IHttpLlmFunction,
4
+ ILlmSchema,
5
+ } from "@samchon/openapi";
6
+
7
+ import { AgenticaOperationBase } from "./AgenticaOperationBase";
8
+
9
+ export class AgenticaHttpOperation<
10
+ Model extends ILlmSchema.Model,
11
+ > extends AgenticaOperationBase<
12
+ "http",
13
+ IHttpLlmApplication<Model>,
14
+ IHttpLlmFunction<Model>
15
+ > {
16
+ public constructor(props: AgenticaHttpOperation.IProps<Model>) {
17
+ super(props);
18
+ }
19
+ }
20
+ export namespace AgenticaHttpOperation {
21
+ export type IProps<Model extends ILlmSchema.Model> =
22
+ AgenticaOperationBase.IProps<
23
+ "http",
24
+ IHttpLlmApplication<Model>,
25
+ IHttpLlmFunction<Model>
26
+ >;
27
+ }