@agentica/core 0.10.1-dev.20250302 → 0.10.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/LICENSE +21 -21
  2. package/README.md +419 -419
  3. package/package.json +1 -1
  4. package/prompts/cancel.md +4 -4
  5. package/prompts/common.md +2 -2
  6. package/prompts/describe.md +6 -6
  7. package/prompts/execute.md +6 -6
  8. package/prompts/initialize.md +2 -2
  9. package/prompts/select.md +6 -6
  10. package/src/Agentica.ts +323 -323
  11. package/src/chatgpt/ChatGptAgent.ts +75 -75
  12. package/src/chatgpt/ChatGptCallFunctionAgent.ts +464 -464
  13. package/src/chatgpt/ChatGptCancelFunctionAgent.ts +287 -287
  14. package/src/chatgpt/ChatGptDescribeFunctionAgent.ts +52 -52
  15. package/src/chatgpt/ChatGptHistoryDecoder.ts +88 -88
  16. package/src/chatgpt/ChatGptInitializeFunctionAgent.ts +88 -88
  17. package/src/chatgpt/ChatGptSelectFunctionAgent.ts +319 -319
  18. package/src/functional/createHttpLlmApplication.ts +63 -63
  19. package/src/index.ts +19 -19
  20. package/src/internal/AgenticaConstant.ts +4 -4
  21. package/src/internal/AgenticaDefaultPrompt.ts +43 -43
  22. package/src/internal/AgenticaOperationComposer.ts +87 -87
  23. package/src/internal/AgenticaPromptFactory.ts +32 -32
  24. package/src/internal/AgenticaPromptTransformer.ts +86 -86
  25. package/src/internal/AgenticaTokenUsageAggregator.ts +115 -115
  26. package/src/internal/MathUtil.ts +3 -3
  27. package/src/internal/Singleton.ts +22 -22
  28. package/src/internal/__map_take.ts +15 -15
  29. package/src/structures/IAgenticaConfig.ts +123 -123
  30. package/src/structures/IAgenticaContext.ts +129 -129
  31. package/src/structures/IAgenticaController.ts +133 -133
  32. package/src/structures/IAgenticaEvent.ts +229 -229
  33. package/src/structures/IAgenticaExecutor.ts +156 -156
  34. package/src/structures/IAgenticaOperation.ts +63 -63
  35. package/src/structures/IAgenticaOperationCollection.ts +52 -52
  36. package/src/structures/IAgenticaOperationSelection.ts +68 -68
  37. package/src/structures/IAgenticaPrompt.ts +182 -182
  38. package/src/structures/IAgenticaProps.ts +70 -70
  39. package/src/structures/IAgenticaSystemPrompt.ts +124 -124
  40. package/src/structures/IAgenticaTokenUsage.ts +107 -107
  41. package/src/structures/IAgenticaVendor.ts +39 -39
  42. package/src/structures/internal/__IChatCancelFunctionsApplication.ts +23 -23
  43. package/src/structures/internal/__IChatFunctionReference.ts +21 -21
  44. package/src/structures/internal/__IChatInitialApplication.ts +15 -15
  45. package/src/structures/internal/__IChatSelectFunctionsApplication.ts +24 -24
  46. package/src/typings/AgenticaSource.ts +6 -6
@@ -1,123 +1,123 @@
1
- import { ILlmSchema } from "@samchon/openapi";
2
-
3
- import { IAgenticaContext } from "./IAgenticaContext";
4
- import { IAgenticaExecutor } from "./IAgenticaExecutor";
5
- import { IAgenticaPrompt } from "./IAgenticaPrompt";
6
- import { IAgenticaSystemPrompt } from "./IAgenticaSystemPrompt";
7
-
8
- /**
9
- * Configuration for Nestia Agent.
10
- *
11
- * `IAgenticaConfig` is an interface that defines the configuration
12
- * properties of the {@link Agentica}. With this configuration, you
13
- * can set the user's locale, timezone, and some of system prompts.
14
- *
15
- * Also, you can affect to the LLM function selecing/calling logic by
16
- * configuring additional properties. For an example, if you configure the
17
- * {@link capacity} property, the A.I. chatbot will divide the functions
18
- * into the several groups with the configured capacity and select proper
19
- * functions to call by operating the multiple LLM function selecting
20
- * agents parallelly.
21
- *
22
- * @author Samchon
23
- */
24
- export interface IAgenticaConfig<Model extends ILlmSchema.Model> {
25
- /**
26
- * Locale of the A.I. chatbot.
27
- *
28
- * If you configure this property, the A.I. chatbot will conversate with
29
- * the given locale. You can get the locale value by
30
- *
31
- * - Browser: `navigator.language`
32
- * - NodeJS: `process.env.LANG.split(".")[0]`
33
- *
34
- * @default your_locale
35
- */
36
- locale?: string;
37
-
38
- /**
39
- * Timezone of the A.I. chatbot.
40
- *
41
- * If you configure this property, the A.I. chatbot will consider the
42
- * given timezone. You can get the timezone value by
43
- * `Intl.DateTimeFormat().resolvedOptions().timeZone`.
44
- *
45
- * @default your_timezone
46
- */
47
- timezone?: string;
48
-
49
- /**
50
- * Retry count.
51
- *
52
- * If LLM function calling composed arguments are invalid,
53
- * the A.I. chatbot will retry to call the function with
54
- * the modified arguments.
55
- *
56
- * By the way, if you configure it to 0 or 1, the A.I. chatbot
57
- * will not retry the LLM function calling for correcting the
58
- * arguments.
59
- *
60
- * @default 3
61
- */
62
- retry?: number;
63
-
64
- /**
65
- * Capacity of the LLM function selecting.
66
- *
67
- * When the A.I. chatbot selects a proper function to call, if the
68
- * number of functions registered in the
69
- * {@link IAgenticaProps.applications} is too much greater,
70
- * the A.I. chatbot often fallen into the hallucination.
71
- *
72
- * In that case, if you configure this property value, `Agentica`
73
- * will divide the functions into the several groups with the configured
74
- * capacity and select proper functions to call by operating the multiple
75
- * LLM function selecting agents parallelly.
76
- *
77
- * @default 100
78
- */
79
- capacity?: number;
80
-
81
- /**
82
- * Eliticism for the LLM function selecting.
83
- *
84
- * If you configure {@link capacity}, the A.I. chatbot will complete
85
- * the candidate functions to call which are selected by the multiple
86
- * LLM function selecting agents.
87
- *
88
- * Otherwise you configure this property as `false`, the A.I. chatbot
89
- * will not complete the candidate functions to call and just accept
90
- * every candidate functions to call which are selected by the multiple
91
- * LLM function selecting agents.
92
- *
93
- * @default true
94
- */
95
- eliticism?: boolean;
96
-
97
- /**
98
- * System prompt messages.
99
- *
100
- * System prompt messages if you want to customize the system prompt
101
- * messages for each situation.
102
- */
103
- systemPrompt?: IAgenticaSystemPrompt<Model>;
104
-
105
- /**
106
- * Agent executor.
107
- *
108
- * Executor function of Agentic AI's iteration plan to internal agents
109
- * running by the {@link Agentica.conversate} function.
110
- *
111
- * If you want to customize the agent execution plan, you can do it
112
- * by assigning you logic function of entire or partial to this property.
113
- * When customizing it, it would better to reference the
114
- * {@link ChatGptAgent.execute} function.
115
- *
116
- * @param ctx Context of the agent
117
- * @returns Lit of prompts generated by the executor
118
- * @default ChatGptAgent.execute
119
- */
120
- executor?:
121
- | Partial<IAgenticaExecutor<Model>>
122
- | ((ctx: IAgenticaContext<Model>) => Promise<IAgenticaPrompt<Model>[]>);
123
- }
1
+ import { ILlmSchema } from "@samchon/openapi";
2
+
3
+ import { IAgenticaContext } from "./IAgenticaContext";
4
+ import { IAgenticaExecutor } from "./IAgenticaExecutor";
5
+ import { IAgenticaPrompt } from "./IAgenticaPrompt";
6
+ import { IAgenticaSystemPrompt } from "./IAgenticaSystemPrompt";
7
+
8
+ /**
9
+ * Configuration for Nestia Agent.
10
+ *
11
+ * `IAgenticaConfig` is an interface that defines the configuration
12
+ * properties of the {@link Agentica}. With this configuration, you
13
+ * can set the user's locale, timezone, and some of system prompts.
14
+ *
15
+ * Also, you can affect to the LLM function selecing/calling logic by
16
+ * configuring additional properties. For an example, if you configure the
17
+ * {@link capacity} property, the A.I. chatbot will divide the functions
18
+ * into the several groups with the configured capacity and select proper
19
+ * functions to call by operating the multiple LLM function selecting
20
+ * agents parallelly.
21
+ *
22
+ * @author Samchon
23
+ */
24
+ export interface IAgenticaConfig<Model extends ILlmSchema.Model> {
25
+ /**
26
+ * Locale of the A.I. chatbot.
27
+ *
28
+ * If you configure this property, the A.I. chatbot will conversate with
29
+ * the given locale. You can get the locale value by
30
+ *
31
+ * - Browser: `navigator.language`
32
+ * - NodeJS: `process.env.LANG.split(".")[0]`
33
+ *
34
+ * @default your_locale
35
+ */
36
+ locale?: string;
37
+
38
+ /**
39
+ * Timezone of the A.I. chatbot.
40
+ *
41
+ * If you configure this property, the A.I. chatbot will consider the
42
+ * given timezone. You can get the timezone value by
43
+ * `Intl.DateTimeFormat().resolvedOptions().timeZone`.
44
+ *
45
+ * @default your_timezone
46
+ */
47
+ timezone?: string;
48
+
49
+ /**
50
+ * Retry count.
51
+ *
52
+ * If LLM function calling composed arguments are invalid,
53
+ * the A.I. chatbot will retry to call the function with
54
+ * the modified arguments.
55
+ *
56
+ * By the way, if you configure it to 0 or 1, the A.I. chatbot
57
+ * will not retry the LLM function calling for correcting the
58
+ * arguments.
59
+ *
60
+ * @default 3
61
+ */
62
+ retry?: number;
63
+
64
+ /**
65
+ * Capacity of the LLM function selecting.
66
+ *
67
+ * When the A.I. chatbot selects a proper function to call, if the
68
+ * number of functions registered in the
69
+ * {@link IAgenticaProps.applications} is too much greater,
70
+ * the A.I. chatbot often fallen into the hallucination.
71
+ *
72
+ * In that case, if you configure this property value, `Agentica`
73
+ * will divide the functions into the several groups with the configured
74
+ * capacity and select proper functions to call by operating the multiple
75
+ * LLM function selecting agents parallelly.
76
+ *
77
+ * @default 100
78
+ */
79
+ capacity?: number;
80
+
81
+ /**
82
+ * Eliticism for the LLM function selecting.
83
+ *
84
+ * If you configure {@link capacity}, the A.I. chatbot will complete
85
+ * the candidate functions to call which are selected by the multiple
86
+ * LLM function selecting agents.
87
+ *
88
+ * Otherwise you configure this property as `false`, the A.I. chatbot
89
+ * will not complete the candidate functions to call and just accept
90
+ * every candidate functions to call which are selected by the multiple
91
+ * LLM function selecting agents.
92
+ *
93
+ * @default true
94
+ */
95
+ eliticism?: boolean;
96
+
97
+ /**
98
+ * System prompt messages.
99
+ *
100
+ * System prompt messages if you want to customize the system prompt
101
+ * messages for each situation.
102
+ */
103
+ systemPrompt?: IAgenticaSystemPrompt<Model>;
104
+
105
+ /**
106
+ * Agent executor.
107
+ *
108
+ * Executor function of Agentic AI's iteration plan to internal agents
109
+ * running by the {@link Agentica.conversate} function.
110
+ *
111
+ * If you want to customize the agent execution plan, you can do it
112
+ * by assigning you logic function of entire or partial to this property.
113
+ * When customizing it, it would better to reference the
114
+ * {@link ChatGptAgent.execute} function.
115
+ *
116
+ * @param ctx Context of the agent
117
+ * @returns Lit of prompts generated by the executor
118
+ * @default ChatGptAgent.execute
119
+ */
120
+ executor?:
121
+ | Partial<IAgenticaExecutor<Model>>
122
+ | ((ctx: IAgenticaContext<Model>) => Promise<IAgenticaPrompt<Model>[]>);
123
+ }
@@ -1,129 +1,129 @@
1
- import { ILlmSchema } from "@samchon/openapi";
2
- import OpenAI from "openai";
3
-
4
- import { AgenticaSource } from "../typings/AgenticaSource";
5
- import { IAgenticaConfig } from "./IAgenticaConfig";
6
- import { IAgenticaEvent } from "./IAgenticaEvent";
7
- import { IAgenticaOperationCollection } from "./IAgenticaOperationCollection";
8
- import { IAgenticaOperationSelection } from "./IAgenticaOperationSelection";
9
- import { IAgenticaPrompt } from "./IAgenticaPrompt";
10
-
11
- /**
12
- * Context of the Nestia A.I. agent.
13
- *
14
- * `IAgenticaContext` is a structure defining the context of the
15
- * internal agents composing the {@link Agentica}, like function
16
- * selector, executor, and describer, and so on. For example, if an
17
- * agent has been configured to utilize the OpenAI, the context will
18
- * be delivered to the below components.
19
- *
20
- * - {@link ChatGptAgent}
21
- * - {@link ChatGptInitializeFunctionAgent}
22
- * - {@link ChatGptSelectFunctionAgent}
23
- * - {@link ChatGptExecuteFunctionAgent}
24
- * - {@link ChatGptDescribeFunctionAgent}
25
- * - {@link ChatGptCancelFunctionAgent}
26
- *
27
- * Also, as its name is context, it contains every information that
28
- * is required to interact with the AI vendor like OpenAI. It
29
- * contains every operations for LLM function calling, and
30
- * configuration used for the agent construction. And it contains
31
- * the prompt histories, and facade controller functions for
32
- * interacting with the {@link Agentica} like {@link dispatch}.
33
- *
34
- * In such reasons, if you're planning to customize some internal
35
- * agents, or add new agents with new process routine, you have to
36
- * understand this context structure. Otherwise you don't have any
37
- * plan to customize the internal agents, this context information is
38
- * not important for you.
39
- *
40
- * @author Samchon
41
- */
42
- export interface IAgenticaContext<Model extends ILlmSchema.Model> {
43
- //----
44
- // APPLICATION
45
- //----
46
- /**
47
- * Collection of operations.
48
- *
49
- * Collection of operations from every controllers, and their
50
- * groups composed by the divide and conquer rule for the
51
- * efficient operation selection if configured.
52
- */
53
- operations: IAgenticaOperationCollection<Model>;
54
-
55
- /**
56
- * Configuration of the agent.
57
- *
58
- * Configuration of the agent, that is used when constructing the
59
- * {@link Agentica} instance.
60
- *
61
- * @todo Write detaily after supporting the agent customization feature
62
- */
63
- config: IAgenticaConfig<Model> | undefined;
64
-
65
- //----
66
- // STATES
67
- //----
68
- /**
69
- * Prompt histories.
70
- */
71
- histories: IAgenticaPrompt<Model>[];
72
-
73
- /**
74
- * Stacked operations.
75
- *
76
- * In other words, list of candidate operations for the LLM function calling.
77
- */
78
- stack: IAgenticaOperationSelection<Model>[];
79
-
80
- /**
81
- * Text prompt of the user.
82
- *
83
- * Text conversation written the by user through the
84
- * {@link Agentica.conversate} function.
85
- */
86
- prompt: IAgenticaPrompt.IText<"user">;
87
-
88
- /**
89
- * Whether the agent is ready.
90
- *
91
- * Returns a boolean value indicates whether the agent is ready to
92
- * perform the function calling.
93
- *
94
- * If the agent has called the {@link IAgenticaContext.initialize},
95
- * it returns `true`. Otherwise the {@link initialize} has never been
96
- * called, returns `false`.
97
- */
98
- ready: () => boolean;
99
-
100
- //----
101
- // HANDLERS
102
- //----
103
- /**
104
- * Dispatch event.
105
- *
106
- * Dispatch event so that the agent can be handle the event
107
- * through the {@link Agentica.on} function.
108
- *
109
- * @param event Event to deliver
110
- */
111
- dispatch: (event: IAgenticaEvent<Model>) => Promise<void>;
112
-
113
- /**
114
- * Request to the OpenAI server.
115
- *
116
- * @param source The source agent of the agent
117
- * @param body The request body to the OpenAI server
118
- * @returns Response from the OpenAI server
119
- */
120
- request: (
121
- source: AgenticaSource,
122
- body: Omit<OpenAI.ChatCompletionCreateParamsNonStreaming, "model">,
123
- ) => Promise<OpenAI.ChatCompletion>;
124
-
125
- /**
126
- * Initialize the agent.
127
- */
128
- initialize: () => Promise<void>;
129
- }
1
+ import { ILlmSchema } from "@samchon/openapi";
2
+ import OpenAI from "openai";
3
+
4
+ import { AgenticaSource } from "../typings/AgenticaSource";
5
+ import { IAgenticaConfig } from "./IAgenticaConfig";
6
+ import { IAgenticaEvent } from "./IAgenticaEvent";
7
+ import { IAgenticaOperationCollection } from "./IAgenticaOperationCollection";
8
+ import { IAgenticaOperationSelection } from "./IAgenticaOperationSelection";
9
+ import { IAgenticaPrompt } from "./IAgenticaPrompt";
10
+
11
+ /**
12
+ * Context of the Nestia A.I. agent.
13
+ *
14
+ * `IAgenticaContext` is a structure defining the context of the
15
+ * internal agents composing the {@link Agentica}, like function
16
+ * selector, executor, and describer, and so on. For example, if an
17
+ * agent has been configured to utilize the OpenAI, the context will
18
+ * be delivered to the below components.
19
+ *
20
+ * - {@link ChatGptAgent}
21
+ * - {@link ChatGptInitializeFunctionAgent}
22
+ * - {@link ChatGptSelectFunctionAgent}
23
+ * - {@link ChatGptExecuteFunctionAgent}
24
+ * - {@link ChatGptDescribeFunctionAgent}
25
+ * - {@link ChatGptCancelFunctionAgent}
26
+ *
27
+ * Also, as its name is context, it contains every information that
28
+ * is required to interact with the AI vendor like OpenAI. It
29
+ * contains every operations for LLM function calling, and
30
+ * configuration used for the agent construction. And it contains
31
+ * the prompt histories, and facade controller functions for
32
+ * interacting with the {@link Agentica} like {@link dispatch}.
33
+ *
34
+ * In such reasons, if you're planning to customize some internal
35
+ * agents, or add new agents with new process routine, you have to
36
+ * understand this context structure. Otherwise you don't have any
37
+ * plan to customize the internal agents, this context information is
38
+ * not important for you.
39
+ *
40
+ * @author Samchon
41
+ */
42
+ export interface IAgenticaContext<Model extends ILlmSchema.Model> {
43
+ //----
44
+ // APPLICATION
45
+ //----
46
+ /**
47
+ * Collection of operations.
48
+ *
49
+ * Collection of operations from every controllers, and their
50
+ * groups composed by the divide and conquer rule for the
51
+ * efficient operation selection if configured.
52
+ */
53
+ operations: IAgenticaOperationCollection<Model>;
54
+
55
+ /**
56
+ * Configuration of the agent.
57
+ *
58
+ * Configuration of the agent, that is used when constructing the
59
+ * {@link Agentica} instance.
60
+ *
61
+ * @todo Write detaily after supporting the agent customization feature
62
+ */
63
+ config: IAgenticaConfig<Model> | undefined;
64
+
65
+ //----
66
+ // STATES
67
+ //----
68
+ /**
69
+ * Prompt histories.
70
+ */
71
+ histories: IAgenticaPrompt<Model>[];
72
+
73
+ /**
74
+ * Stacked operations.
75
+ *
76
+ * In other words, list of candidate operations for the LLM function calling.
77
+ */
78
+ stack: IAgenticaOperationSelection<Model>[];
79
+
80
+ /**
81
+ * Text prompt of the user.
82
+ *
83
+ * Text conversation written the by user through the
84
+ * {@link Agentica.conversate} function.
85
+ */
86
+ prompt: IAgenticaPrompt.IText<"user">;
87
+
88
+ /**
89
+ * Whether the agent is ready.
90
+ *
91
+ * Returns a boolean value indicates whether the agent is ready to
92
+ * perform the function calling.
93
+ *
94
+ * If the agent has called the {@link IAgenticaContext.initialize},
95
+ * it returns `true`. Otherwise the {@link initialize} has never been
96
+ * called, returns `false`.
97
+ */
98
+ ready: () => boolean;
99
+
100
+ //----
101
+ // HANDLERS
102
+ //----
103
+ /**
104
+ * Dispatch event.
105
+ *
106
+ * Dispatch event so that the agent can be handle the event
107
+ * through the {@link Agentica.on} function.
108
+ *
109
+ * @param event Event to deliver
110
+ */
111
+ dispatch: (event: IAgenticaEvent<Model>) => Promise<void>;
112
+
113
+ /**
114
+ * Request to the OpenAI server.
115
+ *
116
+ * @param source The source agent of the agent
117
+ * @param body The request body to the OpenAI server
118
+ * @returns Response from the OpenAI server
119
+ */
120
+ request: (
121
+ source: AgenticaSource,
122
+ body: Omit<OpenAI.ChatCompletionCreateParamsNonStreaming, "model">,
123
+ ) => Promise<OpenAI.ChatCompletion>;
124
+
125
+ /**
126
+ * Initialize the agent.
127
+ */
128
+ initialize: () => Promise<void>;
129
+ }