@agentica/core 0.10.1-dev.20250302 → 0.10.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/LICENSE +21 -21
  2. package/README.md +461 -419
  3. package/package.json +1 -1
  4. package/prompts/cancel.md +4 -4
  5. package/prompts/common.md +2 -2
  6. package/prompts/describe.md +6 -6
  7. package/prompts/execute.md +6 -6
  8. package/prompts/initialize.md +2 -2
  9. package/prompts/select.md +6 -6
  10. package/src/Agentica.ts +323 -323
  11. package/src/chatgpt/ChatGptAgent.ts +75 -75
  12. package/src/chatgpt/ChatGptCallFunctionAgent.ts +464 -464
  13. package/src/chatgpt/ChatGptCancelFunctionAgent.ts +287 -287
  14. package/src/chatgpt/ChatGptDescribeFunctionAgent.ts +52 -52
  15. package/src/chatgpt/ChatGptHistoryDecoder.ts +88 -88
  16. package/src/chatgpt/ChatGptInitializeFunctionAgent.ts +88 -88
  17. package/src/chatgpt/ChatGptSelectFunctionAgent.ts +319 -319
  18. package/src/functional/createHttpLlmApplication.ts +63 -63
  19. package/src/index.ts +19 -19
  20. package/src/internal/AgenticaConstant.ts +4 -4
  21. package/src/internal/AgenticaDefaultPrompt.ts +43 -43
  22. package/src/internal/AgenticaOperationComposer.ts +87 -87
  23. package/src/internal/AgenticaPromptFactory.ts +32 -32
  24. package/src/internal/AgenticaPromptTransformer.ts +86 -86
  25. package/src/internal/AgenticaTokenUsageAggregator.ts +115 -115
  26. package/src/internal/MathUtil.ts +3 -3
  27. package/src/internal/Singleton.ts +22 -22
  28. package/src/internal/__map_take.ts +15 -15
  29. package/src/structures/IAgenticaConfig.ts +123 -123
  30. package/src/structures/IAgenticaContext.ts +129 -129
  31. package/src/structures/IAgenticaController.ts +133 -133
  32. package/src/structures/IAgenticaEvent.ts +229 -229
  33. package/src/structures/IAgenticaExecutor.ts +156 -156
  34. package/src/structures/IAgenticaOperation.ts +63 -63
  35. package/src/structures/IAgenticaOperationCollection.ts +52 -52
  36. package/src/structures/IAgenticaOperationSelection.ts +68 -68
  37. package/src/structures/IAgenticaPrompt.ts +182 -182
  38. package/src/structures/IAgenticaProps.ts +70 -70
  39. package/src/structures/IAgenticaSystemPrompt.ts +124 -124
  40. package/src/structures/IAgenticaTokenUsage.ts +107 -107
  41. package/src/structures/IAgenticaVendor.ts +39 -39
  42. package/src/structures/internal/__IChatCancelFunctionsApplication.ts +23 -23
  43. package/src/structures/internal/__IChatFunctionReference.ts +21 -21
  44. package/src/structures/internal/__IChatInitialApplication.ts +15 -15
  45. package/src/structures/internal/__IChatSelectFunctionsApplication.ts +24 -24
  46. package/src/typings/AgenticaSource.ts +6 -6
@@ -1,156 +1,156 @@
1
- import { ILlmSchema } from "@samchon/openapi";
2
-
3
- import { IAgenticaContext } from "./IAgenticaContext";
4
- import { IAgenticaPrompt } from "./IAgenticaPrompt";
5
-
6
- /**
7
- * Executor of the Agentic AI.
8
- *
9
- * `IAgenticaExecutor` represents an executor of the {@link Agentica},
10
- * composing its internal agents to accomplish the Agentic AI through
11
- * the LLM (Large Language Model) function calling.
12
- *
13
- * You can customize one of these internal agents by configuring
14
- * properties of the `IAgenticaExecutor` type, and assigning it to the
15
- * {@link IAgenticaConfig.executor} property. If you set the
16
- * {@link initialize} as `null` value, the {@link Agentica} will skip
17
- * the initialize process and directly go to the {@link select} process.
18
- *
19
- * By the way, when customizing the executor member, it would better to
20
- * reference the guide documents of `@agentica/core`, and internal
21
- * agents' implementation code. It's because if you take some mistake on
22
- * the executor logic, it can entirely break the {@link Agentica}'s
23
- * operation.
24
- *
25
- * @reference https://github.com/wrtnlabs/agentica?tab=readme-ov-file#principles
26
- * @reference https://github.com/wrtnlabs/agentica/blob/main/packages/agent/src/chatgpt/ChatGptAgent.ts
27
- * @author Samchon
28
- */
29
- export interface IAgenticaExecutor<Model extends ILlmSchema.Model> {
30
- /**
31
- * Initializer agent listing up functions.
32
- *
33
- * `initialize` agent is the first agent that {@link Agentica}
34
- * would meet which judges whether the user's conversation implies
35
- * to call some function or not.
36
- *
37
- * And if the `initialize` agent judges the user's conversation
38
- * implies to call some function, the `initialize` agent will
39
- * call the {@link IAgenticaContext.initialize} function, and
40
- * inform every functions enrolled in the {@link IAgenticaController}
41
- * to the AI agent. And then, the `initialize` agent will not never
42
- * be called again, and let {@link Agentica} to go to the next
43
- * {@link select} agent.
44
- *
45
- * Otherwise the user's conversation does not imply the request of
46
- * function calling, it would just work like plain chatbot, and just
47
- * conversate with the user.
48
- *
49
- * By the way, if you wanna skip the `initialize` agent, you can
50
- * do it by configuring the {@link IAgenticaConfig.executor} as
51
- * `null` value. In that case, the `initialize` agent will never be
52
- * called, and {@link Agentica} just starts from the {@link select}
53
- * agent.
54
- *
55
- * @param ctx Context of the agent
56
- * @returns List of prompts generated by the initializer
57
- */
58
- initialize:
59
- | null
60
- | ((ctx: IAgenticaContext<Model>) => Promise<IAgenticaPrompt<Model>[]>);
61
-
62
- /**
63
- * Function selector agent.
64
- *
65
- * `Select` agent finds candidate functions to call from the
66
- * conversation context with the user. And the candidate functions
67
- * would be enrolled to the {@link IAgenticaContext.stack}, and the
68
- * next {@link call} agent will perform the LLM (Large Language Model)
69
- * function calling.
70
- *
71
- * Note that, the `select` agent does not perform the LLM function
72
- * calling. It ends with just finding the candidate functions to call.
73
- *
74
- * By the way, if the `select` agent can't specify a certain function
75
- * to call due to lack of conversation context or homogeneity between
76
- * heterogeneous functions, how `select` agent works? In that case,
77
- * `select` agent it will just enroll every candidate functions to
78
- * the stack, and let the next {@link call} agent to determine the
79
- * proper function to call. And then let {@link cancel} agent to erase
80
- * the other candidate functions from the stack.
81
- *
82
- * Additionally, if `select` agent could not find any candidate
83
- * function from the conversation context with user, it would just
84
- * act like plain chatbot conversating with the user.
85
- *
86
- * @param ctx Context of the agent
87
- * @returns List of prompts generated by the selector
88
- */
89
- select: (ctx: IAgenticaContext<Model>) => Promise<IAgenticaPrompt<Model>[]>;
90
-
91
- /**
92
- * Function caller agent.
93
- *
94
- * `Call` agent performs the LLM (Large Language Model) function
95
- * calling from the candidate functions enrolled in the
96
- * {@link IAgenticaContext.stack}. And the scope of function calling
97
- * is, not only just arguments filling, but also actual executing
98
- * the function and returning the result.
99
- *
100
- * By the way, conversation context with user can be not enough to
101
- * filling the arguments of the candidate functions. In that case,
102
- * the `call` agent will ask the user to fill the missing arguments.
103
- *
104
- * Otherwise the cpnversation context is enough, so that succeeded
105
- * to call some candidate functions, the `call` agent will step to
106
- * the {@link describe} agent to explain the result of the function
107
- * calling to the user as markdown content.
108
- *
109
- * @param ctx Context of the agent
110
- * @returns List of prompts generated by the caller
111
- * @warning Recommend not to customize, due to its validation
112
- * feedback strategy is working very well, and the `call`
113
- * agent is the most general topic which can be universally
114
- * applied to all domain fields.
115
- */
116
- call: (ctx: IAgenticaContext<Model>) => Promise<IAgenticaPrompt<Model>[]>;
117
-
118
- /**
119
- * Describer agent of the function calling result.
120
- *
121
- * `Describe` agent explains the results of the function callings
122
- * to the user as markdown content.
123
- *
124
- * @param ctx Context of the agent
125
- * @param executes List of function calling results
126
- * @returns List of prompts generated by the describer
127
- */
128
- describe: (
129
- ctx: IAgenticaContext<Model>,
130
- executes: IAgenticaPrompt.IExecute<Model>[],
131
- ) => Promise<IAgenticaPrompt<Model>[]>;
132
-
133
- /**
134
- * Function canceler agent.
135
- *
136
- * `Cancel` agent erases the candidate functions from the
137
- * {@link IAgenticaContext.stack} by analyzing the conversation
138
- * context with the user.
139
- *
140
- * For reference, the first reason of the cancelation is explicit
141
- * order from user to the previous requested function. For example,
142
- * user had requested to send an email to the agent, but suddenly
143
- * user says to cancel the email sending.
144
- *
145
- * The seconod reason n of the cancelation is the multiple candidate
146
- * functions had been selected at once by the {@link select} agent
147
- * due to lack of conversation context or homogeneity between the
148
- * heterogeneous functions. And in the multiple candidate functions,
149
- * one thing is clearly determined by the {@link call} agent, so that
150
- * drop the other candidate functions.
151
- *
152
- * @param ctx Context of the agent
153
- * @returns List of prompts generated by the canceler
154
- */
155
- cancel: (ctx: IAgenticaContext<Model>) => Promise<IAgenticaPrompt<Model>[]>;
156
- }
1
+ import { ILlmSchema } from "@samchon/openapi";
2
+
3
+ import { IAgenticaContext } from "./IAgenticaContext";
4
+ import { IAgenticaPrompt } from "./IAgenticaPrompt";
5
+
6
+ /**
7
+ * Executor of the Agentic AI.
8
+ *
9
+ * `IAgenticaExecutor` represents an executor of the {@link Agentica},
10
+ * composing its internal agents to accomplish the Agentic AI through
11
+ * the LLM (Large Language Model) function calling.
12
+ *
13
+ * You can customize one of these internal agents by configuring
14
+ * properties of the `IAgenticaExecutor` type, and assigning it to the
15
+ * {@link IAgenticaConfig.executor} property. If you set the
16
+ * {@link initialize} as `null` value, the {@link Agentica} will skip
17
+ * the initialize process and directly go to the {@link select} process.
18
+ *
19
+ * By the way, when customizing the executor member, it would better to
20
+ * reference the guide documents of `@agentica/core`, and internal
21
+ * agents' implementation code. It's because if you take some mistake on
22
+ * the executor logic, it can entirely break the {@link Agentica}'s
23
+ * operation.
24
+ *
25
+ * @reference https://github.com/wrtnlabs/agentica?tab=readme-ov-file#principles
26
+ * @reference https://github.com/wrtnlabs/agentica/blob/main/packages/agent/src/chatgpt/ChatGptAgent.ts
27
+ * @author Samchon
28
+ */
29
+ export interface IAgenticaExecutor<Model extends ILlmSchema.Model> {
30
+ /**
31
+ * Initializer agent listing up functions.
32
+ *
33
+ * `initialize` agent is the first agent that {@link Agentica}
34
+ * would meet which judges whether the user's conversation implies
35
+ * to call some function or not.
36
+ *
37
+ * And if the `initialize` agent judges the user's conversation
38
+ * implies to call some function, the `initialize` agent will
39
+ * call the {@link IAgenticaContext.initialize} function, and
40
+ * inform every functions enrolled in the {@link IAgenticaController}
41
+ * to the AI agent. And then, the `initialize` agent will not never
42
+ * be called again, and let {@link Agentica} to go to the next
43
+ * {@link select} agent.
44
+ *
45
+ * Otherwise the user's conversation does not imply the request of
46
+ * function calling, it would just work like plain chatbot, and just
47
+ * conversate with the user.
48
+ *
49
+ * By the way, if you wanna skip the `initialize` agent, you can
50
+ * do it by configuring the {@link IAgenticaConfig.executor} as
51
+ * `null` value. In that case, the `initialize` agent will never be
52
+ * called, and {@link Agentica} just starts from the {@link select}
53
+ * agent.
54
+ *
55
+ * @param ctx Context of the agent
56
+ * @returns List of prompts generated by the initializer
57
+ */
58
+ initialize:
59
+ | null
60
+ | ((ctx: IAgenticaContext<Model>) => Promise<IAgenticaPrompt<Model>[]>);
61
+
62
+ /**
63
+ * Function selector agent.
64
+ *
65
+ * `Select` agent finds candidate functions to call from the
66
+ * conversation context with the user. And the candidate functions
67
+ * would be enrolled to the {@link IAgenticaContext.stack}, and the
68
+ * next {@link call} agent will perform the LLM (Large Language Model)
69
+ * function calling.
70
+ *
71
+ * Note that, the `select` agent does not perform the LLM function
72
+ * calling. It ends with just finding the candidate functions to call.
73
+ *
74
+ * By the way, if the `select` agent can't specify a certain function
75
+ * to call due to lack of conversation context or homogeneity between
76
+ * heterogeneous functions, how `select` agent works? In that case,
77
+ * `select` agent it will just enroll every candidate functions to
78
+ * the stack, and let the next {@link call} agent to determine the
79
+ * proper function to call. And then let {@link cancel} agent to erase
80
+ * the other candidate functions from the stack.
81
+ *
82
+ * Additionally, if `select` agent could not find any candidate
83
+ * function from the conversation context with user, it would just
84
+ * act like plain chatbot conversating with the user.
85
+ *
86
+ * @param ctx Context of the agent
87
+ * @returns List of prompts generated by the selector
88
+ */
89
+ select: (ctx: IAgenticaContext<Model>) => Promise<IAgenticaPrompt<Model>[]>;
90
+
91
+ /**
92
+ * Function caller agent.
93
+ *
94
+ * `Call` agent performs the LLM (Large Language Model) function
95
+ * calling from the candidate functions enrolled in the
96
+ * {@link IAgenticaContext.stack}. And the scope of function calling
97
+ * is, not only just arguments filling, but also actual executing
98
+ * the function and returning the result.
99
+ *
100
+ * By the way, conversation context with user can be not enough to
101
+ * filling the arguments of the candidate functions. In that case,
102
+ * the `call` agent will ask the user to fill the missing arguments.
103
+ *
104
+ * Otherwise the cpnversation context is enough, so that succeeded
105
+ * to call some candidate functions, the `call` agent will step to
106
+ * the {@link describe} agent to explain the result of the function
107
+ * calling to the user as markdown content.
108
+ *
109
+ * @param ctx Context of the agent
110
+ * @returns List of prompts generated by the caller
111
+ * @warning Recommend not to customize, due to its validation
112
+ * feedback strategy is working very well, and the `call`
113
+ * agent is the most general topic which can be universally
114
+ * applied to all domain fields.
115
+ */
116
+ call: (ctx: IAgenticaContext<Model>) => Promise<IAgenticaPrompt<Model>[]>;
117
+
118
+ /**
119
+ * Describer agent of the function calling result.
120
+ *
121
+ * `Describe` agent explains the results of the function callings
122
+ * to the user as markdown content.
123
+ *
124
+ * @param ctx Context of the agent
125
+ * @param executes List of function calling results
126
+ * @returns List of prompts generated by the describer
127
+ */
128
+ describe: (
129
+ ctx: IAgenticaContext<Model>,
130
+ executes: IAgenticaPrompt.IExecute<Model>[],
131
+ ) => Promise<IAgenticaPrompt<Model>[]>;
132
+
133
+ /**
134
+ * Function canceler agent.
135
+ *
136
+ * `Cancel` agent erases the candidate functions from the
137
+ * {@link IAgenticaContext.stack} by analyzing the conversation
138
+ * context with the user.
139
+ *
140
+ * For reference, the first reason of the cancelation is explicit
141
+ * order from user to the previous requested function. For example,
142
+ * user had requested to send an email to the agent, but suddenly
143
+ * user says to cancel the email sending.
144
+ *
145
+ * The seconod reason n of the cancelation is the multiple candidate
146
+ * functions had been selected at once by the {@link select} agent
147
+ * due to lack of conversation context or homogeneity between the
148
+ * heterogeneous functions. And in the multiple candidate functions,
149
+ * one thing is clearly determined by the {@link call} agent, so that
150
+ * drop the other candidate functions.
151
+ *
152
+ * @param ctx Context of the agent
153
+ * @returns List of prompts generated by the canceler
154
+ */
155
+ cancel: (ctx: IAgenticaContext<Model>) => Promise<IAgenticaPrompt<Model>[]>;
156
+ }
@@ -1,63 +1,63 @@
1
- import { IHttpLlmFunction, ILlmFunction, ILlmSchema } from "@samchon/openapi";
2
-
3
- import { IAgenticaController } from "./IAgenticaController";
4
-
5
- /**
6
- * Operation information in the Nestia Agent.
7
- *
8
- * `IAgenticaOperation` is a type represents an operation that would
9
- * be selected by the A.I. chatbot of {@link Agentica} class to
10
- * perform the LLM (Large Language Model) function calling.
11
- *
12
- * Also, it is an union type that is discriminated by the {@link protocol}
13
- * property. If the protocol value is `http`, it means that the HTTP API
14
- * operation would be called by the A.I. chatbot. Otherwise, if the protocol
15
- * value is `class`, it means that the operation has come from a
16
- * TypeScript class.
17
- *
18
- * @author Samchon
19
- */
20
- export type IAgenticaOperation<Model extends ILlmSchema.Model> =
21
- | IAgenticaOperation.IHttp<Model>
22
- | IAgenticaOperation.IClass<Model>;
23
- export namespace IAgenticaOperation {
24
- /**
25
- * HTTP API operation.
26
- */
27
- export type IHttp<Model extends ILlmSchema.Model> = IBase<
28
- "http",
29
- IAgenticaController.IHttp<Model>,
30
- IHttpLlmFunction<Model>
31
- >;
32
-
33
- /**
34
- * TypeScript class operation.
35
- */
36
- export type IClass<Model extends ILlmSchema.Model> = IBase<
37
- "class",
38
- IAgenticaController.IClass<Model>,
39
- ILlmFunction<Model>
40
- >;
41
-
42
- interface IBase<Protocol, Application, Function> {
43
- /**
44
- * Protocol discriminator.
45
- */
46
- protocol: Protocol;
47
-
48
- /**
49
- * Belonged controller of the target function.
50
- */
51
- controller: Application;
52
-
53
- /**
54
- * Target function to call.
55
- */
56
- function: Function;
57
-
58
- /**
59
- * Identifier name.
60
- */
61
- name: string;
62
- }
63
- }
1
+ import { IHttpLlmFunction, ILlmFunction, ILlmSchema } from "@samchon/openapi";
2
+
3
+ import { IAgenticaController } from "./IAgenticaController";
4
+
5
+ /**
6
+ * Operation information in the Nestia Agent.
7
+ *
8
+ * `IAgenticaOperation` is a type represents an operation that would
9
+ * be selected by the A.I. chatbot of {@link Agentica} class to
10
+ * perform the LLM (Large Language Model) function calling.
11
+ *
12
+ * Also, it is an union type that is discriminated by the {@link protocol}
13
+ * property. If the protocol value is `http`, it means that the HTTP API
14
+ * operation would be called by the A.I. chatbot. Otherwise, if the protocol
15
+ * value is `class`, it means that the operation has come from a
16
+ * TypeScript class.
17
+ *
18
+ * @author Samchon
19
+ */
20
+ export type IAgenticaOperation<Model extends ILlmSchema.Model> =
21
+ | IAgenticaOperation.IHttp<Model>
22
+ | IAgenticaOperation.IClass<Model>;
23
+ export namespace IAgenticaOperation {
24
+ /**
25
+ * HTTP API operation.
26
+ */
27
+ export type IHttp<Model extends ILlmSchema.Model> = IBase<
28
+ "http",
29
+ IAgenticaController.IHttp<Model>,
30
+ IHttpLlmFunction<Model>
31
+ >;
32
+
33
+ /**
34
+ * TypeScript class operation.
35
+ */
36
+ export type IClass<Model extends ILlmSchema.Model> = IBase<
37
+ "class",
38
+ IAgenticaController.IClass<Model>,
39
+ ILlmFunction<Model>
40
+ >;
41
+
42
+ interface IBase<Protocol, Application, Function> {
43
+ /**
44
+ * Protocol discriminator.
45
+ */
46
+ protocol: Protocol;
47
+
48
+ /**
49
+ * Belonged controller of the target function.
50
+ */
51
+ controller: Application;
52
+
53
+ /**
54
+ * Target function to call.
55
+ */
56
+ function: Function;
57
+
58
+ /**
59
+ * Identifier name.
60
+ */
61
+ name: string;
62
+ }
63
+ }
@@ -1,52 +1,52 @@
1
- import { ILlmSchema } from "@samchon/openapi";
2
-
3
- import { IAgenticaOperation } from "./IAgenticaOperation";
4
-
5
- /**
6
- * Collection of operations used in the Nestia Agent.
7
- *
8
- * `IAgenticaOperationCollection` is an interface type representing
9
- * a collection of operations for several purposes used in the
10
- * {@link Agentica} internally.
11
- *
12
- * @author Samchon
13
- */
14
- export interface IAgenticaOperationCollection<Model extends ILlmSchema.Model> {
15
- /**
16
- * List of every operations.
17
- */
18
- array: IAgenticaOperation<Model>[];
19
-
20
- /**
21
- * Divided operations.
22
- *
23
- * If you've configured the {@link IAgenticaConfig.capacity} property,
24
- * the A.I. chatbot ({@link Agentica}) will separate the operations
25
- * into the several groups to divide and conquer and LLM function selecting
26
- * for accuracy.
27
- *
28
- * In that case, this property `divided`'s length would be dtermined by
29
- * dividing the number of operations ({@link array}'s length) by the
30
- * {@link IAgenticaConfig.capacity}.
31
- *
32
- * Otherwise, if the {@link IAgenticaConfig.capacity} has not been
33
- * configured, this `divided` property would be the `undefined` value.
34
- */
35
- divided?: IAgenticaOperation<Model>[][] | undefined;
36
-
37
- /**
38
- * Flat dictionary of operations.
39
- *
40
- * Dictionary of operations with their {@link IAgenticaOperation.name}.
41
- */
42
- flat: Map<string, IAgenticaOperation<Model>>;
43
-
44
- /**
45
- * Group dictionary of operations.
46
- *
47
- * Dictionary of operations with their
48
- * {@link IAgenticaOperation.controller.name} and
49
- * {@link IAgenticaOperation.function.name}.
50
- */
51
- group: Map<string, Map<string, IAgenticaOperation<Model>>>;
52
- }
1
+ import { ILlmSchema } from "@samchon/openapi";
2
+
3
+ import { IAgenticaOperation } from "./IAgenticaOperation";
4
+
5
+ /**
6
+ * Collection of operations used in the Nestia Agent.
7
+ *
8
+ * `IAgenticaOperationCollection` is an interface type representing
9
+ * a collection of operations for several purposes used in the
10
+ * {@link Agentica} internally.
11
+ *
12
+ * @author Samchon
13
+ */
14
+ export interface IAgenticaOperationCollection<Model extends ILlmSchema.Model> {
15
+ /**
16
+ * List of every operations.
17
+ */
18
+ array: IAgenticaOperation<Model>[];
19
+
20
+ /**
21
+ * Divided operations.
22
+ *
23
+ * If you've configured the {@link IAgenticaConfig.capacity} property,
24
+ * the A.I. chatbot ({@link Agentica}) will separate the operations
25
+ * into the several groups to divide and conquer and LLM function selecting
26
+ * for accuracy.
27
+ *
28
+ * In that case, this property `divided`'s length would be dtermined by
29
+ * dividing the number of operations ({@link array}'s length) by the
30
+ * {@link IAgenticaConfig.capacity}.
31
+ *
32
+ * Otherwise, if the {@link IAgenticaConfig.capacity} has not been
33
+ * configured, this `divided` property would be the `undefined` value.
34
+ */
35
+ divided?: IAgenticaOperation<Model>[][] | undefined;
36
+
37
+ /**
38
+ * Flat dictionary of operations.
39
+ *
40
+ * Dictionary of operations with their {@link IAgenticaOperation.name}.
41
+ */
42
+ flat: Map<string, IAgenticaOperation<Model>>;
43
+
44
+ /**
45
+ * Group dictionary of operations.
46
+ *
47
+ * Dictionary of operations with their
48
+ * {@link IAgenticaOperation.controller.name} and
49
+ * {@link IAgenticaOperation.function.name}.
50
+ */
51
+ group: Map<string, Map<string, IAgenticaOperation<Model>>>;
52
+ }