@agentica/core 0.27.3 → 0.28.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. package/README.md +17 -2
  2. package/lib/Agentica.d.ts +2 -1
  3. package/lib/Agentica.js +92 -60
  4. package/lib/Agentica.js.map +1 -1
  5. package/lib/MicroAgentica.d.ts +1 -0
  6. package/lib/MicroAgentica.js +88 -60
  7. package/lib/MicroAgentica.js.map +1 -1
  8. package/lib/constants/AgenticaDefaultPrompt.js +2 -18
  9. package/lib/constants/AgenticaDefaultPrompt.js.map +1 -1
  10. package/lib/context/AgenticaContext.d.ts +1 -1
  11. package/lib/context/MicroAgenticaContext.d.ts +2 -2
  12. package/lib/context/internal/AgenticaOperationComposer.spec.js +0 -2
  13. package/lib/context/internal/AgenticaOperationComposer.spec.js.map +1 -1
  14. package/lib/events/AgenticaEventBase.d.ts +9 -0
  15. package/lib/factory/events.d.ts +1 -2
  16. package/lib/factory/events.js +71 -7
  17. package/lib/factory/events.js.map +1 -1
  18. package/lib/factory/histories.js +27 -9
  19. package/lib/factory/histories.js.map +1 -1
  20. package/lib/histories/AgenticaCancelHistory.d.ts +1 -2
  21. package/lib/histories/AgenticaExecuteHistory.d.ts +0 -4
  22. package/lib/histories/AgenticaHistoryBase.d.ts +9 -0
  23. package/lib/histories/AgenticaSelectHistory.d.ts +1 -2
  24. package/lib/index.mjs +413 -380
  25. package/lib/index.mjs.map +1 -1
  26. package/lib/json/IAgenticaEventJson.d.ts +9 -1
  27. package/lib/json/IAgenticaHistoryJson.d.ts +11 -14
  28. package/lib/orchestrate/call.d.ts +2 -2
  29. package/lib/orchestrate/call.js +41 -70
  30. package/lib/orchestrate/call.js.map +1 -1
  31. package/lib/orchestrate/cancel.d.ts +1 -2
  32. package/lib/orchestrate/cancel.js +13 -38
  33. package/lib/orchestrate/cancel.js.map +1 -1
  34. package/lib/orchestrate/describe.d.ts +1 -2
  35. package/lib/orchestrate/describe.js +5 -17
  36. package/lib/orchestrate/describe.js.map +1 -1
  37. package/lib/orchestrate/execute.d.ts +1 -2
  38. package/lib/orchestrate/execute.js +9 -13
  39. package/lib/orchestrate/execute.js.map +1 -1
  40. package/lib/orchestrate/initialize.d.ts +1 -2
  41. package/lib/orchestrate/initialize.js +3 -11
  42. package/lib/orchestrate/initialize.js.map +1 -1
  43. package/lib/orchestrate/internal/{cancelFunction.js → cancelFunctionFromContext.js} +7 -7
  44. package/lib/orchestrate/internal/cancelFunctionFromContext.js.map +1 -0
  45. package/lib/orchestrate/internal/selectFunctionFromContext.js +24 -0
  46. package/lib/orchestrate/internal/selectFunctionFromContext.js.map +1 -0
  47. package/lib/orchestrate/select.d.ts +1 -2
  48. package/lib/orchestrate/select.js +20 -51
  49. package/lib/orchestrate/select.js.map +1 -1
  50. package/lib/structures/IAgenticaConfig.d.ts +1 -3
  51. package/lib/structures/IAgenticaExecutor.d.ts +6 -7
  52. package/lib/structures/IAgenticaVendor.d.ts +14 -0
  53. package/lib/structures/IMicroAgenticaExecutor.d.ts +2 -3
  54. package/lib/transformers/transformHistory.js +13 -8
  55. package/lib/transformers/transformHistory.js.map +1 -1
  56. package/lib/utils/ChatGptCompletionMessageUtil.js +3 -3
  57. package/package.json +9 -7
  58. package/src/Agentica.ts +127 -87
  59. package/src/MicroAgentica.ts +118 -81
  60. package/src/constants/AgenticaDefaultPrompt.ts +3 -20
  61. package/src/context/AgenticaContext.ts +1 -1
  62. package/src/context/MicroAgenticaContext.ts +2 -2
  63. package/src/context/internal/AgenticaOperationComposer.spec.ts +1 -2
  64. package/src/events/AgenticaEventBase.ts +12 -0
  65. package/src/factory/events.ts +78 -8
  66. package/src/factory/histories.ts +41 -11
  67. package/src/histories/AgenticaCancelHistory.ts +1 -2
  68. package/src/histories/AgenticaExecuteHistory.ts +0 -5
  69. package/src/histories/AgenticaHistoryBase.ts +12 -0
  70. package/src/histories/AgenticaSelectHistory.ts +1 -2
  71. package/src/json/IAgenticaEventJson.ts +11 -1
  72. package/src/json/IAgenticaHistoryJson.ts +14 -17
  73. package/src/orchestrate/call.ts +57 -107
  74. package/src/orchestrate/cancel.ts +76 -99
  75. package/src/orchestrate/describe.ts +16 -36
  76. package/src/orchestrate/execute.ts +17 -37
  77. package/src/orchestrate/initialize.ts +36 -49
  78. package/src/orchestrate/internal/{cancelFunction.ts → cancelFunctionFromContext.ts} +11 -11
  79. package/src/orchestrate/internal/{selectFunction.ts → selectFunctionFromContext.ts} +18 -13
  80. package/src/orchestrate/select.ts +112 -151
  81. package/src/structures/IAgenticaConfig.ts +1 -3
  82. package/src/structures/IAgenticaExecutor.ts +10 -8
  83. package/src/structures/IAgenticaVendor.ts +15 -0
  84. package/src/structures/IMicroAgenticaExecutor.ts +2 -3
  85. package/src/transformers/transformHistory.ts +19 -20
  86. package/lib/orchestrate/internal/cancelFunction.js.map +0 -1
  87. package/lib/orchestrate/internal/selectFunction.js +0 -35
  88. package/lib/orchestrate/internal/selectFunction.js.map +0 -1
  89. /package/lib/orchestrate/internal/{cancelFunction.d.ts → cancelFunctionFromContext.d.ts} +0 -0
  90. /package/lib/orchestrate/internal/{selectFunction.d.ts → selectFunctionFromContext.d.ts} +0 -0
@@ -44,7 +44,7 @@ const ChatGptTokenUsageAggregator_1 = require("./ChatGptTokenUsageAggregator");
44
44
  function transformCompletionChunk(source) {
45
45
  const str = source instanceof Uint8Array ? ByteArrayUtil_1.ByteArrayUtil.toUtf8(source) : source;
46
46
  const result = JSON.parse(str);
47
- const valid = (() => { const _io0 = input => "string" === typeof input.id && (Array.isArray(input.choices) && input.choices.every(elem => "object" === typeof elem && null !== elem && _io1(elem))) && "number" === typeof input.created && "string" === typeof input.model && "chat.completion.chunk" === input.object && (null === input.service_tier || undefined === input.service_tier || "scale" === input.service_tier || "default" === input.service_tier) && (undefined === input.system_fingerprint || "string" === typeof input.system_fingerprint) && (null === input.usage || undefined === input.usage || "object" === typeof input.usage && null !== input.usage && _io9(input.usage)); const _io1 = input => "object" === typeof input.delta && null !== input.delta && false === Array.isArray(input.delta) && _io2(input.delta) && (null === input.finish_reason || "length" === input.finish_reason || "function_call" === input.finish_reason || "stop" === input.finish_reason || "tool_calls" === input.finish_reason || "content_filter" === input.finish_reason) && "number" === typeof input.index && (null === input.logprobs || undefined === input.logprobs || "object" === typeof input.logprobs && null !== input.logprobs && _io6(input.logprobs)); const _io2 = input => (null === input.content || undefined === input.content || "string" === typeof input.content) && (undefined === input.function_call || "object" === typeof input.function_call && null !== input.function_call && false === Array.isArray(input.function_call) && _io3(input.function_call)) && (null === input.refusal || undefined === input.refusal || "string" === typeof input.refusal) && (undefined === input.role || "user" === input.role || "developer" === input.role || "system" === input.role || "assistant" === input.role || "tool" === input.role) && (undefined === input.tool_calls || Array.isArray(input.tool_calls) && input.tool_calls.every(elem => "object" === typeof elem && null !== elem && _io4(elem))); const _io3 = input => (undefined === input.arguments || "string" === typeof input.arguments) && (undefined === input.name || "string" === typeof input.name); const _io4 = input => "number" === typeof input.index && (undefined === input.id || "string" === typeof input.id) && (undefined === input["function"] || "object" === typeof input["function"] && null !== input["function"] && false === Array.isArray(input["function"]) && _io5(input["function"])) && (undefined === input.type || "function" === input.type); const _io5 = input => (undefined === input.arguments || "string" === typeof input.arguments) && (undefined === input.name || "string" === typeof input.name); const _io6 = input => (null === input.content || Array.isArray(input.content) && input.content.every(elem => "object" === typeof elem && null !== elem && _io7(elem))) && (null === input.refusal || Array.isArray(input.refusal) && input.refusal.every(elem => "object" === typeof elem && null !== elem && _io7(elem))); const _io7 = input => "string" === typeof input.token && (null === input.bytes || Array.isArray(input.bytes) && input.bytes.every(elem => "number" === typeof elem)) && "number" === typeof input.logprob && (Array.isArray(input.top_logprobs) && input.top_logprobs.every(elem => "object" === typeof elem && null !== elem && _io8(elem))); const _io8 = input => "string" === typeof input.token && (null === input.bytes || Array.isArray(input.bytes) && input.bytes.every(elem => "number" === typeof elem)) && "number" === typeof input.logprob; const _io9 = input => "number" === typeof input.completion_tokens && "number" === typeof input.prompt_tokens && "number" === typeof input.total_tokens && (undefined === input.completion_tokens_details || "object" === typeof input.completion_tokens_details && null !== input.completion_tokens_details && false === Array.isArray(input.completion_tokens_details) && _io10(input.completion_tokens_details)) && (undefined === input.prompt_tokens_details || "object" === typeof input.prompt_tokens_details && null !== input.prompt_tokens_details && false === Array.isArray(input.prompt_tokens_details) && _io11(input.prompt_tokens_details)); const _io10 = input => (undefined === input.accepted_prediction_tokens || "number" === typeof input.accepted_prediction_tokens) && (undefined === input.audio_tokens || "number" === typeof input.audio_tokens) && (undefined === input.reasoning_tokens || "number" === typeof input.reasoning_tokens) && (undefined === input.rejected_prediction_tokens || "number" === typeof input.rejected_prediction_tokens); const _io11 = input => (undefined === input.audio_tokens || "number" === typeof input.audio_tokens) && (undefined === input.cached_tokens || "number" === typeof input.cached_tokens); const _vo0 = (input, _path, _exceptionable = true) => ["string" === typeof input.id || _report(_exceptionable, {
47
+ const valid = (() => { const _io0 = input => "string" === typeof input.id && (Array.isArray(input.choices) && input.choices.every(elem => "object" === typeof elem && null !== elem && _io1(elem))) && "number" === typeof input.created && "string" === typeof input.model && "chat.completion.chunk" === input.object && (null === input.service_tier || undefined === input.service_tier || "auto" === input.service_tier || "default" === input.service_tier || "flex" === input.service_tier) && (undefined === input.system_fingerprint || "string" === typeof input.system_fingerprint) && (null === input.usage || undefined === input.usage || "object" === typeof input.usage && null !== input.usage && _io9(input.usage)); const _io1 = input => "object" === typeof input.delta && null !== input.delta && false === Array.isArray(input.delta) && _io2(input.delta) && (null === input.finish_reason || "length" === input.finish_reason || "function_call" === input.finish_reason || "stop" === input.finish_reason || "tool_calls" === input.finish_reason || "content_filter" === input.finish_reason) && "number" === typeof input.index && (null === input.logprobs || undefined === input.logprobs || "object" === typeof input.logprobs && null !== input.logprobs && _io6(input.logprobs)); const _io2 = input => (null === input.content || undefined === input.content || "string" === typeof input.content) && (undefined === input.function_call || "object" === typeof input.function_call && null !== input.function_call && false === Array.isArray(input.function_call) && _io3(input.function_call)) && (null === input.refusal || undefined === input.refusal || "string" === typeof input.refusal) && (undefined === input.role || "user" === input.role || "developer" === input.role || "system" === input.role || "assistant" === input.role || "tool" === input.role) && (undefined === input.tool_calls || Array.isArray(input.tool_calls) && input.tool_calls.every(elem => "object" === typeof elem && null !== elem && _io4(elem))); const _io3 = input => (undefined === input.arguments || "string" === typeof input.arguments) && (undefined === input.name || "string" === typeof input.name); const _io4 = input => "number" === typeof input.index && (undefined === input.id || "string" === typeof input.id) && (undefined === input["function"] || "object" === typeof input["function"] && null !== input["function"] && false === Array.isArray(input["function"]) && _io5(input["function"])) && (undefined === input.type || "function" === input.type); const _io5 = input => (undefined === input.arguments || "string" === typeof input.arguments) && (undefined === input.name || "string" === typeof input.name); const _io6 = input => (null === input.content || Array.isArray(input.content) && input.content.every(elem => "object" === typeof elem && null !== elem && _io7(elem))) && (null === input.refusal || Array.isArray(input.refusal) && input.refusal.every(elem => "object" === typeof elem && null !== elem && _io7(elem))); const _io7 = input => "string" === typeof input.token && (null === input.bytes || Array.isArray(input.bytes) && input.bytes.every(elem => "number" === typeof elem)) && "number" === typeof input.logprob && (Array.isArray(input.top_logprobs) && input.top_logprobs.every(elem => "object" === typeof elem && null !== elem && _io8(elem))); const _io8 = input => "string" === typeof input.token && (null === input.bytes || Array.isArray(input.bytes) && input.bytes.every(elem => "number" === typeof elem)) && "number" === typeof input.logprob; const _io9 = input => "number" === typeof input.completion_tokens && "number" === typeof input.prompt_tokens && "number" === typeof input.total_tokens && (undefined === input.completion_tokens_details || "object" === typeof input.completion_tokens_details && null !== input.completion_tokens_details && false === Array.isArray(input.completion_tokens_details) && _io10(input.completion_tokens_details)) && (undefined === input.prompt_tokens_details || "object" === typeof input.prompt_tokens_details && null !== input.prompt_tokens_details && false === Array.isArray(input.prompt_tokens_details) && _io11(input.prompt_tokens_details)); const _io10 = input => (undefined === input.accepted_prediction_tokens || "number" === typeof input.accepted_prediction_tokens) && (undefined === input.audio_tokens || "number" === typeof input.audio_tokens) && (undefined === input.reasoning_tokens || "number" === typeof input.reasoning_tokens) && (undefined === input.rejected_prediction_tokens || "number" === typeof input.rejected_prediction_tokens); const _io11 = input => (undefined === input.audio_tokens || "number" === typeof input.audio_tokens) && (undefined === input.cached_tokens || "number" === typeof input.cached_tokens); const _vo0 = (input, _path, _exceptionable = true) => ["string" === typeof input.id || _report(_exceptionable, {
48
48
  path: _path + ".id",
49
49
  expected: "string",
50
50
  value: input.id
@@ -76,9 +76,9 @@ function transformCompletionChunk(source) {
76
76
  path: _path + ".object",
77
77
  expected: "\"chat.completion.chunk\"",
78
78
  value: input.object
79
- }), null === input.service_tier || undefined === input.service_tier || "scale" === input.service_tier || "default" === input.service_tier || _report(_exceptionable, {
79
+ }), null === input.service_tier || undefined === input.service_tier || "auto" === input.service_tier || "default" === input.service_tier || "flex" === input.service_tier || _report(_exceptionable, {
80
80
  path: _path + ".service_tier",
81
- expected: "(\"default\" | \"scale\" | null | undefined)",
81
+ expected: "(\"auto\" | \"default\" | \"flex\" | null | undefined)",
82
82
  value: input.service_tier
83
83
  }), undefined === input.system_fingerprint || "string" === typeof input.system_fingerprint || _report(_exceptionable, {
84
84
  path: _path + ".system_fingerprint",
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@agentica/core",
3
- "version": "0.27.3",
3
+ "version": "0.28.0",
4
4
  "description": "Agentic AI Library specialized in LLM Function Calling",
5
5
  "author": "Wrtn Technologies",
6
6
  "license": "MIT",
@@ -36,29 +36,31 @@
36
36
  "access": "public"
37
37
  },
38
38
  "peerDependencies": {
39
- "@samchon/openapi": "^4.3.1",
40
- "openai": "^4.80.0",
39
+ "@samchon/openapi": "^4.3.3",
40
+ "openai": "^5.2.0",
41
41
  "typia": "^9.3.1"
42
42
  },
43
43
  "dependencies": {
44
- "@samchon/openapi": "^4.3.1",
44
+ "@samchon/openapi": "^4.3.3",
45
+ "tstl": "^3.0.0",
45
46
  "typia": "^9.3.1",
46
47
  "uuid": "^11.0.4"
47
48
  },
48
49
  "devDependencies": {
49
- "@modelcontextprotocol/sdk": "^1.9.0",
50
+ "@modelcontextprotocol/sdk": "^1.12.0",
50
51
  "@nestia/e2e": "^6.0.1",
51
52
  "@rollup/plugin-terser": "^0.4.4",
52
53
  "@rollup/plugin-typescript": "^12.1.2",
53
- "@ryoppippi/unplugin-typia": "^2.6.2",
54
+ "@ryoppippi/unplugin-typia": "^2.6.4",
54
55
  "@types/node": "^22.13.9",
55
56
  "@types/uuid": "^10.0.0",
56
57
  "@wrtnlabs/calculator-mcp": "^0.2.1",
57
- "openai": "^4.80.0",
58
+ "openai": "^5.2.0",
58
59
  "rimraf": "^6.0.1",
59
60
  "rollup": "^4.34.8",
60
61
  "ts-node": "^10.9.2",
61
62
  "ts-patch": "^3.3.0",
63
+ "tstl": "^3.0.0",
62
64
  "typedoc": "^0.27.7",
63
65
  "typescript": "~5.8.3",
64
66
  "vitest": "^3.0.9"
package/src/Agentica.ts CHANGED
@@ -1,11 +1,17 @@
1
1
  import type { ILlmSchema } from "@samchon/openapi";
2
+ import type OpenAI from "openai";
3
+
4
+ import { Semaphore } from "tstl";
5
+ import { v4 } from "uuid";
2
6
 
3
7
  import type { AgenticaContext } from "./context/AgenticaContext";
4
8
  import type { AgenticaOperation } from "./context/AgenticaOperation";
5
9
  import type { AgenticaOperationCollection } from "./context/AgenticaOperationCollection";
6
10
  import type { AgenticaOperationSelection } from "./context/AgenticaOperationSelection";
11
+ import type { AgenticaEventSource } from "./events";
7
12
  import type { AgenticaEvent } from "./events/AgenticaEvent";
8
13
  import type { AgenticaRequestEvent } from "./events/AgenticaRequestEvent";
14
+ import type { AgenticaUserMessageEvent } from "./events/AgenticaUserMessageEvent";
9
15
  import type { AgenticaUserMessageContent } from "./histories";
10
16
  import type { AgenticaHistory } from "./histories/AgenticaHistory";
11
17
  import type { AgenticaUserMessageHistory } from "./histories/AgenticaUserMessageHistory";
@@ -17,7 +23,6 @@ import type { IAgenticaVendor } from "./structures/IAgenticaVendor";
17
23
  import { AgenticaTokenUsage } from "./context/AgenticaTokenUsage";
18
24
  import { AgenticaOperationComposer } from "./context/internal/AgenticaOperationComposer";
19
25
  import { AgenticaTokenUsageAggregator } from "./context/internal/AgenticaTokenUsageAggregator";
20
- import { createUserMessageHistory } from "./factory";
21
26
  import { createInitializeEvent, createRequestEvent, createUserMessageEvent } from "./factory/events";
22
27
  import { execute } from "./orchestrate/execute";
23
28
  import { transformHistory } from "./transformers/transformHistory";
@@ -62,11 +67,10 @@ export class Agentica<Model extends ILlmSchema.Model> {
62
67
  private readonly listeners_: Map<string, Set<(event: AgenticaEvent<Model>) => Promise<void> | void>>;
63
68
 
64
69
  // STATUS
70
+ private readonly executor_: (ctx: AgenticaContext<Model>) => Promise<void>;
71
+ private readonly semaphore_: Semaphore | null;
65
72
  private readonly token_usage_: AgenticaTokenUsage;
66
73
  private ready_: boolean;
67
- private readonly executor_: (
68
- ctx: AgenticaContext<Model>,
69
- ) => Promise<AgenticaHistory<Model>[]>;
70
74
 
71
75
  /* -----------------------------------------------------------
72
76
  CONSTRUCTOR
@@ -83,7 +87,7 @@ export class Agentica<Model extends ILlmSchema.Model> {
83
87
  config: props.config,
84
88
  });
85
89
 
86
- // STATUS
90
+ // STACK
87
91
  this.stack_ = [];
88
92
  this.listeners_ = new Map();
89
93
  this.histories_ = (props.histories ?? []).map(input =>
@@ -94,16 +98,21 @@ export class Agentica<Model extends ILlmSchema.Model> {
94
98
  );
95
99
 
96
100
  // STATUS
101
+ this.executor_
102
+ = typeof props.config?.executor === "function"
103
+ ? props.config.executor
104
+ : execute(props.config?.executor ?? null);
105
+ this.semaphore_ = props.vendor.semaphore != null
106
+ ? typeof props.vendor.semaphore === "object"
107
+ ? props.vendor.semaphore
108
+ : new Semaphore(props.vendor.semaphore)
109
+ : null;
97
110
  this.token_usage_ = this.props.tokenUsage !== undefined
98
111
  ? this.props.tokenUsage instanceof AgenticaTokenUsage
99
112
  ? this.props.tokenUsage
100
113
  : new AgenticaTokenUsage(this.props.tokenUsage)
101
114
  : AgenticaTokenUsage.zero();
102
115
  this.ready_ = false;
103
- this.executor_
104
- = typeof props.config?.executor === "function"
105
- ? props.config.executor
106
- : execute(props.config?.executor ?? null);
107
116
  }
108
117
 
109
118
  /**
@@ -140,7 +149,23 @@ export class Agentica<Model extends ILlmSchema.Model> {
140
149
  abortSignal?: AbortSignal;
141
150
  } = {},
142
151
  ): Promise<AgenticaHistory<Model>[]> {
143
- const prompt: AgenticaUserMessageHistory = createUserMessageHistory({
152
+ const historyGetters: Array<() => Promise<AgenticaHistory<Model>>> = [];
153
+ const dispatch = (event: AgenticaEvent<Model>): void => {
154
+ this.dispatch(event).catch(() => {});
155
+ if ("toHistory" in event) {
156
+ if ("join" in event) {
157
+ historyGetters.push(async () => {
158
+ await event.join();
159
+ return event.toHistory();
160
+ });
161
+ }
162
+ else {
163
+ historyGetters.push(async () => event.toHistory());
164
+ }
165
+ }
166
+ };
167
+
168
+ const prompt: AgenticaUserMessageEvent = createUserMessageEvent({
144
169
  contents: Array.isArray(content)
145
170
  ? content
146
171
  : typeof content === "string"
@@ -150,22 +175,22 @@ export class Agentica<Model extends ILlmSchema.Model> {
150
175
  }]
151
176
  : [content],
152
177
  });
178
+ dispatch(prompt);
153
179
 
154
- this.dispatch(
155
- createUserMessageEvent({
156
- contents: prompt.contents,
157
- }),
158
- ).catch(() => {});
159
-
160
- const newbie: AgenticaHistory<Model>[] = await this.executor_(
180
+ await this.executor_(
161
181
  this.getContext({
162
- prompt,
182
+ dispatch,
183
+ prompt: prompt.toHistory(),
163
184
  abortSignal: options.abortSignal,
164
185
  usage: this.token_usage_,
165
186
  }),
166
187
  );
167
- this.histories_.push(prompt, ...newbie);
168
- return [prompt, ...newbie];
188
+
189
+ const completed: AgenticaHistory<Model>[] = await Promise.all(
190
+ historyGetters.map(async h => h()),
191
+ );
192
+ this.histories_.push(...completed);
193
+ return completed;
169
194
  }
170
195
 
171
196
  /**
@@ -233,9 +258,78 @@ export class Agentica<Model extends ILlmSchema.Model> {
233
258
  public getContext(props: {
234
259
  prompt: AgenticaUserMessageHistory;
235
260
  usage: AgenticaTokenUsage;
261
+ dispatch: (event: AgenticaEvent<Model>) => void;
236
262
  abortSignal?: AbortSignal;
237
263
  }): AgenticaContext<Model> {
238
- const dispatch = async (event: AgenticaEvent<Model>) => this.dispatch(event);
264
+ const request = async (
265
+ source: AgenticaEventSource,
266
+ body: Omit<OpenAI.ChatCompletionCreateParamsStreaming, "model" | "stream">,
267
+ ): Promise<ReadableStream<OpenAI.Chat.Completions.ChatCompletionChunk>> => {
268
+ const event: AgenticaRequestEvent = createRequestEvent({
269
+ source,
270
+ body: {
271
+ ...body,
272
+ model: this.props.vendor.model,
273
+ stream: true,
274
+ stream_options: {
275
+ include_usage: true,
276
+ },
277
+ },
278
+ options: {
279
+ ...this.props.vendor.options,
280
+ signal: props.abortSignal,
281
+ },
282
+ });
283
+ props.dispatch(event);
284
+
285
+ // completion
286
+ const completion = await this.props.vendor.api.chat.completions.create(
287
+ event.body,
288
+ event.options,
289
+ );
290
+
291
+ const [streamForEvent, temporaryStream] = StreamUtil.transform(
292
+ completion.toReadableStream() as ReadableStream<Uint8Array>,
293
+ value =>
294
+ ChatGptCompletionMessageUtil.transformCompletionChunk(value),
295
+ ).tee();
296
+
297
+ const [streamForAggregate, streamForReturn] = temporaryStream.tee();
298
+
299
+ (async () => {
300
+ const reader = streamForAggregate.getReader();
301
+ while (true) {
302
+ const chunk = await reader.read();
303
+ if (chunk.done) {
304
+ break;
305
+ }
306
+ if (chunk.value.usage != null) {
307
+ AgenticaTokenUsageAggregator.aggregate({
308
+ kind: source,
309
+ completionUsage: chunk.value.usage,
310
+ usage: props.usage,
311
+ });
312
+ }
313
+ }
314
+ })().catch(() => {});
315
+
316
+ const [streamForStream, streamForJoin] = streamForEvent.tee();
317
+ props.dispatch({
318
+ id: v4(),
319
+ type: "response",
320
+ source,
321
+ stream: streamDefaultReaderToAsyncGenerator(streamForStream.getReader()),
322
+ body: event.body,
323
+ options: event.options,
324
+ join: async () => {
325
+ const chunks = await StreamUtil.readAll(streamForJoin);
326
+ return ChatGptCompletionMessageUtil.merge(chunks);
327
+ },
328
+ created_at: new Date().toISOString(),
329
+ });
330
+ return streamForReturn;
331
+ };
332
+
239
333
  return {
240
334
  // APPLICATION
241
335
  operations: this.operations_,
@@ -249,75 +343,21 @@ export class Agentica<Model extends ILlmSchema.Model> {
249
343
  abortSignal: props.abortSignal,
250
344
 
251
345
  // HANDLERS
252
- dispatch: async event => this.dispatch(event),
253
- request: async (source, body) => {
254
- // request information
255
- const event: AgenticaRequestEvent = createRequestEvent({
256
- source,
257
- body: {
258
- ...body,
259
- model: this.props.vendor.model,
260
- stream: true,
261
- stream_options: {
262
- include_usage: true,
263
- },
264
- },
265
- options: {
266
- ...this.props.vendor.options,
267
- signal: props.abortSignal,
268
- },
269
- });
270
- await dispatch(event);
271
-
272
- // completion
273
- const completion = await this.props.vendor.api.chat.completions.create(
274
- event.body,
275
- event.options,
276
- );
277
-
278
- const [streamForEvent, temporaryStream] = StreamUtil.transform(
279
- completion.toReadableStream() as ReadableStream<Uint8Array>,
280
- value =>
281
- ChatGptCompletionMessageUtil.transformCompletionChunk(value),
282
- ).tee();
283
-
284
- const [streamForAggregate, streamForReturn] = temporaryStream.tee();
285
-
286
- (async () => {
287
- const reader = streamForAggregate.getReader();
288
- while (true) {
289
- const chunk = await reader.read();
290
- if (chunk.done) {
291
- break;
292
- }
293
- if (chunk.value.usage != null) {
294
- AgenticaTokenUsageAggregator.aggregate({
295
- kind: source,
296
- completionUsage: chunk.value.usage,
297
- usage: props.usage,
298
- });
299
- }
346
+ dispatch: props.dispatch,
347
+ request: this.semaphore_ === null
348
+ ? request
349
+ : async (source, body) => {
350
+ await this.semaphore_!.acquire();
351
+ try {
352
+ return await request(source, body);
300
353
  }
301
- })().catch(() => {});
302
-
303
- const [streamForStream, streamForJoin] = streamForEvent.tee();
304
- await dispatch({
305
- type: "response",
306
- source,
307
- stream: streamDefaultReaderToAsyncGenerator(streamForStream.getReader()),
308
- body: event.body,
309
- options: event.options,
310
- join: async () => {
311
- const chunks = await StreamUtil.readAll(streamForJoin);
312
- return ChatGptCompletionMessageUtil.merge(chunks);
313
- },
314
- });
315
-
316
- return streamForReturn;
317
- },
354
+ finally {
355
+ void this.semaphore_!.release().catch(() => {});
356
+ }
357
+ },
318
358
  initialize: async () => {
319
359
  this.ready_ = true;
320
- await dispatch(createInitializeEvent());
360
+ props.dispatch(createInitializeEvent());
321
361
  },
322
362
  };
323
363
  }
@@ -1,13 +1,17 @@
1
1
  import type { ILlmSchema } from "@samchon/openapi";
2
+ import type OpenAI from "openai";
3
+
4
+ import { Semaphore } from "tstl";
5
+ import { v4 } from "uuid";
2
6
 
3
7
  import type { AgenticaOperation } from "./context/AgenticaOperation";
4
8
  import type { AgenticaOperationCollection } from "./context/AgenticaOperationCollection";
5
9
  import type { MicroAgenticaContext } from "./context/MicroAgenticaContext";
10
+ import type { AgenticaUserMessageEvent } from "./events";
6
11
  import type { AgenticaRequestEvent } from "./events/AgenticaRequestEvent";
7
12
  import type { MicroAgenticaEvent } from "./events/MicroAgenticaEvent";
8
13
  import type { AgenticaUserMessageContent } from "./histories";
9
14
  import type { AgenticaExecuteHistory } from "./histories/AgenticaExecuteHistory";
10
- import type { AgenticaUserMessageHistory } from "./histories/AgenticaUserMessageHistory";
11
15
  import type { MicroAgenticaHistory } from "./histories/MicroAgenticaHistory";
12
16
  import type { IAgenticaController } from "./structures/IAgenticaController";
13
17
  import type { IAgenticaVendor } from "./structures/IAgenticaVendor";
@@ -17,7 +21,6 @@ import type { IMicroAgenticaProps } from "./structures/IMicroAgenticaProps";
17
21
  import { AgenticaTokenUsage } from "./context/AgenticaTokenUsage";
18
22
  import { AgenticaOperationComposer } from "./context/internal/AgenticaOperationComposer";
19
23
  import { AgenticaTokenUsageAggregator } from "./context/internal/AgenticaTokenUsageAggregator";
20
- import { createUserMessageHistory } from "./factory";
21
24
  import { createRequestEvent, createUserMessageEvent } from "./factory/events";
22
25
  import { call, describe } from "./orchestrate";
23
26
  import { transformHistory } from "./transformers/transformHistory";
@@ -64,6 +67,8 @@ export class MicroAgentica<Model extends ILlmSchema.Model> {
64
67
  Set<(event: MicroAgenticaEvent<Model>) => Promise<void>>
65
68
  >;
66
69
 
70
+ private readonly semaphore_: Semaphore | null;
71
+
67
72
  /* -----------------------------------------------------------
68
73
  CONSTRUCTOR
69
74
  ----------------------------------------------------------- */
@@ -89,6 +94,11 @@ export class MicroAgentica<Model extends ILlmSchema.Model> {
89
94
  : new AgenticaTokenUsage(this.props.tokenUsage)
90
95
  : AgenticaTokenUsage.zero();
91
96
  this.listeners_ = new Map();
97
+ this.semaphore_ = props.vendor.semaphore != null
98
+ ? typeof props.vendor.semaphore === "object"
99
+ ? props.vendor.semaphore
100
+ : new Semaphore(props.vendor.semaphore)
101
+ : null;
92
102
  }
93
103
 
94
104
  /**
@@ -119,7 +129,23 @@ export class MicroAgentica<Model extends ILlmSchema.Model> {
119
129
  public async conversate(
120
130
  content: string | AgenticaUserMessageContent | Array<AgenticaUserMessageContent>,
121
131
  ): Promise<MicroAgenticaHistory<Model>[]> {
122
- const talk = createUserMessageHistory({
132
+ const histories: Array<() => Promise<MicroAgenticaHistory<Model>>> = [];
133
+ const dispatch = (event: MicroAgenticaEvent<Model>): void => {
134
+ this.dispatch(event).catch(() => {});
135
+ if ("toHistory" in event) {
136
+ if ("join" in event) {
137
+ histories.push(async () => {
138
+ await event.join();
139
+ return event.toHistory();
140
+ });
141
+ }
142
+ else {
143
+ histories.push(async () => event.toHistory());
144
+ }
145
+ }
146
+ };
147
+
148
+ const prompt: AgenticaUserMessageEvent = createUserMessageEvent({
123
149
  contents: Array.isArray(content)
124
150
  ? content
125
151
  : typeof content === "string"
@@ -129,29 +155,26 @@ export class MicroAgentica<Model extends ILlmSchema.Model> {
129
155
  }]
130
156
  : [content],
131
157
  });
132
- this.dispatch(
133
- createUserMessageEvent({
134
- contents: talk.contents,
135
- }),
136
- ).catch(() => {});
158
+ dispatch(prompt);
137
159
 
138
160
  const ctx: MicroAgenticaContext<Model> = this.getContext({
139
- prompt: talk,
161
+ prompt,
162
+ dispatch,
140
163
  usage: this.token_usage_,
141
164
  });
142
- const histories: MicroAgenticaHistory<Model>[] = await call(
165
+ const executes: AgenticaExecuteHistory<Model>[] = await call(
143
166
  ctx,
144
167
  this.operations_.array,
145
- ) as MicroAgenticaHistory<Model>[];
146
- const executes: AgenticaExecuteHistory<Model>[] = histories.filter(p => p.type === "execute");
147
- if (executes.length
148
- && ctx.config?.executor?.describe !== null
149
- && ctx.config?.executor?.describe !== false) {
150
- histories.push(...await describe(ctx, executes));
168
+ );
169
+ if (executes.length) {
170
+ await describe(ctx, executes);
151
171
  }
152
172
 
153
- this.histories_.push(talk, ...histories);
154
- return histories;
173
+ const completed: MicroAgenticaHistory<Model>[] = await Promise.all(
174
+ histories.map(async h => h()),
175
+ );
176
+ this.histories_.push(...completed);
177
+ return completed;
155
178
  }
156
179
 
157
180
  /**
@@ -217,79 +240,93 @@ export class MicroAgentica<Model extends ILlmSchema.Model> {
217
240
  * @internal
218
241
  */
219
242
  public getContext(props: {
220
- prompt: AgenticaUserMessageHistory;
243
+ prompt: AgenticaUserMessageEvent;
221
244
  usage: AgenticaTokenUsage;
245
+ dispatch: (event: MicroAgenticaEvent<Model>) => void;
222
246
  }): MicroAgenticaContext<Model> {
223
- const dispatch = this.dispatch.bind(this);
224
- return {
225
- operations: this.operations_,
226
- config: this.props.config,
227
-
228
- histories: this.histories_,
229
- prompt: props.prompt,
230
- dispatch,
231
- request: async (source, body) => {
232
- // request information
233
- const event: AgenticaRequestEvent = createRequestEvent({
234
- source,
235
- body: {
236
- ...body,
237
- model: this.props.vendor.model,
238
- stream: true,
239
- stream_options: {
240
- include_usage: true,
241
- },
247
+ const request = async (
248
+ source: MicroAgenticaEvent.Source,
249
+ body: Omit<OpenAI.ChatCompletionCreateParamsStreaming, "model" | "stream">,
250
+ ): Promise<ReadableStream<OpenAI.Chat.Completions.ChatCompletionChunk>> => {
251
+ const event: AgenticaRequestEvent = createRequestEvent({
252
+ source,
253
+ body: {
254
+ ...body,
255
+ model: this.props.vendor.model,
256
+ stream: true,
257
+ stream_options: {
258
+ include_usage: true,
242
259
  },
243
- options: this.props.vendor.options,
244
- });
245
- await dispatch(event);
260
+ },
261
+ options: this.props.vendor.options,
262
+ });
263
+ props.dispatch(event);
246
264
 
247
- // completion
248
- const completion = await this.props.vendor.api.chat.completions.create(
249
- event.body,
250
- event.options,
251
- );
265
+ // completion
266
+ const completion = await this.props.vendor.api.chat.completions.create(
267
+ event.body,
268
+ event.options,
269
+ );
252
270
 
253
- const [streamForEvent, temporaryStream] = StreamUtil.transform(
254
- completion.toReadableStream() as ReadableStream<Uint8Array>,
255
- value =>
256
- ChatGptCompletionMessageUtil.transformCompletionChunk(value),
257
- ).tee();
271
+ const [streamForEvent, temporaryStream] = StreamUtil.transform(
272
+ completion.toReadableStream() as ReadableStream<Uint8Array>,
273
+ value =>
274
+ ChatGptCompletionMessageUtil.transformCompletionChunk(value),
275
+ ).tee();
258
276
 
259
- const [streamForAggregate, streamForReturn] = temporaryStream.tee();
277
+ const [streamForAggregate, streamForReturn] = temporaryStream.tee();
260
278
 
261
- void (async () => {
262
- const reader = streamForAggregate.getReader();
263
- while (true) {
264
- const chunk = await reader.read();
265
- if (chunk.done) {
266
- break;
267
- }
268
- if (chunk.value.usage != null) {
269
- AgenticaTokenUsageAggregator.aggregate({
270
- kind: source,
271
- completionUsage: chunk.value.usage,
272
- usage: props.usage,
273
- });
274
- }
279
+ void (async () => {
280
+ const reader = streamForAggregate.getReader();
281
+ while (true) {
282
+ const chunk = await reader.read();
283
+ if (chunk.done) {
284
+ break;
275
285
  }
276
- })();
286
+ if (chunk.value.usage != null) {
287
+ AgenticaTokenUsageAggregator.aggregate({
288
+ kind: source,
289
+ completionUsage: chunk.value.usage,
290
+ usage: props.usage,
291
+ });
292
+ }
293
+ }
294
+ })().catch(() => {});
277
295
 
278
- const [streamForStream, streamForJoin] = streamForEvent.tee();
279
- await dispatch({
280
- type: "response",
281
- source,
282
- stream: streamDefaultReaderToAsyncGenerator(streamForStream.getReader()),
283
- body: event.body,
284
- options: event.options,
285
- join: async () => {
286
- const chunks = await StreamUtil.readAll(streamForJoin);
287
- return ChatGptCompletionMessageUtil.merge(chunks);
288
- },
289
- });
296
+ const [streamForStream, streamForJoin] = streamForEvent.tee();
297
+ props.dispatch({
298
+ id: v4(),
299
+ type: "response",
300
+ source,
301
+ stream: streamDefaultReaderToAsyncGenerator(streamForStream.getReader()),
302
+ body: event.body,
303
+ options: event.options,
304
+ join: async () => {
305
+ const chunks = await StreamUtil.readAll(streamForJoin);
306
+ return ChatGptCompletionMessageUtil.merge(chunks);
307
+ },
308
+ created_at: new Date().toISOString(),
309
+ });
310
+ return streamForReturn;
311
+ };
312
+ return {
313
+ operations: this.operations_,
314
+ config: this.props.config,
290
315
 
291
- return streamForReturn;
292
- },
316
+ histories: this.histories_,
317
+ prompt: props.prompt,
318
+ dispatch: props.dispatch,
319
+ request: this.semaphore_ === null
320
+ ? request
321
+ : async (source, body) => {
322
+ await this.semaphore_!.acquire();
323
+ try {
324
+ return await request(source, body);
325
+ }
326
+ finally {
327
+ void this.semaphore_!.release().catch(() => {});
328
+ }
329
+ },
293
330
  };
294
331
  }
295
332