@agentica/core 0.32.3-dev.3 → 0.32.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/lib/Agentica.js +7 -64
  2. package/lib/Agentica.js.map +1 -1
  3. package/lib/MicroAgentica.d.ts +3 -1
  4. package/lib/MicroAgentica.js +10 -66
  5. package/lib/MicroAgentica.js.map +1 -1
  6. package/lib/context/MicroAgenticaContext.d.ts +4 -0
  7. package/lib/index.mjs +137 -181
  8. package/lib/index.mjs.map +1 -1
  9. package/lib/orchestrate/call.js +1 -0
  10. package/lib/orchestrate/call.js.map +1 -1
  11. package/lib/orchestrate/cancel.js +1 -1
  12. package/lib/orchestrate/cancel.js.map +1 -1
  13. package/lib/orchestrate/describe.js +1 -1
  14. package/lib/orchestrate/describe.js.map +1 -1
  15. package/lib/orchestrate/initialize.js +1 -1
  16. package/lib/orchestrate/initialize.js.map +1 -1
  17. package/lib/orchestrate/select.js +1 -1
  18. package/lib/orchestrate/select.js.map +1 -1
  19. package/lib/utils/ChatGptCompletionStreamingUtil.d.ts +1 -1
  20. package/lib/utils/ChatGptCompletionStreamingUtil.js +2 -2
  21. package/lib/utils/ChatGptCompletionStreamingUtil.js.map +1 -1
  22. package/lib/utils/StreamUtil.d.ts +7 -4
  23. package/lib/utils/StreamUtil.js +17 -14
  24. package/lib/utils/StreamUtil.js.map +1 -1
  25. package/lib/utils/StreamUtil.spec.js +12 -12
  26. package/lib/utils/StreamUtil.spec.js.map +1 -1
  27. package/lib/utils/request.d.ts +12 -0
  28. package/lib/utils/request.js +81 -0
  29. package/lib/utils/request.js.map +1 -0
  30. package/package.json +1 -1
  31. package/src/Agentica.ts +9 -91
  32. package/src/MicroAgentica.ts +14 -87
  33. package/src/context/MicroAgenticaContext.ts +4 -0
  34. package/src/orchestrate/call.ts +1 -0
  35. package/src/orchestrate/cancel.ts +1 -1
  36. package/src/orchestrate/describe.ts +1 -1
  37. package/src/orchestrate/initialize.ts +1 -1
  38. package/src/orchestrate/select.ts +1 -1
  39. package/src/utils/ChatGptCompletionStreamingUtil.ts +2 -2
  40. package/src/utils/StreamUtil.spec.ts +12 -9
  41. package/src/utils/StreamUtil.ts +15 -11
  42. package/src/utils/request.ts +101 -0
package/src/Agentica.ts CHANGED
@@ -1,16 +1,12 @@
1
1
  import type { ILlmSchema } from "@samchon/openapi";
2
- import type OpenAI from "openai";
3
2
 
4
3
  import { Semaphore } from "tstl";
5
- import { v4 } from "uuid";
6
4
 
7
5
  import type { AgenticaContext } from "./context/AgenticaContext";
8
6
  import type { AgenticaOperation } from "./context/AgenticaOperation";
9
7
  import type { AgenticaOperationCollection } from "./context/AgenticaOperationCollection";
10
8
  import type { AgenticaOperationSelection } from "./context/AgenticaOperationSelection";
11
- import type { AgenticaEventSource } from "./events";
12
9
  import type { AgenticaEvent } from "./events/AgenticaEvent";
13
- import type { AgenticaRequestEvent } from "./events/AgenticaRequestEvent";
14
10
  import type { AgenticaUserMessageEvent } from "./events/AgenticaUserMessageEvent";
15
11
  import type { AgenticaUserMessageContent } from "./histories";
16
12
  import type { AgenticaHistory } from "./histories/AgenticaHistory";
@@ -22,13 +18,11 @@ import type { IAgenticaVendor } from "./structures/IAgenticaVendor";
22
18
 
23
19
  import { AgenticaTokenUsage } from "./context/AgenticaTokenUsage";
24
20
  import { AgenticaOperationComposer } from "./context/internal/AgenticaOperationComposer";
25
- import { AgenticaTokenUsageAggregator } from "./context/internal/AgenticaTokenUsageAggregator";
26
- import { createInitializeEvent, createRequestEvent, createUserMessageEvent } from "./factory/events";
21
+ import { createInitializeEvent, createUserMessageEvent } from "./factory/events";
27
22
  import { execute } from "./orchestrate/execute";
28
23
  import { transformHistory } from "./transformers/transformHistory";
29
24
  import { __map_take } from "./utils/__map_take";
30
- import { ChatGptCompletionMessageUtil } from "./utils/ChatGptCompletionMessageUtil";
31
- import { streamDefaultReaderToAsyncGenerator, StreamUtil } from "./utils/StreamUtil";
25
+ import { getChatCompletionWithStreamingFunction } from "./utils/request";
32
26
 
33
27
  /**
34
28
  * Agentica AI chatbot agent.
@@ -264,89 +258,13 @@ export class Agentica<Model extends ILlmSchema.Model> {
264
258
  dispatch: (event: AgenticaEvent<Model>) => Promise<void>;
265
259
  abortSignal?: AbortSignal;
266
260
  }): AgenticaContext<Model> {
267
- const request = async (
268
- source: AgenticaEventSource,
269
- body: Omit<OpenAI.ChatCompletionCreateParamsStreaming, "model" | "stream">,
270
- ): Promise<ReadableStream<OpenAI.Chat.Completions.ChatCompletionChunk>> => {
271
- const event: AgenticaRequestEvent = createRequestEvent({
272
- source,
273
- body: {
274
- ...body,
275
- model: this.props.vendor.model,
276
- stream: true,
277
- stream_options: {
278
- include_usage: true,
279
- },
280
- },
281
- options: {
282
- ...this.props.vendor.options,
283
- signal: props.abortSignal,
284
- },
285
- });
286
- await props.dispatch(event);
287
-
288
- // completion
289
- const backoffStrategy = this.props.config?.backoffStrategy ?? ((props) => {
290
- throw props.error;
291
- });
292
- const completion = await (async () => {
293
- let count = 0;
294
- while (true) {
295
- try {
296
- return await this.props.vendor.api.chat.completions.create(
297
- event.body,
298
- event.options,
299
- );
300
- }
301
- catch (error) {
302
- const waiting = backoffStrategy({ count, error });
303
- await new Promise(resolve => setTimeout(resolve, waiting));
304
- count++;
305
- }
306
- }
307
- })();
308
-
309
- const [streamForEvent, temporaryStream] = StreamUtil.transform(
310
- completion.toReadableStream() as ReadableStream<Uint8Array>,
311
- value =>
312
- ChatGptCompletionMessageUtil.transformCompletionChunk(value),
313
- ).tee();
314
-
315
- const [streamForAggregate, streamForReturn] = temporaryStream.tee();
316
-
317
- (async () => {
318
- const reader = streamForAggregate.getReader();
319
- while (true) {
320
- const chunk = await reader.read();
321
- if (chunk.done) {
322
- break;
323
- }
324
- if (chunk.value.usage != null) {
325
- AgenticaTokenUsageAggregator.aggregate({
326
- kind: source,
327
- completionUsage: chunk.value.usage,
328
- usage: props.usage,
329
- });
330
- }
331
- }
332
- })().catch(() => {});
333
-
334
- const [streamForStream, streamForJoin] = streamForEvent.tee();
335
- void props.dispatch({
336
- id: v4(),
337
- type: "response",
338
- source,
339
- stream: streamDefaultReaderToAsyncGenerator(streamForStream.getReader()),
340
- body: event.body,
341
- options: event.options,
342
- join: async () => {
343
- const chunks = await StreamUtil.readAll(streamForJoin);
344
- return ChatGptCompletionMessageUtil.merge(chunks);
345
- },
346
- created_at: new Date().toISOString(),
347
- }).catch(() => {});
348
- return streamForReturn;
349
- };
261
+ const request = getChatCompletionWithStreamingFunction<Model>({
262
+ vendor: this.props.vendor,
263
+ config: this.props.config,
264
+ dispatch: props.dispatch,
265
+ abortSignal: props.abortSignal,
266
+ usage: this.token_usage_,
267
+ });
350
268
 
351
269
  return {
352
270
  // APPLICATION
@@ -1,14 +1,11 @@
1
1
  import type { ILlmSchema } from "@samchon/openapi";
2
- import type OpenAI from "openai";
3
2
 
4
3
  import { Semaphore } from "tstl";
5
- import { v4 } from "uuid";
6
4
 
7
5
  import type { AgenticaOperation } from "./context/AgenticaOperation";
8
6
  import type { AgenticaOperationCollection } from "./context/AgenticaOperationCollection";
9
7
  import type { MicroAgenticaContext } from "./context/MicroAgenticaContext";
10
8
  import type { AgenticaUserMessageEvent } from "./events";
11
- import type { AgenticaRequestEvent } from "./events/AgenticaRequestEvent";
12
9
  import type { MicroAgenticaEvent } from "./events/MicroAgenticaEvent";
13
10
  import type { AgenticaUserMessageContent } from "./histories";
14
11
  import type { AgenticaExecuteHistory } from "./histories/AgenticaExecuteHistory";
@@ -20,13 +17,11 @@ import type { IMicroAgenticaProps } from "./structures/IMicroAgenticaProps";
20
17
 
21
18
  import { AgenticaTokenUsage } from "./context/AgenticaTokenUsage";
22
19
  import { AgenticaOperationComposer } from "./context/internal/AgenticaOperationComposer";
23
- import { AgenticaTokenUsageAggregator } from "./context/internal/AgenticaTokenUsageAggregator";
24
- import { createRequestEvent, createUserMessageEvent } from "./factory/events";
20
+ import { createUserMessageEvent } from "./factory/events";
25
21
  import { call, describe } from "./orchestrate";
26
22
  import { transformHistory } from "./transformers/transformHistory";
27
23
  import { __map_take } from "./utils/__map_take";
28
- import { ChatGptCompletionMessageUtil } from "./utils/ChatGptCompletionMessageUtil";
29
- import { streamDefaultReaderToAsyncGenerator, StreamUtil } from "./utils/StreamUtil";
24
+ import { getChatCompletionWithStreamingFunction } from "./utils/request";
30
25
 
31
26
  /**
32
27
  * Micro AI chatbot.
@@ -128,6 +123,9 @@ export class MicroAgentica<Model extends ILlmSchema.Model> {
128
123
  */
129
124
  public async conversate(
130
125
  content: string | AgenticaUserMessageContent | Array<AgenticaUserMessageContent>,
126
+ options: {
127
+ abortSignal?: AbortSignal;
128
+ } = {},
131
129
  ): Promise<MicroAgenticaHistory<Model>[]> {
132
130
  const histories: Array<() => Promise<MicroAgenticaHistory<Model>>> = [];
133
131
  const dispatch = async (event: MicroAgenticaEvent<Model>): Promise<void> => {
@@ -164,6 +162,7 @@ export class MicroAgentica<Model extends ILlmSchema.Model> {
164
162
  prompt,
165
163
  dispatch,
166
164
  usage: this.token_usage_,
165
+ abortSignal: options.abortSignal,
167
166
  });
168
167
  const executes: AgenticaExecuteHistory<Model>[] = await call(
169
168
  ctx,
@@ -248,87 +247,15 @@ export class MicroAgentica<Model extends ILlmSchema.Model> {
248
247
  prompt: AgenticaUserMessageEvent;
249
248
  usage: AgenticaTokenUsage;
250
249
  dispatch: (event: MicroAgenticaEvent<Model>) => Promise<void>;
250
+ abortSignal?: AbortSignal;
251
251
  }): MicroAgenticaContext<Model> {
252
- const request = async (
253
- source: MicroAgenticaEvent.Source,
254
- body: Omit<OpenAI.ChatCompletionCreateParamsStreaming, "model" | "stream">,
255
- ): Promise<ReadableStream<OpenAI.Chat.Completions.ChatCompletionChunk>> => {
256
- const event: AgenticaRequestEvent = createRequestEvent({
257
- source,
258
- body: {
259
- ...body,
260
- model: this.props.vendor.model,
261
- stream: true,
262
- stream_options: {
263
- include_usage: true,
264
- },
265
- },
266
- options: this.props.vendor.options,
267
- });
268
- await props.dispatch(event);
269
-
270
- // completion
271
- const backoffStrategy = this.props.config?.backoffStrategy ?? ((props) => {
272
- throw props.error;
273
- });
274
- const completion = await (async () => {
275
- let count = 0;
276
- while (true) {
277
- try {
278
- return await this.props.vendor.api.chat.completions.create(
279
- event.body,
280
- event.options,
281
- );
282
- }
283
- catch (error) {
284
- const waiting = backoffStrategy({ count, error });
285
- await new Promise(resolve => setTimeout(resolve, waiting));
286
- count++;
287
- }
288
- }
289
- })();
290
-
291
- const [streamForEvent, temporaryStream] = StreamUtil.transform(
292
- completion.toReadableStream() as ReadableStream<Uint8Array>,
293
- value =>
294
- ChatGptCompletionMessageUtil.transformCompletionChunk(value),
295
- ).tee();
296
-
297
- const [streamForAggregate, streamForReturn] = temporaryStream.tee();
298
-
299
- void (async () => {
300
- const reader = streamForAggregate.getReader();
301
- while (true) {
302
- const chunk = await reader.read();
303
- if (chunk.done) {
304
- break;
305
- }
306
- if (chunk.value.usage != null) {
307
- AgenticaTokenUsageAggregator.aggregate({
308
- kind: source,
309
- completionUsage: chunk.value.usage,
310
- usage: props.usage,
311
- });
312
- }
313
- }
314
- })().catch(() => {});
315
-
316
- const [streamForStream, streamForJoin] = streamForEvent.tee();
317
- void props.dispatch({
318
- id: v4(),
319
- type: "response",
320
- source,
321
- stream: streamDefaultReaderToAsyncGenerator(streamForStream.getReader()),
322
- body: event.body,
323
- options: event.options,
324
- join: async () => {
325
- const chunks = await StreamUtil.readAll(streamForJoin);
326
- return ChatGptCompletionMessageUtil.merge(chunks);
327
- },
328
- created_at: new Date().toISOString(),
329
- }).catch(() => {});
330
- return streamForReturn;
331
- };
252
+ const request = getChatCompletionWithStreamingFunction<Model>({
253
+ vendor: this.props.vendor,
254
+ config: this.props.config,
255
+ dispatch: props.dispatch,
256
+ abortSignal: props.abortSignal,
257
+ usage: this.token_usage_,
258
+ });
332
259
  return {
333
260
  operations: this.operations_,
334
261
  config: this.props.config,
@@ -68,6 +68,10 @@ export interface MicroAgenticaContext<Model extends ILlmSchema.Model> {
68
68
  */
69
69
  prompt: AgenticaUserMessageHistory;
70
70
 
71
+ /**
72
+ * Abort signal.
73
+ */
74
+ abortSignal?: AbortSignal;
71
75
  // ----
72
76
  // HANDLERS
73
77
  // ----
@@ -111,6 +111,7 @@ export async function call<Model extends ILlmSchema.Model>(
111
111
  const completion = await reduceStreamingWithDispatch(stream, (props) => {
112
112
  const event: AgenticaAssistantMessageEvent = createAssistantMessageEvent(props);
113
113
  void ctx.dispatch(event).catch(() => {});
114
+ ctx.abortSignal
114
115
  });
115
116
 
116
117
  const allAssistantMessagesEmpty = completion.choices.every(v => v.message.tool_calls == null && v.message.content === "");
@@ -171,7 +171,7 @@ async function step<Model extends ILlmSchema.Model>(
171
171
  // parallel_tool_calls: true,
172
172
  });
173
173
 
174
- const chunks = await StreamUtil.readAll(completionStream);
174
+ const chunks = await StreamUtil.readAll(completionStream, ctx.abortSignal);
175
175
  const completion = ChatGptCompletionMessageUtil.merge(chunks);
176
176
 
177
177
  // ----
@@ -45,7 +45,7 @@ export async function describe<Model extends ILlmSchema.Model>(
45
45
  ...props,
46
46
  });
47
47
  ctx.dispatch(event);
48
- });
48
+ }, ctx.abortSignal);
49
49
  }
50
50
 
51
51
  export const ChatGptDescribeFunctionAgent = {
@@ -66,7 +66,7 @@ export async function initialize<Model extends ILlmSchema.Model>(ctx: AgenticaCo
66
66
  const completion = await reduceStreamingWithDispatch(completionStream, (props) => {
67
67
  const event: AgenticaAssistantMessageEvent = createAssistantMessageEvent(props);
68
68
  ctx.dispatch(event);
69
- });
69
+ }, ctx.abortSignal);
70
70
 
71
71
  if (completion === null) {
72
72
  throw new Error("No completion received");
@@ -194,7 +194,7 @@ async function step<Model extends ILlmSchema.Model>(
194
194
  const completion = await reduceStreamingWithDispatch(stream, (props) => {
195
195
  const event: AgenticaAssistantMessageEvent = createAssistantMessageEvent(props);
196
196
  void ctx.dispatch(event).catch(() => {});
197
- });
197
+ }, ctx.abortSignal);
198
198
  const allAssistantMessagesEmpty = completion.choices.every(v => v.message.tool_calls == null && v.message.content === "");
199
199
  if (allAssistantMessagesEmpty) {
200
200
  const firstChoice = completion.choices.at(0);
@@ -7,7 +7,7 @@ async function reduceStreamingWithDispatch(stream: ReadableStream<ChatCompletion
7
7
  done: () => boolean;
8
8
  get: () => string;
9
9
  join: () => Promise<string>;
10
- }) => void) {
10
+ }) => void, abortSignal?: AbortSignal) {
11
11
  const streamContext = new Map<number, { content: string; mpsc: MPSC<string> }>();
12
12
 
13
13
  const nullableCompletion = await StreamUtil.reduce<ChatCompletionChunk, Promise<ChatCompletion>>(stream, async (accPromise, chunk) => {
@@ -59,7 +59,7 @@ async function reduceStreamingWithDispatch(stream: ReadableStream<ChatCompletion
59
59
  }
60
60
  registerContext(chunk.choices);
61
61
  return ChatGptCompletionMessageUtil.accumulate(acc, chunk);
62
- });
62
+ }, { abortSignal });
63
63
 
64
64
  if (nullableCompletion == null) {
65
65
  throw new Error(
@@ -149,7 +149,7 @@ describe("streamUtil", () => {
149
149
  const stringResult = await StreamUtil.reduce<number, string>(
150
150
  stringStream,
151
151
  (acc, cur) => acc + cur.toString(),
152
- "",
152
+ { initial: ""},
153
153
  );
154
154
 
155
155
  expect(stringResult).toBe("123");
@@ -160,7 +160,7 @@ describe("streamUtil", () => {
160
160
  const sumResult = await StreamUtil.reduce<number, number>(
161
161
  sumStream,
162
162
  (acc, cur) => acc + cur,
163
- 0,
163
+ { initial: 0 },
164
164
  );
165
165
 
166
166
  expect(sumResult).toBe(15);
@@ -171,6 +171,7 @@ describe("streamUtil", () => {
171
171
  const noInitialResult = await StreamUtil.reduce<number>(
172
172
  noInitialStream,
173
173
  (acc, cur) => acc + cur,
174
+ { initial: 0 },
174
175
  );
175
176
 
176
177
  expect(noInitialResult).toBe(10);
@@ -181,7 +182,7 @@ describe("streamUtil", () => {
181
182
  const emptyResult = await StreamUtil.reduce<number, string>(
182
183
  emptyStream,
183
184
  (acc, cur) => acc + cur.toString(),
184
- "initial value",
185
+ { initial: "initial value" },
185
186
  );
186
187
 
187
188
  expect(emptyResult).toBe("initial value");
@@ -192,6 +193,7 @@ describe("streamUtil", () => {
192
193
  const emptyNoInitialResult = await StreamUtil.reduce<number>(
193
194
  emptyNoInitialStream,
194
195
  (acc, cur) => acc + cur,
196
+ { initial: 0 },
195
197
  );
196
198
 
197
199
  expect(emptyNoInitialResult).toBeNull();
@@ -202,7 +204,7 @@ describe("streamUtil", () => {
202
204
  const stringResult = await StreamUtil.reduce<number, string>(
203
205
  stringStream,
204
206
  (acc, cur) => acc + cur.toString(),
205
- "",
207
+ { initial: "" },
206
208
  );
207
209
 
208
210
  expect(stringResult).toBe("123");
@@ -213,6 +215,7 @@ describe("streamUtil", () => {
213
215
  const noInitialResult = await StreamUtil.reduce<number>(
214
216
  noInitialStream,
215
217
  (acc, cur) => acc + cur,
218
+ { initial: 0 },
216
219
  );
217
220
 
218
221
  expect(noInitialResult).toBe(10);
@@ -229,7 +232,7 @@ describe("streamUtil", () => {
229
232
  }
230
233
  return [...acc, `item${cur}`];
231
234
  },
232
- [],
235
+ { initial: [] },
233
236
  );
234
237
 
235
238
  expect(transformResult).toEqual(["item1", "item2", "item3"]);
@@ -240,7 +243,7 @@ describe("streamUtil", () => {
240
243
  const emptyResult = await StreamUtil.reduce<number, string>(
241
244
  emptyStream,
242
245
  (acc, cur) => acc + cur.toString(),
243
- "initial",
246
+ { initial: "initial" },
244
247
  );
245
248
 
246
249
  expect(emptyResult).toBe("initial");
@@ -256,7 +259,7 @@ describe("streamUtil", () => {
256
259
  const delayResult = await StreamUtil.reduce<number, number>(
257
260
  delayStream,
258
261
  (acc, cur) => acc + cur,
259
- 0,
262
+ { initial: 0 },
260
263
  );
261
264
 
262
265
  expect(delayResult).toBe(6);
@@ -274,7 +277,7 @@ describe("streamUtil", () => {
274
277
  }
275
278
  return acc + cur;
276
279
  },
277
- 0,
280
+ { initial: 0 },
278
281
  ),
279
282
  ).rejects.toThrow("Test error");
280
283
  });
@@ -285,7 +288,7 @@ describe("streamUtil", () => {
285
288
  const result = await StreamUtil.reduce<number | null | undefined, number>(
286
289
  stream,
287
290
  (acc, cur) => (acc ?? 0) + (cur ?? 0),
288
- 0,
291
+ { initial: 0 },
289
292
  );
290
293
 
291
294
  expect(result).toBe(9); // 1 + 0 + 3 + 0 + 5 = 9
@@ -4,12 +4,12 @@
4
4
  * Utility functions for streams.
5
5
  */
6
6
 
7
- async function readAll<T>(stream: ReadableStream<T>): Promise<T[]> {
7
+ async function readAll<T>(stream: ReadableStream<T>, abortSignal?: AbortSignal): Promise<T[]> {
8
8
  const reader = stream.getReader();
9
9
  const result: T[] = [];
10
10
  while (true) {
11
11
  const { done, value } = await reader.read();
12
- if (done) {
12
+ if (done || abortSignal?.aborted === true) {
13
13
  break;
14
14
  }
15
15
  result.push(value);
@@ -17,12 +17,16 @@ async function readAll<T>(stream: ReadableStream<T>): Promise<T[]> {
17
17
  return result;
18
18
  }
19
19
 
20
- async function reduce<T, R = T>(stream: ReadableStream<T>, reducer: (acc: T | R, cur: T) => R, initial?: R): Promise<R | null> {
20
+ async function reduce<T, R = T>(stream: ReadableStream<T>, reducer: (acc: T | R, cur: T) => R, options: { initial?: R, abortSignal?: AbortSignal }): Promise<R | null> {
21
21
  const reader = stream.getReader();
22
22
  const iterator = streamDefaultReaderToAsyncGenerator(reader);
23
- let acc = (initial ?? null) as R | null | T;
23
+ let acc = (options.initial ?? null) as R | null | T;
24
24
 
25
25
  for await (const value of iterator) {
26
+ if (options.abortSignal?.aborted === true) {
27
+ break;
28
+ }
29
+
26
30
  if (acc === null) {
27
31
  acc = value;
28
32
  continue;
@@ -49,28 +53,28 @@ export async function* toAsyncGenerator<T>(value: T): AsyncGenerator<T, undefine
49
53
  yield value;
50
54
  }
51
55
 
52
- export async function* streamDefaultReaderToAsyncGenerator<T>(reader: ReadableStreamDefaultReader<T>): AsyncGenerator<Awaited<T>, undefined, undefined> {
56
+ export async function* streamDefaultReaderToAsyncGenerator<T>(reader: ReadableStreamDefaultReader<T>, abortSignal?: AbortSignal): AsyncGenerator<Awaited<T>, undefined, undefined> {
53
57
  while (true) {
54
58
  const { done, value } = await reader.read();
55
- if (done) {
59
+ if (done || abortSignal?.aborted === true) {
56
60
  break;
57
61
  }
58
62
  yield value;
59
63
  }
60
64
  }
61
65
 
62
- function transform<T, R>(stream: ReadableStream<T>, transformer: (value: T) => R): ReadableStream<R> {
66
+ function transform<T, R>(stream: ReadableStream<T>, transformer: (value: T) => R, abortSignal?: AbortSignal): ReadableStream<R> {
63
67
  const reader = stream.getReader();
64
68
 
65
69
  return new ReadableStream<R>({
66
70
  pull: async (controller) => {
67
71
  const { done, value } = await reader.read();
68
- if (!done) {
69
- controller.enqueue(transformer(value));
70
- }
71
- else {
72
+ if (done === true || abortSignal?.aborted === true) {
72
73
  controller.close();
74
+ return;
73
75
  }
76
+
77
+ controller.enqueue(transformer(value));
74
78
  },
75
79
  });
76
80
  }
@@ -0,0 +1,101 @@
1
+ import OpenAI from "openai";
2
+ import { AgenticaEventSource, AgenticaRequestEvent, AgenticaResponseEvent } from "../events";
3
+ import { ChatGptCompletionMessageUtil } from "./ChatGptCompletionMessageUtil";
4
+ import { streamDefaultReaderToAsyncGenerator, StreamUtil } from "./StreamUtil";
5
+ import { createRequestEvent } from "../factory";
6
+ import { IAgenticaConfig, IAgenticaVendor, IMicroAgenticaConfig } from "../structures";
7
+ import { ILlmSchema } from "@samchon/openapi";
8
+ import { AgenticaTokenUsageAggregator } from "../context/internal/AgenticaTokenUsageAggregator";
9
+ import { AgenticaTokenUsage } from "../context/AgenticaTokenUsage";
10
+ import { v4 } from "uuid";
11
+
12
+ export const getChatCompletionWithStreamingFunction = <Model extends ILlmSchema.Model>(props: {
13
+ vendor: IAgenticaVendor;
14
+ config?: IAgenticaConfig<Model> | IMicroAgenticaConfig<Model>;
15
+ dispatch: (event: AgenticaRequestEvent | AgenticaResponseEvent) => Promise<void>;
16
+ abortSignal?: AbortSignal;
17
+ usage: AgenticaTokenUsage;
18
+ }) => async (
19
+ source: AgenticaEventSource,
20
+ body: Omit<OpenAI.ChatCompletionCreateParamsStreaming, "model" | "stream">,
21
+ ) => {
22
+ const event: AgenticaRequestEvent = createRequestEvent({
23
+ source,
24
+ body: {
25
+ ...body,
26
+ model: props.vendor.model,
27
+ stream: true,
28
+ stream_options: {
29
+ include_usage: true,
30
+ },
31
+ },
32
+ options: {
33
+ ...props.vendor.options,
34
+ signal: props.abortSignal,
35
+ },
36
+ });
37
+ await props.dispatch(event);
38
+
39
+ // completion
40
+ const backoffStrategy = props.config?.backoffStrategy ?? ((props) => {
41
+ throw props.error;
42
+ });
43
+ const completion = await (async () => {
44
+ let count = 0;
45
+ while (true) {
46
+ try {
47
+ return await props.vendor.api.chat.completions.create(
48
+ event.body,
49
+ event.options,
50
+ );
51
+ }
52
+ catch (error) {
53
+ const waiting = backoffStrategy({ count, error });
54
+ await new Promise(resolve => setTimeout(resolve, waiting));
55
+ count++;
56
+ }
57
+ }
58
+ })();
59
+
60
+ const [streamForEvent, temporaryStream] = StreamUtil.transform(
61
+ completion.toReadableStream() as ReadableStream<Uint8Array>,
62
+ value =>
63
+ ChatGptCompletionMessageUtil.transformCompletionChunk(value),
64
+ props.abortSignal,
65
+ ).tee();
66
+
67
+ const [streamForAggregate, streamForReturn] = temporaryStream.tee();
68
+
69
+ (async () => {
70
+ const reader = streamForAggregate.getReader();
71
+ while (true) {
72
+ const chunk = await reader.read();
73
+ if (chunk.done || props.abortSignal?.aborted === true) {
74
+ break;
75
+ }
76
+ if (chunk.value.usage != null) {
77
+ AgenticaTokenUsageAggregator.aggregate({
78
+ kind: source,
79
+ completionUsage: chunk.value.usage,
80
+ usage: props.usage,
81
+ });
82
+ }
83
+ }
84
+ })().catch(() => {});
85
+
86
+ const [streamForStream, streamForJoin] = streamForEvent.tee();
87
+ void props.dispatch({
88
+ id: v4(),
89
+ type: "response",
90
+ source,
91
+ stream: streamDefaultReaderToAsyncGenerator(streamForStream.getReader(), props.abortSignal),
92
+ body: event.body,
93
+ options: event.options,
94
+ join: async () => {
95
+ const chunks = await StreamUtil.readAll(streamForJoin, props.abortSignal);
96
+ return ChatGptCompletionMessageUtil.merge(chunks);
97
+ },
98
+ created_at: new Date().toISOString(),
99
+ }).catch(() => {});
100
+ return streamForReturn;
101
+ };