@agentica/core 0.32.3-dev.1 → 0.32.3-dev.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/lib/Agentica.js +20 -17
  2. package/lib/Agentica.js.map +1 -1
  3. package/lib/MicroAgentica.js +19 -16
  4. package/lib/MicroAgentica.js.map +1 -1
  5. package/lib/context/AgenticaContext.d.ts +1 -1
  6. package/lib/context/MicroAgenticaContext.d.ts +1 -1
  7. package/lib/index.mjs +97 -77
  8. package/lib/index.mjs.map +1 -1
  9. package/lib/orchestrate/call.js +13 -11
  10. package/lib/orchestrate/call.js.map +1 -1
  11. package/lib/orchestrate/cancel.js +5 -4
  12. package/lib/orchestrate/cancel.js.map +1 -1
  13. package/lib/orchestrate/select.js +15 -12
  14. package/lib/orchestrate/select.js.map +1 -1
  15. package/lib/utils/ChatGptCompletionStreamingUtil.js +41 -29
  16. package/lib/utils/ChatGptCompletionStreamingUtil.js.map +1 -1
  17. package/lib/utils/ChatGptCompletionStreamingUtil.spec.d.ts +1 -0
  18. package/lib/utils/ChatGptCompletionStreamingUtil.spec.js +855 -0
  19. package/lib/utils/ChatGptCompletionStreamingUtil.spec.js.map +1 -0
  20. package/lib/utils/MPSC.js +8 -6
  21. package/lib/utils/MPSC.js.map +1 -1
  22. package/lib/utils/StreamUtil.d.ts +1 -1
  23. package/lib/utils/StreamUtil.js +2 -2
  24. package/lib/utils/StreamUtil.js.map +1 -1
  25. package/package.json +1 -1
  26. package/src/Agentica.ts +20 -17
  27. package/src/MicroAgentica.ts +19 -16
  28. package/src/context/AgenticaContext.ts +1 -1
  29. package/src/context/MicroAgenticaContext.ts +1 -1
  30. package/src/orchestrate/call.ts +9 -7
  31. package/src/orchestrate/cancel.ts +4 -3
  32. package/src/orchestrate/select.ts +10 -7
  33. package/src/utils/ChatGptCompletionStreamingUtil.spec.ts +908 -0
  34. package/src/utils/ChatGptCompletionStreamingUtil.ts +45 -36
  35. package/src/utils/MPSC.ts +8 -6
  36. package/src/utils/StreamUtil.ts +2 -2
@@ -1,6 +1,6 @@
1
1
  import type { ChatCompletion, ChatCompletionChunk } from "openai/resources";
2
2
 
3
- import { ChatGptCompletionMessageUtil, MPSC, streamDefaultReaderToAsyncGenerator, StreamUtil } from ".";
3
+ import { ChatGptCompletionMessageUtil, MPSC, streamDefaultReaderToAsyncGenerator, StreamUtil, toAsyncGenerator } from ".";
4
4
 
5
5
  async function reduceStreamingWithDispatch(stream: ReadableStream<ChatCompletionChunk>, eventProcessor: (props: {
6
6
  stream: AsyncGenerator<string, undefined, undefined>;
@@ -12,51 +12,45 @@ async function reduceStreamingWithDispatch(stream: ReadableStream<ChatCompletion
12
12
 
13
13
  const nullableCompletion = await StreamUtil.reduce<ChatCompletionChunk, Promise<ChatCompletion>>(stream, async (accPromise, chunk) => {
14
14
  const acc = await accPromise;
15
-
16
15
  const registerContext = (
17
16
  choices: ChatCompletionChunk.Choice[],
18
17
  ) => {
19
18
  for (const choice of choices) {
20
- /**
21
- * @TODO fix it
22
- * Sometimes, the complete message arrives along with a finish reason.
23
- */
19
+ // Handle content first, even if finish_reason is present
20
+ if (choice.delta.content != null && choice.delta.content !== "") {
21
+ // Process content logic (moved up from below)
22
+ if (streamContext.has(choice.index)) {
23
+ const context = streamContext.get(choice.index)!;
24
+ context.content += choice.delta.content;
25
+ context.mpsc.produce(choice.delta.content);
26
+ } else {
27
+ const mpsc = new MPSC<string>();
28
+
29
+ streamContext.set(choice.index, {
30
+ content: choice.delta.content,
31
+ mpsc,
32
+ });
33
+ mpsc.produce(choice.delta.content);
34
+
35
+ eventProcessor({
36
+ stream: streamDefaultReaderToAsyncGenerator(mpsc.consumer.getReader()),
37
+ done: () => mpsc.done(),
38
+ get: () => streamContext.get(choice.index)?.content ?? "",
39
+ join: async () => {
40
+ await mpsc.waitClosed();
41
+ return streamContext.get(choice.index)!.content;
42
+ },
43
+ });
44
+ }
45
+ }
46
+
47
+ // Handle finish_reason after content processing
24
48
  if (choice.finish_reason != null) {
25
49
  const context = streamContext.get(choice.index);
26
50
  if (context != null) {
27
51
  context.mpsc.close();
28
52
  }
29
- continue;
30
53
  }
31
-
32
- if (choice.delta.content == null || choice.delta.content === "") {
33
- continue;
34
- }
35
-
36
- if (streamContext.has(choice.index)) {
37
- const context = streamContext.get(choice.index)!;
38
- context.content += choice.delta.content;
39
- context.mpsc.produce(choice.delta.content);
40
- continue;
41
- }
42
-
43
- const mpsc = new MPSC<string>();
44
-
45
- streamContext.set(choice.index, {
46
- content: choice.delta.content,
47
- mpsc,
48
- });
49
- mpsc.produce(choice.delta.content);
50
-
51
- eventProcessor({
52
- stream: streamDefaultReaderToAsyncGenerator(mpsc.consumer.getReader()),
53
- done: () => mpsc.done(),
54
- get: () => streamContext.get(choice.index)?.content ?? "",
55
- join: async () => {
56
- await mpsc.waitClosed();
57
- return streamContext.get(choice.index)!.content;
58
- },
59
- });
60
54
  }
61
55
  };
62
56
  if (acc.object === "chat.completion.chunk") {
@@ -75,6 +69,21 @@ async function reduceStreamingWithDispatch(stream: ReadableStream<ChatCompletion
75
69
  + `Stream locked: ${stream.locked}.`,
76
70
  );
77
71
  }
72
+
73
+ if((nullableCompletion.object as string) === "chat.completion.chunk") {
74
+ const completion = ChatGptCompletionMessageUtil.merge([nullableCompletion as unknown as ChatCompletionChunk]);
75
+ completion.choices.forEach((choice) => {
76
+ if(choice.message.content != null && choice.message.content !== "") {
77
+ eventProcessor({
78
+ stream: toAsyncGenerator(choice.message.content),
79
+ done: () => true,
80
+ get: () => choice.message.content!,
81
+ join: async () => choice.message.content!,
82
+ });
83
+ }
84
+ });
85
+ return completion;
86
+ }
78
87
  return nullableCompletion;
79
88
  }
80
89
 
package/src/utils/MPSC.ts CHANGED
@@ -7,13 +7,15 @@ export class MPSC<T> {
7
7
  public constructor() {
8
8
  this.queue = new AsyncQueue<T>();
9
9
  this.consumer = new ReadableStream<T>({
10
- pull: async (controller) => {
11
- const { value, done } = await this.queue.dequeue();
12
- if (done === true) {
13
- controller.close();
14
- return;
10
+ start: async (controller) => {
11
+ while (true) {
12
+ const { value, done } = await this.queue.dequeue();
13
+ if (done === true) {
14
+ controller.close();
15
+ return;
16
+ }
17
+ controller.enqueue(value);
15
18
  }
16
- controller.enqueue(value);
17
19
  },
18
20
  });
19
21
  }
@@ -34,10 +34,10 @@ async function reduce<T, R = T>(stream: ReadableStream<T>, reducer: (acc: T | R,
34
34
  return acc as R;
35
35
  }
36
36
 
37
- function from<T>(value: T): ReadableStream<T> {
37
+ function from<T>(...value: T[]): ReadableStream<T> {
38
38
  const stream = new ReadableStream<T>({
39
39
  start: (controller) => {
40
- controller.enqueue(value);
40
+ value.forEach(v => controller.enqueue(v));
41
41
  controller.close();
42
42
  },
43
43
  });