@agentica/core 0.32.2 → 0.32.3-dev.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/index.mjs CHANGED
@@ -808,93 +808,30 @@ function isAgenticaContext(ctx) {
808
808
  return typeof ctx.initialize === "function";
809
809
  }
810
810
 
811
- class AsyncQueueClosedError extends Error {
812
- constructor(message) {
813
- super(message);
814
- this.name = "AsyncQueueClosedError";
815
- }
811
+ function __get_retry(limit) {
812
+ const retryFn = async (fn, prevError, attempt = 0) => {
813
+ try {
814
+ return await fn(prevError);
815
+ } catch (error) {
816
+ if (attempt >= limit - 1) {
817
+ throw error;
818
+ }
819
+ return retryFn(fn, error, attempt + 1);
820
+ }
821
+ };
822
+ return retryFn;
816
823
  }
817
824
 
818
- class AsyncQueue {
825
+ class AssistantMessageEmptyError extends Error {
819
826
  constructor() {
820
- this.queue = [];
821
- this.resolvers = [];
822
- this.closeResolvers = [];
823
- this.emptyResolvers = [];
824
- this.closed = false;
825
- }
826
- enqueue(item) {
827
- if (this.closed) {
828
- console.error(new AsyncQueueClosedError("Cannot enqueue item: queue is closed."));
829
- return;
830
- }
831
- this.queue.push(item);
832
- if (this.resolvers.length > 0) {
833
- this.resolvers.shift()?.({
834
- value: this.queue.shift(),
835
- done: false
836
- });
837
- }
838
- }
839
- async dequeue() {
840
- const item = (() => {
841
- if (!this.isEmpty()) {
842
- return {
843
- value: this.queue.shift(),
844
- done: false
845
- };
846
- }
847
- if (this.isClosed()) {
848
- return {
849
- value: undefined,
850
- done: true
851
- };
852
- }
853
- return null;
854
- })();
855
- if (this.isEmpty() && this.emptyResolvers.length !== 0) {
856
- this.emptyResolvers.forEach((resolve => resolve()));
857
- this.emptyResolvers = [];
858
- }
859
- if (item !== null) {
860
- return item;
861
- }
862
- return new Promise((resolve => this.resolvers.push(resolve)));
827
+ super();
863
828
  }
864
- isEmpty() {
865
- return this.queue.length === 0;
866
- }
867
- isClosed() {
868
- return this.closed;
869
- }
870
- done() {
871
- return this.isClosed() && this.isEmpty();
872
- }
873
- close() {
874
- this.closed = true;
875
- while (this.resolvers.length > 0) {
876
- this.resolvers.shift()?.({
877
- value: undefined,
878
- done: true
879
- });
880
- }
881
- this.closeResolvers.forEach((resolve => resolve()));
882
- }
883
- async waitUntilEmpty() {
884
- if (this.isEmpty()) {
885
- return Promise.resolve();
886
- }
887
- return new Promise((resolve => {
888
- this.emptyResolvers.push(resolve);
889
- }));
890
- }
891
- async waitClosed() {
892
- if (this.isClosed()) {
893
- return Promise.resolve();
894
- }
895
- return new Promise((resolve => {
896
- this.closeResolvers.push(resolve);
897
- }));
829
+ }
830
+
831
+ class AssistantMessageEmptyWithReasoningError extends AssistantMessageEmptyError {
832
+ constructor(reasoning) {
833
+ super();
834
+ this.reasoning = reasoning;
898
835
  }
899
836
  }
900
837
 
@@ -1100,6 +1037,96 @@ const ChatGptCompletionMessageUtil = {
1100
1037
  mergeToolCalls
1101
1038
  };
1102
1039
 
1040
+ class AsyncQueueClosedError extends Error {
1041
+ constructor(message) {
1042
+ super(message);
1043
+ this.name = "AsyncQueueClosedError";
1044
+ }
1045
+ }
1046
+
1047
+ class AsyncQueue {
1048
+ constructor() {
1049
+ this.queue = [];
1050
+ this.resolvers = [];
1051
+ this.closeResolvers = [];
1052
+ this.emptyResolvers = [];
1053
+ this.closed = false;
1054
+ }
1055
+ enqueue(item) {
1056
+ if (this.closed) {
1057
+ console.error(new AsyncQueueClosedError("Cannot enqueue item: queue is closed."));
1058
+ return;
1059
+ }
1060
+ this.queue.push(item);
1061
+ if (this.resolvers.length > 0) {
1062
+ this.resolvers.shift()?.({
1063
+ value: this.queue.shift(),
1064
+ done: false
1065
+ });
1066
+ }
1067
+ }
1068
+ async dequeue() {
1069
+ const item = (() => {
1070
+ if (!this.isEmpty()) {
1071
+ return {
1072
+ value: this.queue.shift(),
1073
+ done: false
1074
+ };
1075
+ }
1076
+ if (this.isClosed()) {
1077
+ return {
1078
+ value: undefined,
1079
+ done: true
1080
+ };
1081
+ }
1082
+ return null;
1083
+ })();
1084
+ if (this.isEmpty() && this.emptyResolvers.length !== 0) {
1085
+ this.emptyResolvers.forEach((resolve => resolve()));
1086
+ this.emptyResolvers = [];
1087
+ }
1088
+ if (item !== null) {
1089
+ return item;
1090
+ }
1091
+ return new Promise((resolve => this.resolvers.push(resolve)));
1092
+ }
1093
+ isEmpty() {
1094
+ return this.queue.length === 0;
1095
+ }
1096
+ isClosed() {
1097
+ return this.closed;
1098
+ }
1099
+ done() {
1100
+ return this.isClosed() && this.isEmpty();
1101
+ }
1102
+ close() {
1103
+ this.closed = true;
1104
+ while (this.resolvers.length > 0) {
1105
+ this.resolvers.shift()?.({
1106
+ value: undefined,
1107
+ done: true
1108
+ });
1109
+ }
1110
+ this.closeResolvers.forEach((resolve => resolve()));
1111
+ }
1112
+ async waitUntilEmpty() {
1113
+ if (this.isEmpty()) {
1114
+ return Promise.resolve();
1115
+ }
1116
+ return new Promise((resolve => {
1117
+ this.emptyResolvers.push(resolve);
1118
+ }));
1119
+ }
1120
+ async waitClosed() {
1121
+ if (this.isClosed()) {
1122
+ return Promise.resolve();
1123
+ }
1124
+ return new Promise((resolve => {
1125
+ this.closeResolvers.push(resolve);
1126
+ }));
1127
+ }
1128
+ }
1129
+
1103
1130
  class MPSC {
1104
1131
  constructor() {
1105
1132
  this.queue = new AsyncQueue;
@@ -1214,94 +1241,43 @@ var index$2 = Object.freeze({
1214
1241
  toAsyncGenerator
1215
1242
  });
1216
1243
 
1217
- function createOperationSelection(props) {
1218
- return {
1219
- operation: props.operation,
1220
- reason: props.reason,
1221
- toJSON: () => ({
1222
- operation: props.operation.toJSON(),
1223
- reason: props.reason
1224
- })
1225
- };
1226
- }
1227
-
1228
- function cancelFunctionFromContext(ctx, reference) {
1229
- const index = ctx.stack.findIndex((item => item.operation.name === reference.name));
1230
- if (index === -1) {
1231
- return;
1232
- }
1233
- const item = ctx.stack[index];
1234
- ctx.stack.splice(index, 1);
1235
- const event = createCancelEvent({
1236
- selection: createOperationSelection({
1237
- operation: item.operation,
1238
- reason: reference.reason
1239
- })
1240
- });
1241
- ctx.dispatch(event);
1242
- }
1243
-
1244
- async function call(ctx, operations) {
1245
- const stream = await ctx.request("call", {
1246
- messages: [ {
1247
- role: "system",
1248
- content: AgenticaDefaultPrompt.write(ctx.config)
1249
- }, ...ctx.histories.map(decodeHistory).flat(), {
1250
- role: "user",
1251
- content: ctx.prompt.contents.map(decodeUserMessageContent)
1252
- }, ...ctx.config?.systemPrompt?.execute === null ? [] : [ {
1253
- role: "system",
1254
- content: ctx.config?.systemPrompt?.execute?.(ctx.histories) ?? AgenticaSystemPrompt.EXECUTE
1255
- } ] ],
1256
- tools: operations.map((s => ({
1257
- type: "function",
1258
- function: {
1259
- name: s.name,
1260
- description: s.function.description,
1261
- parameters: "separated" in s.function && s.function.separated !== undefined ? s.function.separated.llm ?? {
1262
- type: "object",
1263
- properties: {},
1264
- required: [],
1265
- additionalProperties: false,
1266
- $defs: {}
1267
- } : s.function.parameters
1268
- }
1269
- }))),
1270
- tool_choice: "auto"
1271
- });
1272
- const selectContext = [];
1244
+ async function reduceStreamingWithDispatch(stream, eventProcessor) {
1245
+ const streamContext = new Map;
1273
1246
  const nullableCompletion = await StreamUtil.reduce(stream, (async (accPromise, chunk) => {
1274
1247
  const acc = await accPromise;
1275
1248
  const registerContext = choices => {
1276
1249
  for (const choice of choices) {
1277
1250
  if (choice.finish_reason != null) {
1278
- selectContext[choice.index]?.mpsc.close();
1251
+ const context = streamContext.get(choice.index);
1252
+ if (context != null) {
1253
+ context.mpsc.close();
1254
+ }
1279
1255
  continue;
1280
1256
  }
1281
1257
  if (choice.delta.content == null || choice.delta.content === "") {
1282
1258
  continue;
1283
1259
  }
1284
- if (selectContext[choice.index] != null) {
1285
- selectContext[choice.index].content += choice.delta.content;
1286
- selectContext[choice.index].mpsc.produce(choice.delta.content);
1260
+ if (streamContext.has(choice.index)) {
1261
+ const context = streamContext.get(choice.index);
1262
+ context.content += choice.delta.content;
1263
+ context.mpsc.produce(choice.delta.content);
1287
1264
  continue;
1288
1265
  }
1289
1266
  const mpsc = new MPSC;
1290
- selectContext[choice.index] = {
1267
+ streamContext.set(choice.index, {
1291
1268
  content: choice.delta.content,
1292
1269
  mpsc
1293
- };
1270
+ });
1294
1271
  mpsc.produce(choice.delta.content);
1295
- const event = createAssistantMessageEvent({
1272
+ eventProcessor({
1296
1273
  stream: streamDefaultReaderToAsyncGenerator(mpsc.consumer.getReader()),
1297
1274
  done: () => mpsc.done(),
1298
- get: () => selectContext[choice.index]?.content ?? "",
1275
+ get: () => streamContext.get(choice.index)?.content ?? "",
1299
1276
  join: async () => {
1300
1277
  await mpsc.waitClosed();
1301
- return selectContext[choice.index].content;
1278
+ return streamContext.get(choice.index).content;
1302
1279
  }
1303
1280
  });
1304
- ctx.dispatch(event);
1305
1281
  }
1306
1282
  };
1307
1283
  if (acc.object === "chat.completion.chunk") {
@@ -1311,18 +1287,100 @@ async function call(ctx, operations) {
1311
1287
  registerContext(chunk.choices);
1312
1288
  return ChatGptCompletionMessageUtil.accumulate(acc, chunk);
1313
1289
  }));
1314
- const completion = nullableCompletion;
1315
- const emptyAssistantMessages = completion.choices.filter((v => v.message.tool_calls == null && v.message.content === ""));
1316
- if (emptyAssistantMessages.length > 0) {
1317
- emptyAssistantMessages.forEach((v => {
1318
- const event = createAssistantMessageEvent({
1319
- stream: toAsyncGenerator(v.message.content ?? ""),
1320
- done: () => true,
1321
- get: () => v.message.content ?? "",
1322
- join: async () => v.message.content ?? ""
1323
- });
1290
+ if (nullableCompletion == null) {
1291
+ throw new Error("StreamUtil.reduce did not produce a ChatCompletion. Possible causes: the input stream was empty, invalid, or closed prematurely. " + "To debug: check that the stream is properly initialized and contains valid ChatCompletionChunk data. " + "You may also enable verbose logging upstream to inspect the stream contents. " + `Stream locked: ${stream.locked}.`);
1292
+ }
1293
+ return nullableCompletion;
1294
+ }
1295
+
1296
+ function createOperationSelection(props) {
1297
+ return {
1298
+ operation: props.operation,
1299
+ reason: props.reason,
1300
+ toJSON: () => ({
1301
+ operation: props.operation.toJSON(),
1302
+ reason: props.reason
1303
+ })
1304
+ };
1305
+ }
1306
+
1307
+ function cancelFunctionFromContext(ctx, reference) {
1308
+ const index = ctx.stack.findIndex((item => item.operation.name === reference.name));
1309
+ if (index === -1) {
1310
+ return;
1311
+ }
1312
+ const item = ctx.stack[index];
1313
+ ctx.stack.splice(index, 1);
1314
+ const event = createCancelEvent({
1315
+ selection: createOperationSelection({
1316
+ operation: item.operation,
1317
+ reason: reference.reason
1318
+ })
1319
+ });
1320
+ ctx.dispatch(event);
1321
+ }
1322
+
1323
+ async function call(ctx, operations) {
1324
+ const _retryFn = __get_retry(ctx.config?.retry ?? AgenticaConstant.RETRY);
1325
+ const retryFn = async fn => _retryFn(fn).catch((e => {
1326
+ if (e instanceof AssistantMessageEmptyError) {
1327
+ return Symbol("emptyAssistantMessage");
1328
+ }
1329
+ throw e;
1330
+ }));
1331
+ const completion = await retryFn((async prevError => {
1332
+ const stream = await ctx.request("call", {
1333
+ messages: [ {
1334
+ role: "system",
1335
+ content: AgenticaDefaultPrompt.write(ctx.config)
1336
+ }, ...ctx.histories.map(decodeHistory).flat(), {
1337
+ role: "user",
1338
+ content: ctx.prompt.contents.map(decodeUserMessageContent)
1339
+ }, ...prevError instanceof AssistantMessageEmptyWithReasoningError ? [ {
1340
+ role: "assistant",
1341
+ content: prevError.reasoning
1342
+ } ] : [], ...ctx.config?.systemPrompt?.execute === null ? [] : [ {
1343
+ role: "system",
1344
+ content: ctx.config?.systemPrompt?.execute?.(ctx.histories) ?? AgenticaSystemPrompt.EXECUTE
1345
+ } ] ],
1346
+ tools: operations.map((s => ({
1347
+ type: "function",
1348
+ function: {
1349
+ name: s.name,
1350
+ description: s.function.description,
1351
+ parameters: "separated" in s.function && s.function.separated !== undefined ? s.function.separated.llm ?? {
1352
+ type: "object",
1353
+ properties: {},
1354
+ required: [],
1355
+ additionalProperties: false,
1356
+ $defs: {}
1357
+ } : s.function.parameters
1358
+ }
1359
+ }))),
1360
+ tool_choice: "auto"
1361
+ });
1362
+ const completion = await reduceStreamingWithDispatch(stream, (props => {
1363
+ const event = createAssistantMessageEvent(props);
1324
1364
  ctx.dispatch(event);
1325
1365
  }));
1366
+ const allAssistantMessagesEmpty = completion.choices.every((v => v.message.tool_calls == null && v.message.content === ""));
1367
+ if (allAssistantMessagesEmpty) {
1368
+ const firstChoice = completion.choices.at(0);
1369
+ if (firstChoice?.message?.reasoning != null) {
1370
+ throw new AssistantMessageEmptyWithReasoningError(firstChoice?.message?.reasoning ?? "");
1371
+ }
1372
+ throw new AssistantMessageEmptyError;
1373
+ }
1374
+ return completion;
1375
+ }));
1376
+ if (typeof completion === "symbol") {
1377
+ const event = createAssistantMessageEvent({
1378
+ stream: toAsyncGenerator(""),
1379
+ done: () => true,
1380
+ get: () => "",
1381
+ join: async () => ""
1382
+ });
1383
+ ctx.dispatch(event);
1326
1384
  return [];
1327
1385
  }
1328
1386
  const executes = [];
@@ -1872,48 +1930,12 @@ async function describe(ctx, histories) {
1872
1930
  content: ctx.config?.systemPrompt?.describe?.(histories) ?? AgenticaSystemPrompt.DESCRIBE
1873
1931
  } ]
1874
1932
  });
1875
- const describeContext = [];
1876
- await StreamUtil.reduce(completionStream, (async (accPromise, chunk) => {
1877
- const acc = await accPromise;
1878
- const registerContext = choices => {
1879
- for (const choice of choices) {
1880
- if (choice.finish_reason != null) {
1881
- describeContext[choice.index].mpsc.close();
1882
- continue;
1883
- }
1884
- if (choice.delta.content == null) {
1885
- continue;
1886
- }
1887
- if (describeContext[choice.index] != null) {
1888
- describeContext[choice.index].content += choice.delta.content;
1889
- describeContext[choice.index].mpsc.produce(choice.delta.content);
1890
- continue;
1891
- }
1892
- const mpsc = new MPSC;
1893
- describeContext[choice.index] = {
1894
- content: choice.delta.content,
1895
- mpsc
1896
- };
1897
- mpsc.produce(choice.delta.content);
1898
- const event = createDescribeEvent({
1899
- executes: histories,
1900
- stream: streamDefaultReaderToAsyncGenerator(mpsc.consumer.getReader()),
1901
- done: () => mpsc.done(),
1902
- get: () => describeContext[choice.index]?.content ?? "",
1903
- join: async () => {
1904
- await mpsc.waitClosed();
1905
- return describeContext[choice.index].content;
1906
- }
1907
- });
1908
- ctx.dispatch(event);
1909
- }
1910
- };
1911
- if (acc.object === "chat.completion.chunk") {
1912
- registerContext([ acc, chunk ].flatMap((v => v.choices)));
1913
- return ChatGptCompletionMessageUtil.merge([ acc, chunk ]);
1914
- }
1915
- registerContext(chunk.choices);
1916
- return ChatGptCompletionMessageUtil.accumulate(acc, chunk);
1933
+ await reduceStreamingWithDispatch(completionStream, (props => {
1934
+ const event = createDescribeEvent({
1935
+ executes: histories,
1936
+ ...props
1937
+ });
1938
+ ctx.dispatch(event);
1917
1939
  }));
1918
1940
  }
1919
1941
 
@@ -2578,47 +2600,9 @@ async function initialize(ctx) {
2578
2600
  } ],
2579
2601
  tool_choice: "auto"
2580
2602
  });
2581
- const textContext = [];
2582
- const completion = await StreamUtil.reduce(completionStream, (async (accPromise, chunk) => {
2583
- const acc = await accPromise;
2584
- const registerContext = choices => {
2585
- for (const choice of choices) {
2586
- if (choice.finish_reason != null) {
2587
- textContext[choice.index]?.mpsc.close();
2588
- continue;
2589
- }
2590
- if (choice.delta.content == null || choice.delta.content.length === 0) {
2591
- continue;
2592
- }
2593
- if (textContext[choice.index] != null) {
2594
- textContext[choice.index].content += choice.delta.content;
2595
- textContext[choice.index].mpsc.produce(choice.delta.content);
2596
- continue;
2597
- }
2598
- const mpsc = new MPSC;
2599
- textContext[choice.index] = {
2600
- content: choice.delta.content,
2601
- mpsc
2602
- };
2603
- mpsc.produce(choice.delta.content);
2604
- const event = createAssistantMessageEvent({
2605
- stream: streamDefaultReaderToAsyncGenerator(mpsc.consumer.getReader()),
2606
- done: () => mpsc.done(),
2607
- get: () => textContext[choice.index].content,
2608
- join: async () => {
2609
- await mpsc.waitClosed();
2610
- return textContext[choice.index].content;
2611
- }
2612
- });
2613
- ctx.dispatch(event);
2614
- }
2615
- };
2616
- if (acc.object === "chat.completion.chunk") {
2617
- registerContext([ acc, chunk ].flatMap((v => v.choices)));
2618
- return ChatGptCompletionMessageUtil.merge([ acc, chunk ]);
2619
- }
2620
- registerContext(chunk.choices);
2621
- return ChatGptCompletionMessageUtil.accumulate(acc, chunk);
2603
+ const completion = await reduceStreamingWithDispatch(completionStream, (props => {
2604
+ const event = createAssistantMessageEvent(props);
2605
+ ctx.dispatch(event);
2622
2606
  }));
2623
2607
  if (completion === null) {
2624
2608
  throw new Error("No completion received");
@@ -2766,103 +2750,83 @@ async function select(ctx) {
2766
2750
  }
2767
2751
 
2768
2752
  async function step(ctx, operations, retry, failures) {
2769
- const completionStream = await ctx.request("select", {
2770
- messages: [ {
2771
- role: "system",
2772
- content: AgenticaDefaultPrompt.write(ctx.config)
2773
- }, {
2774
- role: "assistant",
2775
- tool_calls: [ {
2753
+ const _retryFn = __get_retry(ctx.config?.retry ?? AgenticaConstant.RETRY);
2754
+ const retryFn = async fn => _retryFn(fn).catch((e => {
2755
+ if (e instanceof AssistantMessageEmptyError) {
2756
+ return Symbol("emptyAssistantMessage");
2757
+ }
2758
+ throw e;
2759
+ }));
2760
+ const completion = await retryFn((async prevError => {
2761
+ const stream = await ctx.request("select", {
2762
+ messages: [ {
2763
+ role: "system",
2764
+ content: AgenticaDefaultPrompt.write(ctx.config)
2765
+ }, {
2766
+ role: "assistant",
2767
+ tool_calls: [ {
2768
+ type: "function",
2769
+ id: "getApiFunctions",
2770
+ function: {
2771
+ name: "getApiFunctions",
2772
+ arguments: JSON.stringify({})
2773
+ }
2774
+ } ]
2775
+ }, {
2776
+ role: "tool",
2777
+ tool_call_id: "getApiFunctions",
2778
+ content: JSON.stringify(operations.map((op => ({
2779
+ name: op.name,
2780
+ description: op.function.description,
2781
+ ...op.protocol === "http" ? {
2782
+ method: op.function.method,
2783
+ path: op.function.path,
2784
+ tags: op.function.tags
2785
+ } : {}
2786
+ }))))
2787
+ }, ...ctx.histories.map(decodeHistory).flat(), {
2788
+ role: "user",
2789
+ content: ctx.prompt.contents.map(decodeUserMessageContent)
2790
+ }, ...prevError instanceof AssistantMessageEmptyWithReasoningError ? [ {
2791
+ role: "assistant",
2792
+ content: prevError.reasoning
2793
+ } ] : [], {
2794
+ role: "system",
2795
+ content: ctx.config?.systemPrompt?.select?.(ctx.histories) ?? AgenticaSystemPrompt.SELECT
2796
+ }, ...emendMessages(failures ?? []) ],
2797
+ tools: [ {
2776
2798
  type: "function",
2777
- id: "getApiFunctions",
2778
2799
  function: {
2779
- name: "getApiFunctions",
2780
- arguments: JSON.stringify({})
2800
+ name: CONTAINER.functions[0].name,
2801
+ description: CONTAINER.functions[0].description,
2802
+ parameters: CONTAINER.functions[0].parameters
2781
2803
  }
2782
- } ]
2783
- }, {
2784
- role: "tool",
2785
- tool_call_id: "getApiFunctions",
2786
- content: JSON.stringify(operations.map((op => ({
2787
- name: op.name,
2788
- description: op.function.description,
2789
- ...op.protocol === "http" ? {
2790
- method: op.function.method,
2791
- path: op.function.path,
2792
- tags: op.function.tags
2793
- } : {}
2794
- }))))
2795
- }, ...ctx.histories.map(decodeHistory).flat(), {
2796
- role: "user",
2797
- content: ctx.prompt.contents.map(decodeUserMessageContent)
2798
- }, {
2799
- role: "system",
2800
- content: ctx.config?.systemPrompt?.select?.(ctx.histories) ?? AgenticaSystemPrompt.SELECT
2801
- }, ...emendMessages(failures ?? []) ],
2802
- tools: [ {
2803
- type: "function",
2804
- function: {
2805
- name: CONTAINER.functions[0].name,
2806
- description: CONTAINER.functions[0].description,
2807
- parameters: CONTAINER.functions[0].parameters
2808
- }
2809
- } ],
2810
- tool_choice: retry === 0 ? "auto" : "required"
2811
- });
2812
- const selectContext = [];
2813
- const nullableCompletion = await StreamUtil.reduce(completionStream, (async (accPromise, chunk) => {
2814
- const acc = await accPromise;
2815
- const registerContext = choices => {
2816
- for (const choice of choices) {
2817
- if (choice.finish_reason != null) {
2818
- selectContext[choice.index]?.mpsc.close();
2819
- continue;
2820
- }
2821
- if (choice.delta.content == null || choice.delta.content === "") {
2822
- continue;
2823
- }
2824
- if (selectContext[choice.index] != null) {
2825
- selectContext[choice.index].content += choice.delta.content;
2826
- selectContext[choice.index].mpsc.produce(choice.delta.content);
2827
- continue;
2828
- }
2829
- const mpsc = new MPSC;
2830
- selectContext[choice.index] = {
2831
- content: choice.delta.content,
2832
- mpsc
2833
- };
2834
- mpsc.produce(choice.delta.content);
2835
- const event = createAssistantMessageEvent({
2836
- stream: streamDefaultReaderToAsyncGenerator(mpsc.consumer.getReader()),
2837
- done: () => mpsc.done(),
2838
- get: () => selectContext[choice.index]?.content ?? "",
2839
- join: async () => {
2840
- await mpsc.waitClosed();
2841
- return selectContext[choice.index].content;
2842
- }
2843
- });
2844
- ctx.dispatch(event);
2804
+ } ],
2805
+ tool_choice: retry === 0 ? "auto" : "required"
2806
+ });
2807
+ const completion = await reduceStreamingWithDispatch(stream, (props => {
2808
+ const event = createAssistantMessageEvent(props);
2809
+ ctx.dispatch(event);
2810
+ }));
2811
+ const allAssistantMessagesEmpty = completion.choices.every((v => v.message.tool_calls == null && v.message.content === ""));
2812
+ if (allAssistantMessagesEmpty) {
2813
+ const firstChoice = completion.choices.at(0);
2814
+ if (firstChoice?.message?.reasoning != null) {
2815
+ throw new AssistantMessageEmptyWithReasoningError(firstChoice?.message?.reasoning ?? "");
2845
2816
  }
2846
- };
2847
- if (acc.object === "chat.completion.chunk") {
2848
- registerContext([ acc, chunk ].flatMap((v => v.choices)));
2849
- return ChatGptCompletionMessageUtil.merge([ acc, chunk ]);
2817
+ throw new AssistantMessageEmptyError;
2850
2818
  }
2851
- registerContext(chunk.choices);
2852
- return ChatGptCompletionMessageUtil.accumulate(acc, chunk);
2819
+ return completion;
2853
2820
  }));
2854
- const completion = nullableCompletion;
2855
- const emptyAssistantMessages = completion.choices.filter((v => v.message.tool_calls == null && v.message.content === ""));
2856
- if (emptyAssistantMessages.length > 0) {
2857
- emptyAssistantMessages.forEach((v => {
2858
- const event = createAssistantMessageEvent({
2859
- stream: toAsyncGenerator(v.message.content ?? ""),
2860
- done: () => true,
2861
- get: () => v.message.content ?? "",
2862
- join: async () => v.message.content ?? ""
2863
- });
2864
- ctx.dispatch(event);
2865
- }));
2821
+ if (typeof completion === "symbol") {
2822
+ const event = createAssistantMessageEvent({
2823
+ stream: toAsyncGenerator(""),
2824
+ done: () => true,
2825
+ get: () => "",
2826
+ join: async () => ""
2827
+ });
2828
+ ctx.dispatch(event);
2829
+ return;
2866
2830
  }
2867
2831
  if (retry++ < (ctx.config?.retry ?? AgenticaConstant.RETRY)) {
2868
2832
  const failures = [];