@agentica/core 0.32.1 → 0.32.3-dev.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/lib/index.mjs +308 -334
  2. package/lib/index.mjs.map +1 -1
  3. package/lib/orchestrate/call.js +87 -107
  4. package/lib/orchestrate/call.js.map +1 -1
  5. package/lib/orchestrate/describe.js +5 -50
  6. package/lib/orchestrate/describe.js.map +1 -1
  7. package/lib/orchestrate/initialize.js +5 -50
  8. package/lib/orchestrate/initialize.js.map +1 -1
  9. package/lib/orchestrate/select.js +107 -126
  10. package/lib/orchestrate/select.js.map +1 -1
  11. package/lib/utils/AssistantMessageEmptyError.d.ts +7 -0
  12. package/lib/utils/AssistantMessageEmptyError.js +17 -0
  13. package/lib/utils/AssistantMessageEmptyError.js.map +1 -0
  14. package/lib/utils/ChatGptCompletionMessageUtil.js +15 -8
  15. package/lib/utils/ChatGptCompletionMessageUtil.js.map +1 -1
  16. package/lib/utils/ChatGptCompletionStreamingUtil.d.ts +8 -0
  17. package/lib/utils/ChatGptCompletionStreamingUtil.js +74 -0
  18. package/lib/utils/ChatGptCompletionStreamingUtil.js.map +1 -0
  19. package/lib/utils/__retry.d.ts +1 -0
  20. package/lib/utils/__retry.js +30 -0
  21. package/lib/utils/__retry.js.map +1 -0
  22. package/lib/utils/__retry.spec.d.ts +1 -0
  23. package/lib/utils/__retry.spec.js +172 -0
  24. package/lib/utils/__retry.spec.js.map +1 -0
  25. package/package.json +1 -1
  26. package/src/orchestrate/call.ts +88 -114
  27. package/src/orchestrate/describe.ts +7 -65
  28. package/src/orchestrate/initialize.ts +4 -64
  29. package/src/orchestrate/select.ts +111 -138
  30. package/src/utils/AssistantMessageEmptyError.ts +13 -0
  31. package/src/utils/ChatGptCompletionMessageUtil.ts +14 -0
  32. package/src/utils/ChatGptCompletionStreamingUtil.ts +81 -0
  33. package/src/utils/__retry.spec.ts +198 -0
  34. package/src/utils/__retry.ts +18 -0
package/lib/index.mjs CHANGED
@@ -808,93 +808,30 @@ function isAgenticaContext(ctx) {
808
808
  return typeof ctx.initialize === "function";
809
809
  }
810
810
 
811
- class AsyncQueueClosedError extends Error {
812
- constructor(message) {
813
- super(message);
814
- this.name = "AsyncQueueClosedError";
815
- }
811
+ function __get_retry(limit) {
812
+ const retryFn = async (fn, prevError, attempt = 0) => {
813
+ try {
814
+ return await fn(prevError);
815
+ } catch (error) {
816
+ if (attempt >= limit - 1) {
817
+ throw error;
818
+ }
819
+ return retryFn(fn, error, attempt + 1);
820
+ }
821
+ };
822
+ return retryFn;
816
823
  }
817
824
 
818
- class AsyncQueue {
825
+ class AssistantMessageEmptyError extends Error {
819
826
  constructor() {
820
- this.queue = [];
821
- this.resolvers = [];
822
- this.closeResolvers = [];
823
- this.emptyResolvers = [];
824
- this.closed = false;
825
- }
826
- enqueue(item) {
827
- if (this.closed) {
828
- console.error(new AsyncQueueClosedError("Cannot enqueue item: queue is closed."));
829
- return;
830
- }
831
- this.queue.push(item);
832
- if (this.resolvers.length > 0) {
833
- this.resolvers.shift()?.({
834
- value: this.queue.shift(),
835
- done: false
836
- });
837
- }
838
- }
839
- async dequeue() {
840
- const item = (() => {
841
- if (!this.isEmpty()) {
842
- return {
843
- value: this.queue.shift(),
844
- done: false
845
- };
846
- }
847
- if (this.isClosed()) {
848
- return {
849
- value: undefined,
850
- done: true
851
- };
852
- }
853
- return null;
854
- })();
855
- if (this.isEmpty() && this.emptyResolvers.length !== 0) {
856
- this.emptyResolvers.forEach((resolve => resolve()));
857
- this.emptyResolvers = [];
858
- }
859
- if (item !== null) {
860
- return item;
861
- }
862
- return new Promise((resolve => this.resolvers.push(resolve)));
827
+ super();
863
828
  }
864
- isEmpty() {
865
- return this.queue.length === 0;
866
- }
867
- isClosed() {
868
- return this.closed;
869
- }
870
- done() {
871
- return this.isClosed() && this.isEmpty();
872
- }
873
- close() {
874
- this.closed = true;
875
- while (this.resolvers.length > 0) {
876
- this.resolvers.shift()?.({
877
- value: undefined,
878
- done: true
879
- });
880
- }
881
- this.closeResolvers.forEach((resolve => resolve()));
882
- }
883
- async waitUntilEmpty() {
884
- if (this.isEmpty()) {
885
- return Promise.resolve();
886
- }
887
- return new Promise((resolve => {
888
- this.emptyResolvers.push(resolve);
889
- }));
890
- }
891
- async waitClosed() {
892
- if (this.isClosed()) {
893
- return Promise.resolve();
894
- }
895
- return new Promise((resolve => {
896
- this.closeResolvers.push(resolve);
897
- }));
829
+ }
830
+
831
+ class AssistantMessageEmptyWithReasoningError extends AssistantMessageEmptyError {
832
+ constructor(reasoning) {
833
+ super();
834
+ this.reasoning = reasoning;
898
835
  }
899
836
  }
900
837
 
@@ -984,7 +921,10 @@ function accumulate(origin, chunk) {
984
921
  }), []) : undefined,
985
922
  content: choice.delta.content ?? null,
986
923
  refusal: choice.delta.refusal ?? null,
987
- role: "assistant"
924
+ role: "assistant",
925
+ ...{
926
+ reasoning: choice.delta.reasoning ?? null
927
+ }
988
928
  }
989
929
  };
990
930
  }));
@@ -1051,6 +991,13 @@ function mergeChoice(acc, cur) {
1051
991
  acc.message.refusal += cur.delta.refusal;
1052
992
  }
1053
993
  }
994
+ if (cur.delta.reasoning != null) {
995
+ if (acc.message.reasoning == null) {
996
+ acc.message.reasoning = cur.delta.reasoning;
997
+ } else {
998
+ acc.message.reasoning += cur.delta.reasoning;
999
+ }
1000
+ }
1054
1001
  if (cur.delta.tool_calls != null) {
1055
1002
  (_a = acc.message).tool_calls ?? (_a.tool_calls = []);
1056
1003
  const toolCalls = acc.message.tool_calls;
@@ -1090,6 +1037,96 @@ const ChatGptCompletionMessageUtil = {
1090
1037
  mergeToolCalls
1091
1038
  };
1092
1039
 
1040
+ class AsyncQueueClosedError extends Error {
1041
+ constructor(message) {
1042
+ super(message);
1043
+ this.name = "AsyncQueueClosedError";
1044
+ }
1045
+ }
1046
+
1047
+ class AsyncQueue {
1048
+ constructor() {
1049
+ this.queue = [];
1050
+ this.resolvers = [];
1051
+ this.closeResolvers = [];
1052
+ this.emptyResolvers = [];
1053
+ this.closed = false;
1054
+ }
1055
+ enqueue(item) {
1056
+ if (this.closed) {
1057
+ console.error(new AsyncQueueClosedError("Cannot enqueue item: queue is closed."));
1058
+ return;
1059
+ }
1060
+ this.queue.push(item);
1061
+ if (this.resolvers.length > 0) {
1062
+ this.resolvers.shift()?.({
1063
+ value: this.queue.shift(),
1064
+ done: false
1065
+ });
1066
+ }
1067
+ }
1068
+ async dequeue() {
1069
+ const item = (() => {
1070
+ if (!this.isEmpty()) {
1071
+ return {
1072
+ value: this.queue.shift(),
1073
+ done: false
1074
+ };
1075
+ }
1076
+ if (this.isClosed()) {
1077
+ return {
1078
+ value: undefined,
1079
+ done: true
1080
+ };
1081
+ }
1082
+ return null;
1083
+ })();
1084
+ if (this.isEmpty() && this.emptyResolvers.length !== 0) {
1085
+ this.emptyResolvers.forEach((resolve => resolve()));
1086
+ this.emptyResolvers = [];
1087
+ }
1088
+ if (item !== null) {
1089
+ return item;
1090
+ }
1091
+ return new Promise((resolve => this.resolvers.push(resolve)));
1092
+ }
1093
+ isEmpty() {
1094
+ return this.queue.length === 0;
1095
+ }
1096
+ isClosed() {
1097
+ return this.closed;
1098
+ }
1099
+ done() {
1100
+ return this.isClosed() && this.isEmpty();
1101
+ }
1102
+ close() {
1103
+ this.closed = true;
1104
+ while (this.resolvers.length > 0) {
1105
+ this.resolvers.shift()?.({
1106
+ value: undefined,
1107
+ done: true
1108
+ });
1109
+ }
1110
+ this.closeResolvers.forEach((resolve => resolve()));
1111
+ }
1112
+ async waitUntilEmpty() {
1113
+ if (this.isEmpty()) {
1114
+ return Promise.resolve();
1115
+ }
1116
+ return new Promise((resolve => {
1117
+ this.emptyResolvers.push(resolve);
1118
+ }));
1119
+ }
1120
+ async waitClosed() {
1121
+ if (this.isClosed()) {
1122
+ return Promise.resolve();
1123
+ }
1124
+ return new Promise((resolve => {
1125
+ this.closeResolvers.push(resolve);
1126
+ }));
1127
+ }
1128
+ }
1129
+
1093
1130
  class MPSC {
1094
1131
  constructor() {
1095
1132
  this.queue = new AsyncQueue;
@@ -1204,94 +1241,43 @@ var index$2 = Object.freeze({
1204
1241
  toAsyncGenerator
1205
1242
  });
1206
1243
 
1207
- function createOperationSelection(props) {
1208
- return {
1209
- operation: props.operation,
1210
- reason: props.reason,
1211
- toJSON: () => ({
1212
- operation: props.operation.toJSON(),
1213
- reason: props.reason
1214
- })
1215
- };
1216
- }
1217
-
1218
- function cancelFunctionFromContext(ctx, reference) {
1219
- const index = ctx.stack.findIndex((item => item.operation.name === reference.name));
1220
- if (index === -1) {
1221
- return;
1222
- }
1223
- const item = ctx.stack[index];
1224
- ctx.stack.splice(index, 1);
1225
- const event = createCancelEvent({
1226
- selection: createOperationSelection({
1227
- operation: item.operation,
1228
- reason: reference.reason
1229
- })
1230
- });
1231
- ctx.dispatch(event);
1232
- }
1233
-
1234
- async function call(ctx, operations) {
1235
- const stream = await ctx.request("call", {
1236
- messages: [ {
1237
- role: "system",
1238
- content: AgenticaDefaultPrompt.write(ctx.config)
1239
- }, ...ctx.histories.map(decodeHistory).flat(), {
1240
- role: "user",
1241
- content: ctx.prompt.contents.map(decodeUserMessageContent)
1242
- }, ...ctx.config?.systemPrompt?.execute === null ? [] : [ {
1243
- role: "system",
1244
- content: ctx.config?.systemPrompt?.execute?.(ctx.histories) ?? AgenticaSystemPrompt.EXECUTE
1245
- } ] ],
1246
- tools: operations.map((s => ({
1247
- type: "function",
1248
- function: {
1249
- name: s.name,
1250
- description: s.function.description,
1251
- parameters: "separated" in s.function && s.function.separated !== undefined ? s.function.separated.llm ?? {
1252
- type: "object",
1253
- properties: {},
1254
- required: [],
1255
- additionalProperties: false,
1256
- $defs: {}
1257
- } : s.function.parameters
1258
- }
1259
- }))),
1260
- tool_choice: "auto"
1261
- });
1262
- const selectContext = [];
1244
+ async function reduceStreamingWithDispatch(stream, eventProcessor) {
1245
+ const streamContext = new Map;
1263
1246
  const nullableCompletion = await StreamUtil.reduce(stream, (async (accPromise, chunk) => {
1264
1247
  const acc = await accPromise;
1265
1248
  const registerContext = choices => {
1266
1249
  for (const choice of choices) {
1267
1250
  if (choice.finish_reason != null) {
1268
- selectContext[choice.index]?.mpsc.close();
1251
+ const context = streamContext.get(choice.index);
1252
+ if (context != null) {
1253
+ context.mpsc.close();
1254
+ }
1269
1255
  continue;
1270
1256
  }
1271
1257
  if (choice.delta.content == null || choice.delta.content === "") {
1272
1258
  continue;
1273
1259
  }
1274
- if (selectContext[choice.index] != null) {
1275
- selectContext[choice.index].content += choice.delta.content;
1276
- selectContext[choice.index].mpsc.produce(choice.delta.content);
1260
+ if (streamContext.has(choice.index)) {
1261
+ const context = streamContext.get(choice.index);
1262
+ context.content += choice.delta.content;
1263
+ context.mpsc.produce(choice.delta.content);
1277
1264
  continue;
1278
1265
  }
1279
1266
  const mpsc = new MPSC;
1280
- selectContext[choice.index] = {
1267
+ streamContext.set(choice.index, {
1281
1268
  content: choice.delta.content,
1282
1269
  mpsc
1283
- };
1270
+ });
1284
1271
  mpsc.produce(choice.delta.content);
1285
- const event = createAssistantMessageEvent({
1272
+ eventProcessor({
1286
1273
  stream: streamDefaultReaderToAsyncGenerator(mpsc.consumer.getReader()),
1287
1274
  done: () => mpsc.done(),
1288
- get: () => selectContext[choice.index]?.content ?? "",
1275
+ get: () => streamContext.get(choice.index)?.content ?? "",
1289
1276
  join: async () => {
1290
1277
  await mpsc.waitClosed();
1291
- return selectContext[choice.index].content;
1278
+ return streamContext.get(choice.index).content;
1292
1279
  }
1293
1280
  });
1294
- ctx.dispatch(event);
1295
1281
  }
1296
1282
  };
1297
1283
  if (acc.object === "chat.completion.chunk") {
@@ -1301,18 +1287,100 @@ async function call(ctx, operations) {
1301
1287
  registerContext(chunk.choices);
1302
1288
  return ChatGptCompletionMessageUtil.accumulate(acc, chunk);
1303
1289
  }));
1304
- const completion = nullableCompletion;
1305
- const emptyAssistantMessages = completion.choices.filter((v => v.message.tool_calls == null && v.message.content === ""));
1306
- if (emptyAssistantMessages.length > 0) {
1307
- emptyAssistantMessages.forEach((v => {
1308
- const event = createAssistantMessageEvent({
1309
- stream: toAsyncGenerator(v.message.content ?? ""),
1310
- done: () => true,
1311
- get: () => v.message.content ?? "",
1312
- join: async () => v.message.content ?? ""
1313
- });
1290
+ if (nullableCompletion == null) {
1291
+ throw new Error("StreamUtil.reduce did not produce a ChatCompletion. Possible causes: the input stream was empty, invalid, or closed prematurely. " + "To debug: check that the stream is properly initialized and contains valid ChatCompletionChunk data. " + "You may also enable verbose logging upstream to inspect the stream contents. " + `Stream locked: ${stream.locked}.`);
1292
+ }
1293
+ return nullableCompletion;
1294
+ }
1295
+
1296
+ function createOperationSelection(props) {
1297
+ return {
1298
+ operation: props.operation,
1299
+ reason: props.reason,
1300
+ toJSON: () => ({
1301
+ operation: props.operation.toJSON(),
1302
+ reason: props.reason
1303
+ })
1304
+ };
1305
+ }
1306
+
1307
+ function cancelFunctionFromContext(ctx, reference) {
1308
+ const index = ctx.stack.findIndex((item => item.operation.name === reference.name));
1309
+ if (index === -1) {
1310
+ return;
1311
+ }
1312
+ const item = ctx.stack[index];
1313
+ ctx.stack.splice(index, 1);
1314
+ const event = createCancelEvent({
1315
+ selection: createOperationSelection({
1316
+ operation: item.operation,
1317
+ reason: reference.reason
1318
+ })
1319
+ });
1320
+ ctx.dispatch(event);
1321
+ }
1322
+
1323
+ async function call(ctx, operations) {
1324
+ const _retryFn = __get_retry(ctx.config?.retry ?? AgenticaConstant.RETRY);
1325
+ const retryFn = async fn => _retryFn(fn).catch((e => {
1326
+ if (e instanceof AssistantMessageEmptyError) {
1327
+ return Symbol("emptyAssistantMessage");
1328
+ }
1329
+ throw e;
1330
+ }));
1331
+ const completion = await retryFn((async prevError => {
1332
+ const stream = await ctx.request("call", {
1333
+ messages: [ {
1334
+ role: "system",
1335
+ content: AgenticaDefaultPrompt.write(ctx.config)
1336
+ }, ...ctx.histories.map(decodeHistory).flat(), {
1337
+ role: "user",
1338
+ content: ctx.prompt.contents.map(decodeUserMessageContent)
1339
+ }, ...prevError instanceof AssistantMessageEmptyWithReasoningError ? [ {
1340
+ role: "assistant",
1341
+ content: prevError.reasoning
1342
+ } ] : [], ...ctx.config?.systemPrompt?.execute === null ? [] : [ {
1343
+ role: "system",
1344
+ content: ctx.config?.systemPrompt?.execute?.(ctx.histories) ?? AgenticaSystemPrompt.EXECUTE
1345
+ } ] ],
1346
+ tools: operations.map((s => ({
1347
+ type: "function",
1348
+ function: {
1349
+ name: s.name,
1350
+ description: s.function.description,
1351
+ parameters: "separated" in s.function && s.function.separated !== undefined ? s.function.separated.llm ?? {
1352
+ type: "object",
1353
+ properties: {},
1354
+ required: [],
1355
+ additionalProperties: false,
1356
+ $defs: {}
1357
+ } : s.function.parameters
1358
+ }
1359
+ }))),
1360
+ tool_choice: "auto"
1361
+ });
1362
+ const completion = await reduceStreamingWithDispatch(stream, (props => {
1363
+ const event = createAssistantMessageEvent(props);
1314
1364
  ctx.dispatch(event);
1315
1365
  }));
1366
+ const allAssistantMessagesEmpty = completion.choices.every((v => v.message.tool_calls == null && v.message.content === ""));
1367
+ if (allAssistantMessagesEmpty) {
1368
+ const firstChoice = completion.choices.at(0);
1369
+ if (firstChoice?.message?.reasoning != null) {
1370
+ throw new AssistantMessageEmptyWithReasoningError(firstChoice?.message?.reasoning ?? "");
1371
+ }
1372
+ throw new AssistantMessageEmptyError;
1373
+ }
1374
+ return completion;
1375
+ }));
1376
+ if (typeof completion === "symbol") {
1377
+ const event = createAssistantMessageEvent({
1378
+ stream: toAsyncGenerator(""),
1379
+ done: () => true,
1380
+ get: () => "",
1381
+ join: async () => ""
1382
+ });
1383
+ ctx.dispatch(event);
1316
1384
  return [];
1317
1385
  }
1318
1386
  const executes = [];
@@ -1862,48 +1930,12 @@ async function describe(ctx, histories) {
1862
1930
  content: ctx.config?.systemPrompt?.describe?.(histories) ?? AgenticaSystemPrompt.DESCRIBE
1863
1931
  } ]
1864
1932
  });
1865
- const describeContext = [];
1866
- await StreamUtil.reduce(completionStream, (async (accPromise, chunk) => {
1867
- const acc = await accPromise;
1868
- const registerContext = choices => {
1869
- for (const choice of choices) {
1870
- if (choice.finish_reason != null) {
1871
- describeContext[choice.index].mpsc.close();
1872
- continue;
1873
- }
1874
- if (choice.delta.content == null) {
1875
- continue;
1876
- }
1877
- if (describeContext[choice.index] != null) {
1878
- describeContext[choice.index].content += choice.delta.content;
1879
- describeContext[choice.index].mpsc.produce(choice.delta.content);
1880
- continue;
1881
- }
1882
- const mpsc = new MPSC;
1883
- describeContext[choice.index] = {
1884
- content: choice.delta.content,
1885
- mpsc
1886
- };
1887
- mpsc.produce(choice.delta.content);
1888
- const event = createDescribeEvent({
1889
- executes: histories,
1890
- stream: streamDefaultReaderToAsyncGenerator(mpsc.consumer.getReader()),
1891
- done: () => mpsc.done(),
1892
- get: () => describeContext[choice.index]?.content ?? "",
1893
- join: async () => {
1894
- await mpsc.waitClosed();
1895
- return describeContext[choice.index].content;
1896
- }
1897
- });
1898
- ctx.dispatch(event);
1899
- }
1900
- };
1901
- if (acc.object === "chat.completion.chunk") {
1902
- registerContext([ acc, chunk ].flatMap((v => v.choices)));
1903
- return ChatGptCompletionMessageUtil.merge([ acc, chunk ]);
1904
- }
1905
- registerContext(chunk.choices);
1906
- return ChatGptCompletionMessageUtil.accumulate(acc, chunk);
1933
+ await reduceStreamingWithDispatch(completionStream, (props => {
1934
+ const event = createDescribeEvent({
1935
+ executes: histories,
1936
+ ...props
1937
+ });
1938
+ ctx.dispatch(event);
1907
1939
  }));
1908
1940
  }
1909
1941
 
@@ -2568,47 +2600,9 @@ async function initialize(ctx) {
2568
2600
  } ],
2569
2601
  tool_choice: "auto"
2570
2602
  });
2571
- const textContext = [];
2572
- const completion = await StreamUtil.reduce(completionStream, (async (accPromise, chunk) => {
2573
- const acc = await accPromise;
2574
- const registerContext = choices => {
2575
- for (const choice of choices) {
2576
- if (choice.finish_reason != null) {
2577
- textContext[choice.index]?.mpsc.close();
2578
- continue;
2579
- }
2580
- if (choice.delta.content == null || choice.delta.content.length === 0) {
2581
- continue;
2582
- }
2583
- if (textContext[choice.index] != null) {
2584
- textContext[choice.index].content += choice.delta.content;
2585
- textContext[choice.index].mpsc.produce(choice.delta.content);
2586
- continue;
2587
- }
2588
- const mpsc = new MPSC;
2589
- textContext[choice.index] = {
2590
- content: choice.delta.content,
2591
- mpsc
2592
- };
2593
- mpsc.produce(choice.delta.content);
2594
- const event = createAssistantMessageEvent({
2595
- stream: streamDefaultReaderToAsyncGenerator(mpsc.consumer.getReader()),
2596
- done: () => mpsc.done(),
2597
- get: () => textContext[choice.index].content,
2598
- join: async () => {
2599
- await mpsc.waitClosed();
2600
- return textContext[choice.index].content;
2601
- }
2602
- });
2603
- ctx.dispatch(event);
2604
- }
2605
- };
2606
- if (acc.object === "chat.completion.chunk") {
2607
- registerContext([ acc, chunk ].flatMap((v => v.choices)));
2608
- return ChatGptCompletionMessageUtil.merge([ acc, chunk ]);
2609
- }
2610
- registerContext(chunk.choices);
2611
- return ChatGptCompletionMessageUtil.accumulate(acc, chunk);
2603
+ const completion = await reduceStreamingWithDispatch(completionStream, (props => {
2604
+ const event = createAssistantMessageEvent(props);
2605
+ ctx.dispatch(event);
2612
2606
  }));
2613
2607
  if (completion === null) {
2614
2608
  throw new Error("No completion received");
@@ -2756,103 +2750,83 @@ async function select(ctx) {
2756
2750
  }
2757
2751
 
2758
2752
  async function step(ctx, operations, retry, failures) {
2759
- const completionStream = await ctx.request("select", {
2760
- messages: [ {
2761
- role: "system",
2762
- content: AgenticaDefaultPrompt.write(ctx.config)
2763
- }, {
2764
- role: "assistant",
2765
- tool_calls: [ {
2753
+ const _retryFn = __get_retry(ctx.config?.retry ?? AgenticaConstant.RETRY);
2754
+ const retryFn = async fn => _retryFn(fn).catch((e => {
2755
+ if (e instanceof AssistantMessageEmptyError) {
2756
+ return Symbol("emptyAssistantMessage");
2757
+ }
2758
+ throw e;
2759
+ }));
2760
+ const completion = await retryFn((async prevError => {
2761
+ const stream = await ctx.request("select", {
2762
+ messages: [ {
2763
+ role: "system",
2764
+ content: AgenticaDefaultPrompt.write(ctx.config)
2765
+ }, {
2766
+ role: "assistant",
2767
+ tool_calls: [ {
2768
+ type: "function",
2769
+ id: "getApiFunctions",
2770
+ function: {
2771
+ name: "getApiFunctions",
2772
+ arguments: JSON.stringify({})
2773
+ }
2774
+ } ]
2775
+ }, {
2776
+ role: "tool",
2777
+ tool_call_id: "getApiFunctions",
2778
+ content: JSON.stringify(operations.map((op => ({
2779
+ name: op.name,
2780
+ description: op.function.description,
2781
+ ...op.protocol === "http" ? {
2782
+ method: op.function.method,
2783
+ path: op.function.path,
2784
+ tags: op.function.tags
2785
+ } : {}
2786
+ }))))
2787
+ }, ...ctx.histories.map(decodeHistory).flat(), {
2788
+ role: "user",
2789
+ content: ctx.prompt.contents.map(decodeUserMessageContent)
2790
+ }, ...prevError instanceof AssistantMessageEmptyWithReasoningError ? [ {
2791
+ role: "assistant",
2792
+ content: prevError.reasoning
2793
+ } ] : [], {
2794
+ role: "system",
2795
+ content: ctx.config?.systemPrompt?.select?.(ctx.histories) ?? AgenticaSystemPrompt.SELECT
2796
+ }, ...emendMessages(failures ?? []) ],
2797
+ tools: [ {
2766
2798
  type: "function",
2767
- id: "getApiFunctions",
2768
2799
  function: {
2769
- name: "getApiFunctions",
2770
- arguments: JSON.stringify({})
2800
+ name: CONTAINER.functions[0].name,
2801
+ description: CONTAINER.functions[0].description,
2802
+ parameters: CONTAINER.functions[0].parameters
2771
2803
  }
2772
- } ]
2773
- }, {
2774
- role: "tool",
2775
- tool_call_id: "getApiFunctions",
2776
- content: JSON.stringify(operations.map((op => ({
2777
- name: op.name,
2778
- description: op.function.description,
2779
- ...op.protocol === "http" ? {
2780
- method: op.function.method,
2781
- path: op.function.path,
2782
- tags: op.function.tags
2783
- } : {}
2784
- }))))
2785
- }, ...ctx.histories.map(decodeHistory).flat(), {
2786
- role: "user",
2787
- content: ctx.prompt.contents.map(decodeUserMessageContent)
2788
- }, {
2789
- role: "system",
2790
- content: ctx.config?.systemPrompt?.select?.(ctx.histories) ?? AgenticaSystemPrompt.SELECT
2791
- }, ...emendMessages(failures ?? []) ],
2792
- tools: [ {
2793
- type: "function",
2794
- function: {
2795
- name: CONTAINER.functions[0].name,
2796
- description: CONTAINER.functions[0].description,
2797
- parameters: CONTAINER.functions[0].parameters
2798
- }
2799
- } ],
2800
- tool_choice: retry === 0 ? "auto" : "required"
2801
- });
2802
- const selectContext = [];
2803
- const nullableCompletion = await StreamUtil.reduce(completionStream, (async (accPromise, chunk) => {
2804
- const acc = await accPromise;
2805
- const registerContext = choices => {
2806
- for (const choice of choices) {
2807
- if (choice.finish_reason != null) {
2808
- selectContext[choice.index]?.mpsc.close();
2809
- continue;
2810
- }
2811
- if (choice.delta.content == null || choice.delta.content === "") {
2812
- continue;
2813
- }
2814
- if (selectContext[choice.index] != null) {
2815
- selectContext[choice.index].content += choice.delta.content;
2816
- selectContext[choice.index].mpsc.produce(choice.delta.content);
2817
- continue;
2818
- }
2819
- const mpsc = new MPSC;
2820
- selectContext[choice.index] = {
2821
- content: choice.delta.content,
2822
- mpsc
2823
- };
2824
- mpsc.produce(choice.delta.content);
2825
- const event = createAssistantMessageEvent({
2826
- stream: streamDefaultReaderToAsyncGenerator(mpsc.consumer.getReader()),
2827
- done: () => mpsc.done(),
2828
- get: () => selectContext[choice.index]?.content ?? "",
2829
- join: async () => {
2830
- await mpsc.waitClosed();
2831
- return selectContext[choice.index].content;
2832
- }
2833
- });
2834
- ctx.dispatch(event);
2804
+ } ],
2805
+ tool_choice: retry === 0 ? "auto" : "required"
2806
+ });
2807
+ const completion = await reduceStreamingWithDispatch(stream, (props => {
2808
+ const event = createAssistantMessageEvent(props);
2809
+ ctx.dispatch(event);
2810
+ }));
2811
+ const allAssistantMessagesEmpty = completion.choices.every((v => v.message.tool_calls == null && v.message.content === ""));
2812
+ if (allAssistantMessagesEmpty) {
2813
+ const firstChoice = completion.choices.at(0);
2814
+ if (firstChoice?.message?.reasoning != null) {
2815
+ throw new AssistantMessageEmptyWithReasoningError(firstChoice?.message?.reasoning ?? "");
2835
2816
  }
2836
- };
2837
- if (acc.object === "chat.completion.chunk") {
2838
- registerContext([ acc, chunk ].flatMap((v => v.choices)));
2839
- return ChatGptCompletionMessageUtil.merge([ acc, chunk ]);
2817
+ throw new AssistantMessageEmptyError;
2840
2818
  }
2841
- registerContext(chunk.choices);
2842
- return ChatGptCompletionMessageUtil.accumulate(acc, chunk);
2819
+ return completion;
2843
2820
  }));
2844
- const completion = nullableCompletion;
2845
- const emptyAssistantMessages = completion.choices.filter((v => v.message.tool_calls == null && v.message.content === ""));
2846
- if (emptyAssistantMessages.length > 0) {
2847
- emptyAssistantMessages.forEach((v => {
2848
- const event = createAssistantMessageEvent({
2849
- stream: toAsyncGenerator(v.message.content ?? ""),
2850
- done: () => true,
2851
- get: () => v.message.content ?? "",
2852
- join: async () => v.message.content ?? ""
2853
- });
2854
- ctx.dispatch(event);
2855
- }));
2821
+ if (typeof completion === "symbol") {
2822
+ const event = createAssistantMessageEvent({
2823
+ stream: toAsyncGenerator(""),
2824
+ done: () => true,
2825
+ get: () => "",
2826
+ join: async () => ""
2827
+ });
2828
+ ctx.dispatch(event);
2829
+ return;
2856
2830
  }
2857
2831
  if (retry++ < (ctx.config?.retry ?? AgenticaConstant.RETRY)) {
2858
2832
  const failures = [];