@agentica/core 0.32.3-dev.3 → 0.32.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/Agentica.js +7 -64
- package/lib/Agentica.js.map +1 -1
- package/lib/MicroAgentica.d.ts +3 -1
- package/lib/MicroAgentica.js +10 -66
- package/lib/MicroAgentica.js.map +1 -1
- package/lib/context/MicroAgenticaContext.d.ts +4 -0
- package/lib/index.mjs +137 -181
- package/lib/index.mjs.map +1 -1
- package/lib/orchestrate/call.js +1 -0
- package/lib/orchestrate/call.js.map +1 -1
- package/lib/orchestrate/cancel.js +1 -1
- package/lib/orchestrate/cancel.js.map +1 -1
- package/lib/orchestrate/describe.js +1 -1
- package/lib/orchestrate/describe.js.map +1 -1
- package/lib/orchestrate/initialize.js +1 -1
- package/lib/orchestrate/initialize.js.map +1 -1
- package/lib/orchestrate/select.js +1 -1
- package/lib/orchestrate/select.js.map +1 -1
- package/lib/utils/ChatGptCompletionStreamingUtil.d.ts +1 -1
- package/lib/utils/ChatGptCompletionStreamingUtil.js +2 -2
- package/lib/utils/ChatGptCompletionStreamingUtil.js.map +1 -1
- package/lib/utils/StreamUtil.d.ts +7 -4
- package/lib/utils/StreamUtil.js +17 -14
- package/lib/utils/StreamUtil.js.map +1 -1
- package/lib/utils/StreamUtil.spec.js +12 -12
- package/lib/utils/StreamUtil.spec.js.map +1 -1
- package/lib/utils/request.d.ts +12 -0
- package/lib/utils/request.js +81 -0
- package/lib/utils/request.js.map +1 -0
- package/package.json +1 -1
- package/src/Agentica.ts +9 -91
- package/src/MicroAgentica.ts +14 -87
- package/src/context/MicroAgenticaContext.ts +4 -0
- package/src/orchestrate/call.ts +1 -0
- package/src/orchestrate/cancel.ts +1 -1
- package/src/orchestrate/describe.ts +1 -1
- package/src/orchestrate/initialize.ts +1 -1
- package/src/orchestrate/select.ts +1 -1
- package/src/utils/ChatGptCompletionStreamingUtil.ts +2 -2
- package/src/utils/StreamUtil.spec.ts +12 -9
- package/src/utils/StreamUtil.ts +15 -11
- package/src/utils/request.ts +101 -0
package/lib/index.mjs
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
import { is_node, Semaphore } from "tstl";
|
|
2
2
|
|
|
3
|
-
import { v4 } from "uuid";
|
|
4
|
-
|
|
5
3
|
import "typia";
|
|
6
4
|
|
|
5
|
+
import { v4 } from "uuid";
|
|
6
|
+
|
|
7
7
|
import { HttpLlm, OpenApi, McpLlm } from "@samchon/openapi";
|
|
8
8
|
|
|
9
9
|
import * as __typia_transform__validateReport from "typia/lib/internal/_validateReport.js";
|
|
@@ -1160,12 +1160,12 @@ class MPSC {
|
|
|
1160
1160
|
}
|
|
1161
1161
|
}
|
|
1162
1162
|
|
|
1163
|
-
async function readAll(stream) {
|
|
1163
|
+
async function readAll(stream, abortSignal) {
|
|
1164
1164
|
const reader = stream.getReader();
|
|
1165
1165
|
const result = [];
|
|
1166
1166
|
while (true) {
|
|
1167
1167
|
const {done, value} = await reader.read();
|
|
1168
|
-
if (done) {
|
|
1168
|
+
if (done || abortSignal?.aborted === true) {
|
|
1169
1169
|
break;
|
|
1170
1170
|
}
|
|
1171
1171
|
result.push(value);
|
|
@@ -1173,11 +1173,14 @@ async function readAll(stream) {
|
|
|
1173
1173
|
return result;
|
|
1174
1174
|
}
|
|
1175
1175
|
|
|
1176
|
-
async function reduce(stream, reducer,
|
|
1176
|
+
async function reduce(stream, reducer, options) {
|
|
1177
1177
|
const reader = stream.getReader();
|
|
1178
1178
|
const iterator = streamDefaultReaderToAsyncGenerator(reader);
|
|
1179
|
-
let acc = initial ?? null;
|
|
1179
|
+
let acc = options.initial ?? null;
|
|
1180
1180
|
for await (const value of iterator) {
|
|
1181
|
+
if (options.abortSignal?.aborted === true) {
|
|
1182
|
+
break;
|
|
1183
|
+
}
|
|
1181
1184
|
if (acc === null) {
|
|
1182
1185
|
acc = value;
|
|
1183
1186
|
continue;
|
|
@@ -1201,26 +1204,26 @@ async function* toAsyncGenerator(value) {
|
|
|
1201
1204
|
yield value;
|
|
1202
1205
|
}
|
|
1203
1206
|
|
|
1204
|
-
async function* streamDefaultReaderToAsyncGenerator(reader) {
|
|
1207
|
+
async function* streamDefaultReaderToAsyncGenerator(reader, abortSignal) {
|
|
1205
1208
|
while (true) {
|
|
1206
1209
|
const {done, value} = await reader.read();
|
|
1207
|
-
if (done) {
|
|
1210
|
+
if (done || abortSignal?.aborted === true) {
|
|
1208
1211
|
break;
|
|
1209
1212
|
}
|
|
1210
1213
|
yield value;
|
|
1211
1214
|
}
|
|
1212
1215
|
}
|
|
1213
1216
|
|
|
1214
|
-
function transform(stream, transformer) {
|
|
1217
|
+
function transform(stream, transformer, abortSignal) {
|
|
1215
1218
|
const reader = stream.getReader();
|
|
1216
1219
|
return new ReadableStream({
|
|
1217
1220
|
pull: async controller => {
|
|
1218
1221
|
const {done, value} = await reader.read();
|
|
1219
|
-
if (
|
|
1220
|
-
controller.enqueue(transformer(value));
|
|
1221
|
-
} else {
|
|
1222
|
+
if (done === true || abortSignal?.aborted === true) {
|
|
1222
1223
|
controller.close();
|
|
1224
|
+
return;
|
|
1223
1225
|
}
|
|
1226
|
+
controller.enqueue(transformer(value));
|
|
1224
1227
|
}
|
|
1225
1228
|
});
|
|
1226
1229
|
}
|
|
@@ -1243,7 +1246,7 @@ var index$2 = Object.freeze({
|
|
|
1243
1246
|
toAsyncGenerator
|
|
1244
1247
|
});
|
|
1245
1248
|
|
|
1246
|
-
async function reduceStreamingWithDispatch(stream, eventProcessor) {
|
|
1249
|
+
async function reduceStreamingWithDispatch(stream, eventProcessor, abortSignal) {
|
|
1247
1250
|
const streamContext = new Map;
|
|
1248
1251
|
const nullableCompletion = await StreamUtil.reduce(stream, (async (accPromise, chunk) => {
|
|
1249
1252
|
const acc = await accPromise;
|
|
@@ -1286,7 +1289,9 @@ async function reduceStreamingWithDispatch(stream, eventProcessor) {
|
|
|
1286
1289
|
}
|
|
1287
1290
|
registerContext(chunk.choices);
|
|
1288
1291
|
return ChatGptCompletionMessageUtil.accumulate(acc, chunk);
|
|
1289
|
-
})
|
|
1292
|
+
}), {
|
|
1293
|
+
abortSignal
|
|
1294
|
+
});
|
|
1290
1295
|
if (nullableCompletion == null) {
|
|
1291
1296
|
throw new Error("StreamUtil.reduce did not produce a ChatCompletion. Possible causes: the input stream was empty, invalid, or closed prematurely. " + "To debug: check that the stream is properly initialized and contains valid ChatCompletionChunk data. " + "You may also enable verbose logging upstream to inspect the stream contents. " + `Stream locked: ${stream.locked}.`);
|
|
1292
1297
|
}
|
|
@@ -1376,6 +1381,7 @@ async function call(ctx, operations) {
|
|
|
1376
1381
|
const completion = await reduceStreamingWithDispatch(stream, (props => {
|
|
1377
1382
|
const event = createAssistantMessageEvent(props);
|
|
1378
1383
|
void ctx.dispatch(event).catch((() => {}));
|
|
1384
|
+
ctx.abortSignal;
|
|
1379
1385
|
}));
|
|
1380
1386
|
const allAssistantMessagesEmpty = completion.choices.every((v => v.message.tool_calls == null && v.message.content === ""));
|
|
1381
1387
|
if (allAssistantMessagesEmpty) {
|
|
@@ -1799,7 +1805,7 @@ async function step$1(ctx, operations, retry, failures) {
|
|
|
1799
1805
|
} ],
|
|
1800
1806
|
tool_choice: retry === 0 ? "auto" : "required"
|
|
1801
1807
|
});
|
|
1802
|
-
const chunks = await StreamUtil.readAll(completionStream);
|
|
1808
|
+
const chunks = await StreamUtil.readAll(completionStream, ctx.abortSignal);
|
|
1803
1809
|
const completion = ChatGptCompletionMessageUtil.merge(chunks);
|
|
1804
1810
|
if (retry++ < (ctx.config?.retry ?? AgenticaConstant.RETRY)) {
|
|
1805
1811
|
const failures = [];
|
|
@@ -1951,7 +1957,7 @@ async function describe(ctx, histories) {
|
|
|
1951
1957
|
...props
|
|
1952
1958
|
});
|
|
1953
1959
|
ctx.dispatch(event);
|
|
1954
|
-
}));
|
|
1960
|
+
}), ctx.abortSignal);
|
|
1955
1961
|
}
|
|
1956
1962
|
|
|
1957
1963
|
const ChatGptDescribeFunctionAgent = {
|
|
@@ -2618,7 +2624,7 @@ async function initialize(ctx) {
|
|
|
2618
2624
|
const completion = await reduceStreamingWithDispatch(completionStream, (props => {
|
|
2619
2625
|
const event = createAssistantMessageEvent(props);
|
|
2620
2626
|
ctx.dispatch(event);
|
|
2621
|
-
}));
|
|
2627
|
+
}), ctx.abortSignal);
|
|
2622
2628
|
if (completion === null) {
|
|
2623
2629
|
throw new Error("No completion received");
|
|
2624
2630
|
}
|
|
@@ -2823,7 +2829,7 @@ async function step(ctx, operations, retry, failures) {
|
|
|
2823
2829
|
const completion = await reduceStreamingWithDispatch(stream, (props => {
|
|
2824
2830
|
const event = createAssistantMessageEvent(props);
|
|
2825
2831
|
void ctx.dispatch(event).catch((() => {}));
|
|
2826
|
-
}));
|
|
2832
|
+
}), ctx.abortSignal);
|
|
2827
2833
|
const allAssistantMessagesEmpty = completion.choices.every((v => v.message.tool_calls == null && v.message.content === ""));
|
|
2828
2834
|
if (allAssistantMessagesEmpty) {
|
|
2829
2835
|
const firstChoice = completion.choices.at(0);
|
|
@@ -3115,6 +3121,102 @@ function findOperation(props) {
|
|
|
3115
3121
|
return found;
|
|
3116
3122
|
}
|
|
3117
3123
|
|
|
3124
|
+
var index$1 = Object.freeze({
|
|
3125
|
+
__proto__: null,
|
|
3126
|
+
createAssistantMessageEvent,
|
|
3127
|
+
createAssistantMessageHistory,
|
|
3128
|
+
createCallEvent,
|
|
3129
|
+
createCancelEvent,
|
|
3130
|
+
createCancelHistory,
|
|
3131
|
+
createDescribeEvent,
|
|
3132
|
+
createDescribeHistory,
|
|
3133
|
+
createExecuteEvent,
|
|
3134
|
+
createExecuteHistory,
|
|
3135
|
+
createInitializeEvent,
|
|
3136
|
+
createJsonParseErrorEvent,
|
|
3137
|
+
createOperationSelection,
|
|
3138
|
+
createRequestEvent,
|
|
3139
|
+
createResponseEvent,
|
|
3140
|
+
createSelectEvent,
|
|
3141
|
+
createSelectHistory,
|
|
3142
|
+
createSystemMessageHistory,
|
|
3143
|
+
createUserMessageEvent,
|
|
3144
|
+
createUserMessageHistory,
|
|
3145
|
+
createValidateEvent,
|
|
3146
|
+
decodeHistory,
|
|
3147
|
+
decodeUserMessageContent
|
|
3148
|
+
});
|
|
3149
|
+
|
|
3150
|
+
const getChatCompletionWithStreamingFunction = props => async (source, body) => {
|
|
3151
|
+
const event = createRequestEvent({
|
|
3152
|
+
source,
|
|
3153
|
+
body: {
|
|
3154
|
+
...body,
|
|
3155
|
+
model: props.vendor.model,
|
|
3156
|
+
stream: true,
|
|
3157
|
+
stream_options: {
|
|
3158
|
+
include_usage: true
|
|
3159
|
+
}
|
|
3160
|
+
},
|
|
3161
|
+
options: {
|
|
3162
|
+
...props.vendor.options,
|
|
3163
|
+
signal: props.abortSignal
|
|
3164
|
+
}
|
|
3165
|
+
});
|
|
3166
|
+
await props.dispatch(event);
|
|
3167
|
+
const backoffStrategy = props.config?.backoffStrategy ?? (props => {
|
|
3168
|
+
throw props.error;
|
|
3169
|
+
});
|
|
3170
|
+
const completion = await (async () => {
|
|
3171
|
+
let count = 0;
|
|
3172
|
+
while (true) {
|
|
3173
|
+
try {
|
|
3174
|
+
return await props.vendor.api.chat.completions.create(event.body, event.options);
|
|
3175
|
+
} catch (error) {
|
|
3176
|
+
const waiting = backoffStrategy({
|
|
3177
|
+
count,
|
|
3178
|
+
error
|
|
3179
|
+
});
|
|
3180
|
+
await new Promise((resolve => setTimeout(resolve, waiting)));
|
|
3181
|
+
count++;
|
|
3182
|
+
}
|
|
3183
|
+
}
|
|
3184
|
+
})();
|
|
3185
|
+
const [streamForEvent, temporaryStream] = StreamUtil.transform(completion.toReadableStream(), (value => ChatGptCompletionMessageUtil.transformCompletionChunk(value)), props.abortSignal).tee();
|
|
3186
|
+
const [streamForAggregate, streamForReturn] = temporaryStream.tee();
|
|
3187
|
+
(async () => {
|
|
3188
|
+
const reader = streamForAggregate.getReader();
|
|
3189
|
+
while (true) {
|
|
3190
|
+
const chunk = await reader.read();
|
|
3191
|
+
if (chunk.done || props.abortSignal?.aborted === true) {
|
|
3192
|
+
break;
|
|
3193
|
+
}
|
|
3194
|
+
if (chunk.value.usage != null) {
|
|
3195
|
+
AgenticaTokenUsageAggregator.aggregate({
|
|
3196
|
+
kind: source,
|
|
3197
|
+
completionUsage: chunk.value.usage,
|
|
3198
|
+
usage: props.usage
|
|
3199
|
+
});
|
|
3200
|
+
}
|
|
3201
|
+
}
|
|
3202
|
+
})().catch((() => {}));
|
|
3203
|
+
const [streamForStream, streamForJoin] = streamForEvent.tee();
|
|
3204
|
+
void props.dispatch({
|
|
3205
|
+
id: v4(),
|
|
3206
|
+
type: "response",
|
|
3207
|
+
source,
|
|
3208
|
+
stream: streamDefaultReaderToAsyncGenerator(streamForStream.getReader(), props.abortSignal),
|
|
3209
|
+
body: event.body,
|
|
3210
|
+
options: event.options,
|
|
3211
|
+
join: async () => {
|
|
3212
|
+
const chunks = await StreamUtil.readAll(streamForJoin, props.abortSignal);
|
|
3213
|
+
return ChatGptCompletionMessageUtil.merge(chunks);
|
|
3214
|
+
},
|
|
3215
|
+
created_at: (new Date).toISOString()
|
|
3216
|
+
}).catch((() => {}));
|
|
3217
|
+
return streamForReturn;
|
|
3218
|
+
};
|
|
3219
|
+
|
|
3118
3220
|
class Agentica {
|
|
3119
3221
|
constructor(props) {
|
|
3120
3222
|
this.props = props;
|
|
@@ -3192,75 +3294,13 @@ class Agentica {
|
|
|
3192
3294
|
return this.token_usage_;
|
|
3193
3295
|
}
|
|
3194
3296
|
getContext(props) {
|
|
3195
|
-
const request =
|
|
3196
|
-
|
|
3197
|
-
|
|
3198
|
-
|
|
3199
|
-
|
|
3200
|
-
|
|
3201
|
-
|
|
3202
|
-
stream_options: {
|
|
3203
|
-
include_usage: true
|
|
3204
|
-
}
|
|
3205
|
-
},
|
|
3206
|
-
options: {
|
|
3207
|
-
...this.props.vendor.options,
|
|
3208
|
-
signal: props.abortSignal
|
|
3209
|
-
}
|
|
3210
|
-
});
|
|
3211
|
-
await props.dispatch(event);
|
|
3212
|
-
const backoffStrategy = this.props.config?.backoffStrategy ?? (props => {
|
|
3213
|
-
throw props.error;
|
|
3214
|
-
});
|
|
3215
|
-
const completion = await (async () => {
|
|
3216
|
-
let count = 0;
|
|
3217
|
-
while (true) {
|
|
3218
|
-
try {
|
|
3219
|
-
return await this.props.vendor.api.chat.completions.create(event.body, event.options);
|
|
3220
|
-
} catch (error) {
|
|
3221
|
-
const waiting = backoffStrategy({
|
|
3222
|
-
count,
|
|
3223
|
-
error
|
|
3224
|
-
});
|
|
3225
|
-
await new Promise((resolve => setTimeout(resolve, waiting)));
|
|
3226
|
-
count++;
|
|
3227
|
-
}
|
|
3228
|
-
}
|
|
3229
|
-
})();
|
|
3230
|
-
const [streamForEvent, temporaryStream] = StreamUtil.transform(completion.toReadableStream(), (value => ChatGptCompletionMessageUtil.transformCompletionChunk(value))).tee();
|
|
3231
|
-
const [streamForAggregate, streamForReturn] = temporaryStream.tee();
|
|
3232
|
-
(async () => {
|
|
3233
|
-
const reader = streamForAggregate.getReader();
|
|
3234
|
-
while (true) {
|
|
3235
|
-
const chunk = await reader.read();
|
|
3236
|
-
if (chunk.done) {
|
|
3237
|
-
break;
|
|
3238
|
-
}
|
|
3239
|
-
if (chunk.value.usage != null) {
|
|
3240
|
-
AgenticaTokenUsageAggregator.aggregate({
|
|
3241
|
-
kind: source,
|
|
3242
|
-
completionUsage: chunk.value.usage,
|
|
3243
|
-
usage: props.usage
|
|
3244
|
-
});
|
|
3245
|
-
}
|
|
3246
|
-
}
|
|
3247
|
-
})().catch((() => {}));
|
|
3248
|
-
const [streamForStream, streamForJoin] = streamForEvent.tee();
|
|
3249
|
-
void props.dispatch({
|
|
3250
|
-
id: v4(),
|
|
3251
|
-
type: "response",
|
|
3252
|
-
source,
|
|
3253
|
-
stream: streamDefaultReaderToAsyncGenerator(streamForStream.getReader()),
|
|
3254
|
-
body: event.body,
|
|
3255
|
-
options: event.options,
|
|
3256
|
-
join: async () => {
|
|
3257
|
-
const chunks = await StreamUtil.readAll(streamForJoin);
|
|
3258
|
-
return ChatGptCompletionMessageUtil.merge(chunks);
|
|
3259
|
-
},
|
|
3260
|
-
created_at: (new Date).toISOString()
|
|
3261
|
-
}).catch((() => {}));
|
|
3262
|
-
return streamForReturn;
|
|
3263
|
-
};
|
|
3297
|
+
const request = getChatCompletionWithStreamingFunction({
|
|
3298
|
+
vendor: this.props.vendor,
|
|
3299
|
+
config: this.props.config,
|
|
3300
|
+
dispatch: props.dispatch,
|
|
3301
|
+
abortSignal: props.abortSignal,
|
|
3302
|
+
usage: this.token_usage_
|
|
3303
|
+
});
|
|
3264
3304
|
return {
|
|
3265
3305
|
operations: this.operations_,
|
|
3266
3306
|
config: this.props.config,
|
|
@@ -3310,32 +3350,6 @@ class Agentica {
|
|
|
3310
3350
|
}
|
|
3311
3351
|
}
|
|
3312
3352
|
|
|
3313
|
-
var index$1 = Object.freeze({
|
|
3314
|
-
__proto__: null,
|
|
3315
|
-
createAssistantMessageEvent,
|
|
3316
|
-
createAssistantMessageHistory,
|
|
3317
|
-
createCallEvent,
|
|
3318
|
-
createCancelEvent,
|
|
3319
|
-
createCancelHistory,
|
|
3320
|
-
createDescribeEvent,
|
|
3321
|
-
createDescribeHistory,
|
|
3322
|
-
createExecuteEvent,
|
|
3323
|
-
createExecuteHistory,
|
|
3324
|
-
createInitializeEvent,
|
|
3325
|
-
createJsonParseErrorEvent,
|
|
3326
|
-
createOperationSelection,
|
|
3327
|
-
createRequestEvent,
|
|
3328
|
-
createResponseEvent,
|
|
3329
|
-
createSelectEvent,
|
|
3330
|
-
createSelectHistory,
|
|
3331
|
-
createSystemMessageHistory,
|
|
3332
|
-
createUserMessageEvent,
|
|
3333
|
-
createUserMessageHistory,
|
|
3334
|
-
createValidateEvent,
|
|
3335
|
-
decodeHistory,
|
|
3336
|
-
decodeUserMessageContent
|
|
3337
|
-
});
|
|
3338
|
-
|
|
3339
3353
|
function assertHttpController(props) {
|
|
3340
3354
|
const document = OpenApi.convert((() => {
|
|
3341
3355
|
const _io0 = input => null !== input.swagger && undefined !== input.swagger && ("2.0" === input.swagger || "string" === typeof input.swagger && RegExp(/^2\.0\.[+-]?\d+(?:\.\d+)?(?:[eE][+-]?\d+)?$/).test(input.swagger)) && (undefined === input.info || "object" === typeof input.info && null !== input.info && _io1(input.info)) && (undefined === input.host || "string" === typeof input.host) && (undefined === input.basePath || "string" === typeof input.basePath) && (undefined === input.consumes || Array.isArray(input.consumes) && input.consumes.every((elem => "string" === typeof elem))) && (undefined === input.produces || Array.isArray(input.produces) && input.produces.every((elem => "string" === typeof elem))) && (undefined === input.definitions || "object" === typeof input.definitions && null !== input.definitions && false === Array.isArray(input.definitions) && _io4(input.definitions)) && (undefined === input.parameters || "object" === typeof input.parameters && null !== input.parameters && false === Array.isArray(input.parameters) && _io16(input.parameters)) && (undefined === input.responses || "object" === typeof input.responses && null !== input.responses && false === Array.isArray(input.responses) && _io29(input.responses)) && (undefined === input.securityDefinitions || "object" === typeof input.securityDefinitions && null !== input.securityDefinitions && false === Array.isArray(input.securityDefinitions) && _io31(input.securityDefinitions)) && (undefined === input.security || Array.isArray(input.security) && input.security.every((elem => "object" === typeof elem && null !== elem && false === Array.isArray(elem) && _io39(elem)))) && (undefined === input.paths || "object" === typeof input.paths && null !== input.paths && false === Array.isArray(input.paths) && _io40(input.paths)) && (undefined === input.tags || Array.isArray(input.tags) && input.tags.every((elem => "object" === typeof elem && null !== elem && _io47(elem))));
|
|
@@ -42020,7 +42034,7 @@ class MicroAgentica {
|
|
|
42020
42034
|
histories: this.props.histories?.slice()
|
|
42021
42035
|
});
|
|
42022
42036
|
}
|
|
42023
|
-
async conversate(content) {
|
|
42037
|
+
async conversate(content, options = {}) {
|
|
42024
42038
|
const histories = [];
|
|
42025
42039
|
const dispatch = async event => {
|
|
42026
42040
|
try {
|
|
@@ -42047,7 +42061,8 @@ class MicroAgentica {
|
|
|
42047
42061
|
const ctx = this.getContext({
|
|
42048
42062
|
prompt,
|
|
42049
42063
|
dispatch,
|
|
42050
|
-
usage: this.token_usage_
|
|
42064
|
+
usage: this.token_usage_,
|
|
42065
|
+
abortSignal: options.abortSignal
|
|
42051
42066
|
});
|
|
42052
42067
|
const executes = await call(ctx, this.operations_.array);
|
|
42053
42068
|
if (executes.length && this.props.config?.executor?.describe !== null && this.props.config?.executor?.describe !== false) {
|
|
@@ -42076,72 +42091,13 @@ class MicroAgentica {
|
|
|
42076
42091
|
return this.token_usage_;
|
|
42077
42092
|
}
|
|
42078
42093
|
getContext(props) {
|
|
42079
|
-
const request =
|
|
42080
|
-
|
|
42081
|
-
|
|
42082
|
-
|
|
42083
|
-
|
|
42084
|
-
|
|
42085
|
-
|
|
42086
|
-
stream_options: {
|
|
42087
|
-
include_usage: true
|
|
42088
|
-
}
|
|
42089
|
-
},
|
|
42090
|
-
options: this.props.vendor.options
|
|
42091
|
-
});
|
|
42092
|
-
await props.dispatch(event);
|
|
42093
|
-
const backoffStrategy = this.props.config?.backoffStrategy ?? (props => {
|
|
42094
|
-
throw props.error;
|
|
42095
|
-
});
|
|
42096
|
-
const completion = await (async () => {
|
|
42097
|
-
let count = 0;
|
|
42098
|
-
while (true) {
|
|
42099
|
-
try {
|
|
42100
|
-
return await this.props.vendor.api.chat.completions.create(event.body, event.options);
|
|
42101
|
-
} catch (error) {
|
|
42102
|
-
const waiting = backoffStrategy({
|
|
42103
|
-
count,
|
|
42104
|
-
error
|
|
42105
|
-
});
|
|
42106
|
-
await new Promise((resolve => setTimeout(resolve, waiting)));
|
|
42107
|
-
count++;
|
|
42108
|
-
}
|
|
42109
|
-
}
|
|
42110
|
-
})();
|
|
42111
|
-
const [streamForEvent, temporaryStream] = StreamUtil.transform(completion.toReadableStream(), (value => ChatGptCompletionMessageUtil.transformCompletionChunk(value))).tee();
|
|
42112
|
-
const [streamForAggregate, streamForReturn] = temporaryStream.tee();
|
|
42113
|
-
void (async () => {
|
|
42114
|
-
const reader = streamForAggregate.getReader();
|
|
42115
|
-
while (true) {
|
|
42116
|
-
const chunk = await reader.read();
|
|
42117
|
-
if (chunk.done) {
|
|
42118
|
-
break;
|
|
42119
|
-
}
|
|
42120
|
-
if (chunk.value.usage != null) {
|
|
42121
|
-
AgenticaTokenUsageAggregator.aggregate({
|
|
42122
|
-
kind: source,
|
|
42123
|
-
completionUsage: chunk.value.usage,
|
|
42124
|
-
usage: props.usage
|
|
42125
|
-
});
|
|
42126
|
-
}
|
|
42127
|
-
}
|
|
42128
|
-
})().catch((() => {}));
|
|
42129
|
-
const [streamForStream, streamForJoin] = streamForEvent.tee();
|
|
42130
|
-
void props.dispatch({
|
|
42131
|
-
id: v4(),
|
|
42132
|
-
type: "response",
|
|
42133
|
-
source,
|
|
42134
|
-
stream: streamDefaultReaderToAsyncGenerator(streamForStream.getReader()),
|
|
42135
|
-
body: event.body,
|
|
42136
|
-
options: event.options,
|
|
42137
|
-
join: async () => {
|
|
42138
|
-
const chunks = await StreamUtil.readAll(streamForJoin);
|
|
42139
|
-
return ChatGptCompletionMessageUtil.merge(chunks);
|
|
42140
|
-
},
|
|
42141
|
-
created_at: (new Date).toISOString()
|
|
42142
|
-
}).catch((() => {}));
|
|
42143
|
-
return streamForReturn;
|
|
42144
|
-
};
|
|
42094
|
+
const request = getChatCompletionWithStreamingFunction({
|
|
42095
|
+
vendor: this.props.vendor,
|
|
42096
|
+
config: this.props.config,
|
|
42097
|
+
dispatch: props.dispatch,
|
|
42098
|
+
abortSignal: props.abortSignal,
|
|
42099
|
+
usage: this.token_usage_
|
|
42100
|
+
});
|
|
42145
42101
|
return {
|
|
42146
42102
|
operations: this.operations_,
|
|
42147
42103
|
config: this.props.config,
|