@agentica/core 0.32.3-dev.3 → 0.32.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/Agentica.js +7 -64
- package/lib/Agentica.js.map +1 -1
- package/lib/MicroAgentica.d.ts +3 -1
- package/lib/MicroAgentica.js +10 -66
- package/lib/MicroAgentica.js.map +1 -1
- package/lib/index.mjs +115 -165
- package/lib/index.mjs.map +1 -1
- package/lib/utils/request.d.ts +12 -0
- package/lib/utils/request.js +80 -0
- package/lib/utils/request.js.map +1 -0
- package/package.json +1 -1
- package/src/Agentica.ts +9 -91
- package/src/MicroAgentica.ts +14 -87
- package/src/utils/request.ts +100 -0
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
12
|
+
exports.getChatCompletionWithStreamingFunction = void 0;
|
|
13
|
+
const ChatGptCompletionMessageUtil_1 = require("./ChatGptCompletionMessageUtil");
|
|
14
|
+
const StreamUtil_1 = require("./StreamUtil");
|
|
15
|
+
const factory_1 = require("../factory");
|
|
16
|
+
const AgenticaTokenUsageAggregator_1 = require("../context/internal/AgenticaTokenUsageAggregator");
|
|
17
|
+
const uuid_1 = require("uuid");
|
|
18
|
+
const getChatCompletionWithStreamingFunction = (props) => (source, body) => __awaiter(void 0, void 0, void 0, function* () {
|
|
19
|
+
var _a, _b;
|
|
20
|
+
const event = (0, factory_1.createRequestEvent)({
|
|
21
|
+
source,
|
|
22
|
+
body: Object.assign(Object.assign({}, body), { model: props.vendor.model, stream: true, stream_options: {
|
|
23
|
+
include_usage: true,
|
|
24
|
+
} }),
|
|
25
|
+
options: Object.assign(Object.assign({}, props.vendor.options), { signal: props.abortSignal }),
|
|
26
|
+
});
|
|
27
|
+
yield props.dispatch(event);
|
|
28
|
+
// completion
|
|
29
|
+
const backoffStrategy = (_b = (_a = props.config) === null || _a === void 0 ? void 0 : _a.backoffStrategy) !== null && _b !== void 0 ? _b : ((props) => {
|
|
30
|
+
throw props.error;
|
|
31
|
+
});
|
|
32
|
+
const completion = yield (() => __awaiter(void 0, void 0, void 0, function* () {
|
|
33
|
+
let count = 0;
|
|
34
|
+
while (true) {
|
|
35
|
+
try {
|
|
36
|
+
return yield props.vendor.api.chat.completions.create(event.body, event.options);
|
|
37
|
+
}
|
|
38
|
+
catch (error) {
|
|
39
|
+
const waiting = backoffStrategy({ count, error });
|
|
40
|
+
yield new Promise(resolve => setTimeout(resolve, waiting));
|
|
41
|
+
count++;
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
}))();
|
|
45
|
+
const [streamForEvent, temporaryStream] = StreamUtil_1.StreamUtil.transform(completion.toReadableStream(), value => ChatGptCompletionMessageUtil_1.ChatGptCompletionMessageUtil.transformCompletionChunk(value)).tee();
|
|
46
|
+
const [streamForAggregate, streamForReturn] = temporaryStream.tee();
|
|
47
|
+
(() => __awaiter(void 0, void 0, void 0, function* () {
|
|
48
|
+
const reader = streamForAggregate.getReader();
|
|
49
|
+
while (true) {
|
|
50
|
+
const chunk = yield reader.read();
|
|
51
|
+
if (chunk.done) {
|
|
52
|
+
break;
|
|
53
|
+
}
|
|
54
|
+
if (chunk.value.usage != null) {
|
|
55
|
+
AgenticaTokenUsageAggregator_1.AgenticaTokenUsageAggregator.aggregate({
|
|
56
|
+
kind: source,
|
|
57
|
+
completionUsage: chunk.value.usage,
|
|
58
|
+
usage: props.usage,
|
|
59
|
+
});
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
}))().catch(() => { });
|
|
63
|
+
const [streamForStream, streamForJoin] = streamForEvent.tee();
|
|
64
|
+
void props.dispatch({
|
|
65
|
+
id: (0, uuid_1.v4)(),
|
|
66
|
+
type: "response",
|
|
67
|
+
source,
|
|
68
|
+
stream: (0, StreamUtil_1.streamDefaultReaderToAsyncGenerator)(streamForStream.getReader()),
|
|
69
|
+
body: event.body,
|
|
70
|
+
options: event.options,
|
|
71
|
+
join: () => __awaiter(void 0, void 0, void 0, function* () {
|
|
72
|
+
const chunks = yield StreamUtil_1.StreamUtil.readAll(streamForJoin);
|
|
73
|
+
return ChatGptCompletionMessageUtil_1.ChatGptCompletionMessageUtil.merge(chunks);
|
|
74
|
+
}),
|
|
75
|
+
created_at: new Date().toISOString(),
|
|
76
|
+
}).catch(() => { });
|
|
77
|
+
return streamForReturn;
|
|
78
|
+
});
|
|
79
|
+
exports.getChatCompletionWithStreamingFunction = getChatCompletionWithStreamingFunction;
|
|
80
|
+
//# sourceMappingURL=request.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"request.js","sourceRoot":"","sources":["../../src/utils/request.ts"],"names":[],"mappings":";;;;;;;;;;;;AAEA,iFAA8E;AAC9E,6CAA+E;AAC/E,wCAAgD;AAGhD,mGAAgG;AAEhG,+BAA0B;AAEnB,MAAM,sCAAsC,GAAG,CAAiC,KAMtF,EAAE,EAAE,CAAC,CACJ,MAA2B,EAC3B,IAA0E,EAC1E,EAAE;;IACF,MAAM,KAAK,GAAyB,IAAA,4BAAkB,EAAC;QACrD,MAAM;QACN,IAAI,kCACC,IAAI,KACP,KAAK,EAAE,KAAK,CAAC,MAAM,CAAC,KAAK,EACzB,MAAM,EAAE,IAAI,EACZ,cAAc,EAAE;gBACd,aAAa,EAAE,IAAI;aACpB,GACF;QACD,OAAO,kCACF,KAAK,CAAC,MAAM,CAAC,OAAO,KACvB,MAAM,EAAE,KAAK,CAAC,WAAW,GAC1B;KACF,CAAC,CAAC;IACH,MAAM,KAAK,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;IAE5B,aAAa;IACb,MAAM,eAAe,GAAG,MAAA,MAAA,KAAK,CAAC,MAAM,0CAAE,eAAe,mCAAI,CAAC,CAAC,KAAK,EAAE,EAAE;QAClE,MAAM,KAAK,CAAC,KAAK,CAAC;IACpB,CAAC,CAAC,CAAC;IACH,MAAM,UAAU,GAAG,MAAM,CAAC,GAAS,EAAE;QACnC,IAAI,KAAK,GAAG,CAAC,CAAC;QACd,OAAO,IAAI,EAAE,CAAC;YACZ,IAAI,CAAC;gBACH,OAAO,MAAM,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,CACnD,KAAK,CAAC,IAAI,EACV,KAAK,CAAC,OAAO,CACd,CAAC;YACJ,CAAC;YACD,OAAO,KAAK,EAAE,CAAC;gBACb,MAAM,OAAO,GAAG,eAAe,CAAC,EAAE,KAAK,EAAE,KAAK,EAAE,CAAC,CAAC;gBAClD,MAAM,IAAI,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,CAAC;gBAC3D,KAAK,EAAE,CAAC;YACV,CAAC;QACH,CAAC;IACH,CAAC,CAAA,CAAC,EAAE,CAAC;IAEL,MAAM,CAAC,cAAc,EAAE,eAAe,CAAC,GAAG,uBAAU,CAAC,SAAS,CAC5D,UAAU,CAAC,gBAAgB,EAAgC,EAC3D,KAAK,CAAC,EAAE,CACN,2DAA4B,CAAC,wBAAwB,CAAC,KAAK,CAAC,CAC/D,CAAC,GAAG,EAAE,CAAC;IAER,MAAM,CAAC,kBAAkB,EAAE,eAAe,CAAC,GAAG,eAAe,CAAC,GAAG,EAAE,CAAC;IAEpE,CAAC,GAAS,EAAE;QACV,MAAM,MAAM,GAAG,kBAAkB,CAAC,SAAS,EAAE,CAAC;QAC9C,OAAO,IAAI,EAAE,CAAC;YACZ,MAAM,KAAK,GAAG,MAAM,MAAM,CAAC,IAAI,EAAE,CAAC;YAClC,IAAI,KAAK,CAAC,IAAI,EAAE,CAAC;gBACf,MAAM;YACR,CAAC;YACD,IAAI,KAAK,CAAC,KAAK,CAAC,KAAK,IAAI,IAAI,EAAE,CAAC;gBAC9B,2DAA4B,CAAC,SAAS,CAAC;oBACrC,IAAI,EAAE,MAAM;oBACZ,eAAe,EAAE,KAAK,CAAC,KAAK,CAAC,KAAK;oBAClC,KAAK,EAAE,KAAK,CAAC,KAAK;iBACnB,CAAC,CAAC;YACL,CAAC;QACH,CAAC;IACH,CAAC,CAAA,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,EAAE,GAAE,CAAC,CAAC,CAAC;IAErB,MAAM,CAAC,eAAe,EAAE,aAAa,CAAC,GAAG,cAAc,CAAC,GAAG,EAAE,CAAC;IAC9D,KAAK,KAAK,CAAC,QAAQ,CAAC;QAClB,EAAE,EAAE,IAAA,SAAE,GAAE;QACR,IAAI,EAAE,UAAU;QAChB,MAAM;QACN,MAAM,EAAE,IAAA,gDAAmC,EAAC,eAAe,CAAC,SAAS,EAAE,CAAC;QACxE,IAAI,EAAE,KAAK,CAAC,IAAI;QAChB,OAAO,EAAE,KAAK,CAAC,OAAO;QACtB,IAAI,EAAE,GAAS,EAAE;YACf,MAAM,MAAM,GAAG,MAAM,uBAAU,CAAC,OAAO,CAAC,aAAa,CAAC,CAAC;YACvD,OAAO,2DAA4B,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;QACpD,CAAC,CAAA;QACD,UAAU,EAAE,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE;KACrC,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,GAAE,CAAC,CAAC,CAAC;IACnB,OAAO,eAAe,CAAC;AACzB,CAAC,CAAA,CAAC;AAxFW,QAAA,sCAAsC,0CAwFjD"}
|
package/package.json
CHANGED
package/src/Agentica.ts
CHANGED
|
@@ -1,16 +1,12 @@
|
|
|
1
1
|
import type { ILlmSchema } from "@samchon/openapi";
|
|
2
|
-
import type OpenAI from "openai";
|
|
3
2
|
|
|
4
3
|
import { Semaphore } from "tstl";
|
|
5
|
-
import { v4 } from "uuid";
|
|
6
4
|
|
|
7
5
|
import type { AgenticaContext } from "./context/AgenticaContext";
|
|
8
6
|
import type { AgenticaOperation } from "./context/AgenticaOperation";
|
|
9
7
|
import type { AgenticaOperationCollection } from "./context/AgenticaOperationCollection";
|
|
10
8
|
import type { AgenticaOperationSelection } from "./context/AgenticaOperationSelection";
|
|
11
|
-
import type { AgenticaEventSource } from "./events";
|
|
12
9
|
import type { AgenticaEvent } from "./events/AgenticaEvent";
|
|
13
|
-
import type { AgenticaRequestEvent } from "./events/AgenticaRequestEvent";
|
|
14
10
|
import type { AgenticaUserMessageEvent } from "./events/AgenticaUserMessageEvent";
|
|
15
11
|
import type { AgenticaUserMessageContent } from "./histories";
|
|
16
12
|
import type { AgenticaHistory } from "./histories/AgenticaHistory";
|
|
@@ -22,13 +18,11 @@ import type { IAgenticaVendor } from "./structures/IAgenticaVendor";
|
|
|
22
18
|
|
|
23
19
|
import { AgenticaTokenUsage } from "./context/AgenticaTokenUsage";
|
|
24
20
|
import { AgenticaOperationComposer } from "./context/internal/AgenticaOperationComposer";
|
|
25
|
-
import {
|
|
26
|
-
import { createInitializeEvent, createRequestEvent, createUserMessageEvent } from "./factory/events";
|
|
21
|
+
import { createInitializeEvent, createUserMessageEvent } from "./factory/events";
|
|
27
22
|
import { execute } from "./orchestrate/execute";
|
|
28
23
|
import { transformHistory } from "./transformers/transformHistory";
|
|
29
24
|
import { __map_take } from "./utils/__map_take";
|
|
30
|
-
import {
|
|
31
|
-
import { streamDefaultReaderToAsyncGenerator, StreamUtil } from "./utils/StreamUtil";
|
|
25
|
+
import { getChatCompletionWithStreamingFunction } from "./utils/request";
|
|
32
26
|
|
|
33
27
|
/**
|
|
34
28
|
* Agentica AI chatbot agent.
|
|
@@ -264,89 +258,13 @@ export class Agentica<Model extends ILlmSchema.Model> {
|
|
|
264
258
|
dispatch: (event: AgenticaEvent<Model>) => Promise<void>;
|
|
265
259
|
abortSignal?: AbortSignal;
|
|
266
260
|
}): AgenticaContext<Model> {
|
|
267
|
-
const request =
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
...body,
|
|
275
|
-
model: this.props.vendor.model,
|
|
276
|
-
stream: true,
|
|
277
|
-
stream_options: {
|
|
278
|
-
include_usage: true,
|
|
279
|
-
},
|
|
280
|
-
},
|
|
281
|
-
options: {
|
|
282
|
-
...this.props.vendor.options,
|
|
283
|
-
signal: props.abortSignal,
|
|
284
|
-
},
|
|
285
|
-
});
|
|
286
|
-
await props.dispatch(event);
|
|
287
|
-
|
|
288
|
-
// completion
|
|
289
|
-
const backoffStrategy = this.props.config?.backoffStrategy ?? ((props) => {
|
|
290
|
-
throw props.error;
|
|
291
|
-
});
|
|
292
|
-
const completion = await (async () => {
|
|
293
|
-
let count = 0;
|
|
294
|
-
while (true) {
|
|
295
|
-
try {
|
|
296
|
-
return await this.props.vendor.api.chat.completions.create(
|
|
297
|
-
event.body,
|
|
298
|
-
event.options,
|
|
299
|
-
);
|
|
300
|
-
}
|
|
301
|
-
catch (error) {
|
|
302
|
-
const waiting = backoffStrategy({ count, error });
|
|
303
|
-
await new Promise(resolve => setTimeout(resolve, waiting));
|
|
304
|
-
count++;
|
|
305
|
-
}
|
|
306
|
-
}
|
|
307
|
-
})();
|
|
308
|
-
|
|
309
|
-
const [streamForEvent, temporaryStream] = StreamUtil.transform(
|
|
310
|
-
completion.toReadableStream() as ReadableStream<Uint8Array>,
|
|
311
|
-
value =>
|
|
312
|
-
ChatGptCompletionMessageUtil.transformCompletionChunk(value),
|
|
313
|
-
).tee();
|
|
314
|
-
|
|
315
|
-
const [streamForAggregate, streamForReturn] = temporaryStream.tee();
|
|
316
|
-
|
|
317
|
-
(async () => {
|
|
318
|
-
const reader = streamForAggregate.getReader();
|
|
319
|
-
while (true) {
|
|
320
|
-
const chunk = await reader.read();
|
|
321
|
-
if (chunk.done) {
|
|
322
|
-
break;
|
|
323
|
-
}
|
|
324
|
-
if (chunk.value.usage != null) {
|
|
325
|
-
AgenticaTokenUsageAggregator.aggregate({
|
|
326
|
-
kind: source,
|
|
327
|
-
completionUsage: chunk.value.usage,
|
|
328
|
-
usage: props.usage,
|
|
329
|
-
});
|
|
330
|
-
}
|
|
331
|
-
}
|
|
332
|
-
})().catch(() => {});
|
|
333
|
-
|
|
334
|
-
const [streamForStream, streamForJoin] = streamForEvent.tee();
|
|
335
|
-
void props.dispatch({
|
|
336
|
-
id: v4(),
|
|
337
|
-
type: "response",
|
|
338
|
-
source,
|
|
339
|
-
stream: streamDefaultReaderToAsyncGenerator(streamForStream.getReader()),
|
|
340
|
-
body: event.body,
|
|
341
|
-
options: event.options,
|
|
342
|
-
join: async () => {
|
|
343
|
-
const chunks = await StreamUtil.readAll(streamForJoin);
|
|
344
|
-
return ChatGptCompletionMessageUtil.merge(chunks);
|
|
345
|
-
},
|
|
346
|
-
created_at: new Date().toISOString(),
|
|
347
|
-
}).catch(() => {});
|
|
348
|
-
return streamForReturn;
|
|
349
|
-
};
|
|
261
|
+
const request = getChatCompletionWithStreamingFunction<Model>({
|
|
262
|
+
vendor: this.props.vendor,
|
|
263
|
+
config: this.props.config,
|
|
264
|
+
dispatch: props.dispatch,
|
|
265
|
+
abortSignal: props.abortSignal,
|
|
266
|
+
usage: this.token_usage_,
|
|
267
|
+
});
|
|
350
268
|
|
|
351
269
|
return {
|
|
352
270
|
// APPLICATION
|
package/src/MicroAgentica.ts
CHANGED
|
@@ -1,14 +1,11 @@
|
|
|
1
1
|
import type { ILlmSchema } from "@samchon/openapi";
|
|
2
|
-
import type OpenAI from "openai";
|
|
3
2
|
|
|
4
3
|
import { Semaphore } from "tstl";
|
|
5
|
-
import { v4 } from "uuid";
|
|
6
4
|
|
|
7
5
|
import type { AgenticaOperation } from "./context/AgenticaOperation";
|
|
8
6
|
import type { AgenticaOperationCollection } from "./context/AgenticaOperationCollection";
|
|
9
7
|
import type { MicroAgenticaContext } from "./context/MicroAgenticaContext";
|
|
10
8
|
import type { AgenticaUserMessageEvent } from "./events";
|
|
11
|
-
import type { AgenticaRequestEvent } from "./events/AgenticaRequestEvent";
|
|
12
9
|
import type { MicroAgenticaEvent } from "./events/MicroAgenticaEvent";
|
|
13
10
|
import type { AgenticaUserMessageContent } from "./histories";
|
|
14
11
|
import type { AgenticaExecuteHistory } from "./histories/AgenticaExecuteHistory";
|
|
@@ -20,13 +17,11 @@ import type { IMicroAgenticaProps } from "./structures/IMicroAgenticaProps";
|
|
|
20
17
|
|
|
21
18
|
import { AgenticaTokenUsage } from "./context/AgenticaTokenUsage";
|
|
22
19
|
import { AgenticaOperationComposer } from "./context/internal/AgenticaOperationComposer";
|
|
23
|
-
import {
|
|
24
|
-
import { createRequestEvent, createUserMessageEvent } from "./factory/events";
|
|
20
|
+
import { createUserMessageEvent } from "./factory/events";
|
|
25
21
|
import { call, describe } from "./orchestrate";
|
|
26
22
|
import { transformHistory } from "./transformers/transformHistory";
|
|
27
23
|
import { __map_take } from "./utils/__map_take";
|
|
28
|
-
import {
|
|
29
|
-
import { streamDefaultReaderToAsyncGenerator, StreamUtil } from "./utils/StreamUtil";
|
|
24
|
+
import { getChatCompletionWithStreamingFunction } from "./utils/request";
|
|
30
25
|
|
|
31
26
|
/**
|
|
32
27
|
* Micro AI chatbot.
|
|
@@ -128,6 +123,9 @@ export class MicroAgentica<Model extends ILlmSchema.Model> {
|
|
|
128
123
|
*/
|
|
129
124
|
public async conversate(
|
|
130
125
|
content: string | AgenticaUserMessageContent | Array<AgenticaUserMessageContent>,
|
|
126
|
+
options: {
|
|
127
|
+
abortSignal?: AbortSignal;
|
|
128
|
+
} = {},
|
|
131
129
|
): Promise<MicroAgenticaHistory<Model>[]> {
|
|
132
130
|
const histories: Array<() => Promise<MicroAgenticaHistory<Model>>> = [];
|
|
133
131
|
const dispatch = async (event: MicroAgenticaEvent<Model>): Promise<void> => {
|
|
@@ -164,6 +162,7 @@ export class MicroAgentica<Model extends ILlmSchema.Model> {
|
|
|
164
162
|
prompt,
|
|
165
163
|
dispatch,
|
|
166
164
|
usage: this.token_usage_,
|
|
165
|
+
abortSignal: options.abortSignal,
|
|
167
166
|
});
|
|
168
167
|
const executes: AgenticaExecuteHistory<Model>[] = await call(
|
|
169
168
|
ctx,
|
|
@@ -248,87 +247,15 @@ export class MicroAgentica<Model extends ILlmSchema.Model> {
|
|
|
248
247
|
prompt: AgenticaUserMessageEvent;
|
|
249
248
|
usage: AgenticaTokenUsage;
|
|
250
249
|
dispatch: (event: MicroAgenticaEvent<Model>) => Promise<void>;
|
|
250
|
+
abortSignal?: AbortSignal;
|
|
251
251
|
}): MicroAgenticaContext<Model> {
|
|
252
|
-
const request =
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
...body,
|
|
260
|
-
model: this.props.vendor.model,
|
|
261
|
-
stream: true,
|
|
262
|
-
stream_options: {
|
|
263
|
-
include_usage: true,
|
|
264
|
-
},
|
|
265
|
-
},
|
|
266
|
-
options: this.props.vendor.options,
|
|
267
|
-
});
|
|
268
|
-
await props.dispatch(event);
|
|
269
|
-
|
|
270
|
-
// completion
|
|
271
|
-
const backoffStrategy = this.props.config?.backoffStrategy ?? ((props) => {
|
|
272
|
-
throw props.error;
|
|
273
|
-
});
|
|
274
|
-
const completion = await (async () => {
|
|
275
|
-
let count = 0;
|
|
276
|
-
while (true) {
|
|
277
|
-
try {
|
|
278
|
-
return await this.props.vendor.api.chat.completions.create(
|
|
279
|
-
event.body,
|
|
280
|
-
event.options,
|
|
281
|
-
);
|
|
282
|
-
}
|
|
283
|
-
catch (error) {
|
|
284
|
-
const waiting = backoffStrategy({ count, error });
|
|
285
|
-
await new Promise(resolve => setTimeout(resolve, waiting));
|
|
286
|
-
count++;
|
|
287
|
-
}
|
|
288
|
-
}
|
|
289
|
-
})();
|
|
290
|
-
|
|
291
|
-
const [streamForEvent, temporaryStream] = StreamUtil.transform(
|
|
292
|
-
completion.toReadableStream() as ReadableStream<Uint8Array>,
|
|
293
|
-
value =>
|
|
294
|
-
ChatGptCompletionMessageUtil.transformCompletionChunk(value),
|
|
295
|
-
).tee();
|
|
296
|
-
|
|
297
|
-
const [streamForAggregate, streamForReturn] = temporaryStream.tee();
|
|
298
|
-
|
|
299
|
-
void (async () => {
|
|
300
|
-
const reader = streamForAggregate.getReader();
|
|
301
|
-
while (true) {
|
|
302
|
-
const chunk = await reader.read();
|
|
303
|
-
if (chunk.done) {
|
|
304
|
-
break;
|
|
305
|
-
}
|
|
306
|
-
if (chunk.value.usage != null) {
|
|
307
|
-
AgenticaTokenUsageAggregator.aggregate({
|
|
308
|
-
kind: source,
|
|
309
|
-
completionUsage: chunk.value.usage,
|
|
310
|
-
usage: props.usage,
|
|
311
|
-
});
|
|
312
|
-
}
|
|
313
|
-
}
|
|
314
|
-
})().catch(() => {});
|
|
315
|
-
|
|
316
|
-
const [streamForStream, streamForJoin] = streamForEvent.tee();
|
|
317
|
-
void props.dispatch({
|
|
318
|
-
id: v4(),
|
|
319
|
-
type: "response",
|
|
320
|
-
source,
|
|
321
|
-
stream: streamDefaultReaderToAsyncGenerator(streamForStream.getReader()),
|
|
322
|
-
body: event.body,
|
|
323
|
-
options: event.options,
|
|
324
|
-
join: async () => {
|
|
325
|
-
const chunks = await StreamUtil.readAll(streamForJoin);
|
|
326
|
-
return ChatGptCompletionMessageUtil.merge(chunks);
|
|
327
|
-
},
|
|
328
|
-
created_at: new Date().toISOString(),
|
|
329
|
-
}).catch(() => {});
|
|
330
|
-
return streamForReturn;
|
|
331
|
-
};
|
|
252
|
+
const request = getChatCompletionWithStreamingFunction<Model>({
|
|
253
|
+
vendor: this.props.vendor,
|
|
254
|
+
config: this.props.config,
|
|
255
|
+
dispatch: props.dispatch,
|
|
256
|
+
abortSignal: props.abortSignal,
|
|
257
|
+
usage: this.token_usage_,
|
|
258
|
+
});
|
|
332
259
|
return {
|
|
333
260
|
operations: this.operations_,
|
|
334
261
|
config: this.props.config,
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
import OpenAI from "openai";
|
|
2
|
+
import { AgenticaEventSource, AgenticaRequestEvent, AgenticaResponseEvent } from "../events";
|
|
3
|
+
import { ChatGptCompletionMessageUtil } from "./ChatGptCompletionMessageUtil";
|
|
4
|
+
import { streamDefaultReaderToAsyncGenerator, StreamUtil } from "./StreamUtil";
|
|
5
|
+
import { createRequestEvent } from "../factory";
|
|
6
|
+
import { IAgenticaConfig, IAgenticaVendor, IMicroAgenticaConfig } from "../structures";
|
|
7
|
+
import { ILlmSchema } from "@samchon/openapi";
|
|
8
|
+
import { AgenticaTokenUsageAggregator } from "../context/internal/AgenticaTokenUsageAggregator";
|
|
9
|
+
import { AgenticaTokenUsage } from "../context/AgenticaTokenUsage";
|
|
10
|
+
import { v4 } from "uuid";
|
|
11
|
+
|
|
12
|
+
export const getChatCompletionWithStreamingFunction = <Model extends ILlmSchema.Model>(props: {
|
|
13
|
+
vendor: IAgenticaVendor;
|
|
14
|
+
config?: IAgenticaConfig<Model> | IMicroAgenticaConfig<Model>;
|
|
15
|
+
dispatch: (event: AgenticaRequestEvent | AgenticaResponseEvent) => Promise<void>;
|
|
16
|
+
abortSignal?: AbortSignal;
|
|
17
|
+
usage: AgenticaTokenUsage;
|
|
18
|
+
}) => async (
|
|
19
|
+
source: AgenticaEventSource,
|
|
20
|
+
body: Omit<OpenAI.ChatCompletionCreateParamsStreaming, "model" | "stream">,
|
|
21
|
+
) => {
|
|
22
|
+
const event: AgenticaRequestEvent = createRequestEvent({
|
|
23
|
+
source,
|
|
24
|
+
body: {
|
|
25
|
+
...body,
|
|
26
|
+
model: props.vendor.model,
|
|
27
|
+
stream: true,
|
|
28
|
+
stream_options: {
|
|
29
|
+
include_usage: true,
|
|
30
|
+
},
|
|
31
|
+
},
|
|
32
|
+
options: {
|
|
33
|
+
...props.vendor.options,
|
|
34
|
+
signal: props.abortSignal,
|
|
35
|
+
},
|
|
36
|
+
});
|
|
37
|
+
await props.dispatch(event);
|
|
38
|
+
|
|
39
|
+
// completion
|
|
40
|
+
const backoffStrategy = props.config?.backoffStrategy ?? ((props) => {
|
|
41
|
+
throw props.error;
|
|
42
|
+
});
|
|
43
|
+
const completion = await (async () => {
|
|
44
|
+
let count = 0;
|
|
45
|
+
while (true) {
|
|
46
|
+
try {
|
|
47
|
+
return await props.vendor.api.chat.completions.create(
|
|
48
|
+
event.body,
|
|
49
|
+
event.options,
|
|
50
|
+
);
|
|
51
|
+
}
|
|
52
|
+
catch (error) {
|
|
53
|
+
const waiting = backoffStrategy({ count, error });
|
|
54
|
+
await new Promise(resolve => setTimeout(resolve, waiting));
|
|
55
|
+
count++;
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
})();
|
|
59
|
+
|
|
60
|
+
const [streamForEvent, temporaryStream] = StreamUtil.transform(
|
|
61
|
+
completion.toReadableStream() as ReadableStream<Uint8Array>,
|
|
62
|
+
value =>
|
|
63
|
+
ChatGptCompletionMessageUtil.transformCompletionChunk(value),
|
|
64
|
+
).tee();
|
|
65
|
+
|
|
66
|
+
const [streamForAggregate, streamForReturn] = temporaryStream.tee();
|
|
67
|
+
|
|
68
|
+
(async () => {
|
|
69
|
+
const reader = streamForAggregate.getReader();
|
|
70
|
+
while (true) {
|
|
71
|
+
const chunk = await reader.read();
|
|
72
|
+
if (chunk.done) {
|
|
73
|
+
break;
|
|
74
|
+
}
|
|
75
|
+
if (chunk.value.usage != null) {
|
|
76
|
+
AgenticaTokenUsageAggregator.aggregate({
|
|
77
|
+
kind: source,
|
|
78
|
+
completionUsage: chunk.value.usage,
|
|
79
|
+
usage: props.usage,
|
|
80
|
+
});
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
})().catch(() => {});
|
|
84
|
+
|
|
85
|
+
const [streamForStream, streamForJoin] = streamForEvent.tee();
|
|
86
|
+
void props.dispatch({
|
|
87
|
+
id: v4(),
|
|
88
|
+
type: "response",
|
|
89
|
+
source,
|
|
90
|
+
stream: streamDefaultReaderToAsyncGenerator(streamForStream.getReader()),
|
|
91
|
+
body: event.body,
|
|
92
|
+
options: event.options,
|
|
93
|
+
join: async () => {
|
|
94
|
+
const chunks = await StreamUtil.readAll(streamForJoin);
|
|
95
|
+
return ChatGptCompletionMessageUtil.merge(chunks);
|
|
96
|
+
},
|
|
97
|
+
created_at: new Date().toISOString(),
|
|
98
|
+
}).catch(() => {});
|
|
99
|
+
return streamForReturn;
|
|
100
|
+
};
|