ai 4.0.0-canary.1 → 4.0.0-canary.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +129 -0
- package/README.md +1 -1
- package/dist/index.d.mts +96 -675
- package/dist/index.d.ts +96 -675
- package/dist/index.js +1219 -1584
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +1209 -1562
- package/dist/index.mjs.map +1 -1
- package/package.json +10 -24
- package/react/dist/index.d.ts +0 -17
- package/rsc/dist/index.d.ts +19 -19
- package/rsc/dist/rsc-server.d.mts +19 -19
- package/rsc/dist/rsc-server.mjs +9 -132
- package/rsc/dist/rsc-server.mjs.map +1 -1
- package/react/dist/index.server.d.mts +0 -17
- package/react/dist/index.server.d.ts +0 -17
- package/react/dist/index.server.js +0 -50
- package/react/dist/index.server.js.map +0 -1
- package/react/dist/index.server.mjs +0 -23
- package/react/dist/index.server.mjs.map +0 -1
package/dist/index.js
CHANGED
@@ -21,7 +21,6 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
|
|
21
21
|
var streams_exports = {};
|
22
22
|
__export(streams_exports, {
|
23
23
|
AISDKError: () => import_provider13.AISDKError,
|
24
|
-
AIStream: () => AIStream,
|
25
24
|
APICallError: () => import_provider13.APICallError,
|
26
25
|
AssistantResponse: () => AssistantResponse,
|
27
26
|
DownloadError: () => DownloadError,
|
@@ -43,48 +42,35 @@ __export(streams_exports, {
|
|
43
42
|
NoSuchProviderError: () => NoSuchProviderError,
|
44
43
|
NoSuchToolError: () => NoSuchToolError,
|
45
44
|
RetryError: () => RetryError,
|
46
|
-
StreamData: () =>
|
47
|
-
StreamingTextResponse: () => StreamingTextResponse,
|
45
|
+
StreamData: () => StreamData,
|
48
46
|
TypeValidationError: () => import_provider13.TypeValidationError,
|
49
47
|
UnsupportedFunctionalityError: () => import_provider13.UnsupportedFunctionalityError,
|
50
48
|
convertToCoreMessages: () => convertToCoreMessages,
|
51
49
|
cosineSimilarity: () => cosineSimilarity,
|
52
|
-
createCallbacksTransformer: () => createCallbacksTransformer,
|
53
|
-
createEventStreamTransformer: () => createEventStreamTransformer,
|
54
50
|
createStreamDataTransformer: () => createStreamDataTransformer,
|
55
51
|
embed: () => embed,
|
56
52
|
embedMany: () => embedMany,
|
57
|
-
experimental_AssistantResponse: () => experimental_AssistantResponse,
|
58
|
-
experimental_StreamData: () => experimental_StreamData,
|
59
|
-
experimental_createModelRegistry: () => experimental_createModelRegistry,
|
60
53
|
experimental_createProviderRegistry: () => experimental_createProviderRegistry,
|
61
54
|
experimental_customProvider: () => experimental_customProvider,
|
62
|
-
experimental_generateObject: () => experimental_generateObject,
|
63
|
-
experimental_generateText: () => experimental_generateText,
|
64
|
-
experimental_streamObject: () => experimental_streamObject,
|
65
|
-
experimental_streamText: () => experimental_streamText,
|
66
55
|
experimental_wrapLanguageModel: () => experimental_wrapLanguageModel,
|
67
|
-
formatStreamPart: () =>
|
68
|
-
generateId: () =>
|
56
|
+
formatStreamPart: () => import_ui_utils10.formatStreamPart,
|
57
|
+
generateId: () => import_provider_utils11.generateId,
|
69
58
|
generateObject: () => generateObject,
|
70
59
|
generateText: () => generateText,
|
71
|
-
jsonSchema: () =>
|
72
|
-
parseStreamPart: () =>
|
73
|
-
processDataProtocolResponse: () =>
|
74
|
-
readDataStream: () =>
|
75
|
-
readableFromAsyncIterable: () => readableFromAsyncIterable,
|
60
|
+
jsonSchema: () => import_ui_utils7.jsonSchema,
|
61
|
+
parseStreamPart: () => import_ui_utils10.parseStreamPart,
|
62
|
+
processDataProtocolResponse: () => import_ui_utils10.processDataProtocolResponse,
|
63
|
+
readDataStream: () => import_ui_utils10.readDataStream,
|
76
64
|
streamObject: () => streamObject,
|
77
65
|
streamText: () => streamText,
|
78
|
-
|
79
|
-
tool: () => tool,
|
80
|
-
trimStartOfStreamHelper: () => trimStartOfStreamHelper
|
66
|
+
tool: () => tool
|
81
67
|
});
|
82
68
|
module.exports = __toCommonJS(streams_exports);
|
83
|
-
var
|
84
|
-
var
|
69
|
+
var import_ui_utils10 = require("@ai-sdk/ui-utils");
|
70
|
+
var import_provider_utils11 = require("@ai-sdk/provider-utils");
|
85
71
|
|
86
72
|
// core/index.ts
|
87
|
-
var
|
73
|
+
var import_ui_utils7 = require("@ai-sdk/ui-utils");
|
88
74
|
|
89
75
|
// util/retry-with-exponential-backoff.ts
|
90
76
|
var import_provider2 = require("@ai-sdk/provider");
|
@@ -116,24 +102,6 @@ var RetryError = class extends import_provider.AISDKError {
|
|
116
102
|
static isInstance(error) {
|
117
103
|
return import_provider.AISDKError.hasMarker(error, marker);
|
118
104
|
}
|
119
|
-
/**
|
120
|
-
* @deprecated use `isInstance` instead
|
121
|
-
*/
|
122
|
-
static isRetryError(error) {
|
123
|
-
return error instanceof Error && error.name === name && typeof error.reason === "string" && Array.isArray(error.errors);
|
124
|
-
}
|
125
|
-
/**
|
126
|
-
* @deprecated Do not use this method. It will be removed in the next major version.
|
127
|
-
*/
|
128
|
-
toJSON() {
|
129
|
-
return {
|
130
|
-
name: this.name,
|
131
|
-
message: this.message,
|
132
|
-
reason: this.reason,
|
133
|
-
lastError: this.lastError,
|
134
|
-
errors: this.errors
|
135
|
-
};
|
136
|
-
}
|
137
105
|
};
|
138
106
|
_a = symbol;
|
139
107
|
|
@@ -171,7 +139,7 @@ async function _retryWithExponentialBackoff(f, {
|
|
171
139
|
errors: newErrors
|
172
140
|
});
|
173
141
|
}
|
174
|
-
if (error instanceof Error && import_provider2.APICallError.
|
142
|
+
if (error instanceof Error && import_provider2.APICallError.isInstance(error) && error.isRetryable === true && tryNumber <= maxRetries) {
|
175
143
|
await delay(delayInMs);
|
176
144
|
return _retryWithExponentialBackoff(
|
177
145
|
f,
|
@@ -686,25 +654,6 @@ var DownloadError = class extends import_provider3.AISDKError {
|
|
686
654
|
static isInstance(error) {
|
687
655
|
return import_provider3.AISDKError.hasMarker(error, marker2);
|
688
656
|
}
|
689
|
-
/**
|
690
|
-
* @deprecated use `isInstance` instead
|
691
|
-
*/
|
692
|
-
static isDownloadError(error) {
|
693
|
-
return error instanceof Error && error.name === name2 && typeof error.url === "string" && (error.statusCode == null || typeof error.statusCode === "number") && (error.statusText == null || typeof error.statusText === "string");
|
694
|
-
}
|
695
|
-
/**
|
696
|
-
* @deprecated Do not use this method. It will be removed in the next major version.
|
697
|
-
*/
|
698
|
-
toJSON() {
|
699
|
-
return {
|
700
|
-
name: this.name,
|
701
|
-
message: this.message,
|
702
|
-
url: this.url,
|
703
|
-
statusCode: this.statusCode,
|
704
|
-
statusText: this.statusText,
|
705
|
-
cause: this.cause
|
706
|
-
};
|
707
|
-
}
|
708
657
|
};
|
709
658
|
_a2 = symbol2;
|
710
659
|
|
@@ -774,24 +723,6 @@ var InvalidDataContentError = class extends import_provider4.AISDKError {
|
|
774
723
|
static isInstance(error) {
|
775
724
|
return import_provider4.AISDKError.hasMarker(error, marker3);
|
776
725
|
}
|
777
|
-
/**
|
778
|
-
* @deprecated use `isInstance` instead
|
779
|
-
*/
|
780
|
-
static isInvalidDataContentError(error) {
|
781
|
-
return error instanceof Error && error.name === name3 && error.content != null;
|
782
|
-
}
|
783
|
-
/**
|
784
|
-
* @deprecated Do not use this method. It will be removed in the next major version.
|
785
|
-
*/
|
786
|
-
toJSON() {
|
787
|
-
return {
|
788
|
-
name: this.name,
|
789
|
-
message: this.message,
|
790
|
-
stack: this.stack,
|
791
|
-
cause: this.cause,
|
792
|
-
content: this.content
|
793
|
-
};
|
794
|
-
}
|
795
726
|
};
|
796
727
|
_a3 = symbol3;
|
797
728
|
|
@@ -865,23 +796,6 @@ var InvalidMessageRoleError = class extends import_provider5.AISDKError {
|
|
865
796
|
static isInstance(error) {
|
866
797
|
return import_provider5.AISDKError.hasMarker(error, marker4);
|
867
798
|
}
|
868
|
-
/**
|
869
|
-
* @deprecated use `isInstance` instead
|
870
|
-
*/
|
871
|
-
static isInvalidMessageRoleError(error) {
|
872
|
-
return error instanceof Error && error.name === name4 && typeof error.role === "string";
|
873
|
-
}
|
874
|
-
/**
|
875
|
-
* @deprecated Do not use this method. It will be removed in the next major version.
|
876
|
-
*/
|
877
|
-
toJSON() {
|
878
|
-
return {
|
879
|
-
name: this.name,
|
880
|
-
message: this.message,
|
881
|
-
stack: this.stack,
|
882
|
-
role: this.role
|
883
|
-
};
|
884
|
-
}
|
885
799
|
};
|
886
800
|
_a4 = symbol4;
|
887
801
|
|
@@ -1109,21 +1023,6 @@ var InvalidArgumentError = class extends import_provider6.AISDKError {
|
|
1109
1023
|
static isInstance(error) {
|
1110
1024
|
return import_provider6.AISDKError.hasMarker(error, marker5);
|
1111
1025
|
}
|
1112
|
-
/**
|
1113
|
-
* @deprecated use `isInstance` instead
|
1114
|
-
*/
|
1115
|
-
static isInvalidArgumentError(error) {
|
1116
|
-
return error instanceof Error && error.name === name5 && typeof error.parameter === "string" && typeof error.value === "string";
|
1117
|
-
}
|
1118
|
-
toJSON() {
|
1119
|
-
return {
|
1120
|
-
name: this.name,
|
1121
|
-
message: this.message,
|
1122
|
-
stack: this.stack,
|
1123
|
-
parameter: this.parameter,
|
1124
|
-
value: this.value
|
1125
|
-
};
|
1126
|
-
}
|
1127
1026
|
};
|
1128
1027
|
_a5 = symbol5;
|
1129
1028
|
|
@@ -1556,9 +1455,7 @@ function convertToCoreMessages(messages, options) {
|
|
1556
1455
|
});
|
1557
1456
|
break;
|
1558
1457
|
}
|
1559
|
-
case "
|
1560
|
-
case "data":
|
1561
|
-
case "tool": {
|
1458
|
+
case "data": {
|
1562
1459
|
break;
|
1563
1460
|
}
|
1564
1461
|
default: {
|
@@ -1646,28 +1543,30 @@ function standardizePrompt({
|
|
1646
1543
|
}
|
1647
1544
|
|
1648
1545
|
// core/types/usage.ts
|
1649
|
-
function calculateLanguageModelUsage(
|
1546
|
+
function calculateLanguageModelUsage({
|
1547
|
+
promptTokens,
|
1548
|
+
completionTokens
|
1549
|
+
}) {
|
1650
1550
|
return {
|
1651
|
-
promptTokens
|
1652
|
-
completionTokens
|
1653
|
-
totalTokens:
|
1551
|
+
promptTokens,
|
1552
|
+
completionTokens,
|
1553
|
+
totalTokens: promptTokens + completionTokens
|
1654
1554
|
};
|
1655
1555
|
}
|
1656
1556
|
|
1657
1557
|
// core/util/prepare-response-headers.ts
|
1658
|
-
function prepareResponseHeaders(
|
1558
|
+
function prepareResponseHeaders(headers, {
|
1659
1559
|
contentType,
|
1660
1560
|
dataStreamVersion
|
1661
1561
|
}) {
|
1662
|
-
|
1663
|
-
|
1664
|
-
|
1665
|
-
headers.set("Content-Type", contentType);
|
1562
|
+
const responseHeaders = new Headers(headers != null ? headers : {});
|
1563
|
+
if (!responseHeaders.has("Content-Type")) {
|
1564
|
+
responseHeaders.set("Content-Type", contentType);
|
1666
1565
|
}
|
1667
1566
|
if (dataStreamVersion !== void 0) {
|
1668
|
-
|
1567
|
+
responseHeaders.set("X-Vercel-AI-Data-Stream", dataStreamVersion);
|
1669
1568
|
}
|
1670
|
-
return
|
1569
|
+
return responseHeaders;
|
1671
1570
|
}
|
1672
1571
|
|
1673
1572
|
// core/generate-object/inject-json-instruction.ts
|
@@ -1705,23 +1604,6 @@ var NoObjectGeneratedError = class extends import_provider9.AISDKError {
|
|
1705
1604
|
static isInstance(error) {
|
1706
1605
|
return import_provider9.AISDKError.hasMarker(error, marker7);
|
1707
1606
|
}
|
1708
|
-
/**
|
1709
|
-
* @deprecated Use isInstance instead.
|
1710
|
-
*/
|
1711
|
-
static isNoObjectGeneratedError(error) {
|
1712
|
-
return error instanceof Error && error.name === name7;
|
1713
|
-
}
|
1714
|
-
/**
|
1715
|
-
* @deprecated Do not use this method. It will be removed in the next major version.
|
1716
|
-
*/
|
1717
|
-
toJSON() {
|
1718
|
-
return {
|
1719
|
-
name: this.name,
|
1720
|
-
cause: this.cause,
|
1721
|
-
message: this.message,
|
1722
|
-
stack: this.stack
|
1723
|
-
};
|
1724
|
-
}
|
1725
1607
|
};
|
1726
1608
|
_a7 = symbol7;
|
1727
1609
|
|
@@ -2253,9 +2135,6 @@ async function generateObject({
|
|
2253
2135
|
"ai.response.timestamp": responseData.timestamp.toISOString(),
|
2254
2136
|
"ai.usage.promptTokens": result2.usage.promptTokens,
|
2255
2137
|
"ai.usage.completionTokens": result2.usage.completionTokens,
|
2256
|
-
// deprecated:
|
2257
|
-
"ai.finishReason": result2.finishReason,
|
2258
|
-
"ai.result.object": { output: () => result2.text },
|
2259
2138
|
// standardized gen-ai llm span attributes:
|
2260
2139
|
"gen_ai.response.finish_reasons": [result2.finishReason],
|
2261
2140
|
"gen_ai.response.id": responseData.id,
|
@@ -2360,9 +2239,6 @@ async function generateObject({
|
|
2360
2239
|
"ai.response.timestamp": responseData.timestamp.toISOString(),
|
2361
2240
|
"ai.usage.promptTokens": result2.usage.promptTokens,
|
2362
2241
|
"ai.usage.completionTokens": result2.usage.completionTokens,
|
2363
|
-
// deprecated:
|
2364
|
-
"ai.finishReason": result2.finishReason,
|
2365
|
-
"ai.result.object": { output: () => objectText },
|
2366
2242
|
// standardized gen-ai llm span attributes:
|
2367
2243
|
"gen_ai.response.finish_reasons": [result2.finishReason],
|
2368
2244
|
"gen_ai.response.id": responseData.id,
|
@@ -2416,12 +2292,7 @@ async function generateObject({
|
|
2416
2292
|
output: () => JSON.stringify(validationResult.value)
|
2417
2293
|
},
|
2418
2294
|
"ai.usage.promptTokens": usage.promptTokens,
|
2419
|
-
"ai.usage.completionTokens": usage.completionTokens
|
2420
|
-
// deprecated:
|
2421
|
-
"ai.finishReason": finishReason,
|
2422
|
-
"ai.result.object": {
|
2423
|
-
output: () => JSON.stringify(validationResult.value)
|
2424
|
-
}
|
2295
|
+
"ai.usage.completionTokens": usage.completionTokens
|
2425
2296
|
}
|
2426
2297
|
})
|
2427
2298
|
);
|
@@ -2450,42 +2321,23 @@ var DefaultGenerateObjectResult = class {
|
|
2450
2321
|
this.experimental_providerMetadata = options.providerMetadata;
|
2451
2322
|
this.response = options.response;
|
2452
2323
|
this.request = options.request;
|
2453
|
-
this.rawResponse = {
|
2454
|
-
headers: options.response.headers
|
2455
|
-
};
|
2456
2324
|
this.logprobs = options.logprobs;
|
2457
2325
|
}
|
2458
2326
|
toJsonResponse(init) {
|
2459
2327
|
var _a11;
|
2460
2328
|
return new Response(JSON.stringify(this.object), {
|
2461
2329
|
status: (_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200,
|
2462
|
-
headers: prepareResponseHeaders(init, {
|
2330
|
+
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
2463
2331
|
contentType: "application/json; charset=utf-8"
|
2464
2332
|
})
|
2465
2333
|
});
|
2466
2334
|
}
|
2467
2335
|
};
|
2468
|
-
var experimental_generateObject = generateObject;
|
2469
2336
|
|
2470
2337
|
// core/generate-object/stream-object.ts
|
2471
2338
|
var import_provider_utils6 = require("@ai-sdk/provider-utils");
|
2472
2339
|
var import_ui_utils2 = require("@ai-sdk/ui-utils");
|
2473
2340
|
|
2474
|
-
// util/create-resolvable-promise.ts
|
2475
|
-
function createResolvablePromise() {
|
2476
|
-
let resolve;
|
2477
|
-
let reject;
|
2478
|
-
const promise = new Promise((res, rej) => {
|
2479
|
-
resolve = res;
|
2480
|
-
reject = rej;
|
2481
|
-
});
|
2482
|
-
return {
|
2483
|
-
promise,
|
2484
|
-
resolve,
|
2485
|
-
reject
|
2486
|
-
};
|
2487
|
-
}
|
2488
|
-
|
2489
2341
|
// util/delayed-promise.ts
|
2490
2342
|
var DelayedPromise = class {
|
2491
2343
|
constructor() {
|
@@ -2531,23 +2383,23 @@ function now() {
|
|
2531
2383
|
}
|
2532
2384
|
|
2533
2385
|
// core/util/prepare-outgoing-http-headers.ts
|
2534
|
-
function prepareOutgoingHttpHeaders(
|
2386
|
+
function prepareOutgoingHttpHeaders(headers, {
|
2535
2387
|
contentType,
|
2536
2388
|
dataStreamVersion
|
2537
2389
|
}) {
|
2538
|
-
const
|
2539
|
-
if (
|
2540
|
-
for (const [key, value] of Object.entries(
|
2541
|
-
|
2390
|
+
const outgoingHeaders = {};
|
2391
|
+
if (headers != null) {
|
2392
|
+
for (const [key, value] of Object.entries(headers)) {
|
2393
|
+
outgoingHeaders[key] = value;
|
2542
2394
|
}
|
2543
2395
|
}
|
2544
|
-
if (
|
2545
|
-
|
2396
|
+
if (outgoingHeaders["Content-Type"] == null) {
|
2397
|
+
outgoingHeaders["Content-Type"] = contentType;
|
2546
2398
|
}
|
2547
2399
|
if (dataStreamVersion !== void 0) {
|
2548
|
-
|
2400
|
+
outgoingHeaders["X-Vercel-AI-Data-Stream"] = dataStreamVersion;
|
2549
2401
|
}
|
2550
|
-
return
|
2402
|
+
return outgoingHeaders;
|
2551
2403
|
}
|
2552
2404
|
|
2553
2405
|
// core/util/write-to-server-response.ts
|
@@ -2577,9 +2429,91 @@ function writeToServerResponse({
|
|
2577
2429
|
read();
|
2578
2430
|
}
|
2579
2431
|
|
2432
|
+
// util/create-resolvable-promise.ts
|
2433
|
+
function createResolvablePromise() {
|
2434
|
+
let resolve;
|
2435
|
+
let reject;
|
2436
|
+
const promise = new Promise((res, rej) => {
|
2437
|
+
resolve = res;
|
2438
|
+
reject = rej;
|
2439
|
+
});
|
2440
|
+
return {
|
2441
|
+
promise,
|
2442
|
+
resolve,
|
2443
|
+
reject
|
2444
|
+
};
|
2445
|
+
}
|
2446
|
+
|
2447
|
+
// core/util/create-stitchable-stream.ts
|
2448
|
+
function createStitchableStream() {
|
2449
|
+
let innerStreamReaders = [];
|
2450
|
+
let controller = null;
|
2451
|
+
let isClosed = false;
|
2452
|
+
let waitForNewStream = createResolvablePromise();
|
2453
|
+
const processPull = async () => {
|
2454
|
+
if (isClosed && innerStreamReaders.length === 0) {
|
2455
|
+
controller == null ? void 0 : controller.close();
|
2456
|
+
return;
|
2457
|
+
}
|
2458
|
+
if (innerStreamReaders.length === 0) {
|
2459
|
+
waitForNewStream = createResolvablePromise();
|
2460
|
+
await waitForNewStream.promise;
|
2461
|
+
return processPull();
|
2462
|
+
}
|
2463
|
+
try {
|
2464
|
+
const { value, done } = await innerStreamReaders[0].read();
|
2465
|
+
if (done) {
|
2466
|
+
innerStreamReaders.shift();
|
2467
|
+
if (innerStreamReaders.length > 0) {
|
2468
|
+
await processPull();
|
2469
|
+
} else if (isClosed) {
|
2470
|
+
controller == null ? void 0 : controller.close();
|
2471
|
+
}
|
2472
|
+
} else {
|
2473
|
+
controller == null ? void 0 : controller.enqueue(value);
|
2474
|
+
}
|
2475
|
+
} catch (error) {
|
2476
|
+
controller == null ? void 0 : controller.error(error);
|
2477
|
+
innerStreamReaders.shift();
|
2478
|
+
if (isClosed && innerStreamReaders.length === 0) {
|
2479
|
+
controller == null ? void 0 : controller.close();
|
2480
|
+
}
|
2481
|
+
}
|
2482
|
+
};
|
2483
|
+
return {
|
2484
|
+
stream: new ReadableStream({
|
2485
|
+
start(controllerParam) {
|
2486
|
+
controller = controllerParam;
|
2487
|
+
},
|
2488
|
+
pull: processPull,
|
2489
|
+
async cancel() {
|
2490
|
+
for (const reader of innerStreamReaders) {
|
2491
|
+
await reader.cancel();
|
2492
|
+
}
|
2493
|
+
innerStreamReaders = [];
|
2494
|
+
isClosed = true;
|
2495
|
+
}
|
2496
|
+
}),
|
2497
|
+
addStream: (innerStream) => {
|
2498
|
+
if (isClosed) {
|
2499
|
+
throw new Error("Cannot add inner stream: outer stream is closed");
|
2500
|
+
}
|
2501
|
+
innerStreamReaders.push(innerStream.getReader());
|
2502
|
+
waitForNewStream.resolve();
|
2503
|
+
},
|
2504
|
+
close: () => {
|
2505
|
+
isClosed = true;
|
2506
|
+
waitForNewStream.resolve();
|
2507
|
+
if (innerStreamReaders.length === 0) {
|
2508
|
+
controller == null ? void 0 : controller.close();
|
2509
|
+
}
|
2510
|
+
}
|
2511
|
+
};
|
2512
|
+
}
|
2513
|
+
|
2580
2514
|
// core/generate-object/stream-object.ts
|
2581
2515
|
var originalGenerateId2 = (0, import_provider_utils6.createIdGenerator)({ prefix: "aiobj", size: 24 });
|
2582
|
-
|
2516
|
+
function streamObject({
|
2583
2517
|
model,
|
2584
2518
|
schema: inputSchema,
|
2585
2519
|
schemaName,
|
@@ -2613,407 +2547,433 @@ async function streamObject({
|
|
2613
2547
|
if (outputStrategy.type === "no-schema" && mode === void 0) {
|
2614
2548
|
mode = "json";
|
2615
2549
|
}
|
2616
|
-
|
2550
|
+
return new DefaultStreamObjectResult({
|
2617
2551
|
model,
|
2618
2552
|
telemetry,
|
2619
2553
|
headers,
|
2620
|
-
settings
|
2621
|
-
|
2622
|
-
|
2623
|
-
|
2624
|
-
|
2625
|
-
|
2626
|
-
|
2627
|
-
|
2628
|
-
|
2629
|
-
|
2630
|
-
|
2631
|
-
|
2632
|
-
|
2633
|
-
|
2634
|
-
|
2635
|
-
"ai.prompt": {
|
2636
|
-
input: () => JSON.stringify({ system, prompt, messages })
|
2637
|
-
},
|
2638
|
-
"ai.schema": outputStrategy.jsonSchema != null ? { input: () => JSON.stringify(outputStrategy.jsonSchema) } : void 0,
|
2639
|
-
"ai.schema.name": schemaName,
|
2640
|
-
"ai.schema.description": schemaDescription,
|
2641
|
-
"ai.settings.output": outputStrategy.type,
|
2642
|
-
"ai.settings.mode": mode
|
2643
|
-
}
|
2644
|
-
}),
|
2645
|
-
tracer,
|
2646
|
-
endWhenDone: false,
|
2647
|
-
fn: async (rootSpan) => {
|
2648
|
-
if (mode === "auto" || mode == null) {
|
2649
|
-
mode = model.defaultObjectGenerationMode;
|
2650
|
-
}
|
2651
|
-
let callOptions;
|
2652
|
-
let transformer;
|
2653
|
-
switch (mode) {
|
2654
|
-
case "json": {
|
2655
|
-
const standardizedPrompt = standardizePrompt({
|
2656
|
-
prompt: {
|
2657
|
-
system: outputStrategy.jsonSchema == null ? injectJsonInstruction({ prompt: system }) : model.supportsStructuredOutputs ? system : injectJsonInstruction({
|
2658
|
-
prompt: system,
|
2659
|
-
schema: outputStrategy.jsonSchema
|
2660
|
-
}),
|
2661
|
-
prompt,
|
2662
|
-
messages
|
2663
|
-
},
|
2664
|
-
tools: void 0
|
2665
|
-
});
|
2666
|
-
callOptions = {
|
2667
|
-
mode: {
|
2668
|
-
type: "object-json",
|
2669
|
-
schema: outputStrategy.jsonSchema,
|
2670
|
-
name: schemaName,
|
2671
|
-
description: schemaDescription
|
2672
|
-
},
|
2673
|
-
...prepareCallSettings(settings),
|
2674
|
-
inputFormat: standardizedPrompt.type,
|
2675
|
-
prompt: await convertToLanguageModelPrompt({
|
2676
|
-
prompt: standardizedPrompt,
|
2677
|
-
modelSupportsImageUrls: model.supportsImageUrls,
|
2678
|
-
modelSupportsUrl: model.supportsUrl
|
2679
|
-
}),
|
2680
|
-
providerMetadata,
|
2681
|
-
abortSignal,
|
2682
|
-
headers
|
2683
|
-
};
|
2684
|
-
transformer = {
|
2685
|
-
transform: (chunk, controller) => {
|
2686
|
-
switch (chunk.type) {
|
2687
|
-
case "text-delta":
|
2688
|
-
controller.enqueue(chunk.textDelta);
|
2689
|
-
break;
|
2690
|
-
case "response-metadata":
|
2691
|
-
case "finish":
|
2692
|
-
case "error":
|
2693
|
-
controller.enqueue(chunk);
|
2694
|
-
break;
|
2695
|
-
}
|
2696
|
-
}
|
2697
|
-
};
|
2698
|
-
break;
|
2699
|
-
}
|
2700
|
-
case "tool": {
|
2701
|
-
const standardizedPrompt = standardizePrompt({
|
2702
|
-
prompt: { system, prompt, messages },
|
2703
|
-
tools: void 0
|
2704
|
-
});
|
2705
|
-
callOptions = {
|
2706
|
-
mode: {
|
2707
|
-
type: "object-tool",
|
2708
|
-
tool: {
|
2709
|
-
type: "function",
|
2710
|
-
name: schemaName != null ? schemaName : "json",
|
2711
|
-
description: schemaDescription != null ? schemaDescription : "Respond with a JSON object.",
|
2712
|
-
parameters: outputStrategy.jsonSchema
|
2713
|
-
}
|
2714
|
-
},
|
2715
|
-
...prepareCallSettings(settings),
|
2716
|
-
inputFormat: standardizedPrompt.type,
|
2717
|
-
prompt: await convertToLanguageModelPrompt({
|
2718
|
-
prompt: standardizedPrompt,
|
2719
|
-
modelSupportsImageUrls: model.supportsImageUrls,
|
2720
|
-
modelSupportsUrl: model.supportsUrl
|
2721
|
-
}),
|
2722
|
-
providerMetadata,
|
2723
|
-
abortSignal,
|
2724
|
-
headers
|
2725
|
-
};
|
2726
|
-
transformer = {
|
2727
|
-
transform(chunk, controller) {
|
2728
|
-
switch (chunk.type) {
|
2729
|
-
case "tool-call-delta":
|
2730
|
-
controller.enqueue(chunk.argsTextDelta);
|
2731
|
-
break;
|
2732
|
-
case "response-metadata":
|
2733
|
-
case "finish":
|
2734
|
-
case "error":
|
2735
|
-
controller.enqueue(chunk);
|
2736
|
-
break;
|
2737
|
-
}
|
2738
|
-
}
|
2739
|
-
};
|
2740
|
-
break;
|
2741
|
-
}
|
2742
|
-
case void 0: {
|
2743
|
-
throw new Error(
|
2744
|
-
"Model does not have a default object generation mode."
|
2745
|
-
);
|
2746
|
-
}
|
2747
|
-
default: {
|
2748
|
-
const _exhaustiveCheck = mode;
|
2749
|
-
throw new Error(`Unsupported mode: ${_exhaustiveCheck}`);
|
2750
|
-
}
|
2751
|
-
}
|
2752
|
-
const {
|
2753
|
-
result: { stream, warnings, rawResponse, request },
|
2754
|
-
doStreamSpan,
|
2755
|
-
startTimestampMs
|
2756
|
-
} = await retry(
|
2757
|
-
() => recordSpan({
|
2758
|
-
name: "ai.streamObject.doStream",
|
2759
|
-
attributes: selectTelemetryAttributes({
|
2760
|
-
telemetry,
|
2761
|
-
attributes: {
|
2762
|
-
...assembleOperationName({
|
2763
|
-
operationId: "ai.streamObject.doStream",
|
2764
|
-
telemetry
|
2765
|
-
}),
|
2766
|
-
...baseTelemetryAttributes,
|
2767
|
-
"ai.prompt.format": {
|
2768
|
-
input: () => callOptions.inputFormat
|
2769
|
-
},
|
2770
|
-
"ai.prompt.messages": {
|
2771
|
-
input: () => JSON.stringify(callOptions.prompt)
|
2772
|
-
},
|
2773
|
-
"ai.settings.mode": mode,
|
2774
|
-
// standardized gen-ai llm span attributes:
|
2775
|
-
"gen_ai.system": model.provider,
|
2776
|
-
"gen_ai.request.model": model.modelId,
|
2777
|
-
"gen_ai.request.frequency_penalty": settings.frequencyPenalty,
|
2778
|
-
"gen_ai.request.max_tokens": settings.maxTokens,
|
2779
|
-
"gen_ai.request.presence_penalty": settings.presencePenalty,
|
2780
|
-
"gen_ai.request.temperature": settings.temperature,
|
2781
|
-
"gen_ai.request.top_k": settings.topK,
|
2782
|
-
"gen_ai.request.top_p": settings.topP
|
2783
|
-
}
|
2784
|
-
}),
|
2785
|
-
tracer,
|
2786
|
-
endWhenDone: false,
|
2787
|
-
fn: async (doStreamSpan2) => ({
|
2788
|
-
startTimestampMs: now2(),
|
2789
|
-
doStreamSpan: doStreamSpan2,
|
2790
|
-
result: await model.doStream(callOptions)
|
2791
|
-
})
|
2792
|
-
})
|
2793
|
-
);
|
2794
|
-
return new DefaultStreamObjectResult({
|
2795
|
-
outputStrategy,
|
2796
|
-
stream: stream.pipeThrough(new TransformStream(transformer)),
|
2797
|
-
warnings,
|
2798
|
-
rawResponse,
|
2799
|
-
request: request != null ? request : {},
|
2800
|
-
onFinish,
|
2801
|
-
rootSpan,
|
2802
|
-
doStreamSpan,
|
2803
|
-
telemetry,
|
2804
|
-
startTimestampMs,
|
2805
|
-
modelId: model.modelId,
|
2806
|
-
now: now2,
|
2807
|
-
currentDate,
|
2808
|
-
generateId: generateId3
|
2809
|
-
});
|
2810
|
-
}
|
2554
|
+
settings,
|
2555
|
+
maxRetries,
|
2556
|
+
abortSignal,
|
2557
|
+
outputStrategy,
|
2558
|
+
system,
|
2559
|
+
prompt,
|
2560
|
+
messages,
|
2561
|
+
schemaName,
|
2562
|
+
schemaDescription,
|
2563
|
+
inputProviderMetadata: providerMetadata,
|
2564
|
+
mode,
|
2565
|
+
onFinish,
|
2566
|
+
generateId: generateId3,
|
2567
|
+
currentDate,
|
2568
|
+
now: now2
|
2811
2569
|
});
|
2812
2570
|
}
|
2813
2571
|
var DefaultStreamObjectResult = class {
|
2814
2572
|
constructor({
|
2815
|
-
|
2816
|
-
|
2817
|
-
|
2818
|
-
|
2573
|
+
model,
|
2574
|
+
headers,
|
2575
|
+
telemetry,
|
2576
|
+
settings,
|
2577
|
+
maxRetries,
|
2578
|
+
abortSignal,
|
2819
2579
|
outputStrategy,
|
2580
|
+
system,
|
2581
|
+
prompt,
|
2582
|
+
messages,
|
2583
|
+
schemaName,
|
2584
|
+
schemaDescription,
|
2585
|
+
inputProviderMetadata,
|
2586
|
+
mode,
|
2820
2587
|
onFinish,
|
2821
|
-
|
2822
|
-
doStreamSpan,
|
2823
|
-
telemetry,
|
2824
|
-
startTimestampMs,
|
2825
|
-
modelId,
|
2826
|
-
now: now2,
|
2588
|
+
generateId: generateId3,
|
2827
2589
|
currentDate,
|
2828
|
-
|
2590
|
+
now: now2
|
2829
2591
|
}) {
|
2830
|
-
this.warnings = warnings;
|
2831
|
-
this.rawResponse = rawResponse;
|
2832
|
-
this.outputStrategy = outputStrategy;
|
2833
|
-
this.request = Promise.resolve(request);
|
2834
2592
|
this.objectPromise = new DelayedPromise();
|
2835
|
-
|
2836
|
-
this.
|
2837
|
-
|
2838
|
-
this.
|
2839
|
-
|
2840
|
-
|
2841
|
-
|
2842
|
-
|
2843
|
-
|
2844
|
-
|
2845
|
-
|
2846
|
-
|
2847
|
-
|
2848
|
-
|
2849
|
-
let accumulatedText = "";
|
2850
|
-
let textDelta = "";
|
2851
|
-
let response = {
|
2852
|
-
id: generateId3(),
|
2853
|
-
timestamp: currentDate(),
|
2854
|
-
modelId
|
2855
|
-
};
|
2856
|
-
let latestObjectJson = void 0;
|
2857
|
-
let latestObject = void 0;
|
2858
|
-
let isFirstChunk = true;
|
2859
|
-
let isFirstDelta = true;
|
2593
|
+
this.usagePromise = new DelayedPromise();
|
2594
|
+
this.providerMetadataPromise = new DelayedPromise();
|
2595
|
+
this.warningsPromise = new DelayedPromise();
|
2596
|
+
this.requestPromise = new DelayedPromise();
|
2597
|
+
this.responsePromise = new DelayedPromise();
|
2598
|
+
this.stitchableStream = createStitchableStream();
|
2599
|
+
const baseTelemetryAttributes = getBaseTelemetryAttributes({
|
2600
|
+
model,
|
2601
|
+
telemetry,
|
2602
|
+
headers,
|
2603
|
+
settings: { ...settings, maxRetries }
|
2604
|
+
});
|
2605
|
+
const tracer = getTracer(telemetry);
|
2606
|
+
const retry = retryWithExponentialBackoff({ maxRetries });
|
2860
2607
|
const self = this;
|
2861
|
-
|
2862
|
-
|
2863
|
-
|
2864
|
-
|
2865
|
-
|
2866
|
-
|
2867
|
-
|
2868
|
-
|
2869
|
-
|
2608
|
+
recordSpan({
|
2609
|
+
name: "ai.streamObject",
|
2610
|
+
attributes: selectTelemetryAttributes({
|
2611
|
+
telemetry,
|
2612
|
+
attributes: {
|
2613
|
+
...assembleOperationName({
|
2614
|
+
operationId: "ai.streamObject",
|
2615
|
+
telemetry
|
2616
|
+
}),
|
2617
|
+
...baseTelemetryAttributes,
|
2618
|
+
// specific settings that only make sense on the outer level:
|
2619
|
+
"ai.prompt": {
|
2620
|
+
input: () => JSON.stringify({ system, prompt, messages })
|
2621
|
+
},
|
2622
|
+
"ai.schema": outputStrategy.jsonSchema != null ? { input: () => JSON.stringify(outputStrategy.jsonSchema) } : void 0,
|
2623
|
+
"ai.schema.name": schemaName,
|
2624
|
+
"ai.schema.description": schemaDescription,
|
2625
|
+
"ai.settings.output": outputStrategy.type,
|
2626
|
+
"ai.settings.mode": mode
|
2627
|
+
}
|
2628
|
+
}),
|
2629
|
+
tracer,
|
2630
|
+
endWhenDone: false,
|
2631
|
+
fn: async (rootSpan) => {
|
2632
|
+
if (mode === "auto" || mode == null) {
|
2633
|
+
mode = model.defaultObjectGenerationMode;
|
2634
|
+
}
|
2635
|
+
let callOptions;
|
2636
|
+
let transformer;
|
2637
|
+
switch (mode) {
|
2638
|
+
case "json": {
|
2639
|
+
const standardizedPrompt = standardizePrompt({
|
2640
|
+
prompt: {
|
2641
|
+
system: outputStrategy.jsonSchema == null ? injectJsonInstruction({ prompt: system }) : model.supportsStructuredOutputs ? system : injectJsonInstruction({
|
2642
|
+
prompt: system,
|
2643
|
+
schema: outputStrategy.jsonSchema
|
2644
|
+
}),
|
2645
|
+
prompt,
|
2646
|
+
messages
|
2647
|
+
},
|
2648
|
+
tools: void 0
|
2870
2649
|
});
|
2871
|
-
|
2872
|
-
|
2650
|
+
callOptions = {
|
2651
|
+
mode: {
|
2652
|
+
type: "object-json",
|
2653
|
+
schema: outputStrategy.jsonSchema,
|
2654
|
+
name: schemaName,
|
2655
|
+
description: schemaDescription
|
2656
|
+
},
|
2657
|
+
...prepareCallSettings(settings),
|
2658
|
+
inputFormat: standardizedPrompt.type,
|
2659
|
+
prompt: await convertToLanguageModelPrompt({
|
2660
|
+
prompt: standardizedPrompt,
|
2661
|
+
modelSupportsImageUrls: model.supportsImageUrls,
|
2662
|
+
modelSupportsUrl: model.supportsUrl
|
2663
|
+
}),
|
2664
|
+
providerMetadata: inputProviderMetadata,
|
2665
|
+
abortSignal,
|
2666
|
+
headers
|
2667
|
+
};
|
2668
|
+
transformer = {
|
2669
|
+
transform: (chunk, controller) => {
|
2670
|
+
switch (chunk.type) {
|
2671
|
+
case "text-delta":
|
2672
|
+
controller.enqueue(chunk.textDelta);
|
2673
|
+
break;
|
2674
|
+
case "response-metadata":
|
2675
|
+
case "finish":
|
2676
|
+
case "error":
|
2677
|
+
controller.enqueue(chunk);
|
2678
|
+
break;
|
2679
|
+
}
|
2680
|
+
}
|
2681
|
+
};
|
2682
|
+
break;
|
2683
|
+
}
|
2684
|
+
case "tool": {
|
2685
|
+
const standardizedPrompt = standardizePrompt({
|
2686
|
+
prompt: { system, prompt, messages },
|
2687
|
+
tools: void 0
|
2873
2688
|
});
|
2689
|
+
callOptions = {
|
2690
|
+
mode: {
|
2691
|
+
type: "object-tool",
|
2692
|
+
tool: {
|
2693
|
+
type: "function",
|
2694
|
+
name: schemaName != null ? schemaName : "json",
|
2695
|
+
description: schemaDescription != null ? schemaDescription : "Respond with a JSON object.",
|
2696
|
+
parameters: outputStrategy.jsonSchema
|
2697
|
+
}
|
2698
|
+
},
|
2699
|
+
...prepareCallSettings(settings),
|
2700
|
+
inputFormat: standardizedPrompt.type,
|
2701
|
+
prompt: await convertToLanguageModelPrompt({
|
2702
|
+
prompt: standardizedPrompt,
|
2703
|
+
modelSupportsImageUrls: model.supportsImageUrls,
|
2704
|
+
modelSupportsUrl: model.supportsUrl
|
2705
|
+
}),
|
2706
|
+
providerMetadata: inputProviderMetadata,
|
2707
|
+
abortSignal,
|
2708
|
+
headers
|
2709
|
+
};
|
2710
|
+
transformer = {
|
2711
|
+
transform(chunk, controller) {
|
2712
|
+
switch (chunk.type) {
|
2713
|
+
case "tool-call-delta":
|
2714
|
+
controller.enqueue(chunk.argsTextDelta);
|
2715
|
+
break;
|
2716
|
+
case "response-metadata":
|
2717
|
+
case "finish":
|
2718
|
+
case "error":
|
2719
|
+
controller.enqueue(chunk);
|
2720
|
+
break;
|
2721
|
+
}
|
2722
|
+
}
|
2723
|
+
};
|
2724
|
+
break;
|
2874
2725
|
}
|
2875
|
-
|
2876
|
-
|
2877
|
-
|
2878
|
-
|
2879
|
-
|
2880
|
-
|
2881
|
-
|
2882
|
-
|
2883
|
-
|
2884
|
-
|
2885
|
-
|
2886
|
-
|
2887
|
-
|
2888
|
-
|
2889
|
-
|
2890
|
-
|
2891
|
-
|
2892
|
-
|
2726
|
+
case void 0: {
|
2727
|
+
throw new Error(
|
2728
|
+
"Model does not have a default object generation mode."
|
2729
|
+
);
|
2730
|
+
}
|
2731
|
+
default: {
|
2732
|
+
const _exhaustiveCheck = mode;
|
2733
|
+
throw new Error(`Unsupported mode: ${_exhaustiveCheck}`);
|
2734
|
+
}
|
2735
|
+
}
|
2736
|
+
const {
|
2737
|
+
result: { stream, warnings, rawResponse, request },
|
2738
|
+
doStreamSpan,
|
2739
|
+
startTimestampMs
|
2740
|
+
} = await retry(
|
2741
|
+
() => recordSpan({
|
2742
|
+
name: "ai.streamObject.doStream",
|
2743
|
+
attributes: selectTelemetryAttributes({
|
2744
|
+
telemetry,
|
2745
|
+
attributes: {
|
2746
|
+
...assembleOperationName({
|
2747
|
+
operationId: "ai.streamObject.doStream",
|
2748
|
+
telemetry
|
2749
|
+
}),
|
2750
|
+
...baseTelemetryAttributes,
|
2751
|
+
"ai.prompt.format": {
|
2752
|
+
input: () => callOptions.inputFormat
|
2753
|
+
},
|
2754
|
+
"ai.prompt.messages": {
|
2755
|
+
input: () => JSON.stringify(callOptions.prompt)
|
2756
|
+
},
|
2757
|
+
"ai.settings.mode": mode,
|
2758
|
+
// standardized gen-ai llm span attributes:
|
2759
|
+
"gen_ai.system": model.provider,
|
2760
|
+
"gen_ai.request.model": model.modelId,
|
2761
|
+
"gen_ai.request.frequency_penalty": settings.frequencyPenalty,
|
2762
|
+
"gen_ai.request.max_tokens": settings.maxTokens,
|
2763
|
+
"gen_ai.request.presence_penalty": settings.presencePenalty,
|
2764
|
+
"gen_ai.request.temperature": settings.temperature,
|
2765
|
+
"gen_ai.request.top_k": settings.topK,
|
2766
|
+
"gen_ai.request.top_p": settings.topP
|
2767
|
+
}
|
2768
|
+
}),
|
2769
|
+
tracer,
|
2770
|
+
endWhenDone: false,
|
2771
|
+
fn: async (doStreamSpan2) => ({
|
2772
|
+
startTimestampMs: now2(),
|
2773
|
+
doStreamSpan: doStreamSpan2,
|
2774
|
+
result: await model.doStream(callOptions)
|
2775
|
+
})
|
2776
|
+
})
|
2777
|
+
);
|
2778
|
+
self.requestPromise.resolve(request != null ? request : {});
|
2779
|
+
let usage;
|
2780
|
+
let finishReason;
|
2781
|
+
let providerMetadata;
|
2782
|
+
let object;
|
2783
|
+
let error;
|
2784
|
+
let accumulatedText = "";
|
2785
|
+
let textDelta = "";
|
2786
|
+
let response = {
|
2787
|
+
id: generateId3(),
|
2788
|
+
timestamp: currentDate(),
|
2789
|
+
modelId: model.modelId
|
2790
|
+
};
|
2791
|
+
let latestObjectJson = void 0;
|
2792
|
+
let latestObject = void 0;
|
2793
|
+
let isFirstChunk = true;
|
2794
|
+
let isFirstDelta = true;
|
2795
|
+
const transformedStream = stream.pipeThrough(new TransformStream(transformer)).pipeThrough(
|
2796
|
+
new TransformStream({
|
2797
|
+
async transform(chunk, controller) {
|
2798
|
+
var _a11, _b, _c;
|
2799
|
+
if (isFirstChunk) {
|
2800
|
+
const msToFirstChunk = now2() - startTimestampMs;
|
2801
|
+
isFirstChunk = false;
|
2802
|
+
doStreamSpan.addEvent("ai.stream.firstChunk", {
|
2803
|
+
"ai.stream.msToFirstChunk": msToFirstChunk
|
2893
2804
|
});
|
2894
|
-
|
2895
|
-
|
2896
|
-
textDelta: validationResult.value.textDelta
|
2805
|
+
doStreamSpan.setAttributes({
|
2806
|
+
"ai.stream.msToFirstChunk": msToFirstChunk
|
2897
2807
|
});
|
2898
|
-
textDelta = "";
|
2899
|
-
isFirstDelta = false;
|
2900
|
-
}
|
2901
|
-
}
|
2902
|
-
return;
|
2903
|
-
}
|
2904
|
-
switch (chunk.type) {
|
2905
|
-
case "response-metadata": {
|
2906
|
-
response = {
|
2907
|
-
id: (_a11 = chunk.id) != null ? _a11 : response.id,
|
2908
|
-
timestamp: (_b = chunk.timestamp) != null ? _b : response.timestamp,
|
2909
|
-
modelId: (_c = chunk.modelId) != null ? _c : response.modelId
|
2910
|
-
};
|
2911
|
-
break;
|
2912
|
-
}
|
2913
|
-
case "finish": {
|
2914
|
-
if (textDelta !== "") {
|
2915
|
-
controller.enqueue({ type: "text-delta", textDelta });
|
2916
2808
|
}
|
2917
|
-
|
2918
|
-
|
2919
|
-
|
2920
|
-
|
2921
|
-
|
2922
|
-
|
2923
|
-
|
2924
|
-
|
2925
|
-
|
2926
|
-
|
2927
|
-
|
2928
|
-
|
2929
|
-
|
2930
|
-
|
2931
|
-
|
2932
|
-
|
2933
|
-
|
2809
|
+
if (typeof chunk === "string") {
|
2810
|
+
accumulatedText += chunk;
|
2811
|
+
textDelta += chunk;
|
2812
|
+
const { value: currentObjectJson, state: parseState } = (0, import_ui_utils2.parsePartialJson)(accumulatedText);
|
2813
|
+
if (currentObjectJson !== void 0 && !(0, import_ui_utils2.isDeepEqualData)(latestObjectJson, currentObjectJson)) {
|
2814
|
+
const validationResult = outputStrategy.validatePartialResult({
|
2815
|
+
value: currentObjectJson,
|
2816
|
+
textDelta,
|
2817
|
+
latestObject,
|
2818
|
+
isFirstDelta,
|
2819
|
+
isFinalDelta: parseState === "successful-parse"
|
2820
|
+
});
|
2821
|
+
if (validationResult.success && !(0, import_ui_utils2.isDeepEqualData)(
|
2822
|
+
latestObject,
|
2823
|
+
validationResult.value.partial
|
2824
|
+
)) {
|
2825
|
+
latestObjectJson = currentObjectJson;
|
2826
|
+
latestObject = validationResult.value.partial;
|
2827
|
+
controller.enqueue({
|
2828
|
+
type: "object",
|
2829
|
+
object: latestObject
|
2830
|
+
});
|
2831
|
+
controller.enqueue({
|
2832
|
+
type: "text-delta",
|
2833
|
+
textDelta: validationResult.value.textDelta
|
2834
|
+
});
|
2835
|
+
textDelta = "";
|
2836
|
+
isFirstDelta = false;
|
2837
|
+
}
|
2838
|
+
}
|
2839
|
+
return;
|
2934
2840
|
}
|
2935
|
-
|
2936
|
-
|
2937
|
-
|
2938
|
-
|
2939
|
-
|
2940
|
-
|
2941
|
-
|
2942
|
-
|
2943
|
-
// invoke onFinish callback and resolve toolResults promise when the stream is about to close:
|
2944
|
-
async flush(controller) {
|
2945
|
-
try {
|
2946
|
-
const finalUsage = usage != null ? usage : {
|
2947
|
-
promptTokens: NaN,
|
2948
|
-
completionTokens: NaN,
|
2949
|
-
totalTokens: NaN
|
2950
|
-
};
|
2951
|
-
doStreamSpan.setAttributes(
|
2952
|
-
selectTelemetryAttributes({
|
2953
|
-
telemetry,
|
2954
|
-
attributes: {
|
2955
|
-
"ai.response.finishReason": finishReason,
|
2956
|
-
"ai.response.object": {
|
2957
|
-
output: () => JSON.stringify(object)
|
2958
|
-
},
|
2959
|
-
"ai.response.id": response.id,
|
2960
|
-
"ai.response.model": response.modelId,
|
2961
|
-
"ai.response.timestamp": response.timestamp.toISOString(),
|
2962
|
-
"ai.usage.promptTokens": finalUsage.promptTokens,
|
2963
|
-
"ai.usage.completionTokens": finalUsage.completionTokens,
|
2964
|
-
// deprecated
|
2965
|
-
"ai.finishReason": finishReason,
|
2966
|
-
"ai.result.object": { output: () => JSON.stringify(object) },
|
2967
|
-
// standardized gen-ai llm span attributes:
|
2968
|
-
"gen_ai.response.finish_reasons": [finishReason],
|
2969
|
-
"gen_ai.response.id": response.id,
|
2970
|
-
"gen_ai.response.model": response.modelId,
|
2971
|
-
"gen_ai.usage.input_tokens": finalUsage.promptTokens,
|
2972
|
-
"gen_ai.usage.output_tokens": finalUsage.completionTokens
|
2841
|
+
switch (chunk.type) {
|
2842
|
+
case "response-metadata": {
|
2843
|
+
response = {
|
2844
|
+
id: (_a11 = chunk.id) != null ? _a11 : response.id,
|
2845
|
+
timestamp: (_b = chunk.timestamp) != null ? _b : response.timestamp,
|
2846
|
+
modelId: (_c = chunk.modelId) != null ? _c : response.modelId
|
2847
|
+
};
|
2848
|
+
break;
|
2973
2849
|
}
|
2974
|
-
|
2975
|
-
|
2976
|
-
|
2977
|
-
|
2978
|
-
|
2979
|
-
|
2980
|
-
|
2981
|
-
|
2982
|
-
|
2983
|
-
|
2984
|
-
|
2985
|
-
|
2986
|
-
|
2987
|
-
|
2850
|
+
case "finish": {
|
2851
|
+
if (textDelta !== "") {
|
2852
|
+
controller.enqueue({ type: "text-delta", textDelta });
|
2853
|
+
}
|
2854
|
+
finishReason = chunk.finishReason;
|
2855
|
+
usage = calculateLanguageModelUsage(chunk.usage);
|
2856
|
+
providerMetadata = chunk.providerMetadata;
|
2857
|
+
controller.enqueue({ ...chunk, usage, response });
|
2858
|
+
self.usagePromise.resolve(usage);
|
2859
|
+
self.providerMetadataPromise.resolve(providerMetadata);
|
2860
|
+
self.responsePromise.resolve({
|
2861
|
+
...response,
|
2862
|
+
headers: rawResponse == null ? void 0 : rawResponse.headers
|
2863
|
+
});
|
2864
|
+
const validationResult = outputStrategy.validateFinalResult(latestObjectJson);
|
2865
|
+
if (validationResult.success) {
|
2866
|
+
object = validationResult.value;
|
2867
|
+
self.objectPromise.resolve(object);
|
2868
|
+
} else {
|
2869
|
+
error = validationResult.error;
|
2870
|
+
self.objectPromise.reject(error);
|
2871
|
+
}
|
2872
|
+
break;
|
2988
2873
|
}
|
2989
|
-
|
2990
|
-
|
2991
|
-
|
2992
|
-
|
2993
|
-
|
2994
|
-
|
2995
|
-
|
2996
|
-
|
2997
|
-
|
2998
|
-
|
2999
|
-
|
3000
|
-
|
3001
|
-
|
3002
|
-
|
3003
|
-
|
3004
|
-
|
3005
|
-
|
3006
|
-
|
2874
|
+
default: {
|
2875
|
+
controller.enqueue(chunk);
|
2876
|
+
break;
|
2877
|
+
}
|
2878
|
+
}
|
2879
|
+
},
|
2880
|
+
// invoke onFinish callback and resolve toolResults promise when the stream is about to close:
|
2881
|
+
async flush(controller) {
|
2882
|
+
try {
|
2883
|
+
const finalUsage = usage != null ? usage : {
|
2884
|
+
promptTokens: NaN,
|
2885
|
+
completionTokens: NaN,
|
2886
|
+
totalTokens: NaN
|
2887
|
+
};
|
2888
|
+
doStreamSpan.setAttributes(
|
2889
|
+
selectTelemetryAttributes({
|
2890
|
+
telemetry,
|
2891
|
+
attributes: {
|
2892
|
+
"ai.response.finishReason": finishReason,
|
2893
|
+
"ai.response.object": {
|
2894
|
+
output: () => JSON.stringify(object)
|
2895
|
+
},
|
2896
|
+
"ai.response.id": response.id,
|
2897
|
+
"ai.response.model": response.modelId,
|
2898
|
+
"ai.response.timestamp": response.timestamp.toISOString(),
|
2899
|
+
"ai.usage.promptTokens": finalUsage.promptTokens,
|
2900
|
+
"ai.usage.completionTokens": finalUsage.completionTokens,
|
2901
|
+
// standardized gen-ai llm span attributes:
|
2902
|
+
"gen_ai.response.finish_reasons": [finishReason],
|
2903
|
+
"gen_ai.response.id": response.id,
|
2904
|
+
"gen_ai.response.model": response.modelId,
|
2905
|
+
"gen_ai.usage.input_tokens": finalUsage.promptTokens,
|
2906
|
+
"gen_ai.usage.output_tokens": finalUsage.completionTokens
|
2907
|
+
}
|
2908
|
+
})
|
2909
|
+
);
|
2910
|
+
doStreamSpan.end();
|
2911
|
+
rootSpan.setAttributes(
|
2912
|
+
selectTelemetryAttributes({
|
2913
|
+
telemetry,
|
2914
|
+
attributes: {
|
2915
|
+
"ai.usage.promptTokens": finalUsage.promptTokens,
|
2916
|
+
"ai.usage.completionTokens": finalUsage.completionTokens,
|
2917
|
+
"ai.response.object": {
|
2918
|
+
output: () => JSON.stringify(object)
|
2919
|
+
}
|
2920
|
+
}
|
2921
|
+
})
|
2922
|
+
);
|
2923
|
+
await (onFinish == null ? void 0 : onFinish({
|
2924
|
+
usage: finalUsage,
|
2925
|
+
object,
|
2926
|
+
error,
|
2927
|
+
response: {
|
2928
|
+
...response,
|
2929
|
+
headers: rawResponse == null ? void 0 : rawResponse.headers
|
2930
|
+
},
|
2931
|
+
warnings,
|
2932
|
+
experimental_providerMetadata: providerMetadata
|
2933
|
+
}));
|
2934
|
+
} catch (error2) {
|
2935
|
+
controller.error(error2);
|
2936
|
+
} finally {
|
2937
|
+
rootSpan.end();
|
2938
|
+
}
|
2939
|
+
}
|
2940
|
+
})
|
2941
|
+
);
|
2942
|
+
self.stitchableStream.addStream(transformedStream);
|
2943
|
+
}
|
2944
|
+
}).catch((error) => {
|
2945
|
+
self.stitchableStream.addStream(
|
2946
|
+
new ReadableStream({
|
2947
|
+
start(controller) {
|
2948
|
+
controller.error(error);
|
3007
2949
|
}
|
3008
|
-
}
|
3009
|
-
|
3010
|
-
)
|
2950
|
+
})
|
2951
|
+
);
|
2952
|
+
}).finally(() => {
|
2953
|
+
self.stitchableStream.close();
|
2954
|
+
});
|
2955
|
+
this.outputStrategy = outputStrategy;
|
3011
2956
|
}
|
3012
2957
|
get object() {
|
3013
2958
|
return this.objectPromise.value;
|
3014
2959
|
}
|
2960
|
+
get usage() {
|
2961
|
+
return this.usagePromise.value;
|
2962
|
+
}
|
2963
|
+
get experimental_providerMetadata() {
|
2964
|
+
return this.providerMetadataPromise.value;
|
2965
|
+
}
|
2966
|
+
get warnings() {
|
2967
|
+
return this.warningsPromise.value;
|
2968
|
+
}
|
2969
|
+
get request() {
|
2970
|
+
return this.requestPromise.value;
|
2971
|
+
}
|
2972
|
+
get response() {
|
2973
|
+
return this.responsePromise.value;
|
2974
|
+
}
|
3015
2975
|
get partialObjectStream() {
|
3016
|
-
return createAsyncIterableStream(this.
|
2976
|
+
return createAsyncIterableStream(this.stitchableStream.stream, {
|
3017
2977
|
transform(chunk, controller) {
|
3018
2978
|
switch (chunk.type) {
|
3019
2979
|
case "object":
|
@@ -3034,10 +2994,12 @@ var DefaultStreamObjectResult = class {
|
|
3034
2994
|
});
|
3035
2995
|
}
|
3036
2996
|
get elementStream() {
|
3037
|
-
return this.outputStrategy.createElementStream(
|
2997
|
+
return this.outputStrategy.createElementStream(
|
2998
|
+
this.stitchableStream.stream
|
2999
|
+
);
|
3038
3000
|
}
|
3039
3001
|
get textStream() {
|
3040
|
-
return createAsyncIterableStream(this.
|
3002
|
+
return createAsyncIterableStream(this.stitchableStream.stream, {
|
3041
3003
|
transform(chunk, controller) {
|
3042
3004
|
switch (chunk.type) {
|
3043
3005
|
case "text-delta":
|
@@ -3058,7 +3020,7 @@ var DefaultStreamObjectResult = class {
|
|
3058
3020
|
});
|
3059
3021
|
}
|
3060
3022
|
get fullStream() {
|
3061
|
-
return createAsyncIterableStream(this.
|
3023
|
+
return createAsyncIterableStream(this.stitchableStream.stream, {
|
3062
3024
|
transform(chunk, controller) {
|
3063
3025
|
controller.enqueue(chunk);
|
3064
3026
|
}
|
@@ -3069,7 +3031,7 @@ var DefaultStreamObjectResult = class {
|
|
3069
3031
|
response,
|
3070
3032
|
status: init == null ? void 0 : init.status,
|
3071
3033
|
statusText: init == null ? void 0 : init.statusText,
|
3072
|
-
headers: prepareOutgoingHttpHeaders(init, {
|
3034
|
+
headers: prepareOutgoingHttpHeaders(init == null ? void 0 : init.headers, {
|
3073
3035
|
contentType: "text/plain; charset=utf-8"
|
3074
3036
|
}),
|
3075
3037
|
stream: this.textStream.pipeThrough(new TextEncoderStream())
|
@@ -3079,13 +3041,12 @@ var DefaultStreamObjectResult = class {
|
|
3079
3041
|
var _a11;
|
3080
3042
|
return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
|
3081
3043
|
status: (_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200,
|
3082
|
-
headers: prepareResponseHeaders(init, {
|
3044
|
+
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
3083
3045
|
contentType: "text/plain; charset=utf-8"
|
3084
3046
|
})
|
3085
3047
|
});
|
3086
3048
|
}
|
3087
3049
|
};
|
3088
|
-
var experimental_streamObject = streamObject;
|
3089
3050
|
|
3090
3051
|
// core/generate-text/generate-text.ts
|
3091
3052
|
var import_provider_utils8 = require("@ai-sdk/provider-utils");
|
@@ -3116,25 +3077,6 @@ var InvalidToolArgumentsError = class extends import_provider11.AISDKError {
|
|
3116
3077
|
static isInstance(error) {
|
3117
3078
|
return import_provider11.AISDKError.hasMarker(error, marker8);
|
3118
3079
|
}
|
3119
|
-
/**
|
3120
|
-
* @deprecated use `isInstance` instead
|
3121
|
-
*/
|
3122
|
-
static isInvalidToolArgumentsError(error) {
|
3123
|
-
return error instanceof Error && error.name === name8 && typeof error.toolName === "string" && typeof error.toolArgs === "string";
|
3124
|
-
}
|
3125
|
-
/**
|
3126
|
-
* @deprecated Do not use this method. It will be removed in the next major version.
|
3127
|
-
*/
|
3128
|
-
toJSON() {
|
3129
|
-
return {
|
3130
|
-
name: this.name,
|
3131
|
-
message: this.message,
|
3132
|
-
cause: this.cause,
|
3133
|
-
stack: this.stack,
|
3134
|
-
toolName: this.toolName,
|
3135
|
-
toolArgs: this.toolArgs
|
3136
|
-
};
|
3137
|
-
}
|
3138
3080
|
};
|
3139
3081
|
_a8 = symbol8;
|
3140
3082
|
|
@@ -3158,24 +3100,6 @@ var NoSuchToolError = class extends import_provider12.AISDKError {
|
|
3158
3100
|
static isInstance(error) {
|
3159
3101
|
return import_provider12.AISDKError.hasMarker(error, marker9);
|
3160
3102
|
}
|
3161
|
-
/**
|
3162
|
-
* @deprecated use `isInstance` instead
|
3163
|
-
*/
|
3164
|
-
static isNoSuchToolError(error) {
|
3165
|
-
return error instanceof Error && error.name === name9 && "toolName" in error && error.toolName != void 0 && typeof error.name === "string";
|
3166
|
-
}
|
3167
|
-
/**
|
3168
|
-
* @deprecated Do not use this method. It will be removed in the next major version.
|
3169
|
-
*/
|
3170
|
-
toJSON() {
|
3171
|
-
return {
|
3172
|
-
name: this.name,
|
3173
|
-
message: this.message,
|
3174
|
-
stack: this.stack,
|
3175
|
-
toolName: this.toolName,
|
3176
|
-
availableTools: this.availableTools
|
3177
|
-
};
|
3178
|
-
}
|
3179
3103
|
};
|
3180
3104
|
_a9 = symbol9;
|
3181
3105
|
|
@@ -3328,11 +3252,8 @@ async function generateText({
|
|
3328
3252
|
maxRetries,
|
3329
3253
|
abortSignal,
|
3330
3254
|
headers,
|
3331
|
-
|
3332
|
-
|
3333
|
-
maxSteps = maxToolRoundtrips != null ? maxToolRoundtrips + 1 : 1,
|
3334
|
-
experimental_continuationSteps,
|
3335
|
-
experimental_continueSteps: continueSteps = experimental_continuationSteps != null ? experimental_continuationSteps : false,
|
3255
|
+
maxSteps = 1,
|
3256
|
+
experimental_continueSteps: continueSteps = false,
|
3336
3257
|
experimental_telemetry: telemetry,
|
3337
3258
|
experimental_providerMetadata: providerMetadata,
|
3338
3259
|
experimental_activeTools: activeTools,
|
@@ -3484,14 +3405,6 @@ async function generateText({
|
|
3484
3405
|
"ai.response.timestamp": responseData.timestamp.toISOString(),
|
3485
3406
|
"ai.usage.promptTokens": result.usage.promptTokens,
|
3486
3407
|
"ai.usage.completionTokens": result.usage.completionTokens,
|
3487
|
-
// deprecated:
|
3488
|
-
"ai.finishReason": result.finishReason,
|
3489
|
-
"ai.result.text": {
|
3490
|
-
output: () => result.text
|
3491
|
-
},
|
3492
|
-
"ai.result.toolCalls": {
|
3493
|
-
output: () => JSON.stringify(result.toolCalls)
|
3494
|
-
},
|
3495
3408
|
// standardized gen-ai llm span attributes:
|
3496
3409
|
"gen_ai.response.finish_reasons": [result.finishReason],
|
3497
3410
|
"gen_ai.response.id": responseData.id,
|
@@ -3594,15 +3507,7 @@ async function generateText({
|
|
3594
3507
|
output: () => JSON.stringify(currentModelResponse.toolCalls)
|
3595
3508
|
},
|
3596
3509
|
"ai.usage.promptTokens": currentModelResponse.usage.promptTokens,
|
3597
|
-
"ai.usage.completionTokens": currentModelResponse.usage.completionTokens
|
3598
|
-
// deprecated:
|
3599
|
-
"ai.finishReason": currentModelResponse.finishReason,
|
3600
|
-
"ai.result.text": {
|
3601
|
-
output: () => currentModelResponse.text
|
3602
|
-
},
|
3603
|
-
"ai.result.toolCalls": {
|
3604
|
-
output: () => JSON.stringify(currentModelResponse.toolCalls)
|
3605
|
-
}
|
3510
|
+
"ai.usage.completionTokens": currentModelResponse.usage.completionTokens
|
3606
3511
|
}
|
3607
3512
|
})
|
3608
3513
|
);
|
@@ -3620,7 +3525,6 @@ async function generateText({
|
|
3620
3525
|
messages: responseMessages
|
3621
3526
|
},
|
3622
3527
|
logprobs: currentModelResponse.logprobs,
|
3623
|
-
responseMessages,
|
3624
3528
|
steps,
|
3625
3529
|
providerMetadata: currentModelResponse.providerMetadata
|
3626
3530
|
});
|
@@ -3697,105 +3601,38 @@ var DefaultGenerateTextResult = class {
|
|
3697
3601
|
this.warnings = options.warnings;
|
3698
3602
|
this.request = options.request;
|
3699
3603
|
this.response = options.response;
|
3700
|
-
this.responseMessages = options.responseMessages;
|
3701
|
-
this.roundtrips = options.steps;
|
3702
3604
|
this.steps = options.steps;
|
3703
3605
|
this.experimental_providerMetadata = options.providerMetadata;
|
3704
|
-
this.rawResponse = {
|
3705
|
-
headers: options.response.headers
|
3706
|
-
};
|
3707
3606
|
this.logprobs = options.logprobs;
|
3708
3607
|
}
|
3709
3608
|
};
|
3710
|
-
var experimental_generateText = generateText;
|
3711
3609
|
|
3712
3610
|
// core/generate-text/stream-text.ts
|
3713
3611
|
var import_provider_utils9 = require("@ai-sdk/provider-utils");
|
3612
|
+
var import_ui_utils6 = require("@ai-sdk/ui-utils");
|
3714
3613
|
|
3715
|
-
// core/util/
|
3716
|
-
function
|
3717
|
-
|
3718
|
-
|
3719
|
-
let
|
3720
|
-
|
3721
|
-
|
3722
|
-
|
3723
|
-
|
3724
|
-
}
|
3725
|
-
if (innerStreamReaders.length === 0) {
|
3726
|
-
return;
|
3727
|
-
}
|
3614
|
+
// core/util/merge-streams.ts
|
3615
|
+
function mergeStreams(stream1, stream2) {
|
3616
|
+
const reader1 = stream1.getReader();
|
3617
|
+
const reader2 = stream2.getReader();
|
3618
|
+
let lastRead1 = void 0;
|
3619
|
+
let lastRead2 = void 0;
|
3620
|
+
let stream1Done = false;
|
3621
|
+
let stream2Done = false;
|
3622
|
+
async function readStream1(controller) {
|
3728
3623
|
try {
|
3729
|
-
|
3730
|
-
|
3731
|
-
|
3732
|
-
|
3733
|
-
|
3734
|
-
|
3735
|
-
|
3736
|
-
}
|
3624
|
+
if (lastRead1 == null) {
|
3625
|
+
lastRead1 = reader1.read();
|
3626
|
+
}
|
3627
|
+
const result = await lastRead1;
|
3628
|
+
lastRead1 = void 0;
|
3629
|
+
if (!result.done) {
|
3630
|
+
controller.enqueue(result.value);
|
3737
3631
|
} else {
|
3738
|
-
controller
|
3632
|
+
controller.close();
|
3739
3633
|
}
|
3740
3634
|
} catch (error) {
|
3741
|
-
controller
|
3742
|
-
innerStreamReaders.shift();
|
3743
|
-
if (isClosed && innerStreamReaders.length === 0) {
|
3744
|
-
controller == null ? void 0 : controller.close();
|
3745
|
-
}
|
3746
|
-
}
|
3747
|
-
};
|
3748
|
-
return {
|
3749
|
-
stream: new ReadableStream({
|
3750
|
-
start(controllerParam) {
|
3751
|
-
controller = controllerParam;
|
3752
|
-
},
|
3753
|
-
pull: processPull,
|
3754
|
-
async cancel() {
|
3755
|
-
for (const reader of innerStreamReaders) {
|
3756
|
-
await reader.cancel();
|
3757
|
-
}
|
3758
|
-
innerStreamReaders = [];
|
3759
|
-
isClosed = true;
|
3760
|
-
}
|
3761
|
-
}),
|
3762
|
-
addStream: (innerStream) => {
|
3763
|
-
if (isClosed) {
|
3764
|
-
throw new Error("Cannot add inner stream: outer stream is closed");
|
3765
|
-
}
|
3766
|
-
innerStreamReaders.push(innerStream.getReader());
|
3767
|
-
},
|
3768
|
-
close: () => {
|
3769
|
-
isClosed = true;
|
3770
|
-
if (innerStreamReaders.length === 0) {
|
3771
|
-
controller == null ? void 0 : controller.close();
|
3772
|
-
}
|
3773
|
-
}
|
3774
|
-
};
|
3775
|
-
}
|
3776
|
-
|
3777
|
-
// core/util/merge-streams.ts
|
3778
|
-
function mergeStreams(stream1, stream2) {
|
3779
|
-
const reader1 = stream1.getReader();
|
3780
|
-
const reader2 = stream2.getReader();
|
3781
|
-
let lastRead1 = void 0;
|
3782
|
-
let lastRead2 = void 0;
|
3783
|
-
let stream1Done = false;
|
3784
|
-
let stream2Done = false;
|
3785
|
-
async function readStream1(controller) {
|
3786
|
-
try {
|
3787
|
-
if (lastRead1 == null) {
|
3788
|
-
lastRead1 = reader1.read();
|
3789
|
-
}
|
3790
|
-
const result = await lastRead1;
|
3791
|
-
lastRead1 = void 0;
|
3792
|
-
if (!result.done) {
|
3793
|
-
controller.enqueue(result.value);
|
3794
|
-
} else {
|
3795
|
-
controller.close();
|
3796
|
-
}
|
3797
|
-
} catch (error) {
|
3798
|
-
controller.error(error);
|
3635
|
+
controller.error(error);
|
3799
3636
|
}
|
3800
3637
|
}
|
3801
3638
|
async function readStream2(controller) {
|
@@ -4057,7 +3894,7 @@ function runToolsTransformation({
|
|
4057
3894
|
|
4058
3895
|
// core/generate-text/stream-text.ts
|
4059
3896
|
var originalGenerateId4 = (0, import_provider_utils9.createIdGenerator)({ prefix: "aitxt", size: 24 });
|
4060
|
-
|
3897
|
+
function streamText({
|
4061
3898
|
model,
|
4062
3899
|
tools,
|
4063
3900
|
toolChoice,
|
@@ -4067,8 +3904,7 @@ async function streamText({
|
|
4067
3904
|
maxRetries,
|
4068
3905
|
abortSignal,
|
4069
3906
|
headers,
|
4070
|
-
|
4071
|
-
maxSteps = maxToolRoundtrips != null ? maxToolRoundtrips + 1 : 1,
|
3907
|
+
maxSteps = 1,
|
4072
3908
|
experimental_continueSteps: continueSteps = false,
|
4073
3909
|
experimental_telemetry: telemetry,
|
4074
3910
|
experimental_providerMetadata: providerMetadata,
|
@@ -4091,598 +3927,606 @@ async function streamText({
|
|
4091
3927
|
message: "maxSteps must be at least 1"
|
4092
3928
|
});
|
4093
3929
|
}
|
4094
|
-
|
3930
|
+
return new DefaultStreamTextResult({
|
4095
3931
|
model,
|
4096
3932
|
telemetry,
|
4097
3933
|
headers,
|
4098
|
-
settings
|
4099
|
-
|
4100
|
-
|
4101
|
-
|
4102
|
-
prompt
|
4103
|
-
|
4104
|
-
|
4105
|
-
|
4106
|
-
|
4107
|
-
|
4108
|
-
|
4109
|
-
|
4110
|
-
|
4111
|
-
|
4112
|
-
|
4113
|
-
|
4114
|
-
|
4115
|
-
|
4116
|
-
|
4117
|
-
}
|
4118
|
-
}),
|
4119
|
-
tracer,
|
4120
|
-
endWhenDone: false,
|
4121
|
-
fn: async (rootSpan) => {
|
4122
|
-
const retry = retryWithExponentialBackoff({ maxRetries });
|
4123
|
-
const startStep = async ({
|
4124
|
-
responseMessages
|
4125
|
-
}) => {
|
4126
|
-
const promptFormat = responseMessages.length === 0 ? initialPrompt.type : "messages";
|
4127
|
-
const promptMessages = await convertToLanguageModelPrompt({
|
4128
|
-
prompt: {
|
4129
|
-
type: promptFormat,
|
4130
|
-
system: initialPrompt.system,
|
4131
|
-
messages: [...initialPrompt.messages, ...responseMessages]
|
4132
|
-
},
|
4133
|
-
modelSupportsImageUrls: model.supportsImageUrls,
|
4134
|
-
modelSupportsUrl: model.supportsUrl
|
4135
|
-
});
|
4136
|
-
const mode = {
|
4137
|
-
type: "regular",
|
4138
|
-
...prepareToolsAndToolChoice({ tools, toolChoice, activeTools })
|
4139
|
-
};
|
4140
|
-
const {
|
4141
|
-
result: { stream: stream2, warnings: warnings2, rawResponse: rawResponse2, request: request2 },
|
4142
|
-
doStreamSpan: doStreamSpan2,
|
4143
|
-
startTimestampMs: startTimestampMs2
|
4144
|
-
} = await retry(
|
4145
|
-
() => recordSpan({
|
4146
|
-
name: "ai.streamText.doStream",
|
4147
|
-
attributes: selectTelemetryAttributes({
|
4148
|
-
telemetry,
|
4149
|
-
attributes: {
|
4150
|
-
...assembleOperationName({
|
4151
|
-
operationId: "ai.streamText.doStream",
|
4152
|
-
telemetry
|
4153
|
-
}),
|
4154
|
-
...baseTelemetryAttributes,
|
4155
|
-
"ai.prompt.format": {
|
4156
|
-
input: () => promptFormat
|
4157
|
-
},
|
4158
|
-
"ai.prompt.messages": {
|
4159
|
-
input: () => JSON.stringify(promptMessages)
|
4160
|
-
},
|
4161
|
-
"ai.prompt.tools": {
|
4162
|
-
// convert the language model level tools:
|
4163
|
-
input: () => {
|
4164
|
-
var _a11;
|
4165
|
-
return (_a11 = mode.tools) == null ? void 0 : _a11.map((tool2) => JSON.stringify(tool2));
|
4166
|
-
}
|
4167
|
-
},
|
4168
|
-
"ai.prompt.toolChoice": {
|
4169
|
-
input: () => mode.toolChoice != null ? JSON.stringify(mode.toolChoice) : void 0
|
4170
|
-
},
|
4171
|
-
// standardized gen-ai llm span attributes:
|
4172
|
-
"gen_ai.system": model.provider,
|
4173
|
-
"gen_ai.request.model": model.modelId,
|
4174
|
-
"gen_ai.request.frequency_penalty": settings.frequencyPenalty,
|
4175
|
-
"gen_ai.request.max_tokens": settings.maxTokens,
|
4176
|
-
"gen_ai.request.presence_penalty": settings.presencePenalty,
|
4177
|
-
"gen_ai.request.stop_sequences": settings.stopSequences,
|
4178
|
-
"gen_ai.request.temperature": settings.temperature,
|
4179
|
-
"gen_ai.request.top_k": settings.topK,
|
4180
|
-
"gen_ai.request.top_p": settings.topP
|
4181
|
-
}
|
4182
|
-
}),
|
4183
|
-
tracer,
|
4184
|
-
endWhenDone: false,
|
4185
|
-
fn: async (doStreamSpan3) => ({
|
4186
|
-
startTimestampMs: now2(),
|
4187
|
-
// get before the call
|
4188
|
-
doStreamSpan: doStreamSpan3,
|
4189
|
-
result: await model.doStream({
|
4190
|
-
mode,
|
4191
|
-
...prepareCallSettings(settings),
|
4192
|
-
inputFormat: promptFormat,
|
4193
|
-
prompt: promptMessages,
|
4194
|
-
providerMetadata,
|
4195
|
-
abortSignal,
|
4196
|
-
headers
|
4197
|
-
})
|
4198
|
-
})
|
4199
|
-
})
|
4200
|
-
);
|
4201
|
-
return {
|
4202
|
-
result: {
|
4203
|
-
stream: runToolsTransformation({
|
4204
|
-
tools,
|
4205
|
-
generatorStream: stream2,
|
4206
|
-
toolCallStreaming,
|
4207
|
-
tracer,
|
4208
|
-
telemetry,
|
4209
|
-
abortSignal
|
4210
|
-
}),
|
4211
|
-
warnings: warnings2,
|
4212
|
-
request: request2 != null ? request2 : {},
|
4213
|
-
rawResponse: rawResponse2
|
4214
|
-
},
|
4215
|
-
doStreamSpan: doStreamSpan2,
|
4216
|
-
startTimestampMs: startTimestampMs2
|
4217
|
-
};
|
4218
|
-
};
|
4219
|
-
const {
|
4220
|
-
result: { stream, warnings, rawResponse, request },
|
4221
|
-
doStreamSpan,
|
4222
|
-
startTimestampMs
|
4223
|
-
} = await startStep({ responseMessages: [] });
|
4224
|
-
return new DefaultStreamTextResult({
|
4225
|
-
stream,
|
4226
|
-
warnings,
|
4227
|
-
rawResponse,
|
4228
|
-
request,
|
4229
|
-
onChunk,
|
4230
|
-
onFinish,
|
4231
|
-
onStepFinish,
|
4232
|
-
rootSpan,
|
4233
|
-
doStreamSpan,
|
4234
|
-
telemetry,
|
4235
|
-
startTimestampMs,
|
4236
|
-
maxSteps,
|
4237
|
-
continueSteps,
|
4238
|
-
startStep,
|
4239
|
-
modelId: model.modelId,
|
4240
|
-
now: now2,
|
4241
|
-
currentDate,
|
4242
|
-
generateId: generateId3,
|
4243
|
-
tools
|
4244
|
-
});
|
4245
|
-
}
|
3934
|
+
settings,
|
3935
|
+
maxRetries,
|
3936
|
+
abortSignal,
|
3937
|
+
system,
|
3938
|
+
prompt,
|
3939
|
+
messages,
|
3940
|
+
tools,
|
3941
|
+
toolChoice,
|
3942
|
+
toolCallStreaming,
|
3943
|
+
activeTools,
|
3944
|
+
maxSteps,
|
3945
|
+
continueSteps,
|
3946
|
+
providerMetadata,
|
3947
|
+
onChunk,
|
3948
|
+
onFinish,
|
3949
|
+
onStepFinish,
|
3950
|
+
now: now2,
|
3951
|
+
currentDate,
|
3952
|
+
generateId: generateId3
|
4246
3953
|
});
|
4247
3954
|
}
|
4248
3955
|
var DefaultStreamTextResult = class {
|
4249
3956
|
constructor({
|
4250
|
-
|
4251
|
-
warnings,
|
4252
|
-
rawResponse,
|
4253
|
-
request,
|
4254
|
-
onChunk,
|
4255
|
-
onFinish,
|
4256
|
-
onStepFinish,
|
4257
|
-
rootSpan,
|
4258
|
-
doStreamSpan,
|
3957
|
+
model,
|
4259
3958
|
telemetry,
|
4260
|
-
|
3959
|
+
headers,
|
3960
|
+
settings,
|
3961
|
+
maxRetries,
|
3962
|
+
abortSignal,
|
3963
|
+
system,
|
3964
|
+
prompt,
|
3965
|
+
messages,
|
3966
|
+
tools,
|
3967
|
+
toolChoice,
|
3968
|
+
toolCallStreaming,
|
3969
|
+
activeTools,
|
4261
3970
|
maxSteps,
|
4262
3971
|
continueSteps,
|
4263
|
-
|
4264
|
-
|
3972
|
+
providerMetadata,
|
3973
|
+
onChunk,
|
3974
|
+
onFinish,
|
3975
|
+
onStepFinish,
|
4265
3976
|
now: now2,
|
4266
3977
|
currentDate,
|
4267
|
-
generateId: generateId3
|
4268
|
-
tools
|
3978
|
+
generateId: generateId3
|
4269
3979
|
}) {
|
4270
|
-
this.
|
4271
|
-
this.
|
4272
|
-
|
4273
|
-
this.
|
4274
|
-
|
4275
|
-
this.
|
4276
|
-
|
4277
|
-
this.
|
4278
|
-
|
4279
|
-
this.
|
4280
|
-
|
4281
|
-
|
4282
|
-
const
|
4283
|
-
|
4284
|
-
|
4285
|
-
|
4286
|
-
|
4287
|
-
}
|
4288
|
-
|
4289
|
-
|
4290
|
-
|
4291
|
-
|
4292
|
-
this.response = responsePromise;
|
4293
|
-
const {
|
4294
|
-
resolve: resolveResponseMessages,
|
4295
|
-
promise: responseMessagesPromise
|
4296
|
-
} = createResolvablePromise();
|
4297
|
-
this.responseMessages = responseMessagesPromise;
|
4298
|
-
const {
|
4299
|
-
stream: stitchableStream,
|
4300
|
-
addStream,
|
4301
|
-
close: closeStitchableStream
|
4302
|
-
} = createStitchableStream();
|
4303
|
-
this.originalStream = stitchableStream;
|
4304
|
-
const stepResults = [];
|
3980
|
+
this.warningsPromise = new DelayedPromise();
|
3981
|
+
this.usagePromise = new DelayedPromise();
|
3982
|
+
this.finishReasonPromise = new DelayedPromise();
|
3983
|
+
this.providerMetadataPromise = new DelayedPromise();
|
3984
|
+
this.textPromise = new DelayedPromise();
|
3985
|
+
this.toolCallsPromise = new DelayedPromise();
|
3986
|
+
this.toolResultsPromise = new DelayedPromise();
|
3987
|
+
this.requestPromise = new DelayedPromise();
|
3988
|
+
this.responsePromise = new DelayedPromise();
|
3989
|
+
this.stepsPromise = new DelayedPromise();
|
3990
|
+
this.stitchableStream = createStitchableStream();
|
3991
|
+
const tracer = getTracer(telemetry);
|
3992
|
+
const baseTelemetryAttributes = getBaseTelemetryAttributes({
|
3993
|
+
model,
|
3994
|
+
telemetry,
|
3995
|
+
headers,
|
3996
|
+
settings: { ...settings, maxRetries }
|
3997
|
+
});
|
3998
|
+
const initialPrompt = standardizePrompt({
|
3999
|
+
prompt: { system, prompt, messages },
|
4000
|
+
tools
|
4001
|
+
});
|
4305
4002
|
const self = this;
|
4306
|
-
|
4307
|
-
|
4308
|
-
|
4309
|
-
|
4310
|
-
|
4311
|
-
|
4312
|
-
|
4313
|
-
|
4314
|
-
|
4315
|
-
|
4316
|
-
|
4317
|
-
|
4318
|
-
|
4319
|
-
|
4320
|
-
|
4321
|
-
|
4322
|
-
|
4323
|
-
|
4324
|
-
|
4325
|
-
|
4326
|
-
|
4327
|
-
|
4328
|
-
|
4329
|
-
|
4330
|
-
|
4331
|
-
|
4332
|
-
|
4333
|
-
|
4334
|
-
|
4335
|
-
|
4336
|
-
|
4337
|
-
|
4338
|
-
|
4339
|
-
|
4340
|
-
|
4341
|
-
|
4342
|
-
|
4343
|
-
|
4344
|
-
|
4345
|
-
|
4346
|
-
|
4347
|
-
|
4348
|
-
|
4349
|
-
|
4350
|
-
|
4351
|
-
|
4352
|
-
|
4353
|
-
|
4354
|
-
|
4355
|
-
|
4356
|
-
|
4357
|
-
|
4358
|
-
|
4359
|
-
|
4360
|
-
|
4361
|
-
|
4362
|
-
|
4363
|
-
|
4364
|
-
|
4365
|
-
|
4366
|
-
|
4367
|
-
|
4368
|
-
doStreamSpan2.setAttributes({
|
4369
|
-
"ai.response.msToFirstChunk": msToFirstChunk,
|
4370
|
-
// deprecated:
|
4371
|
-
"ai.stream.msToFirstChunk": msToFirstChunk
|
4372
|
-
});
|
4373
|
-
}
|
4374
|
-
if (chunk.type === "text-delta" && chunk.textDelta.length === 0) {
|
4375
|
-
return;
|
4376
|
-
}
|
4377
|
-
const chunkType = chunk.type;
|
4378
|
-
switch (chunkType) {
|
4379
|
-
case "text-delta": {
|
4380
|
-
if (continueSteps) {
|
4381
|
-
const trimmedChunkText = inWhitespacePrefix && hasLeadingWhitespace ? chunk.textDelta.trimStart() : chunk.textDelta;
|
4382
|
-
if (trimmedChunkText.length === 0) {
|
4383
|
-
break;
|
4384
|
-
}
|
4385
|
-
inWhitespacePrefix = false;
|
4386
|
-
chunkBuffer += trimmedChunkText;
|
4387
|
-
const split = splitOnLastWhitespace(chunkBuffer);
|
4388
|
-
if (split != null) {
|
4389
|
-
chunkBuffer = split.suffix;
|
4390
|
-
await publishTextChunk({
|
4391
|
-
controller,
|
4392
|
-
chunk: {
|
4393
|
-
type: "text-delta",
|
4394
|
-
textDelta: split.prefix + split.whitespace
|
4395
|
-
}
|
4396
|
-
});
|
4003
|
+
const stepResults = [];
|
4004
|
+
recordSpan({
|
4005
|
+
name: "ai.streamText",
|
4006
|
+
attributes: selectTelemetryAttributes({
|
4007
|
+
telemetry,
|
4008
|
+
attributes: {
|
4009
|
+
...assembleOperationName({ operationId: "ai.streamText", telemetry }),
|
4010
|
+
...baseTelemetryAttributes,
|
4011
|
+
// specific settings that only make sense on the outer level:
|
4012
|
+
"ai.prompt": {
|
4013
|
+
input: () => JSON.stringify({ system, prompt, messages })
|
4014
|
+
},
|
4015
|
+
"ai.settings.maxSteps": maxSteps
|
4016
|
+
}
|
4017
|
+
}),
|
4018
|
+
tracer,
|
4019
|
+
endWhenDone: false,
|
4020
|
+
fn: async (rootSpan) => {
|
4021
|
+
const retry = retryWithExponentialBackoff({ maxRetries });
|
4022
|
+
const startStep = async ({
|
4023
|
+
responseMessages
|
4024
|
+
}) => {
|
4025
|
+
const promptFormat = responseMessages.length === 0 ? initialPrompt.type : "messages";
|
4026
|
+
const promptMessages = await convertToLanguageModelPrompt({
|
4027
|
+
prompt: {
|
4028
|
+
type: promptFormat,
|
4029
|
+
system: initialPrompt.system,
|
4030
|
+
messages: [...initialPrompt.messages, ...responseMessages]
|
4031
|
+
},
|
4032
|
+
modelSupportsImageUrls: model.supportsImageUrls,
|
4033
|
+
modelSupportsUrl: model.supportsUrl
|
4034
|
+
});
|
4035
|
+
const mode = {
|
4036
|
+
type: "regular",
|
4037
|
+
...prepareToolsAndToolChoice({ tools, toolChoice, activeTools })
|
4038
|
+
};
|
4039
|
+
const {
|
4040
|
+
result: { stream: stream2, warnings: warnings2, rawResponse: rawResponse2, request: request2 },
|
4041
|
+
doStreamSpan: doStreamSpan2,
|
4042
|
+
startTimestampMs: startTimestampMs2
|
4043
|
+
} = await retry(
|
4044
|
+
() => recordSpan({
|
4045
|
+
name: "ai.streamText.doStream",
|
4046
|
+
attributes: selectTelemetryAttributes({
|
4047
|
+
telemetry,
|
4048
|
+
attributes: {
|
4049
|
+
...assembleOperationName({
|
4050
|
+
operationId: "ai.streamText.doStream",
|
4051
|
+
telemetry
|
4052
|
+
}),
|
4053
|
+
...baseTelemetryAttributes,
|
4054
|
+
"ai.prompt.format": {
|
4055
|
+
input: () => promptFormat
|
4056
|
+
},
|
4057
|
+
"ai.prompt.messages": {
|
4058
|
+
input: () => JSON.stringify(promptMessages)
|
4059
|
+
},
|
4060
|
+
"ai.prompt.tools": {
|
4061
|
+
// convert the language model level tools:
|
4062
|
+
input: () => {
|
4063
|
+
var _a11;
|
4064
|
+
return (_a11 = mode.tools) == null ? void 0 : _a11.map((tool2) => JSON.stringify(tool2));
|
4397
4065
|
}
|
4398
|
-
}
|
4399
|
-
|
4400
|
-
|
4401
|
-
|
4402
|
-
|
4403
|
-
|
4404
|
-
|
4405
|
-
|
4406
|
-
|
4407
|
-
|
4408
|
-
|
4409
|
-
|
4410
|
-
|
4411
|
-
|
4412
|
-
await (onChunk == null ? void 0 : onChunk({ chunk }));
|
4413
|
-
break;
|
4414
|
-
}
|
4415
|
-
case "response-metadata": {
|
4416
|
-
stepResponse = {
|
4417
|
-
id: (_a11 = chunk.id) != null ? _a11 : stepResponse.id,
|
4418
|
-
timestamp: (_b = chunk.timestamp) != null ? _b : stepResponse.timestamp,
|
4419
|
-
modelId: (_c = chunk.modelId) != null ? _c : stepResponse.modelId
|
4420
|
-
};
|
4421
|
-
break;
|
4422
|
-
}
|
4423
|
-
case "finish": {
|
4424
|
-
stepUsage = chunk.usage;
|
4425
|
-
stepFinishReason = chunk.finishReason;
|
4426
|
-
stepProviderMetadata = chunk.experimental_providerMetadata;
|
4427
|
-
stepLogProbs = chunk.logprobs;
|
4428
|
-
const msToFinish = now2() - startTimestamp;
|
4429
|
-
doStreamSpan2.addEvent("ai.stream.finish");
|
4430
|
-
doStreamSpan2.setAttributes({
|
4431
|
-
"ai.response.msToFinish": msToFinish,
|
4432
|
-
"ai.response.avgCompletionTokensPerSecond": 1e3 * stepUsage.completionTokens / msToFinish
|
4433
|
-
});
|
4434
|
-
break;
|
4435
|
-
}
|
4436
|
-
case "tool-call-streaming-start":
|
4437
|
-
case "tool-call-delta": {
|
4438
|
-
controller.enqueue(chunk);
|
4439
|
-
await (onChunk == null ? void 0 : onChunk({ chunk }));
|
4440
|
-
break;
|
4441
|
-
}
|
4442
|
-
case "error": {
|
4443
|
-
controller.enqueue(chunk);
|
4444
|
-
stepFinishReason = "error";
|
4445
|
-
break;
|
4446
|
-
}
|
4447
|
-
default: {
|
4448
|
-
const exhaustiveCheck = chunkType;
|
4449
|
-
throw new Error(`Unknown chunk type: ${exhaustiveCheck}`);
|
4066
|
+
},
|
4067
|
+
"ai.prompt.toolChoice": {
|
4068
|
+
input: () => mode.toolChoice != null ? JSON.stringify(mode.toolChoice) : void 0
|
4069
|
+
},
|
4070
|
+
// standardized gen-ai llm span attributes:
|
4071
|
+
"gen_ai.system": model.provider,
|
4072
|
+
"gen_ai.request.model": model.modelId,
|
4073
|
+
"gen_ai.request.frequency_penalty": settings.frequencyPenalty,
|
4074
|
+
"gen_ai.request.max_tokens": settings.maxTokens,
|
4075
|
+
"gen_ai.request.presence_penalty": settings.presencePenalty,
|
4076
|
+
"gen_ai.request.stop_sequences": settings.stopSequences,
|
4077
|
+
"gen_ai.request.temperature": settings.temperature,
|
4078
|
+
"gen_ai.request.top_k": settings.topK,
|
4079
|
+
"gen_ai.request.top_p": settings.topP
|
4450
4080
|
}
|
4451
|
-
}
|
4081
|
+
}),
|
4082
|
+
tracer,
|
4083
|
+
endWhenDone: false,
|
4084
|
+
fn: async (doStreamSpan3) => ({
|
4085
|
+
startTimestampMs: now2(),
|
4086
|
+
// get before the call
|
4087
|
+
doStreamSpan: doStreamSpan3,
|
4088
|
+
result: await model.doStream({
|
4089
|
+
mode,
|
4090
|
+
...prepareCallSettings(settings),
|
4091
|
+
inputFormat: promptFormat,
|
4092
|
+
prompt: promptMessages,
|
4093
|
+
providerMetadata,
|
4094
|
+
abortSignal,
|
4095
|
+
headers
|
4096
|
+
})
|
4097
|
+
})
|
4098
|
+
})
|
4099
|
+
);
|
4100
|
+
return {
|
4101
|
+
result: {
|
4102
|
+
stream: runToolsTransformation({
|
4103
|
+
tools,
|
4104
|
+
generatorStream: stream2,
|
4105
|
+
toolCallStreaming,
|
4106
|
+
tracer,
|
4107
|
+
telemetry,
|
4108
|
+
abortSignal
|
4109
|
+
}),
|
4110
|
+
warnings: warnings2,
|
4111
|
+
request: request2 != null ? request2 : {},
|
4112
|
+
rawResponse: rawResponse2
|
4452
4113
|
},
|
4453
|
-
|
4454
|
-
|
4455
|
-
|
4456
|
-
|
4457
|
-
|
4458
|
-
|
4459
|
-
|
4460
|
-
|
4461
|
-
|
4462
|
-
|
4463
|
-
|
4464
|
-
|
4465
|
-
|
4466
|
-
|
4467
|
-
|
4468
|
-
|
4469
|
-
|
4470
|
-
|
4471
|
-
|
4472
|
-
|
4473
|
-
|
4474
|
-
|
4475
|
-
|
4476
|
-
|
4114
|
+
doStreamSpan: doStreamSpan2,
|
4115
|
+
startTimestampMs: startTimestampMs2
|
4116
|
+
};
|
4117
|
+
};
|
4118
|
+
const {
|
4119
|
+
result: { stream, warnings, rawResponse, request },
|
4120
|
+
doStreamSpan,
|
4121
|
+
startTimestampMs
|
4122
|
+
} = await startStep({ responseMessages: [] });
|
4123
|
+
function addStepStream({
|
4124
|
+
stream: stream2,
|
4125
|
+
startTimestamp,
|
4126
|
+
doStreamSpan: doStreamSpan2,
|
4127
|
+
currentStep,
|
4128
|
+
responseMessages,
|
4129
|
+
usage = {
|
4130
|
+
promptTokens: 0,
|
4131
|
+
completionTokens: 0,
|
4132
|
+
totalTokens: 0
|
4133
|
+
},
|
4134
|
+
stepType,
|
4135
|
+
previousStepText = "",
|
4136
|
+
stepRequest,
|
4137
|
+
hasLeadingWhitespace,
|
4138
|
+
warnings: warnings2,
|
4139
|
+
response
|
4140
|
+
}) {
|
4141
|
+
const stepToolCalls = [];
|
4142
|
+
const stepToolResults = [];
|
4143
|
+
let stepFinishReason = "unknown";
|
4144
|
+
let stepUsage = {
|
4145
|
+
promptTokens: 0,
|
4146
|
+
completionTokens: 0,
|
4147
|
+
totalTokens: 0
|
4148
|
+
};
|
4149
|
+
let stepProviderMetadata;
|
4150
|
+
let stepFirstChunk = true;
|
4151
|
+
let stepText = "";
|
4152
|
+
let fullStepText = stepType === "continue" ? previousStepText : "";
|
4153
|
+
let stepLogProbs;
|
4154
|
+
let stepResponse = {
|
4155
|
+
id: generateId3(),
|
4156
|
+
timestamp: currentDate(),
|
4157
|
+
modelId: model.modelId
|
4158
|
+
};
|
4159
|
+
let chunkBuffer = "";
|
4160
|
+
let chunkTextPublished = false;
|
4161
|
+
let inWhitespacePrefix = true;
|
4162
|
+
let hasWhitespaceSuffix = false;
|
4163
|
+
async function publishTextChunk({
|
4164
|
+
controller,
|
4165
|
+
chunk
|
4166
|
+
}) {
|
4167
|
+
controller.enqueue(chunk);
|
4168
|
+
stepText += chunk.textDelta;
|
4169
|
+
fullStepText += chunk.textDelta;
|
4170
|
+
chunkTextPublished = true;
|
4171
|
+
hasWhitespaceSuffix = chunk.textDelta.trimEnd() !== chunk.textDelta;
|
4172
|
+
await (onChunk == null ? void 0 : onChunk({ chunk }));
|
4173
|
+
}
|
4174
|
+
self.stitchableStream.addStream(
|
4175
|
+
stream2.pipeThrough(
|
4176
|
+
new TransformStream({
|
4177
|
+
async transform(chunk, controller) {
|
4178
|
+
var _a11, _b, _c;
|
4179
|
+
if (stepFirstChunk) {
|
4180
|
+
const msToFirstChunk = now2() - startTimestamp;
|
4181
|
+
stepFirstChunk = false;
|
4182
|
+
doStreamSpan2.addEvent("ai.stream.firstChunk", {
|
4183
|
+
"ai.response.msToFirstChunk": msToFirstChunk
|
4184
|
+
});
|
4185
|
+
doStreamSpan2.setAttributes({
|
4186
|
+
"ai.response.msToFirstChunk": msToFirstChunk
|
4187
|
+
});
|
4477
4188
|
}
|
4478
|
-
|
4479
|
-
|
4480
|
-
|
4481
|
-
|
4482
|
-
|
4483
|
-
|
4484
|
-
|
4485
|
-
|
4486
|
-
|
4487
|
-
|
4488
|
-
|
4489
|
-
|
4490
|
-
|
4491
|
-
|
4492
|
-
|
4493
|
-
|
4494
|
-
|
4495
|
-
|
4496
|
-
|
4497
|
-
|
4498
|
-
|
4499
|
-
|
4500
|
-
|
4501
|
-
|
4502
|
-
|
4503
|
-
|
4504
|
-
|
4505
|
-
|
4506
|
-
"gen_ai.usage.input_tokens": stepUsage.promptTokens,
|
4507
|
-
"gen_ai.usage.output_tokens": stepUsage.completionTokens
|
4189
|
+
if (chunk.type === "text-delta" && chunk.textDelta.length === 0) {
|
4190
|
+
return;
|
4191
|
+
}
|
4192
|
+
const chunkType = chunk.type;
|
4193
|
+
switch (chunkType) {
|
4194
|
+
case "text-delta": {
|
4195
|
+
if (continueSteps) {
|
4196
|
+
const trimmedChunkText = inWhitespacePrefix && hasLeadingWhitespace ? chunk.textDelta.trimStart() : chunk.textDelta;
|
4197
|
+
if (trimmedChunkText.length === 0) {
|
4198
|
+
break;
|
4199
|
+
}
|
4200
|
+
inWhitespacePrefix = false;
|
4201
|
+
chunkBuffer += trimmedChunkText;
|
4202
|
+
const split = splitOnLastWhitespace(chunkBuffer);
|
4203
|
+
if (split != null) {
|
4204
|
+
chunkBuffer = split.suffix;
|
4205
|
+
await publishTextChunk({
|
4206
|
+
controller,
|
4207
|
+
chunk: {
|
4208
|
+
type: "text-delta",
|
4209
|
+
textDelta: split.prefix + split.whitespace
|
4210
|
+
}
|
4211
|
+
});
|
4212
|
+
}
|
4213
|
+
} else {
|
4214
|
+
await publishTextChunk({ controller, chunk });
|
4215
|
+
}
|
4216
|
+
break;
|
4508
4217
|
}
|
4509
|
-
|
4510
|
-
|
4511
|
-
|
4512
|
-
|
4513
|
-
|
4514
|
-
|
4515
|
-
|
4516
|
-
|
4517
|
-
|
4518
|
-
|
4519
|
-
|
4520
|
-
|
4521
|
-
|
4522
|
-
|
4218
|
+
case "tool-call": {
|
4219
|
+
controller.enqueue(chunk);
|
4220
|
+
stepToolCalls.push(chunk);
|
4221
|
+
await (onChunk == null ? void 0 : onChunk({ chunk }));
|
4222
|
+
break;
|
4223
|
+
}
|
4224
|
+
case "tool-result": {
|
4225
|
+
controller.enqueue(chunk);
|
4226
|
+
stepToolResults.push(chunk);
|
4227
|
+
await (onChunk == null ? void 0 : onChunk({ chunk }));
|
4228
|
+
break;
|
4229
|
+
}
|
4230
|
+
case "response-metadata": {
|
4231
|
+
stepResponse = {
|
4232
|
+
id: (_a11 = chunk.id) != null ? _a11 : stepResponse.id,
|
4233
|
+
timestamp: (_b = chunk.timestamp) != null ? _b : stepResponse.timestamp,
|
4234
|
+
modelId: (_c = chunk.modelId) != null ? _c : stepResponse.modelId
|
4235
|
+
};
|
4236
|
+
break;
|
4237
|
+
}
|
4238
|
+
case "finish": {
|
4239
|
+
stepUsage = chunk.usage;
|
4240
|
+
stepFinishReason = chunk.finishReason;
|
4241
|
+
stepProviderMetadata = chunk.experimental_providerMetadata;
|
4242
|
+
stepLogProbs = chunk.logprobs;
|
4243
|
+
const msToFinish = now2() - startTimestamp;
|
4244
|
+
doStreamSpan2.addEvent("ai.stream.finish");
|
4245
|
+
doStreamSpan2.setAttributes({
|
4246
|
+
"ai.response.msToFinish": msToFinish,
|
4247
|
+
"ai.response.avgCompletionTokensPerSecond": 1e3 * stepUsage.completionTokens / msToFinish
|
4248
|
+
});
|
4249
|
+
break;
|
4250
|
+
}
|
4251
|
+
case "tool-call-streaming-start":
|
4252
|
+
case "tool-call-delta": {
|
4253
|
+
controller.enqueue(chunk);
|
4254
|
+
await (onChunk == null ? void 0 : onChunk({ chunk }));
|
4255
|
+
break;
|
4256
|
+
}
|
4257
|
+
case "error": {
|
4258
|
+
controller.enqueue(chunk);
|
4259
|
+
stepFinishReason = "error";
|
4260
|
+
break;
|
4261
|
+
}
|
4262
|
+
default: {
|
4263
|
+
const exhaustiveCheck = chunkType;
|
4264
|
+
throw new Error(`Unknown chunk type: ${exhaustiveCheck}`);
|
4265
|
+
}
|
4266
|
+
}
|
4523
4267
|
},
|
4524
|
-
|
4525
|
-
|
4526
|
-
|
4527
|
-
|
4528
|
-
|
4529
|
-
|
4530
|
-
|
4531
|
-
|
4532
|
-
|
4533
|
-
|
4268
|
+
// invoke onFinish callback and resolve toolResults promise when the stream is about to close:
|
4269
|
+
async flush(controller) {
|
4270
|
+
const stepToolCallsJson = stepToolCalls.length > 0 ? JSON.stringify(stepToolCalls) : void 0;
|
4271
|
+
let nextStepType = "done";
|
4272
|
+
if (currentStep + 1 < maxSteps) {
|
4273
|
+
if (continueSteps && stepFinishReason === "length" && // only use continue when there are no tool calls:
|
4274
|
+
stepToolCalls.length === 0) {
|
4275
|
+
nextStepType = "continue";
|
4276
|
+
} else if (
|
4277
|
+
// there are tool calls:
|
4278
|
+
stepToolCalls.length > 0 && // all current tool calls have results:
|
4279
|
+
stepToolResults.length === stepToolCalls.length
|
4280
|
+
) {
|
4281
|
+
nextStepType = "tool-result";
|
4282
|
+
}
|
4283
|
+
}
|
4284
|
+
if (continueSteps && chunkBuffer.length > 0 && (nextStepType !== "continue" || // when the next step is a regular step, publish the buffer
|
4285
|
+
stepType === "continue" && !chunkTextPublished)) {
|
4286
|
+
await publishTextChunk({
|
4287
|
+
controller,
|
4288
|
+
chunk: {
|
4289
|
+
type: "text-delta",
|
4290
|
+
textDelta: chunkBuffer
|
4291
|
+
}
|
4292
|
+
});
|
4293
|
+
chunkBuffer = "";
|
4294
|
+
}
|
4295
|
+
try {
|
4296
|
+
doStreamSpan2.setAttributes(
|
4297
|
+
selectTelemetryAttributes({
|
4298
|
+
telemetry,
|
4299
|
+
attributes: {
|
4300
|
+
"ai.response.finishReason": stepFinishReason,
|
4301
|
+
"ai.response.text": { output: () => stepText },
|
4302
|
+
"ai.response.toolCalls": {
|
4303
|
+
output: () => stepToolCallsJson
|
4304
|
+
},
|
4305
|
+
"ai.response.id": stepResponse.id,
|
4306
|
+
"ai.response.model": stepResponse.modelId,
|
4307
|
+
"ai.response.timestamp": stepResponse.timestamp.toISOString(),
|
4308
|
+
"ai.usage.promptTokens": stepUsage.promptTokens,
|
4309
|
+
"ai.usage.completionTokens": stepUsage.completionTokens,
|
4310
|
+
// standardized gen-ai llm span attributes:
|
4311
|
+
"gen_ai.response.finish_reasons": [stepFinishReason],
|
4312
|
+
"gen_ai.response.id": stepResponse.id,
|
4313
|
+
"gen_ai.response.model": stepResponse.modelId,
|
4314
|
+
"gen_ai.usage.input_tokens": stepUsage.promptTokens,
|
4315
|
+
"gen_ai.usage.output_tokens": stepUsage.completionTokens
|
4316
|
+
}
|
4317
|
+
})
|
4318
|
+
);
|
4319
|
+
} catch (error) {
|
4320
|
+
} finally {
|
4321
|
+
doStreamSpan2.end();
|
4322
|
+
}
|
4323
|
+
controller.enqueue({
|
4324
|
+
type: "step-finish",
|
4325
|
+
finishReason: stepFinishReason,
|
4326
|
+
usage: stepUsage,
|
4327
|
+
experimental_providerMetadata: stepProviderMetadata,
|
4328
|
+
logprobs: stepLogProbs,
|
4329
|
+
response: {
|
4330
|
+
...stepResponse
|
4331
|
+
},
|
4332
|
+
isContinued: nextStepType === "continue"
|
4534
4333
|
});
|
4535
|
-
|
4536
|
-
|
4537
|
-
|
4538
|
-
|
4334
|
+
if (stepType === "continue") {
|
4335
|
+
const lastMessage = responseMessages[responseMessages.length - 1];
|
4336
|
+
if (typeof lastMessage.content === "string") {
|
4337
|
+
lastMessage.content += stepText;
|
4338
|
+
} else {
|
4339
|
+
lastMessage.content.push({
|
4340
|
+
text: stepText,
|
4341
|
+
type: "text"
|
4342
|
+
});
|
4343
|
+
}
|
4344
|
+
} else {
|
4345
|
+
responseMessages.push(
|
4346
|
+
...toResponseMessages({
|
4347
|
+
text: stepText,
|
4348
|
+
tools: tools != null ? tools : {},
|
4349
|
+
toolCalls: stepToolCalls,
|
4350
|
+
toolResults: stepToolResults
|
4351
|
+
})
|
4352
|
+
);
|
4353
|
+
}
|
4354
|
+
const currentStepResult = {
|
4355
|
+
stepType,
|
4539
4356
|
text: stepText,
|
4540
|
-
tools: tools != null ? tools : {},
|
4541
4357
|
toolCalls: stepToolCalls,
|
4542
|
-
toolResults: stepToolResults
|
4543
|
-
|
4544
|
-
|
4545
|
-
|
4546
|
-
|
4547
|
-
|
4548
|
-
|
4549
|
-
|
4550
|
-
|
4551
|
-
|
4552
|
-
|
4553
|
-
|
4554
|
-
|
4555
|
-
|
4556
|
-
|
4557
|
-
|
4558
|
-
|
4559
|
-
|
4560
|
-
|
4561
|
-
|
4562
|
-
|
4563
|
-
|
4564
|
-
|
4565
|
-
|
4566
|
-
|
4567
|
-
|
4568
|
-
|
4569
|
-
|
4570
|
-
|
4571
|
-
|
4572
|
-
|
4573
|
-
|
4574
|
-
|
4575
|
-
|
4576
|
-
|
4577
|
-
|
4578
|
-
|
4579
|
-
|
4580
|
-
|
4581
|
-
|
4582
|
-
|
4583
|
-
|
4584
|
-
|
4585
|
-
|
4586
|
-
|
4587
|
-
usage: combinedUsage,
|
4588
|
-
stepType: nextStepType,
|
4589
|
-
previousStepText: fullStepText,
|
4590
|
-
stepRequest: result.request,
|
4591
|
-
hasLeadingWhitespace: hasWhitespaceSuffix
|
4592
|
-
});
|
4593
|
-
return;
|
4594
|
-
}
|
4595
|
-
try {
|
4596
|
-
controller.enqueue({
|
4597
|
-
type: "finish",
|
4598
|
-
finishReason: stepFinishReason,
|
4599
|
-
usage: combinedUsage,
|
4600
|
-
experimental_providerMetadata: stepProviderMetadata,
|
4601
|
-
logprobs: stepLogProbs,
|
4602
|
-
response: {
|
4603
|
-
...stepResponse
|
4358
|
+
toolResults: stepToolResults,
|
4359
|
+
finishReason: stepFinishReason,
|
4360
|
+
usage: stepUsage,
|
4361
|
+
warnings: warnings2,
|
4362
|
+
logprobs: stepLogProbs,
|
4363
|
+
request: stepRequest,
|
4364
|
+
response: {
|
4365
|
+
...stepResponse,
|
4366
|
+
headers: response == null ? void 0 : response.headers,
|
4367
|
+
// deep clone msgs to avoid mutating past messages in multi-step:
|
4368
|
+
messages: JSON.parse(JSON.stringify(responseMessages))
|
4369
|
+
},
|
4370
|
+
experimental_providerMetadata: stepProviderMetadata,
|
4371
|
+
isContinued: nextStepType === "continue"
|
4372
|
+
};
|
4373
|
+
stepResults.push(currentStepResult);
|
4374
|
+
await (onStepFinish == null ? void 0 : onStepFinish(currentStepResult));
|
4375
|
+
const combinedUsage = {
|
4376
|
+
promptTokens: usage.promptTokens + stepUsage.promptTokens,
|
4377
|
+
completionTokens: usage.completionTokens + stepUsage.completionTokens,
|
4378
|
+
totalTokens: usage.totalTokens + stepUsage.totalTokens
|
4379
|
+
};
|
4380
|
+
if (nextStepType !== "done") {
|
4381
|
+
const {
|
4382
|
+
result,
|
4383
|
+
doStreamSpan: doStreamSpan3,
|
4384
|
+
startTimestampMs: startTimestamp2
|
4385
|
+
} = await startStep({ responseMessages });
|
4386
|
+
warnings2 = result.warnings;
|
4387
|
+
response = result.rawResponse;
|
4388
|
+
addStepStream({
|
4389
|
+
stream: result.stream,
|
4390
|
+
startTimestamp: startTimestamp2,
|
4391
|
+
doStreamSpan: doStreamSpan3,
|
4392
|
+
currentStep: currentStep + 1,
|
4393
|
+
responseMessages,
|
4394
|
+
usage: combinedUsage,
|
4395
|
+
stepType: nextStepType,
|
4396
|
+
previousStepText: fullStepText,
|
4397
|
+
stepRequest: result.request,
|
4398
|
+
hasLeadingWhitespace: hasWhitespaceSuffix,
|
4399
|
+
warnings: warnings2,
|
4400
|
+
response
|
4401
|
+
});
|
4402
|
+
return;
|
4604
4403
|
}
|
4605
|
-
|
4606
|
-
|
4607
|
-
|
4608
|
-
|
4609
|
-
|
4610
|
-
|
4611
|
-
|
4612
|
-
|
4613
|
-
|
4614
|
-
output: () => stepToolCallsJson
|
4615
|
-
},
|
4616
|
-
"ai.usage.promptTokens": combinedUsage.promptTokens,
|
4617
|
-
"ai.usage.completionTokens": combinedUsage.completionTokens,
|
4618
|
-
// deprecated
|
4619
|
-
"ai.finishReason": stepFinishReason,
|
4620
|
-
"ai.result.text": { output: () => fullStepText },
|
4621
|
-
"ai.result.toolCalls": {
|
4622
|
-
output: () => stepToolCallsJson
|
4404
|
+
try {
|
4405
|
+
controller.enqueue({
|
4406
|
+
type: "finish",
|
4407
|
+
finishReason: stepFinishReason,
|
4408
|
+
usage: combinedUsage,
|
4409
|
+
experimental_providerMetadata: stepProviderMetadata,
|
4410
|
+
logprobs: stepLogProbs,
|
4411
|
+
response: {
|
4412
|
+
...stepResponse
|
4623
4413
|
}
|
4624
|
-
}
|
4625
|
-
|
4626
|
-
|
4627
|
-
|
4628
|
-
|
4629
|
-
|
4630
|
-
|
4631
|
-
|
4632
|
-
|
4633
|
-
|
4634
|
-
|
4635
|
-
|
4636
|
-
|
4637
|
-
|
4638
|
-
|
4639
|
-
|
4640
|
-
|
4641
|
-
|
4642
|
-
|
4643
|
-
|
4644
|
-
|
4645
|
-
|
4646
|
-
|
4647
|
-
|
4648
|
-
|
4649
|
-
|
4650
|
-
|
4651
|
-
|
4652
|
-
|
4653
|
-
|
4654
|
-
|
4655
|
-
|
4656
|
-
|
4657
|
-
|
4658
|
-
|
4659
|
-
|
4660
|
-
|
4661
|
-
|
4662
|
-
|
4663
|
-
|
4664
|
-
|
4665
|
-
|
4666
|
-
|
4667
|
-
|
4668
|
-
|
4669
|
-
|
4670
|
-
|
4671
|
-
|
4414
|
+
});
|
4415
|
+
self.stitchableStream.close();
|
4416
|
+
rootSpan.setAttributes(
|
4417
|
+
selectTelemetryAttributes({
|
4418
|
+
telemetry,
|
4419
|
+
attributes: {
|
4420
|
+
"ai.response.finishReason": stepFinishReason,
|
4421
|
+
"ai.response.text": { output: () => fullStepText },
|
4422
|
+
"ai.response.toolCalls": {
|
4423
|
+
output: () => stepToolCallsJson
|
4424
|
+
},
|
4425
|
+
"ai.usage.promptTokens": combinedUsage.promptTokens,
|
4426
|
+
"ai.usage.completionTokens": combinedUsage.completionTokens
|
4427
|
+
}
|
4428
|
+
})
|
4429
|
+
);
|
4430
|
+
self.usagePromise.resolve(combinedUsage);
|
4431
|
+
self.finishReasonPromise.resolve(stepFinishReason);
|
4432
|
+
self.textPromise.resolve(fullStepText);
|
4433
|
+
self.toolCallsPromise.resolve(stepToolCalls);
|
4434
|
+
self.providerMetadataPromise.resolve(stepProviderMetadata);
|
4435
|
+
self.toolResultsPromise.resolve(stepToolResults);
|
4436
|
+
self.requestPromise.resolve(stepRequest);
|
4437
|
+
self.responsePromise.resolve({
|
4438
|
+
...stepResponse,
|
4439
|
+
headers: rawResponse == null ? void 0 : rawResponse.headers,
|
4440
|
+
messages: responseMessages
|
4441
|
+
});
|
4442
|
+
self.stepsPromise.resolve(stepResults);
|
4443
|
+
self.warningsPromise.resolve(warnings2 != null ? warnings2 : []);
|
4444
|
+
await (onFinish == null ? void 0 : onFinish({
|
4445
|
+
finishReason: stepFinishReason,
|
4446
|
+
logprobs: stepLogProbs,
|
4447
|
+
usage: combinedUsage,
|
4448
|
+
text: fullStepText,
|
4449
|
+
toolCalls: stepToolCalls,
|
4450
|
+
// The tool results are inferred as a never[] type, because they are
|
4451
|
+
// optional and the execute method with an inferred result type is
|
4452
|
+
// optional as well. Therefore we need to cast the toolResults to any.
|
4453
|
+
// The type exposed to the users will be correctly inferred.
|
4454
|
+
toolResults: stepToolResults,
|
4455
|
+
request: stepRequest,
|
4456
|
+
response: {
|
4457
|
+
...stepResponse,
|
4458
|
+
headers: rawResponse == null ? void 0 : rawResponse.headers,
|
4459
|
+
messages: responseMessages
|
4460
|
+
},
|
4461
|
+
warnings: warnings2,
|
4462
|
+
experimental_providerMetadata: stepProviderMetadata,
|
4463
|
+
steps: stepResults
|
4464
|
+
}));
|
4465
|
+
} catch (error) {
|
4466
|
+
controller.error(error);
|
4467
|
+
} finally {
|
4468
|
+
rootSpan.end();
|
4469
|
+
}
|
4470
|
+
}
|
4471
|
+
})
|
4472
|
+
)
|
4473
|
+
);
|
4474
|
+
}
|
4475
|
+
addStepStream({
|
4476
|
+
stream,
|
4477
|
+
startTimestamp: startTimestampMs,
|
4478
|
+
doStreamSpan,
|
4479
|
+
currentStep: 0,
|
4480
|
+
responseMessages: [],
|
4481
|
+
usage: void 0,
|
4482
|
+
stepType: "initial",
|
4483
|
+
stepRequest: request,
|
4484
|
+
hasLeadingWhitespace: false,
|
4485
|
+
warnings,
|
4486
|
+
response: rawResponse
|
4487
|
+
});
|
4488
|
+
}
|
4489
|
+
}).catch((error) => {
|
4490
|
+
self.stitchableStream.addStream(
|
4491
|
+
new ReadableStream({
|
4492
|
+
start(controller) {
|
4493
|
+
controller.error(error);
|
4494
|
+
}
|
4495
|
+
})
|
4672
4496
|
);
|
4673
|
-
|
4674
|
-
addStepStream({
|
4675
|
-
stream,
|
4676
|
-
startTimestamp: startTimestampMs,
|
4677
|
-
doStreamSpan,
|
4678
|
-
currentStep: 0,
|
4679
|
-
responseMessages: [],
|
4680
|
-
usage: void 0,
|
4681
|
-
stepType: "initial",
|
4682
|
-
stepRequest: request,
|
4683
|
-
hasLeadingWhitespace: false
|
4497
|
+
self.stitchableStream.close();
|
4684
4498
|
});
|
4685
4499
|
}
|
4500
|
+
get warnings() {
|
4501
|
+
return this.warningsPromise.value;
|
4502
|
+
}
|
4503
|
+
get usage() {
|
4504
|
+
return this.usagePromise.value;
|
4505
|
+
}
|
4506
|
+
get finishReason() {
|
4507
|
+
return this.finishReasonPromise.value;
|
4508
|
+
}
|
4509
|
+
get experimental_providerMetadata() {
|
4510
|
+
return this.providerMetadataPromise.value;
|
4511
|
+
}
|
4512
|
+
get text() {
|
4513
|
+
return this.textPromise.value;
|
4514
|
+
}
|
4515
|
+
get toolCalls() {
|
4516
|
+
return this.toolCallsPromise.value;
|
4517
|
+
}
|
4518
|
+
get toolResults() {
|
4519
|
+
return this.toolResultsPromise.value;
|
4520
|
+
}
|
4521
|
+
get request() {
|
4522
|
+
return this.requestPromise.value;
|
4523
|
+
}
|
4524
|
+
get response() {
|
4525
|
+
return this.responsePromise.value;
|
4526
|
+
}
|
4527
|
+
get steps() {
|
4528
|
+
return this.stepsPromise.value;
|
4529
|
+
}
|
4686
4530
|
/**
|
4687
4531
|
Split out a new stream from the original stream.
|
4688
4532
|
The original stream is replaced to allow for further splitting,
|
@@ -4692,8 +4536,8 @@ var DefaultStreamTextResult = class {
|
|
4692
4536
|
However, the LLM results are expected to be small enough to not cause issues.
|
4693
4537
|
*/
|
4694
4538
|
teeStream() {
|
4695
|
-
const [stream1, stream2] = this.
|
4696
|
-
this.
|
4539
|
+
const [stream1, stream2] = this.stitchableStream.stream.tee();
|
4540
|
+
this.stitchableStream.stream = stream2;
|
4697
4541
|
return stream1;
|
4698
4542
|
}
|
4699
4543
|
get textStream() {
|
@@ -4714,37 +4558,18 @@ var DefaultStreamTextResult = class {
|
|
4714
4558
|
}
|
4715
4559
|
});
|
4716
4560
|
}
|
4717
|
-
toAIStream(callbacks = {}) {
|
4718
|
-
return this.toDataStreamInternal({ callbacks });
|
4719
|
-
}
|
4720
4561
|
toDataStreamInternal({
|
4721
|
-
callbacks = {},
|
4722
4562
|
getErrorMessage: getErrorMessage3 = () => "",
|
4723
4563
|
// mask error messages for safety by default
|
4724
4564
|
sendUsage = true
|
4725
4565
|
} = {}) {
|
4726
4566
|
let aggregatedResponse = "";
|
4727
4567
|
const callbackTransformer = new TransformStream({
|
4728
|
-
async start() {
|
4729
|
-
if (callbacks.onStart)
|
4730
|
-
await callbacks.onStart();
|
4731
|
-
},
|
4732
4568
|
async transform(chunk, controller) {
|
4733
4569
|
controller.enqueue(chunk);
|
4734
4570
|
if (chunk.type === "text-delta") {
|
4735
|
-
|
4736
|
-
aggregatedResponse += textDelta;
|
4737
|
-
if (callbacks.onToken)
|
4738
|
-
await callbacks.onToken(textDelta);
|
4739
|
-
if (callbacks.onText)
|
4740
|
-
await callbacks.onText(textDelta);
|
4571
|
+
aggregatedResponse += chunk.textDelta;
|
4741
4572
|
}
|
4742
|
-
},
|
4743
|
-
async flush() {
|
4744
|
-
if (callbacks.onCompletion)
|
4745
|
-
await callbacks.onCompletion(aggregatedResponse);
|
4746
|
-
if (callbacks.onFinal)
|
4747
|
-
await callbacks.onFinal(aggregatedResponse);
|
4748
4573
|
}
|
4749
4574
|
});
|
4750
4575
|
const streamPartsTransformer = new TransformStream({
|
@@ -4752,12 +4577,12 @@ var DefaultStreamTextResult = class {
|
|
4752
4577
|
const chunkType = chunk.type;
|
4753
4578
|
switch (chunkType) {
|
4754
4579
|
case "text-delta": {
|
4755
|
-
controller.enqueue((0,
|
4580
|
+
controller.enqueue((0, import_ui_utils6.formatStreamPart)("text", chunk.textDelta));
|
4756
4581
|
break;
|
4757
4582
|
}
|
4758
4583
|
case "tool-call-streaming-start": {
|
4759
4584
|
controller.enqueue(
|
4760
|
-
(0,
|
4585
|
+
(0, import_ui_utils6.formatStreamPart)("tool_call_streaming_start", {
|
4761
4586
|
toolCallId: chunk.toolCallId,
|
4762
4587
|
toolName: chunk.toolName
|
4763
4588
|
})
|
@@ -4766,7 +4591,7 @@ var DefaultStreamTextResult = class {
|
|
4766
4591
|
}
|
4767
4592
|
case "tool-call-delta": {
|
4768
4593
|
controller.enqueue(
|
4769
|
-
(0,
|
4594
|
+
(0, import_ui_utils6.formatStreamPart)("tool_call_delta", {
|
4770
4595
|
toolCallId: chunk.toolCallId,
|
4771
4596
|
argsTextDelta: chunk.argsTextDelta
|
4772
4597
|
})
|
@@ -4775,7 +4600,7 @@ var DefaultStreamTextResult = class {
|
|
4775
4600
|
}
|
4776
4601
|
case "tool-call": {
|
4777
4602
|
controller.enqueue(
|
4778
|
-
(0,
|
4603
|
+
(0, import_ui_utils6.formatStreamPart)("tool_call", {
|
4779
4604
|
toolCallId: chunk.toolCallId,
|
4780
4605
|
toolName: chunk.toolName,
|
4781
4606
|
args: chunk.args
|
@@ -4785,7 +4610,7 @@ var DefaultStreamTextResult = class {
|
|
4785
4610
|
}
|
4786
4611
|
case "tool-result": {
|
4787
4612
|
controller.enqueue(
|
4788
|
-
(0,
|
4613
|
+
(0, import_ui_utils6.formatStreamPart)("tool_result", {
|
4789
4614
|
toolCallId: chunk.toolCallId,
|
4790
4615
|
result: chunk.result
|
4791
4616
|
})
|
@@ -4794,13 +4619,13 @@ var DefaultStreamTextResult = class {
|
|
4794
4619
|
}
|
4795
4620
|
case "error": {
|
4796
4621
|
controller.enqueue(
|
4797
|
-
(0,
|
4622
|
+
(0, import_ui_utils6.formatStreamPart)("error", getErrorMessage3(chunk.error))
|
4798
4623
|
);
|
4799
4624
|
break;
|
4800
4625
|
}
|
4801
4626
|
case "step-finish": {
|
4802
4627
|
controller.enqueue(
|
4803
|
-
(0,
|
4628
|
+
(0, import_ui_utils6.formatStreamPart)("finish_step", {
|
4804
4629
|
finishReason: chunk.finishReason,
|
4805
4630
|
usage: sendUsage ? {
|
4806
4631
|
promptTokens: chunk.usage.promptTokens,
|
@@ -4813,7 +4638,7 @@ var DefaultStreamTextResult = class {
|
|
4813
4638
|
}
|
4814
4639
|
case "finish": {
|
4815
4640
|
controller.enqueue(
|
4816
|
-
(0,
|
4641
|
+
(0, import_ui_utils6.formatStreamPart)("finish_message", {
|
4817
4642
|
finishReason: chunk.finishReason,
|
4818
4643
|
usage: sendUsage ? {
|
4819
4644
|
promptTokens: chunk.usage.promptTokens,
|
@@ -4832,23 +4657,19 @@ var DefaultStreamTextResult = class {
|
|
4832
4657
|
});
|
4833
4658
|
return this.fullStream.pipeThrough(callbackTransformer).pipeThrough(streamPartsTransformer).pipeThrough(new TextEncoderStream());
|
4834
4659
|
}
|
4835
|
-
|
4836
|
-
|
4837
|
-
|
4838
|
-
|
4839
|
-
|
4840
|
-
|
4841
|
-
|
4842
|
-
|
4843
|
-
};
|
4844
|
-
const data = options == null ? void 0 : "data" in options ? options.data : void 0;
|
4845
|
-
const getErrorMessage3 = options == null ? void 0 : "getErrorMessage" in options ? options.getErrorMessage : void 0;
|
4846
|
-
const sendUsage = options == null ? void 0 : "sendUsage" in options ? options.sendUsage : void 0;
|
4660
|
+
pipeDataStreamToResponse(response, {
|
4661
|
+
status,
|
4662
|
+
statusText,
|
4663
|
+
headers,
|
4664
|
+
data,
|
4665
|
+
getErrorMessage: getErrorMessage3,
|
4666
|
+
sendUsage
|
4667
|
+
} = {}) {
|
4847
4668
|
writeToServerResponse({
|
4848
4669
|
response,
|
4849
|
-
status
|
4850
|
-
statusText
|
4851
|
-
headers: prepareOutgoingHttpHeaders(
|
4670
|
+
status,
|
4671
|
+
statusText,
|
4672
|
+
headers: prepareOutgoingHttpHeaders(headers, {
|
4852
4673
|
contentType: "text/plain; charset=utf-8",
|
4853
4674
|
dataStreamVersion: "v1"
|
4854
4675
|
}),
|
@@ -4860,15 +4681,12 @@ var DefaultStreamTextResult = class {
|
|
4860
4681
|
response,
|
4861
4682
|
status: init == null ? void 0 : init.status,
|
4862
4683
|
statusText: init == null ? void 0 : init.statusText,
|
4863
|
-
headers: prepareOutgoingHttpHeaders(init, {
|
4684
|
+
headers: prepareOutgoingHttpHeaders(init == null ? void 0 : init.headers, {
|
4864
4685
|
contentType: "text/plain; charset=utf-8"
|
4865
4686
|
}),
|
4866
4687
|
stream: this.textStream.pipeThrough(new TextEncoderStream())
|
4867
4688
|
});
|
4868
4689
|
}
|
4869
|
-
toAIStreamResponse(options) {
|
4870
|
-
return this.toDataStreamResponse(options);
|
4871
|
-
}
|
4872
4690
|
toDataStream(options) {
|
4873
4691
|
const stream = this.toDataStreamInternal({
|
4874
4692
|
getErrorMessage: options == null ? void 0 : options.getErrorMessage,
|
@@ -4876,22 +4694,20 @@ var DefaultStreamTextResult = class {
|
|
4876
4694
|
});
|
4877
4695
|
return (options == null ? void 0 : options.data) ? mergeStreams(options == null ? void 0 : options.data.stream, stream) : stream;
|
4878
4696
|
}
|
4879
|
-
toDataStreamResponse(
|
4880
|
-
|
4881
|
-
|
4882
|
-
|
4883
|
-
|
4884
|
-
|
4885
|
-
|
4886
|
-
|
4887
|
-
const getErrorMessage3 = options == null ? void 0 : "getErrorMessage" in options ? options.getErrorMessage : void 0;
|
4888
|
-
const sendUsage = options == null ? void 0 : "sendUsage" in options ? options.sendUsage : void 0;
|
4697
|
+
toDataStreamResponse({
|
4698
|
+
headers,
|
4699
|
+
status,
|
4700
|
+
statusText,
|
4701
|
+
data,
|
4702
|
+
getErrorMessage: getErrorMessage3,
|
4703
|
+
sendUsage
|
4704
|
+
} = {}) {
|
4889
4705
|
return new Response(
|
4890
4706
|
this.toDataStream({ data, getErrorMessage: getErrorMessage3, sendUsage }),
|
4891
4707
|
{
|
4892
|
-
status
|
4893
|
-
statusText
|
4894
|
-
headers: prepareResponseHeaders(
|
4708
|
+
status,
|
4709
|
+
statusText,
|
4710
|
+
headers: prepareResponseHeaders(headers, {
|
4895
4711
|
contentType: "text/plain; charset=utf-8",
|
4896
4712
|
dataStreamVersion: "v1"
|
4897
4713
|
})
|
@@ -4902,13 +4718,12 @@ var DefaultStreamTextResult = class {
|
|
4902
4718
|
var _a11;
|
4903
4719
|
return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
|
4904
4720
|
status: (_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200,
|
4905
|
-
headers: prepareResponseHeaders(init, {
|
4721
|
+
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
4906
4722
|
contentType: "text/plain; charset=utf-8"
|
4907
4723
|
})
|
4908
4724
|
});
|
4909
4725
|
}
|
4910
4726
|
};
|
4911
|
-
var experimental_streamText = streamText;
|
4912
4727
|
|
4913
4728
|
// core/middleware/wrap-language-model.ts
|
4914
4729
|
var experimental_wrapLanguageModel = ({
|
@@ -4995,26 +4810,6 @@ var NoSuchProviderError = class extends import_provider15.NoSuchModelError {
|
|
4995
4810
|
static isInstance(error) {
|
4996
4811
|
return import_provider15.AISDKError.hasMarker(error, marker10);
|
4997
4812
|
}
|
4998
|
-
/**
|
4999
|
-
* @deprecated use `isInstance` instead
|
5000
|
-
*/
|
5001
|
-
static isNoSuchProviderError(error) {
|
5002
|
-
return error instanceof Error && error.name === name10 && typeof error.providerId === "string" && Array.isArray(error.availableProviders);
|
5003
|
-
}
|
5004
|
-
/**
|
5005
|
-
* @deprecated Do not use this method. It will be removed in the next major version.
|
5006
|
-
*/
|
5007
|
-
toJSON() {
|
5008
|
-
return {
|
5009
|
-
name: this.name,
|
5010
|
-
message: this.message,
|
5011
|
-
stack: this.stack,
|
5012
|
-
modelId: this.modelId,
|
5013
|
-
modelType: this.modelType,
|
5014
|
-
providerId: this.providerId,
|
5015
|
-
availableProviders: this.availableProviders
|
5016
|
-
};
|
5017
|
-
}
|
5018
4813
|
};
|
5019
4814
|
_a10 = symbol10;
|
5020
4815
|
|
@@ -5027,15 +4822,11 @@ function experimental_createProviderRegistry(providers) {
|
|
5027
4822
|
}
|
5028
4823
|
return registry;
|
5029
4824
|
}
|
5030
|
-
var experimental_createModelRegistry = experimental_createProviderRegistry;
|
5031
4825
|
var DefaultProviderRegistry = class {
|
5032
4826
|
constructor() {
|
5033
4827
|
this.providers = {};
|
5034
4828
|
}
|
5035
|
-
registerProvider({
|
5036
|
-
id,
|
5037
|
-
provider
|
5038
|
-
}) {
|
4829
|
+
registerProvider({ id, provider }) {
|
5039
4830
|
this.providers[id] = provider;
|
5040
4831
|
}
|
5041
4832
|
getProvider(id) {
|
@@ -5071,10 +4862,10 @@ var DefaultProviderRegistry = class {
|
|
5071
4862
|
return model;
|
5072
4863
|
}
|
5073
4864
|
textEmbeddingModel(id) {
|
5074
|
-
var _a11
|
4865
|
+
var _a11;
|
5075
4866
|
const [providerId, modelId] = this.splitId(id, "textEmbeddingModel");
|
5076
4867
|
const provider = this.getProvider(providerId);
|
5077
|
-
const model = (
|
4868
|
+
const model = (_a11 = provider.textEmbeddingModel) == null ? void 0 : _a11.call(provider, modelId);
|
5078
4869
|
if (model == null) {
|
5079
4870
|
throw new import_provider16.NoSuchModelError({
|
5080
4871
|
modelId: id,
|
@@ -5115,123 +4906,8 @@ function magnitude(vector) {
|
|
5115
4906
|
return Math.sqrt(dotProduct(vector, vector));
|
5116
4907
|
}
|
5117
4908
|
|
5118
|
-
// streams/ai-stream.ts
|
5119
|
-
var import_eventsource_parser = require("eventsource-parser");
|
5120
|
-
function createEventStreamTransformer(customParser) {
|
5121
|
-
const textDecoder = new TextDecoder();
|
5122
|
-
let eventSourceParser;
|
5123
|
-
return new TransformStream({
|
5124
|
-
async start(controller) {
|
5125
|
-
eventSourceParser = (0, import_eventsource_parser.createParser)(
|
5126
|
-
(event) => {
|
5127
|
-
if ("data" in event && event.type === "event" && event.data === "[DONE]" || // Replicate doesn't send [DONE] but does send a 'done' event
|
5128
|
-
// @see https://replicate.com/docs/streaming
|
5129
|
-
event.event === "done") {
|
5130
|
-
controller.terminate();
|
5131
|
-
return;
|
5132
|
-
}
|
5133
|
-
if ("data" in event) {
|
5134
|
-
const parsedMessage = customParser ? customParser(event.data, {
|
5135
|
-
event: event.event
|
5136
|
-
}) : event.data;
|
5137
|
-
if (parsedMessage)
|
5138
|
-
controller.enqueue(parsedMessage);
|
5139
|
-
}
|
5140
|
-
}
|
5141
|
-
);
|
5142
|
-
},
|
5143
|
-
transform(chunk) {
|
5144
|
-
eventSourceParser.feed(textDecoder.decode(chunk));
|
5145
|
-
}
|
5146
|
-
});
|
5147
|
-
}
|
5148
|
-
function createCallbacksTransformer(cb) {
|
5149
|
-
const textEncoder = new TextEncoder();
|
5150
|
-
let aggregatedResponse = "";
|
5151
|
-
const callbacks = cb || {};
|
5152
|
-
return new TransformStream({
|
5153
|
-
async start() {
|
5154
|
-
if (callbacks.onStart)
|
5155
|
-
await callbacks.onStart();
|
5156
|
-
},
|
5157
|
-
async transform(message, controller) {
|
5158
|
-
const content = typeof message === "string" ? message : message.content;
|
5159
|
-
controller.enqueue(textEncoder.encode(content));
|
5160
|
-
aggregatedResponse += content;
|
5161
|
-
if (callbacks.onToken)
|
5162
|
-
await callbacks.onToken(content);
|
5163
|
-
if (callbacks.onText && typeof message === "string") {
|
5164
|
-
await callbacks.onText(message);
|
5165
|
-
}
|
5166
|
-
},
|
5167
|
-
async flush() {
|
5168
|
-
if (callbacks.onCompletion) {
|
5169
|
-
await callbacks.onCompletion(aggregatedResponse);
|
5170
|
-
}
|
5171
|
-
}
|
5172
|
-
});
|
5173
|
-
}
|
5174
|
-
function trimStartOfStreamHelper() {
|
5175
|
-
let isStreamStart = true;
|
5176
|
-
return (text) => {
|
5177
|
-
if (isStreamStart) {
|
5178
|
-
text = text.trimStart();
|
5179
|
-
if (text)
|
5180
|
-
isStreamStart = false;
|
5181
|
-
}
|
5182
|
-
return text;
|
5183
|
-
};
|
5184
|
-
}
|
5185
|
-
function AIStream(response, customParser, callbacks) {
|
5186
|
-
if (!response.ok) {
|
5187
|
-
if (response.body) {
|
5188
|
-
const reader = response.body.getReader();
|
5189
|
-
return new ReadableStream({
|
5190
|
-
async start(controller) {
|
5191
|
-
const { done, value } = await reader.read();
|
5192
|
-
if (!done) {
|
5193
|
-
const errorText = new TextDecoder().decode(value);
|
5194
|
-
controller.error(new Error(`Response error: ${errorText}`));
|
5195
|
-
}
|
5196
|
-
}
|
5197
|
-
});
|
5198
|
-
} else {
|
5199
|
-
return new ReadableStream({
|
5200
|
-
start(controller) {
|
5201
|
-
controller.error(new Error("Response error: No response body"));
|
5202
|
-
}
|
5203
|
-
});
|
5204
|
-
}
|
5205
|
-
}
|
5206
|
-
const responseBodyStream = response.body || createEmptyReadableStream();
|
5207
|
-
return responseBodyStream.pipeThrough(createEventStreamTransformer(customParser)).pipeThrough(createCallbacksTransformer(callbacks));
|
5208
|
-
}
|
5209
|
-
function createEmptyReadableStream() {
|
5210
|
-
return new ReadableStream({
|
5211
|
-
start(controller) {
|
5212
|
-
controller.close();
|
5213
|
-
}
|
5214
|
-
});
|
5215
|
-
}
|
5216
|
-
function readableFromAsyncIterable(iterable) {
|
5217
|
-
let it = iterable[Symbol.asyncIterator]();
|
5218
|
-
return new ReadableStream({
|
5219
|
-
async pull(controller) {
|
5220
|
-
const { done, value } = await it.next();
|
5221
|
-
if (done)
|
5222
|
-
controller.close();
|
5223
|
-
else
|
5224
|
-
controller.enqueue(value);
|
5225
|
-
},
|
5226
|
-
async cancel(reason) {
|
5227
|
-
var _a11;
|
5228
|
-
await ((_a11 = it.return) == null ? void 0 : _a11.call(it, reason));
|
5229
|
-
}
|
5230
|
-
});
|
5231
|
-
}
|
5232
|
-
|
5233
4909
|
// streams/assistant-response.ts
|
5234
|
-
var
|
4910
|
+
var import_ui_utils8 = require("@ai-sdk/ui-utils");
|
5235
4911
|
function AssistantResponse({ threadId, messageId }, process2) {
|
5236
4912
|
const stream = new ReadableStream({
|
5237
4913
|
async start(controller) {
|
@@ -5239,17 +4915,17 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
5239
4915
|
const textEncoder = new TextEncoder();
|
5240
4916
|
const sendMessage = (message) => {
|
5241
4917
|
controller.enqueue(
|
5242
|
-
textEncoder.encode((0,
|
4918
|
+
textEncoder.encode((0, import_ui_utils8.formatStreamPart)("assistant_message", message))
|
5243
4919
|
);
|
5244
4920
|
};
|
5245
4921
|
const sendDataMessage = (message) => {
|
5246
4922
|
controller.enqueue(
|
5247
|
-
textEncoder.encode((0,
|
4923
|
+
textEncoder.encode((0, import_ui_utils8.formatStreamPart)("data_message", message))
|
5248
4924
|
);
|
5249
4925
|
};
|
5250
4926
|
const sendError = (errorMessage) => {
|
5251
4927
|
controller.enqueue(
|
5252
|
-
textEncoder.encode((0,
|
4928
|
+
textEncoder.encode((0, import_ui_utils8.formatStreamPart)("error", errorMessage))
|
5253
4929
|
);
|
5254
4930
|
};
|
5255
4931
|
const forwardStream = async (stream2) => {
|
@@ -5260,7 +4936,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
5260
4936
|
case "thread.message.created": {
|
5261
4937
|
controller.enqueue(
|
5262
4938
|
textEncoder.encode(
|
5263
|
-
(0,
|
4939
|
+
(0, import_ui_utils8.formatStreamPart)("assistant_message", {
|
5264
4940
|
id: value.data.id,
|
5265
4941
|
role: "assistant",
|
5266
4942
|
content: [{ type: "text", text: { value: "" } }]
|
@@ -5274,7 +4950,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
5274
4950
|
if ((content == null ? void 0 : content.type) === "text" && ((_b = content.text) == null ? void 0 : _b.value) != null) {
|
5275
4951
|
controller.enqueue(
|
5276
4952
|
textEncoder.encode(
|
5277
|
-
(0,
|
4953
|
+
(0, import_ui_utils8.formatStreamPart)("text", content.text.value)
|
5278
4954
|
)
|
5279
4955
|
);
|
5280
4956
|
}
|
@@ -5291,7 +4967,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
5291
4967
|
};
|
5292
4968
|
controller.enqueue(
|
5293
4969
|
textEncoder.encode(
|
5294
|
-
(0,
|
4970
|
+
(0, import_ui_utils8.formatStreamPart)("assistant_control_data", {
|
5295
4971
|
threadId,
|
5296
4972
|
messageId
|
5297
4973
|
})
|
@@ -5299,8 +4975,6 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
5299
4975
|
);
|
5300
4976
|
try {
|
5301
4977
|
await process2({
|
5302
|
-
threadId,
|
5303
|
-
messageId,
|
5304
4978
|
sendMessage,
|
5305
4979
|
sendDataMessage,
|
5306
4980
|
forwardStream
|
@@ -5323,24 +4997,48 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
5323
4997
|
}
|
5324
4998
|
});
|
5325
4999
|
}
|
5326
|
-
var experimental_AssistantResponse = AssistantResponse;
|
5327
5000
|
|
5328
5001
|
// streams/langchain-adapter.ts
|
5329
5002
|
var langchain_adapter_exports = {};
|
5330
5003
|
__export(langchain_adapter_exports, {
|
5331
|
-
toAIStream: () => toAIStream,
|
5332
5004
|
toDataStream: () => toDataStream,
|
5333
5005
|
toDataStreamResponse: () => toDataStreamResponse
|
5334
5006
|
});
|
5335
5007
|
|
5008
|
+
// streams/stream-callbacks.ts
|
5009
|
+
function createCallbacksTransformer(callbacks = {}) {
|
5010
|
+
const textEncoder = new TextEncoder();
|
5011
|
+
let aggregatedResponse = "";
|
5012
|
+
return new TransformStream({
|
5013
|
+
async start() {
|
5014
|
+
if (callbacks.onStart)
|
5015
|
+
await callbacks.onStart();
|
5016
|
+
},
|
5017
|
+
async transform(message, controller) {
|
5018
|
+
controller.enqueue(textEncoder.encode(message));
|
5019
|
+
aggregatedResponse += message;
|
5020
|
+
if (callbacks.onToken)
|
5021
|
+
await callbacks.onToken(message);
|
5022
|
+
if (callbacks.onText && typeof message === "string") {
|
5023
|
+
await callbacks.onText(message);
|
5024
|
+
}
|
5025
|
+
},
|
5026
|
+
async flush() {
|
5027
|
+
if (callbacks.onCompletion) {
|
5028
|
+
await callbacks.onCompletion(aggregatedResponse);
|
5029
|
+
}
|
5030
|
+
}
|
5031
|
+
});
|
5032
|
+
}
|
5033
|
+
|
5336
5034
|
// streams/stream-data.ts
|
5337
|
-
var
|
5035
|
+
var import_ui_utils9 = require("@ai-sdk/ui-utils");
|
5338
5036
|
|
5339
5037
|
// util/constants.ts
|
5340
5038
|
var HANGING_STREAM_WARNING_TIME_MS = 15 * 1e3;
|
5341
5039
|
|
5342
5040
|
// streams/stream-data.ts
|
5343
|
-
var
|
5041
|
+
var StreamData = class {
|
5344
5042
|
constructor() {
|
5345
5043
|
this.encoder = new TextEncoder();
|
5346
5044
|
this.controller = null;
|
@@ -5386,7 +5084,7 @@ var StreamData2 = class {
|
|
5386
5084
|
throw new Error("Stream controller is not initialized.");
|
5387
5085
|
}
|
5388
5086
|
this.controller.enqueue(
|
5389
|
-
this.encoder.encode((0,
|
5087
|
+
this.encoder.encode((0, import_ui_utils9.formatStreamPart)("data", [value]))
|
5390
5088
|
);
|
5391
5089
|
}
|
5392
5090
|
appendMessageAnnotation(value) {
|
@@ -5397,7 +5095,7 @@ var StreamData2 = class {
|
|
5397
5095
|
throw new Error("Stream controller is not initialized.");
|
5398
5096
|
}
|
5399
5097
|
this.controller.enqueue(
|
5400
|
-
this.encoder.encode((0,
|
5098
|
+
this.encoder.encode((0, import_ui_utils9.formatStreamPart)("message_annotations", [value]))
|
5401
5099
|
);
|
5402
5100
|
}
|
5403
5101
|
};
|
@@ -5407,17 +5105,12 @@ function createStreamDataTransformer() {
|
|
5407
5105
|
return new TransformStream({
|
5408
5106
|
transform: async (chunk, controller) => {
|
5409
5107
|
const message = decoder.decode(chunk);
|
5410
|
-
controller.enqueue(encoder.encode((0,
|
5108
|
+
controller.enqueue(encoder.encode((0, import_ui_utils9.formatStreamPart)("text", message)));
|
5411
5109
|
}
|
5412
5110
|
});
|
5413
5111
|
}
|
5414
|
-
var experimental_StreamData = class extends StreamData2 {
|
5415
|
-
};
|
5416
5112
|
|
5417
5113
|
// streams/langchain-adapter.ts
|
5418
|
-
function toAIStream(stream, callbacks) {
|
5419
|
-
return toDataStream(stream, callbacks);
|
5420
|
-
}
|
5421
5114
|
function toDataStream(stream, callbacks) {
|
5422
5115
|
return stream.pipeThrough(
|
5423
5116
|
new TransformStream({
|
@@ -5450,7 +5143,7 @@ function toDataStreamResponse(stream, options) {
|
|
5450
5143
|
return new Response(responseStream, {
|
5451
5144
|
status: (_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200,
|
5452
5145
|
statusText: init == null ? void 0 : init.statusText,
|
5453
|
-
headers: prepareResponseHeaders(init, {
|
5146
|
+
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
5454
5147
|
contentType: "text/plain; charset=utf-8",
|
5455
5148
|
dataStreamVersion: "v1"
|
5456
5149
|
})
|
@@ -5475,8 +5168,16 @@ __export(llamaindex_adapter_exports, {
|
|
5475
5168
|
toDataStream: () => toDataStream2,
|
5476
5169
|
toDataStreamResponse: () => toDataStreamResponse2
|
5477
5170
|
});
|
5171
|
+
var import_provider_utils10 = require("@ai-sdk/provider-utils");
|
5478
5172
|
function toDataStream2(stream, callbacks) {
|
5479
|
-
|
5173
|
+
const trimStart = trimStartOfStream();
|
5174
|
+
return (0, import_provider_utils10.convertAsyncIteratorToReadableStream)(stream[Symbol.asyncIterator]()).pipeThrough(
|
5175
|
+
new TransformStream({
|
5176
|
+
async transform(message, controller) {
|
5177
|
+
controller.enqueue(trimStart(message.delta));
|
5178
|
+
}
|
5179
|
+
})
|
5180
|
+
).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
|
5480
5181
|
}
|
5481
5182
|
function toDataStreamResponse2(stream, options = {}) {
|
5482
5183
|
var _a11;
|
@@ -5486,79 +5187,26 @@ function toDataStreamResponse2(stream, options = {}) {
|
|
5486
5187
|
return new Response(responseStream, {
|
5487
5188
|
status: (_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200,
|
5488
5189
|
statusText: init == null ? void 0 : init.statusText,
|
5489
|
-
headers: prepareResponseHeaders(init, {
|
5190
|
+
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
5490
5191
|
contentType: "text/plain; charset=utf-8",
|
5491
5192
|
dataStreamVersion: "v1"
|
5492
5193
|
})
|
5493
5194
|
});
|
5494
5195
|
}
|
5495
|
-
function
|
5496
|
-
|
5497
|
-
|
5498
|
-
|
5499
|
-
|
5500
|
-
|
5501
|
-
|
5502
|
-
if (done) {
|
5503
|
-
controller.close();
|
5504
|
-
return;
|
5505
|
-
}
|
5506
|
-
const text = trimStartOfStream((_a11 = value.delta) != null ? _a11 : "");
|
5507
|
-
if (text) {
|
5508
|
-
controller.enqueue(text);
|
5509
|
-
}
|
5196
|
+
function trimStartOfStream() {
|
5197
|
+
let isStreamStart = true;
|
5198
|
+
return (text) => {
|
5199
|
+
if (isStreamStart) {
|
5200
|
+
text = text.trimStart();
|
5201
|
+
if (text)
|
5202
|
+
isStreamStart = false;
|
5510
5203
|
}
|
5511
|
-
|
5512
|
-
}
|
5513
|
-
|
5514
|
-
// streams/stream-to-response.ts
|
5515
|
-
function streamToResponse(res, response, init, data) {
|
5516
|
-
var _a11;
|
5517
|
-
response.writeHead((_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200, {
|
5518
|
-
"Content-Type": "text/plain; charset=utf-8",
|
5519
|
-
...init == null ? void 0 : init.headers
|
5520
|
-
});
|
5521
|
-
let processedStream = res;
|
5522
|
-
if (data) {
|
5523
|
-
processedStream = mergeStreams(data.stream, res);
|
5524
|
-
}
|
5525
|
-
const reader = processedStream.getReader();
|
5526
|
-
function read() {
|
5527
|
-
reader.read().then(({ done, value }) => {
|
5528
|
-
if (done) {
|
5529
|
-
response.end();
|
5530
|
-
return;
|
5531
|
-
}
|
5532
|
-
response.write(value);
|
5533
|
-
read();
|
5534
|
-
});
|
5535
|
-
}
|
5536
|
-
read();
|
5204
|
+
return text;
|
5205
|
+
};
|
5537
5206
|
}
|
5538
|
-
|
5539
|
-
// streams/streaming-text-response.ts
|
5540
|
-
var StreamingTextResponse = class extends Response {
|
5541
|
-
constructor(res, init, data) {
|
5542
|
-
let processedStream = res;
|
5543
|
-
if (data) {
|
5544
|
-
processedStream = mergeStreams(data.stream, res);
|
5545
|
-
}
|
5546
|
-
super(processedStream, {
|
5547
|
-
...init,
|
5548
|
-
status: 200,
|
5549
|
-
headers: prepareResponseHeaders(init, {
|
5550
|
-
contentType: "text/plain; charset=utf-8"
|
5551
|
-
})
|
5552
|
-
});
|
5553
|
-
}
|
5554
|
-
};
|
5555
|
-
|
5556
|
-
// streams/index.ts
|
5557
|
-
var generateId2 = import_provider_utils10.generateId;
|
5558
5207
|
// Annotate the CommonJS export names for ESM import in node:
|
5559
5208
|
0 && (module.exports = {
|
5560
5209
|
AISDKError,
|
5561
|
-
AIStream,
|
5562
5210
|
APICallError,
|
5563
5211
|
AssistantResponse,
|
5564
5212
|
DownloadError,
|
@@ -5581,25 +5229,15 @@ var generateId2 = import_provider_utils10.generateId;
|
|
5581
5229
|
NoSuchToolError,
|
5582
5230
|
RetryError,
|
5583
5231
|
StreamData,
|
5584
|
-
StreamingTextResponse,
|
5585
5232
|
TypeValidationError,
|
5586
5233
|
UnsupportedFunctionalityError,
|
5587
5234
|
convertToCoreMessages,
|
5588
5235
|
cosineSimilarity,
|
5589
|
-
createCallbacksTransformer,
|
5590
|
-
createEventStreamTransformer,
|
5591
5236
|
createStreamDataTransformer,
|
5592
5237
|
embed,
|
5593
5238
|
embedMany,
|
5594
|
-
experimental_AssistantResponse,
|
5595
|
-
experimental_StreamData,
|
5596
|
-
experimental_createModelRegistry,
|
5597
5239
|
experimental_createProviderRegistry,
|
5598
5240
|
experimental_customProvider,
|
5599
|
-
experimental_generateObject,
|
5600
|
-
experimental_generateText,
|
5601
|
-
experimental_streamObject,
|
5602
|
-
experimental_streamText,
|
5603
5241
|
experimental_wrapLanguageModel,
|
5604
5242
|
formatStreamPart,
|
5605
5243
|
generateId,
|
@@ -5609,11 +5247,8 @@ var generateId2 = import_provider_utils10.generateId;
|
|
5609
5247
|
parseStreamPart,
|
5610
5248
|
processDataProtocolResponse,
|
5611
5249
|
readDataStream,
|
5612
|
-
readableFromAsyncIterable,
|
5613
5250
|
streamObject,
|
5614
5251
|
streamText,
|
5615
|
-
|
5616
|
-
tool,
|
5617
|
-
trimStartOfStreamHelper
|
5252
|
+
tool
|
5618
5253
|
});
|
5619
5254
|
//# sourceMappingURL=index.js.map
|