ai 4.0.0-canary.1 → 4.0.0-canary.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +122 -0
- package/dist/index.d.mts +91 -670
- package/dist/index.d.ts +91 -670
- package/dist/index.js +151 -586
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +125 -548
- package/dist/index.mjs.map +1 -1
- package/package.json +10 -24
- package/react/dist/index.d.ts +0 -17
- package/rsc/dist/index.d.ts +19 -19
- package/rsc/dist/rsc-server.d.mts +19 -19
- package/rsc/dist/rsc-server.mjs +9 -132
- package/rsc/dist/rsc-server.mjs.map +1 -1
- package/react/dist/index.server.d.mts +0 -17
- package/react/dist/index.server.d.ts +0 -17
- package/react/dist/index.server.js +0 -50
- package/react/dist/index.server.js.map +0 -1
- package/react/dist/index.server.mjs +0 -23
- package/react/dist/index.server.mjs.map +0 -1
package/dist/index.mjs
CHANGED
@@ -6,12 +6,12 @@ var __export = (target, all) => {
|
|
6
6
|
|
7
7
|
// streams/index.ts
|
8
8
|
import {
|
9
|
-
formatStreamPart,
|
9
|
+
formatStreamPart as formatStreamPart4,
|
10
10
|
parseStreamPart,
|
11
11
|
readDataStream,
|
12
12
|
processDataProtocolResponse
|
13
13
|
} from "@ai-sdk/ui-utils";
|
14
|
-
import { generateId as
|
14
|
+
import { generateId as generateId2 } from "@ai-sdk/provider-utils";
|
15
15
|
|
16
16
|
// core/index.ts
|
17
17
|
import { jsonSchema } from "@ai-sdk/ui-utils";
|
@@ -46,24 +46,6 @@ var RetryError = class extends AISDKError {
|
|
46
46
|
static isInstance(error) {
|
47
47
|
return AISDKError.hasMarker(error, marker);
|
48
48
|
}
|
49
|
-
/**
|
50
|
-
* @deprecated use `isInstance` instead
|
51
|
-
*/
|
52
|
-
static isRetryError(error) {
|
53
|
-
return error instanceof Error && error.name === name && typeof error.reason === "string" && Array.isArray(error.errors);
|
54
|
-
}
|
55
|
-
/**
|
56
|
-
* @deprecated Do not use this method. It will be removed in the next major version.
|
57
|
-
*/
|
58
|
-
toJSON() {
|
59
|
-
return {
|
60
|
-
name: this.name,
|
61
|
-
message: this.message,
|
62
|
-
reason: this.reason,
|
63
|
-
lastError: this.lastError,
|
64
|
-
errors: this.errors
|
65
|
-
};
|
66
|
-
}
|
67
49
|
};
|
68
50
|
_a = symbol;
|
69
51
|
|
@@ -101,7 +83,7 @@ async function _retryWithExponentialBackoff(f, {
|
|
101
83
|
errors: newErrors
|
102
84
|
});
|
103
85
|
}
|
104
|
-
if (error instanceof Error && APICallError.
|
86
|
+
if (error instanceof Error && APICallError.isInstance(error) && error.isRetryable === true && tryNumber <= maxRetries) {
|
105
87
|
await delay(delayInMs);
|
106
88
|
return _retryWithExponentialBackoff(
|
107
89
|
f,
|
@@ -616,25 +598,6 @@ var DownloadError = class extends AISDKError2 {
|
|
616
598
|
static isInstance(error) {
|
617
599
|
return AISDKError2.hasMarker(error, marker2);
|
618
600
|
}
|
619
|
-
/**
|
620
|
-
* @deprecated use `isInstance` instead
|
621
|
-
*/
|
622
|
-
static isDownloadError(error) {
|
623
|
-
return error instanceof Error && error.name === name2 && typeof error.url === "string" && (error.statusCode == null || typeof error.statusCode === "number") && (error.statusText == null || typeof error.statusText === "string");
|
624
|
-
}
|
625
|
-
/**
|
626
|
-
* @deprecated Do not use this method. It will be removed in the next major version.
|
627
|
-
*/
|
628
|
-
toJSON() {
|
629
|
-
return {
|
630
|
-
name: this.name,
|
631
|
-
message: this.message,
|
632
|
-
url: this.url,
|
633
|
-
statusCode: this.statusCode,
|
634
|
-
statusText: this.statusText,
|
635
|
-
cause: this.cause
|
636
|
-
};
|
637
|
-
}
|
638
601
|
};
|
639
602
|
_a2 = symbol2;
|
640
603
|
|
@@ -707,24 +670,6 @@ var InvalidDataContentError = class extends AISDKError3 {
|
|
707
670
|
static isInstance(error) {
|
708
671
|
return AISDKError3.hasMarker(error, marker3);
|
709
672
|
}
|
710
|
-
/**
|
711
|
-
* @deprecated use `isInstance` instead
|
712
|
-
*/
|
713
|
-
static isInvalidDataContentError(error) {
|
714
|
-
return error instanceof Error && error.name === name3 && error.content != null;
|
715
|
-
}
|
716
|
-
/**
|
717
|
-
* @deprecated Do not use this method. It will be removed in the next major version.
|
718
|
-
*/
|
719
|
-
toJSON() {
|
720
|
-
return {
|
721
|
-
name: this.name,
|
722
|
-
message: this.message,
|
723
|
-
stack: this.stack,
|
724
|
-
cause: this.cause,
|
725
|
-
content: this.content
|
726
|
-
};
|
727
|
-
}
|
728
673
|
};
|
729
674
|
_a3 = symbol3;
|
730
675
|
|
@@ -798,23 +743,6 @@ var InvalidMessageRoleError = class extends AISDKError4 {
|
|
798
743
|
static isInstance(error) {
|
799
744
|
return AISDKError4.hasMarker(error, marker4);
|
800
745
|
}
|
801
|
-
/**
|
802
|
-
* @deprecated use `isInstance` instead
|
803
|
-
*/
|
804
|
-
static isInvalidMessageRoleError(error) {
|
805
|
-
return error instanceof Error && error.name === name4 && typeof error.role === "string";
|
806
|
-
}
|
807
|
-
/**
|
808
|
-
* @deprecated Do not use this method. It will be removed in the next major version.
|
809
|
-
*/
|
810
|
-
toJSON() {
|
811
|
-
return {
|
812
|
-
name: this.name,
|
813
|
-
message: this.message,
|
814
|
-
stack: this.stack,
|
815
|
-
role: this.role
|
816
|
-
};
|
817
|
-
}
|
818
746
|
};
|
819
747
|
_a4 = symbol4;
|
820
748
|
|
@@ -1042,21 +970,6 @@ var InvalidArgumentError = class extends AISDKError5 {
|
|
1042
970
|
static isInstance(error) {
|
1043
971
|
return AISDKError5.hasMarker(error, marker5);
|
1044
972
|
}
|
1045
|
-
/**
|
1046
|
-
* @deprecated use `isInstance` instead
|
1047
|
-
*/
|
1048
|
-
static isInvalidArgumentError(error) {
|
1049
|
-
return error instanceof Error && error.name === name5 && typeof error.parameter === "string" && typeof error.value === "string";
|
1050
|
-
}
|
1051
|
-
toJSON() {
|
1052
|
-
return {
|
1053
|
-
name: this.name,
|
1054
|
-
message: this.message,
|
1055
|
-
stack: this.stack,
|
1056
|
-
parameter: this.parameter,
|
1057
|
-
value: this.value
|
1058
|
-
};
|
1059
|
-
}
|
1060
973
|
};
|
1061
974
|
_a5 = symbol5;
|
1062
975
|
|
@@ -1489,9 +1402,7 @@ function convertToCoreMessages(messages, options) {
|
|
1489
1402
|
});
|
1490
1403
|
break;
|
1491
1404
|
}
|
1492
|
-
case "
|
1493
|
-
case "data":
|
1494
|
-
case "tool": {
|
1405
|
+
case "data": {
|
1495
1406
|
break;
|
1496
1407
|
}
|
1497
1408
|
default: {
|
@@ -1579,28 +1490,30 @@ function standardizePrompt({
|
|
1579
1490
|
}
|
1580
1491
|
|
1581
1492
|
// core/types/usage.ts
|
1582
|
-
function calculateLanguageModelUsage(
|
1493
|
+
function calculateLanguageModelUsage({
|
1494
|
+
promptTokens,
|
1495
|
+
completionTokens
|
1496
|
+
}) {
|
1583
1497
|
return {
|
1584
|
-
promptTokens
|
1585
|
-
completionTokens
|
1586
|
-
totalTokens:
|
1498
|
+
promptTokens,
|
1499
|
+
completionTokens,
|
1500
|
+
totalTokens: promptTokens + completionTokens
|
1587
1501
|
};
|
1588
1502
|
}
|
1589
1503
|
|
1590
1504
|
// core/util/prepare-response-headers.ts
|
1591
|
-
function prepareResponseHeaders(
|
1505
|
+
function prepareResponseHeaders(headers, {
|
1592
1506
|
contentType,
|
1593
1507
|
dataStreamVersion
|
1594
1508
|
}) {
|
1595
|
-
|
1596
|
-
|
1597
|
-
|
1598
|
-
headers.set("Content-Type", contentType);
|
1509
|
+
const responseHeaders = new Headers(headers != null ? headers : {});
|
1510
|
+
if (!responseHeaders.has("Content-Type")) {
|
1511
|
+
responseHeaders.set("Content-Type", contentType);
|
1599
1512
|
}
|
1600
1513
|
if (dataStreamVersion !== void 0) {
|
1601
|
-
|
1514
|
+
responseHeaders.set("X-Vercel-AI-Data-Stream", dataStreamVersion);
|
1602
1515
|
}
|
1603
|
-
return
|
1516
|
+
return responseHeaders;
|
1604
1517
|
}
|
1605
1518
|
|
1606
1519
|
// core/generate-object/inject-json-instruction.ts
|
@@ -1638,23 +1551,6 @@ var NoObjectGeneratedError = class extends AISDKError7 {
|
|
1638
1551
|
static isInstance(error) {
|
1639
1552
|
return AISDKError7.hasMarker(error, marker7);
|
1640
1553
|
}
|
1641
|
-
/**
|
1642
|
-
* @deprecated Use isInstance instead.
|
1643
|
-
*/
|
1644
|
-
static isNoObjectGeneratedError(error) {
|
1645
|
-
return error instanceof Error && error.name === name7;
|
1646
|
-
}
|
1647
|
-
/**
|
1648
|
-
* @deprecated Do not use this method. It will be removed in the next major version.
|
1649
|
-
*/
|
1650
|
-
toJSON() {
|
1651
|
-
return {
|
1652
|
-
name: this.name,
|
1653
|
-
cause: this.cause,
|
1654
|
-
message: this.message,
|
1655
|
-
stack: this.stack
|
1656
|
-
};
|
1657
|
-
}
|
1658
1554
|
};
|
1659
1555
|
_a7 = symbol7;
|
1660
1556
|
|
@@ -2191,9 +2087,6 @@ async function generateObject({
|
|
2191
2087
|
"ai.response.timestamp": responseData.timestamp.toISOString(),
|
2192
2088
|
"ai.usage.promptTokens": result2.usage.promptTokens,
|
2193
2089
|
"ai.usage.completionTokens": result2.usage.completionTokens,
|
2194
|
-
// deprecated:
|
2195
|
-
"ai.finishReason": result2.finishReason,
|
2196
|
-
"ai.result.object": { output: () => result2.text },
|
2197
2090
|
// standardized gen-ai llm span attributes:
|
2198
2091
|
"gen_ai.response.finish_reasons": [result2.finishReason],
|
2199
2092
|
"gen_ai.response.id": responseData.id,
|
@@ -2298,9 +2191,6 @@ async function generateObject({
|
|
2298
2191
|
"ai.response.timestamp": responseData.timestamp.toISOString(),
|
2299
2192
|
"ai.usage.promptTokens": result2.usage.promptTokens,
|
2300
2193
|
"ai.usage.completionTokens": result2.usage.completionTokens,
|
2301
|
-
// deprecated:
|
2302
|
-
"ai.finishReason": result2.finishReason,
|
2303
|
-
"ai.result.object": { output: () => objectText },
|
2304
2194
|
// standardized gen-ai llm span attributes:
|
2305
2195
|
"gen_ai.response.finish_reasons": [result2.finishReason],
|
2306
2196
|
"gen_ai.response.id": responseData.id,
|
@@ -2354,12 +2244,7 @@ async function generateObject({
|
|
2354
2244
|
output: () => JSON.stringify(validationResult.value)
|
2355
2245
|
},
|
2356
2246
|
"ai.usage.promptTokens": usage.promptTokens,
|
2357
|
-
"ai.usage.completionTokens": usage.completionTokens
|
2358
|
-
// deprecated:
|
2359
|
-
"ai.finishReason": finishReason,
|
2360
|
-
"ai.result.object": {
|
2361
|
-
output: () => JSON.stringify(validationResult.value)
|
2362
|
-
}
|
2247
|
+
"ai.usage.completionTokens": usage.completionTokens
|
2363
2248
|
}
|
2364
2249
|
})
|
2365
2250
|
);
|
@@ -2388,22 +2273,18 @@ var DefaultGenerateObjectResult = class {
|
|
2388
2273
|
this.experimental_providerMetadata = options.providerMetadata;
|
2389
2274
|
this.response = options.response;
|
2390
2275
|
this.request = options.request;
|
2391
|
-
this.rawResponse = {
|
2392
|
-
headers: options.response.headers
|
2393
|
-
};
|
2394
2276
|
this.logprobs = options.logprobs;
|
2395
2277
|
}
|
2396
2278
|
toJsonResponse(init) {
|
2397
2279
|
var _a11;
|
2398
2280
|
return new Response(JSON.stringify(this.object), {
|
2399
2281
|
status: (_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200,
|
2400
|
-
headers: prepareResponseHeaders(init, {
|
2282
|
+
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
2401
2283
|
contentType: "application/json; charset=utf-8"
|
2402
2284
|
})
|
2403
2285
|
});
|
2404
2286
|
}
|
2405
2287
|
};
|
2406
|
-
var experimental_generateObject = generateObject;
|
2407
2288
|
|
2408
2289
|
// core/generate-object/stream-object.ts
|
2409
2290
|
import { createIdGenerator as createIdGenerator2 } from "@ai-sdk/provider-utils";
|
@@ -2472,23 +2353,23 @@ function now() {
|
|
2472
2353
|
}
|
2473
2354
|
|
2474
2355
|
// core/util/prepare-outgoing-http-headers.ts
|
2475
|
-
function prepareOutgoingHttpHeaders(
|
2356
|
+
function prepareOutgoingHttpHeaders(headers, {
|
2476
2357
|
contentType,
|
2477
2358
|
dataStreamVersion
|
2478
2359
|
}) {
|
2479
|
-
const
|
2480
|
-
if (
|
2481
|
-
for (const [key, value] of Object.entries(
|
2482
|
-
|
2360
|
+
const outgoingHeaders = {};
|
2361
|
+
if (headers != null) {
|
2362
|
+
for (const [key, value] of Object.entries(headers)) {
|
2363
|
+
outgoingHeaders[key] = value;
|
2483
2364
|
}
|
2484
2365
|
}
|
2485
|
-
if (
|
2486
|
-
|
2366
|
+
if (outgoingHeaders["Content-Type"] == null) {
|
2367
|
+
outgoingHeaders["Content-Type"] = contentType;
|
2487
2368
|
}
|
2488
2369
|
if (dataStreamVersion !== void 0) {
|
2489
|
-
|
2370
|
+
outgoingHeaders["X-Vercel-AI-Data-Stream"] = dataStreamVersion;
|
2490
2371
|
}
|
2491
|
-
return
|
2372
|
+
return outgoingHeaders;
|
2492
2373
|
}
|
2493
2374
|
|
2494
2375
|
// core/util/write-to-server-response.ts
|
@@ -2769,7 +2650,6 @@ var DefaultStreamObjectResult = class {
|
|
2769
2650
|
generateId: generateId3
|
2770
2651
|
}) {
|
2771
2652
|
this.warnings = warnings;
|
2772
|
-
this.rawResponse = rawResponse;
|
2773
2653
|
this.outputStrategy = outputStrategy;
|
2774
2654
|
this.request = Promise.resolve(request);
|
2775
2655
|
this.objectPromise = new DelayedPromise();
|
@@ -2902,9 +2782,6 @@ var DefaultStreamObjectResult = class {
|
|
2902
2782
|
"ai.response.timestamp": response.timestamp.toISOString(),
|
2903
2783
|
"ai.usage.promptTokens": finalUsage.promptTokens,
|
2904
2784
|
"ai.usage.completionTokens": finalUsage.completionTokens,
|
2905
|
-
// deprecated
|
2906
|
-
"ai.finishReason": finishReason,
|
2907
|
-
"ai.result.object": { output: () => JSON.stringify(object) },
|
2908
2785
|
// standardized gen-ai llm span attributes:
|
2909
2786
|
"gen_ai.response.finish_reasons": [finishReason],
|
2910
2787
|
"gen_ai.response.id": response.id,
|
@@ -2923,9 +2800,7 @@ var DefaultStreamObjectResult = class {
|
|
2923
2800
|
"ai.usage.completionTokens": finalUsage.completionTokens,
|
2924
2801
|
"ai.response.object": {
|
2925
2802
|
output: () => JSON.stringify(object)
|
2926
|
-
}
|
2927
|
-
// deprecated
|
2928
|
-
"ai.result.object": { output: () => JSON.stringify(object) }
|
2803
|
+
}
|
2929
2804
|
}
|
2930
2805
|
})
|
2931
2806
|
);
|
@@ -2933,7 +2808,6 @@ var DefaultStreamObjectResult = class {
|
|
2933
2808
|
usage: finalUsage,
|
2934
2809
|
object,
|
2935
2810
|
error,
|
2936
|
-
rawResponse,
|
2937
2811
|
response: {
|
2938
2812
|
...response,
|
2939
2813
|
headers: rawResponse == null ? void 0 : rawResponse.headers
|
@@ -3010,7 +2884,7 @@ var DefaultStreamObjectResult = class {
|
|
3010
2884
|
response,
|
3011
2885
|
status: init == null ? void 0 : init.status,
|
3012
2886
|
statusText: init == null ? void 0 : init.statusText,
|
3013
|
-
headers: prepareOutgoingHttpHeaders(init, {
|
2887
|
+
headers: prepareOutgoingHttpHeaders(init == null ? void 0 : init.headers, {
|
3014
2888
|
contentType: "text/plain; charset=utf-8"
|
3015
2889
|
}),
|
3016
2890
|
stream: this.textStream.pipeThrough(new TextEncoderStream())
|
@@ -3020,13 +2894,12 @@ var DefaultStreamObjectResult = class {
|
|
3020
2894
|
var _a11;
|
3021
2895
|
return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
|
3022
2896
|
status: (_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200,
|
3023
|
-
headers: prepareResponseHeaders(init, {
|
2897
|
+
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
3024
2898
|
contentType: "text/plain; charset=utf-8"
|
3025
2899
|
})
|
3026
2900
|
});
|
3027
2901
|
}
|
3028
2902
|
};
|
3029
|
-
var experimental_streamObject = streamObject;
|
3030
2903
|
|
3031
2904
|
// core/generate-text/generate-text.ts
|
3032
2905
|
import { createIdGenerator as createIdGenerator3 } from "@ai-sdk/provider-utils";
|
@@ -3069,25 +2942,6 @@ var InvalidToolArgumentsError = class extends AISDKError8 {
|
|
3069
2942
|
static isInstance(error) {
|
3070
2943
|
return AISDKError8.hasMarker(error, marker8);
|
3071
2944
|
}
|
3072
|
-
/**
|
3073
|
-
* @deprecated use `isInstance` instead
|
3074
|
-
*/
|
3075
|
-
static isInvalidToolArgumentsError(error) {
|
3076
|
-
return error instanceof Error && error.name === name8 && typeof error.toolName === "string" && typeof error.toolArgs === "string";
|
3077
|
-
}
|
3078
|
-
/**
|
3079
|
-
* @deprecated Do not use this method. It will be removed in the next major version.
|
3080
|
-
*/
|
3081
|
-
toJSON() {
|
3082
|
-
return {
|
3083
|
-
name: this.name,
|
3084
|
-
message: this.message,
|
3085
|
-
cause: this.cause,
|
3086
|
-
stack: this.stack,
|
3087
|
-
toolName: this.toolName,
|
3088
|
-
toolArgs: this.toolArgs
|
3089
|
-
};
|
3090
|
-
}
|
3091
2945
|
};
|
3092
2946
|
_a8 = symbol8;
|
3093
2947
|
|
@@ -3111,24 +2965,6 @@ var NoSuchToolError = class extends AISDKError9 {
|
|
3111
2965
|
static isInstance(error) {
|
3112
2966
|
return AISDKError9.hasMarker(error, marker9);
|
3113
2967
|
}
|
3114
|
-
/**
|
3115
|
-
* @deprecated use `isInstance` instead
|
3116
|
-
*/
|
3117
|
-
static isNoSuchToolError(error) {
|
3118
|
-
return error instanceof Error && error.name === name9 && "toolName" in error && error.toolName != void 0 && typeof error.name === "string";
|
3119
|
-
}
|
3120
|
-
/**
|
3121
|
-
* @deprecated Do not use this method. It will be removed in the next major version.
|
3122
|
-
*/
|
3123
|
-
toJSON() {
|
3124
|
-
return {
|
3125
|
-
name: this.name,
|
3126
|
-
message: this.message,
|
3127
|
-
stack: this.stack,
|
3128
|
-
toolName: this.toolName,
|
3129
|
-
availableTools: this.availableTools
|
3130
|
-
};
|
3131
|
-
}
|
3132
2968
|
};
|
3133
2969
|
_a9 = symbol9;
|
3134
2970
|
|
@@ -3281,11 +3117,8 @@ async function generateText({
|
|
3281
3117
|
maxRetries,
|
3282
3118
|
abortSignal,
|
3283
3119
|
headers,
|
3284
|
-
|
3285
|
-
|
3286
|
-
maxSteps = maxToolRoundtrips != null ? maxToolRoundtrips + 1 : 1,
|
3287
|
-
experimental_continuationSteps,
|
3288
|
-
experimental_continueSteps: continueSteps = experimental_continuationSteps != null ? experimental_continuationSteps : false,
|
3120
|
+
maxSteps = 1,
|
3121
|
+
experimental_continueSteps: continueSteps = false,
|
3289
3122
|
experimental_telemetry: telemetry,
|
3290
3123
|
experimental_providerMetadata: providerMetadata,
|
3291
3124
|
experimental_activeTools: activeTools,
|
@@ -3437,14 +3270,6 @@ async function generateText({
|
|
3437
3270
|
"ai.response.timestamp": responseData.timestamp.toISOString(),
|
3438
3271
|
"ai.usage.promptTokens": result.usage.promptTokens,
|
3439
3272
|
"ai.usage.completionTokens": result.usage.completionTokens,
|
3440
|
-
// deprecated:
|
3441
|
-
"ai.finishReason": result.finishReason,
|
3442
|
-
"ai.result.text": {
|
3443
|
-
output: () => result.text
|
3444
|
-
},
|
3445
|
-
"ai.result.toolCalls": {
|
3446
|
-
output: () => JSON.stringify(result.toolCalls)
|
3447
|
-
},
|
3448
3273
|
// standardized gen-ai llm span attributes:
|
3449
3274
|
"gen_ai.response.finish_reasons": [result.finishReason],
|
3450
3275
|
"gen_ai.response.id": responseData.id,
|
@@ -3547,15 +3372,7 @@ async function generateText({
|
|
3547
3372
|
output: () => JSON.stringify(currentModelResponse.toolCalls)
|
3548
3373
|
},
|
3549
3374
|
"ai.usage.promptTokens": currentModelResponse.usage.promptTokens,
|
3550
|
-
"ai.usage.completionTokens": currentModelResponse.usage.completionTokens
|
3551
|
-
// deprecated:
|
3552
|
-
"ai.finishReason": currentModelResponse.finishReason,
|
3553
|
-
"ai.result.text": {
|
3554
|
-
output: () => currentModelResponse.text
|
3555
|
-
},
|
3556
|
-
"ai.result.toolCalls": {
|
3557
|
-
output: () => JSON.stringify(currentModelResponse.toolCalls)
|
3558
|
-
}
|
3375
|
+
"ai.usage.completionTokens": currentModelResponse.usage.completionTokens
|
3559
3376
|
}
|
3560
3377
|
})
|
3561
3378
|
);
|
@@ -3573,7 +3390,6 @@ async function generateText({
|
|
3573
3390
|
messages: responseMessages
|
3574
3391
|
},
|
3575
3392
|
logprobs: currentModelResponse.logprobs,
|
3576
|
-
responseMessages,
|
3577
3393
|
steps,
|
3578
3394
|
providerMetadata: currentModelResponse.providerMetadata
|
3579
3395
|
});
|
@@ -3650,20 +3466,15 @@ var DefaultGenerateTextResult = class {
|
|
3650
3466
|
this.warnings = options.warnings;
|
3651
3467
|
this.request = options.request;
|
3652
3468
|
this.response = options.response;
|
3653
|
-
this.responseMessages = options.responseMessages;
|
3654
|
-
this.roundtrips = options.steps;
|
3655
3469
|
this.steps = options.steps;
|
3656
3470
|
this.experimental_providerMetadata = options.providerMetadata;
|
3657
|
-
this.rawResponse = {
|
3658
|
-
headers: options.response.headers
|
3659
|
-
};
|
3660
3471
|
this.logprobs = options.logprobs;
|
3661
3472
|
}
|
3662
3473
|
};
|
3663
|
-
var experimental_generateText = generateText;
|
3664
3474
|
|
3665
3475
|
// core/generate-text/stream-text.ts
|
3666
3476
|
import { createIdGenerator as createIdGenerator4 } from "@ai-sdk/provider-utils";
|
3477
|
+
import { formatStreamPart } from "@ai-sdk/ui-utils";
|
3667
3478
|
|
3668
3479
|
// core/util/create-stitchable-stream.ts
|
3669
3480
|
function createStitchableStream() {
|
@@ -4020,8 +3831,7 @@ async function streamText({
|
|
4020
3831
|
maxRetries,
|
4021
3832
|
abortSignal,
|
4022
3833
|
headers,
|
4023
|
-
|
4024
|
-
maxSteps = maxToolRoundtrips != null ? maxToolRoundtrips + 1 : 1,
|
3834
|
+
maxSteps = 1,
|
4025
3835
|
experimental_continueSteps: continueSteps = false,
|
4026
3836
|
experimental_telemetry: telemetry,
|
4027
3837
|
experimental_providerMetadata: providerMetadata,
|
@@ -4220,7 +4030,7 @@ var DefaultStreamTextResult = class {
|
|
4220
4030
|
generateId: generateId3,
|
4221
4031
|
tools
|
4222
4032
|
}) {
|
4223
|
-
this.
|
4033
|
+
this.rawWarnings = warnings;
|
4224
4034
|
this.rawResponse = rawResponse;
|
4225
4035
|
const { resolve: resolveUsage, promise: usagePromise } = createResolvablePromise();
|
4226
4036
|
this.usage = usagePromise;
|
@@ -4243,11 +4053,8 @@ var DefaultStreamTextResult = class {
|
|
4243
4053
|
this.request = requestPromise;
|
4244
4054
|
const { resolve: resolveResponse, promise: responsePromise } = createResolvablePromise();
|
4245
4055
|
this.response = responsePromise;
|
4246
|
-
const {
|
4247
|
-
|
4248
|
-
promise: responseMessagesPromise
|
4249
|
-
} = createResolvablePromise();
|
4250
|
-
this.responseMessages = responseMessagesPromise;
|
4056
|
+
const { resolve: resolveWarnings, promise: warningsPromise } = createResolvablePromise();
|
4057
|
+
this.warnings = warningsPromise;
|
4251
4058
|
const {
|
4252
4059
|
stream: stitchableStream,
|
4253
4060
|
addStream,
|
@@ -4314,14 +4121,10 @@ var DefaultStreamTextResult = class {
|
|
4314
4121
|
const msToFirstChunk = now2() - startTimestamp;
|
4315
4122
|
stepFirstChunk = false;
|
4316
4123
|
doStreamSpan2.addEvent("ai.stream.firstChunk", {
|
4317
|
-
"ai.response.msToFirstChunk": msToFirstChunk
|
4318
|
-
// deprecated:
|
4319
|
-
"ai.stream.msToFirstChunk": msToFirstChunk
|
4124
|
+
"ai.response.msToFirstChunk": msToFirstChunk
|
4320
4125
|
});
|
4321
4126
|
doStreamSpan2.setAttributes({
|
4322
|
-
"ai.response.msToFirstChunk": msToFirstChunk
|
4323
|
-
// deprecated:
|
4324
|
-
"ai.stream.msToFirstChunk": msToFirstChunk
|
4127
|
+
"ai.response.msToFirstChunk": msToFirstChunk
|
4325
4128
|
});
|
4326
4129
|
}
|
4327
4130
|
if (chunk.type === "text-delta" && chunk.textDelta.length === 0) {
|
@@ -4405,7 +4208,7 @@ var DefaultStreamTextResult = class {
|
|
4405
4208
|
},
|
4406
4209
|
// invoke onFinish callback and resolve toolResults promise when the stream is about to close:
|
4407
4210
|
async flush(controller) {
|
4408
|
-
var _a11;
|
4211
|
+
var _a11, _b;
|
4409
4212
|
const stepToolCallsJson = stepToolCalls.length > 0 ? JSON.stringify(stepToolCalls) : void 0;
|
4410
4213
|
let nextStepType = "done";
|
4411
4214
|
if (currentStep + 1 < maxSteps) {
|
@@ -4446,12 +4249,6 @@ var DefaultStreamTextResult = class {
|
|
4446
4249
|
"ai.response.timestamp": stepResponse.timestamp.toISOString(),
|
4447
4250
|
"ai.usage.promptTokens": stepUsage.promptTokens,
|
4448
4251
|
"ai.usage.completionTokens": stepUsage.completionTokens,
|
4449
|
-
// deprecated
|
4450
|
-
"ai.finishReason": stepFinishReason,
|
4451
|
-
"ai.result.text": { output: () => stepText },
|
4452
|
-
"ai.result.toolCalls": {
|
4453
|
-
output: () => stepToolCallsJson
|
4454
|
-
},
|
4455
4252
|
// standardized gen-ai llm span attributes:
|
4456
4253
|
"gen_ai.response.finish_reasons": [stepFinishReason],
|
4457
4254
|
"gen_ai.response.id": stepResponse.id,
|
@@ -4503,10 +4300,9 @@ var DefaultStreamTextResult = class {
|
|
4503
4300
|
toolResults: stepToolResults,
|
4504
4301
|
finishReason: stepFinishReason,
|
4505
4302
|
usage: stepUsage,
|
4506
|
-
warnings: self.
|
4303
|
+
warnings: self.rawWarnings,
|
4507
4304
|
logprobs: stepLogProbs,
|
4508
4305
|
request: stepRequest,
|
4509
|
-
rawResponse: self.rawResponse,
|
4510
4306
|
response: {
|
4511
4307
|
...stepResponse,
|
4512
4308
|
headers: (_a11 = self.rawResponse) == null ? void 0 : _a11.headers,
|
@@ -4529,7 +4325,7 @@ var DefaultStreamTextResult = class {
|
|
4529
4325
|
doStreamSpan: doStreamSpan3,
|
4530
4326
|
startTimestampMs: startTimestamp2
|
4531
4327
|
} = await startStep({ responseMessages });
|
4532
|
-
self.
|
4328
|
+
self.rawWarnings = result.warnings;
|
4533
4329
|
self.rawResponse = result.rawResponse;
|
4534
4330
|
addStepStream({
|
4535
4331
|
stream: result.stream,
|
@@ -4567,13 +4363,7 @@ var DefaultStreamTextResult = class {
|
|
4567
4363
|
output: () => stepToolCallsJson
|
4568
4364
|
},
|
4569
4365
|
"ai.usage.promptTokens": combinedUsage.promptTokens,
|
4570
|
-
"ai.usage.completionTokens": combinedUsage.completionTokens
|
4571
|
-
// deprecated
|
4572
|
-
"ai.finishReason": stepFinishReason,
|
4573
|
-
"ai.result.text": { output: () => fullStepText },
|
4574
|
-
"ai.result.toolCalls": {
|
4575
|
-
output: () => stepToolCallsJson
|
4576
|
-
}
|
4366
|
+
"ai.usage.completionTokens": combinedUsage.completionTokens
|
4577
4367
|
}
|
4578
4368
|
})
|
4579
4369
|
);
|
@@ -4590,7 +4380,7 @@ var DefaultStreamTextResult = class {
|
|
4590
4380
|
messages: responseMessages
|
4591
4381
|
});
|
4592
4382
|
resolveSteps(stepResults);
|
4593
|
-
|
4383
|
+
resolveWarnings((_b = self.rawWarnings) != null ? _b : []);
|
4594
4384
|
await (onFinish == null ? void 0 : onFinish({
|
4595
4385
|
finishReason: stepFinishReason,
|
4596
4386
|
logprobs: stepLogProbs,
|
@@ -4603,7 +4393,6 @@ var DefaultStreamTextResult = class {
|
|
4603
4393
|
// The type exposed to the users will be correctly inferred.
|
4604
4394
|
toolResults: stepToolResults,
|
4605
4395
|
request: stepRequest,
|
4606
|
-
rawResponse,
|
4607
4396
|
response: {
|
4608
4397
|
...stepResponse,
|
4609
4398
|
headers: rawResponse == null ? void 0 : rawResponse.headers,
|
@@ -4611,8 +4400,7 @@ var DefaultStreamTextResult = class {
|
|
4611
4400
|
},
|
4612
4401
|
warnings,
|
4613
4402
|
experimental_providerMetadata: stepProviderMetadata,
|
4614
|
-
steps: stepResults
|
4615
|
-
responseMessages
|
4403
|
+
steps: stepResults
|
4616
4404
|
}));
|
4617
4405
|
} catch (error) {
|
4618
4406
|
controller.error(error);
|
@@ -4667,37 +4455,18 @@ var DefaultStreamTextResult = class {
|
|
4667
4455
|
}
|
4668
4456
|
});
|
4669
4457
|
}
|
4670
|
-
toAIStream(callbacks = {}) {
|
4671
|
-
return this.toDataStreamInternal({ callbacks });
|
4672
|
-
}
|
4673
4458
|
toDataStreamInternal({
|
4674
|
-
callbacks = {},
|
4675
4459
|
getErrorMessage: getErrorMessage3 = () => "",
|
4676
4460
|
// mask error messages for safety by default
|
4677
4461
|
sendUsage = true
|
4678
4462
|
} = {}) {
|
4679
4463
|
let aggregatedResponse = "";
|
4680
4464
|
const callbackTransformer = new TransformStream({
|
4681
|
-
async start() {
|
4682
|
-
if (callbacks.onStart)
|
4683
|
-
await callbacks.onStart();
|
4684
|
-
},
|
4685
4465
|
async transform(chunk, controller) {
|
4686
4466
|
controller.enqueue(chunk);
|
4687
4467
|
if (chunk.type === "text-delta") {
|
4688
|
-
|
4689
|
-
aggregatedResponse += textDelta;
|
4690
|
-
if (callbacks.onToken)
|
4691
|
-
await callbacks.onToken(textDelta);
|
4692
|
-
if (callbacks.onText)
|
4693
|
-
await callbacks.onText(textDelta);
|
4468
|
+
aggregatedResponse += chunk.textDelta;
|
4694
4469
|
}
|
4695
|
-
},
|
4696
|
-
async flush() {
|
4697
|
-
if (callbacks.onCompletion)
|
4698
|
-
await callbacks.onCompletion(aggregatedResponse);
|
4699
|
-
if (callbacks.onFinal)
|
4700
|
-
await callbacks.onFinal(aggregatedResponse);
|
4701
4470
|
}
|
4702
4471
|
});
|
4703
4472
|
const streamPartsTransformer = new TransformStream({
|
@@ -4785,23 +4554,19 @@ var DefaultStreamTextResult = class {
|
|
4785
4554
|
});
|
4786
4555
|
return this.fullStream.pipeThrough(callbackTransformer).pipeThrough(streamPartsTransformer).pipeThrough(new TextEncoderStream());
|
4787
4556
|
}
|
4788
|
-
|
4789
|
-
|
4790
|
-
|
4791
|
-
|
4792
|
-
|
4793
|
-
|
4794
|
-
|
4795
|
-
|
4796
|
-
};
|
4797
|
-
const data = options == null ? void 0 : "data" in options ? options.data : void 0;
|
4798
|
-
const getErrorMessage3 = options == null ? void 0 : "getErrorMessage" in options ? options.getErrorMessage : void 0;
|
4799
|
-
const sendUsage = options == null ? void 0 : "sendUsage" in options ? options.sendUsage : void 0;
|
4557
|
+
pipeDataStreamToResponse(response, {
|
4558
|
+
status,
|
4559
|
+
statusText,
|
4560
|
+
headers,
|
4561
|
+
data,
|
4562
|
+
getErrorMessage: getErrorMessage3,
|
4563
|
+
sendUsage
|
4564
|
+
} = {}) {
|
4800
4565
|
writeToServerResponse({
|
4801
4566
|
response,
|
4802
|
-
status
|
4803
|
-
statusText
|
4804
|
-
headers: prepareOutgoingHttpHeaders(
|
4567
|
+
status,
|
4568
|
+
statusText,
|
4569
|
+
headers: prepareOutgoingHttpHeaders(headers, {
|
4805
4570
|
contentType: "text/plain; charset=utf-8",
|
4806
4571
|
dataStreamVersion: "v1"
|
4807
4572
|
}),
|
@@ -4813,15 +4578,12 @@ var DefaultStreamTextResult = class {
|
|
4813
4578
|
response,
|
4814
4579
|
status: init == null ? void 0 : init.status,
|
4815
4580
|
statusText: init == null ? void 0 : init.statusText,
|
4816
|
-
headers: prepareOutgoingHttpHeaders(init, {
|
4581
|
+
headers: prepareOutgoingHttpHeaders(init == null ? void 0 : init.headers, {
|
4817
4582
|
contentType: "text/plain; charset=utf-8"
|
4818
4583
|
}),
|
4819
4584
|
stream: this.textStream.pipeThrough(new TextEncoderStream())
|
4820
4585
|
});
|
4821
4586
|
}
|
4822
|
-
toAIStreamResponse(options) {
|
4823
|
-
return this.toDataStreamResponse(options);
|
4824
|
-
}
|
4825
4587
|
toDataStream(options) {
|
4826
4588
|
const stream = this.toDataStreamInternal({
|
4827
4589
|
getErrorMessage: options == null ? void 0 : options.getErrorMessage,
|
@@ -4829,22 +4591,20 @@ var DefaultStreamTextResult = class {
|
|
4829
4591
|
});
|
4830
4592
|
return (options == null ? void 0 : options.data) ? mergeStreams(options == null ? void 0 : options.data.stream, stream) : stream;
|
4831
4593
|
}
|
4832
|
-
toDataStreamResponse(
|
4833
|
-
|
4834
|
-
|
4835
|
-
|
4836
|
-
|
4837
|
-
|
4838
|
-
|
4839
|
-
|
4840
|
-
const getErrorMessage3 = options == null ? void 0 : "getErrorMessage" in options ? options.getErrorMessage : void 0;
|
4841
|
-
const sendUsage = options == null ? void 0 : "sendUsage" in options ? options.sendUsage : void 0;
|
4594
|
+
toDataStreamResponse({
|
4595
|
+
headers,
|
4596
|
+
status,
|
4597
|
+
statusText,
|
4598
|
+
data,
|
4599
|
+
getErrorMessage: getErrorMessage3,
|
4600
|
+
sendUsage
|
4601
|
+
} = {}) {
|
4842
4602
|
return new Response(
|
4843
4603
|
this.toDataStream({ data, getErrorMessage: getErrorMessage3, sendUsage }),
|
4844
4604
|
{
|
4845
|
-
status
|
4846
|
-
statusText
|
4847
|
-
headers: prepareResponseHeaders(
|
4605
|
+
status,
|
4606
|
+
statusText,
|
4607
|
+
headers: prepareResponseHeaders(headers, {
|
4848
4608
|
contentType: "text/plain; charset=utf-8",
|
4849
4609
|
dataStreamVersion: "v1"
|
4850
4610
|
})
|
@@ -4855,13 +4615,12 @@ var DefaultStreamTextResult = class {
|
|
4855
4615
|
var _a11;
|
4856
4616
|
return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
|
4857
4617
|
status: (_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200,
|
4858
|
-
headers: prepareResponseHeaders(init, {
|
4618
|
+
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
4859
4619
|
contentType: "text/plain; charset=utf-8"
|
4860
4620
|
})
|
4861
4621
|
});
|
4862
4622
|
}
|
4863
4623
|
};
|
4864
|
-
var experimental_streamText = streamText;
|
4865
4624
|
|
4866
4625
|
// core/middleware/wrap-language-model.ts
|
4867
4626
|
var experimental_wrapLanguageModel = ({
|
@@ -4948,26 +4707,6 @@ var NoSuchProviderError = class extends NoSuchModelError3 {
|
|
4948
4707
|
static isInstance(error) {
|
4949
4708
|
return AISDKError11.hasMarker(error, marker10);
|
4950
4709
|
}
|
4951
|
-
/**
|
4952
|
-
* @deprecated use `isInstance` instead
|
4953
|
-
*/
|
4954
|
-
static isNoSuchProviderError(error) {
|
4955
|
-
return error instanceof Error && error.name === name10 && typeof error.providerId === "string" && Array.isArray(error.availableProviders);
|
4956
|
-
}
|
4957
|
-
/**
|
4958
|
-
* @deprecated Do not use this method. It will be removed in the next major version.
|
4959
|
-
*/
|
4960
|
-
toJSON() {
|
4961
|
-
return {
|
4962
|
-
name: this.name,
|
4963
|
-
message: this.message,
|
4964
|
-
stack: this.stack,
|
4965
|
-
modelId: this.modelId,
|
4966
|
-
modelType: this.modelType,
|
4967
|
-
providerId: this.providerId,
|
4968
|
-
availableProviders: this.availableProviders
|
4969
|
-
};
|
4970
|
-
}
|
4971
4710
|
};
|
4972
4711
|
_a10 = symbol10;
|
4973
4712
|
|
@@ -4980,15 +4719,11 @@ function experimental_createProviderRegistry(providers) {
|
|
4980
4719
|
}
|
4981
4720
|
return registry;
|
4982
4721
|
}
|
4983
|
-
var experimental_createModelRegistry = experimental_createProviderRegistry;
|
4984
4722
|
var DefaultProviderRegistry = class {
|
4985
4723
|
constructor() {
|
4986
4724
|
this.providers = {};
|
4987
4725
|
}
|
4988
|
-
registerProvider({
|
4989
|
-
id,
|
4990
|
-
provider
|
4991
|
-
}) {
|
4726
|
+
registerProvider({ id, provider }) {
|
4992
4727
|
this.providers[id] = provider;
|
4993
4728
|
}
|
4994
4729
|
getProvider(id) {
|
@@ -5024,10 +4759,10 @@ var DefaultProviderRegistry = class {
|
|
5024
4759
|
return model;
|
5025
4760
|
}
|
5026
4761
|
textEmbeddingModel(id) {
|
5027
|
-
var _a11
|
4762
|
+
var _a11;
|
5028
4763
|
const [providerId, modelId] = this.splitId(id, "textEmbeddingModel");
|
5029
4764
|
const provider = this.getProvider(providerId);
|
5030
|
-
const model = (
|
4765
|
+
const model = (_a11 = provider.textEmbeddingModel) == null ? void 0 : _a11.call(provider, modelId);
|
5031
4766
|
if (model == null) {
|
5032
4767
|
throw new NoSuchModelError4({
|
5033
4768
|
modelId: id,
|
@@ -5068,123 +4803,6 @@ function magnitude(vector) {
|
|
5068
4803
|
return Math.sqrt(dotProduct(vector, vector));
|
5069
4804
|
}
|
5070
4805
|
|
5071
|
-
// streams/ai-stream.ts
|
5072
|
-
import {
|
5073
|
-
createParser
|
5074
|
-
} from "eventsource-parser";
|
5075
|
-
function createEventStreamTransformer(customParser) {
|
5076
|
-
const textDecoder = new TextDecoder();
|
5077
|
-
let eventSourceParser;
|
5078
|
-
return new TransformStream({
|
5079
|
-
async start(controller) {
|
5080
|
-
eventSourceParser = createParser(
|
5081
|
-
(event) => {
|
5082
|
-
if ("data" in event && event.type === "event" && event.data === "[DONE]" || // Replicate doesn't send [DONE] but does send a 'done' event
|
5083
|
-
// @see https://replicate.com/docs/streaming
|
5084
|
-
event.event === "done") {
|
5085
|
-
controller.terminate();
|
5086
|
-
return;
|
5087
|
-
}
|
5088
|
-
if ("data" in event) {
|
5089
|
-
const parsedMessage = customParser ? customParser(event.data, {
|
5090
|
-
event: event.event
|
5091
|
-
}) : event.data;
|
5092
|
-
if (parsedMessage)
|
5093
|
-
controller.enqueue(parsedMessage);
|
5094
|
-
}
|
5095
|
-
}
|
5096
|
-
);
|
5097
|
-
},
|
5098
|
-
transform(chunk) {
|
5099
|
-
eventSourceParser.feed(textDecoder.decode(chunk));
|
5100
|
-
}
|
5101
|
-
});
|
5102
|
-
}
|
5103
|
-
function createCallbacksTransformer(cb) {
|
5104
|
-
const textEncoder = new TextEncoder();
|
5105
|
-
let aggregatedResponse = "";
|
5106
|
-
const callbacks = cb || {};
|
5107
|
-
return new TransformStream({
|
5108
|
-
async start() {
|
5109
|
-
if (callbacks.onStart)
|
5110
|
-
await callbacks.onStart();
|
5111
|
-
},
|
5112
|
-
async transform(message, controller) {
|
5113
|
-
const content = typeof message === "string" ? message : message.content;
|
5114
|
-
controller.enqueue(textEncoder.encode(content));
|
5115
|
-
aggregatedResponse += content;
|
5116
|
-
if (callbacks.onToken)
|
5117
|
-
await callbacks.onToken(content);
|
5118
|
-
if (callbacks.onText && typeof message === "string") {
|
5119
|
-
await callbacks.onText(message);
|
5120
|
-
}
|
5121
|
-
},
|
5122
|
-
async flush() {
|
5123
|
-
if (callbacks.onCompletion) {
|
5124
|
-
await callbacks.onCompletion(aggregatedResponse);
|
5125
|
-
}
|
5126
|
-
}
|
5127
|
-
});
|
5128
|
-
}
|
5129
|
-
function trimStartOfStreamHelper() {
|
5130
|
-
let isStreamStart = true;
|
5131
|
-
return (text) => {
|
5132
|
-
if (isStreamStart) {
|
5133
|
-
text = text.trimStart();
|
5134
|
-
if (text)
|
5135
|
-
isStreamStart = false;
|
5136
|
-
}
|
5137
|
-
return text;
|
5138
|
-
};
|
5139
|
-
}
|
5140
|
-
function AIStream(response, customParser, callbacks) {
|
5141
|
-
if (!response.ok) {
|
5142
|
-
if (response.body) {
|
5143
|
-
const reader = response.body.getReader();
|
5144
|
-
return new ReadableStream({
|
5145
|
-
async start(controller) {
|
5146
|
-
const { done, value } = await reader.read();
|
5147
|
-
if (!done) {
|
5148
|
-
const errorText = new TextDecoder().decode(value);
|
5149
|
-
controller.error(new Error(`Response error: ${errorText}`));
|
5150
|
-
}
|
5151
|
-
}
|
5152
|
-
});
|
5153
|
-
} else {
|
5154
|
-
return new ReadableStream({
|
5155
|
-
start(controller) {
|
5156
|
-
controller.error(new Error("Response error: No response body"));
|
5157
|
-
}
|
5158
|
-
});
|
5159
|
-
}
|
5160
|
-
}
|
5161
|
-
const responseBodyStream = response.body || createEmptyReadableStream();
|
5162
|
-
return responseBodyStream.pipeThrough(createEventStreamTransformer(customParser)).pipeThrough(createCallbacksTransformer(callbacks));
|
5163
|
-
}
|
5164
|
-
function createEmptyReadableStream() {
|
5165
|
-
return new ReadableStream({
|
5166
|
-
start(controller) {
|
5167
|
-
controller.close();
|
5168
|
-
}
|
5169
|
-
});
|
5170
|
-
}
|
5171
|
-
function readableFromAsyncIterable(iterable) {
|
5172
|
-
let it = iterable[Symbol.asyncIterator]();
|
5173
|
-
return new ReadableStream({
|
5174
|
-
async pull(controller) {
|
5175
|
-
const { done, value } = await it.next();
|
5176
|
-
if (done)
|
5177
|
-
controller.close();
|
5178
|
-
else
|
5179
|
-
controller.enqueue(value);
|
5180
|
-
},
|
5181
|
-
async cancel(reason) {
|
5182
|
-
var _a11;
|
5183
|
-
await ((_a11 = it.return) == null ? void 0 : _a11.call(it, reason));
|
5184
|
-
}
|
5185
|
-
});
|
5186
|
-
}
|
5187
|
-
|
5188
4806
|
// streams/assistant-response.ts
|
5189
4807
|
import {
|
5190
4808
|
formatStreamPart as formatStreamPart2
|
@@ -5256,8 +4874,6 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
5256
4874
|
);
|
5257
4875
|
try {
|
5258
4876
|
await process2({
|
5259
|
-
threadId,
|
5260
|
-
messageId,
|
5261
4877
|
sendMessage,
|
5262
4878
|
sendDataMessage,
|
5263
4879
|
forwardStream
|
@@ -5280,16 +4896,40 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
5280
4896
|
}
|
5281
4897
|
});
|
5282
4898
|
}
|
5283
|
-
var experimental_AssistantResponse = AssistantResponse;
|
5284
4899
|
|
5285
4900
|
// streams/langchain-adapter.ts
|
5286
4901
|
var langchain_adapter_exports = {};
|
5287
4902
|
__export(langchain_adapter_exports, {
|
5288
|
-
toAIStream: () => toAIStream,
|
5289
4903
|
toDataStream: () => toDataStream,
|
5290
4904
|
toDataStreamResponse: () => toDataStreamResponse
|
5291
4905
|
});
|
5292
4906
|
|
4907
|
+
// streams/stream-callbacks.ts
|
4908
|
+
function createCallbacksTransformer(callbacks = {}) {
|
4909
|
+
const textEncoder = new TextEncoder();
|
4910
|
+
let aggregatedResponse = "";
|
4911
|
+
return new TransformStream({
|
4912
|
+
async start() {
|
4913
|
+
if (callbacks.onStart)
|
4914
|
+
await callbacks.onStart();
|
4915
|
+
},
|
4916
|
+
async transform(message, controller) {
|
4917
|
+
controller.enqueue(textEncoder.encode(message));
|
4918
|
+
aggregatedResponse += message;
|
4919
|
+
if (callbacks.onToken)
|
4920
|
+
await callbacks.onToken(message);
|
4921
|
+
if (callbacks.onText && typeof message === "string") {
|
4922
|
+
await callbacks.onText(message);
|
4923
|
+
}
|
4924
|
+
},
|
4925
|
+
async flush() {
|
4926
|
+
if (callbacks.onCompletion) {
|
4927
|
+
await callbacks.onCompletion(aggregatedResponse);
|
4928
|
+
}
|
4929
|
+
}
|
4930
|
+
});
|
4931
|
+
}
|
4932
|
+
|
5293
4933
|
// streams/stream-data.ts
|
5294
4934
|
import { formatStreamPart as formatStreamPart3 } from "@ai-sdk/ui-utils";
|
5295
4935
|
|
@@ -5297,7 +4937,7 @@ import { formatStreamPart as formatStreamPart3 } from "@ai-sdk/ui-utils";
|
|
5297
4937
|
var HANGING_STREAM_WARNING_TIME_MS = 15 * 1e3;
|
5298
4938
|
|
5299
4939
|
// streams/stream-data.ts
|
5300
|
-
var
|
4940
|
+
var StreamData = class {
|
5301
4941
|
constructor() {
|
5302
4942
|
this.encoder = new TextEncoder();
|
5303
4943
|
this.controller = null;
|
@@ -5368,13 +5008,8 @@ function createStreamDataTransformer() {
|
|
5368
5008
|
}
|
5369
5009
|
});
|
5370
5010
|
}
|
5371
|
-
var experimental_StreamData = class extends StreamData2 {
|
5372
|
-
};
|
5373
5011
|
|
5374
5012
|
// streams/langchain-adapter.ts
|
5375
|
-
function toAIStream(stream, callbacks) {
|
5376
|
-
return toDataStream(stream, callbacks);
|
5377
|
-
}
|
5378
5013
|
function toDataStream(stream, callbacks) {
|
5379
5014
|
return stream.pipeThrough(
|
5380
5015
|
new TransformStream({
|
@@ -5407,7 +5042,7 @@ function toDataStreamResponse(stream, options) {
|
|
5407
5042
|
return new Response(responseStream, {
|
5408
5043
|
status: (_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200,
|
5409
5044
|
statusText: init == null ? void 0 : init.statusText,
|
5410
|
-
headers: prepareResponseHeaders(init, {
|
5045
|
+
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
5411
5046
|
contentType: "text/plain; charset=utf-8",
|
5412
5047
|
dataStreamVersion: "v1"
|
5413
5048
|
})
|
@@ -5432,8 +5067,16 @@ __export(llamaindex_adapter_exports, {
|
|
5432
5067
|
toDataStream: () => toDataStream2,
|
5433
5068
|
toDataStreamResponse: () => toDataStreamResponse2
|
5434
5069
|
});
|
5070
|
+
import { convertAsyncIteratorToReadableStream } from "@ai-sdk/provider-utils";
|
5435
5071
|
function toDataStream2(stream, callbacks) {
|
5436
|
-
|
5072
|
+
const trimStart = trimStartOfStream();
|
5073
|
+
return convertAsyncIteratorToReadableStream(stream[Symbol.asyncIterator]()).pipeThrough(
|
5074
|
+
new TransformStream({
|
5075
|
+
async transform(message, controller) {
|
5076
|
+
controller.enqueue(trimStart(message.delta));
|
5077
|
+
}
|
5078
|
+
})
|
5079
|
+
).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
|
5437
5080
|
}
|
5438
5081
|
function toDataStreamResponse2(stream, options = {}) {
|
5439
5082
|
var _a11;
|
@@ -5443,78 +5086,25 @@ function toDataStreamResponse2(stream, options = {}) {
|
|
5443
5086
|
return new Response(responseStream, {
|
5444
5087
|
status: (_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200,
|
5445
5088
|
statusText: init == null ? void 0 : init.statusText,
|
5446
|
-
headers: prepareResponseHeaders(init, {
|
5089
|
+
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
5447
5090
|
contentType: "text/plain; charset=utf-8",
|
5448
5091
|
dataStreamVersion: "v1"
|
5449
5092
|
})
|
5450
5093
|
});
|
5451
5094
|
}
|
5452
|
-
function
|
5453
|
-
|
5454
|
-
|
5455
|
-
|
5456
|
-
|
5457
|
-
|
5458
|
-
|
5459
|
-
if (done) {
|
5460
|
-
controller.close();
|
5461
|
-
return;
|
5462
|
-
}
|
5463
|
-
const text = trimStartOfStream((_a11 = value.delta) != null ? _a11 : "");
|
5464
|
-
if (text) {
|
5465
|
-
controller.enqueue(text);
|
5466
|
-
}
|
5095
|
+
function trimStartOfStream() {
|
5096
|
+
let isStreamStart = true;
|
5097
|
+
return (text) => {
|
5098
|
+
if (isStreamStart) {
|
5099
|
+
text = text.trimStart();
|
5100
|
+
if (text)
|
5101
|
+
isStreamStart = false;
|
5467
5102
|
}
|
5468
|
-
|
5469
|
-
}
|
5470
|
-
|
5471
|
-
// streams/stream-to-response.ts
|
5472
|
-
function streamToResponse(res, response, init, data) {
|
5473
|
-
var _a11;
|
5474
|
-
response.writeHead((_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200, {
|
5475
|
-
"Content-Type": "text/plain; charset=utf-8",
|
5476
|
-
...init == null ? void 0 : init.headers
|
5477
|
-
});
|
5478
|
-
let processedStream = res;
|
5479
|
-
if (data) {
|
5480
|
-
processedStream = mergeStreams(data.stream, res);
|
5481
|
-
}
|
5482
|
-
const reader = processedStream.getReader();
|
5483
|
-
function read() {
|
5484
|
-
reader.read().then(({ done, value }) => {
|
5485
|
-
if (done) {
|
5486
|
-
response.end();
|
5487
|
-
return;
|
5488
|
-
}
|
5489
|
-
response.write(value);
|
5490
|
-
read();
|
5491
|
-
});
|
5492
|
-
}
|
5493
|
-
read();
|
5103
|
+
return text;
|
5104
|
+
};
|
5494
5105
|
}
|
5495
|
-
|
5496
|
-
// streams/streaming-text-response.ts
|
5497
|
-
var StreamingTextResponse = class extends Response {
|
5498
|
-
constructor(res, init, data) {
|
5499
|
-
let processedStream = res;
|
5500
|
-
if (data) {
|
5501
|
-
processedStream = mergeStreams(data.stream, res);
|
5502
|
-
}
|
5503
|
-
super(processedStream, {
|
5504
|
-
...init,
|
5505
|
-
status: 200,
|
5506
|
-
headers: prepareResponseHeaders(init, {
|
5507
|
-
contentType: "text/plain; charset=utf-8"
|
5508
|
-
})
|
5509
|
-
});
|
5510
|
-
}
|
5511
|
-
};
|
5512
|
-
|
5513
|
-
// streams/index.ts
|
5514
|
-
var generateId2 = generateIdImpl;
|
5515
5106
|
export {
|
5516
5107
|
AISDKError10 as AISDKError,
|
5517
|
-
AIStream,
|
5518
5108
|
APICallError2 as APICallError,
|
5519
5109
|
AssistantResponse,
|
5520
5110
|
DownloadError,
|
@@ -5536,28 +5126,18 @@ export {
|
|
5536
5126
|
NoSuchProviderError,
|
5537
5127
|
NoSuchToolError,
|
5538
5128
|
RetryError,
|
5539
|
-
|
5540
|
-
StreamingTextResponse,
|
5129
|
+
StreamData,
|
5541
5130
|
TypeValidationError2 as TypeValidationError,
|
5542
5131
|
UnsupportedFunctionalityError2 as UnsupportedFunctionalityError,
|
5543
5132
|
convertToCoreMessages,
|
5544
5133
|
cosineSimilarity,
|
5545
|
-
createCallbacksTransformer,
|
5546
|
-
createEventStreamTransformer,
|
5547
5134
|
createStreamDataTransformer,
|
5548
5135
|
embed,
|
5549
5136
|
embedMany,
|
5550
|
-
experimental_AssistantResponse,
|
5551
|
-
experimental_StreamData,
|
5552
|
-
experimental_createModelRegistry,
|
5553
5137
|
experimental_createProviderRegistry,
|
5554
5138
|
experimental_customProvider,
|
5555
|
-
experimental_generateObject,
|
5556
|
-
experimental_generateText,
|
5557
|
-
experimental_streamObject,
|
5558
|
-
experimental_streamText,
|
5559
5139
|
experimental_wrapLanguageModel,
|
5560
|
-
formatStreamPart,
|
5140
|
+
formatStreamPart4 as formatStreamPart,
|
5561
5141
|
generateId2 as generateId,
|
5562
5142
|
generateObject,
|
5563
5143
|
generateText,
|
@@ -5565,11 +5145,8 @@ export {
|
|
5565
5145
|
parseStreamPart,
|
5566
5146
|
processDataProtocolResponse,
|
5567
5147
|
readDataStream,
|
5568
|
-
readableFromAsyncIterable,
|
5569
5148
|
streamObject,
|
5570
5149
|
streamText,
|
5571
|
-
|
5572
|
-
tool,
|
5573
|
-
trimStartOfStreamHelper
|
5150
|
+
tool
|
5574
5151
|
};
|
5575
5152
|
//# sourceMappingURL=index.mjs.map
|