ai 3.3.5 → 3.3.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +95 -64
- package/dist/index.d.ts +95 -64
- package/dist/index.js +253 -230
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +203 -169
- package/dist/index.mjs.map +1 -1
- package/package.json +8 -8
- package/rsc/dist/index.d.ts +186 -183
- package/rsc/dist/rsc-server.d.mts +185 -183
- package/rsc/dist/rsc-server.mjs +1219 -1248
- package/rsc/dist/rsc-server.mjs.map +1 -1
- package/rsc/dist/rsc-shared.d.mts +24 -22
- package/rsc/dist/rsc-shared.mjs +46 -60
- package/rsc/dist/rsc-shared.mjs.map +1 -1
package/dist/index.mjs
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
var __defProp = Object.defineProperty;
|
2
2
|
var __export = (target, all) => {
|
3
|
-
for (var
|
4
|
-
__defProp(target,
|
3
|
+
for (var name12 in all)
|
4
|
+
__defProp(target, name12, { get: all[name12], enumerable: true });
|
5
5
|
};
|
6
6
|
|
7
7
|
// streams/index.ts
|
@@ -13,13 +13,16 @@ import {
|
|
13
13
|
} from "@ai-sdk/ui-utils";
|
14
14
|
import { generateId as generateIdImpl } from "@ai-sdk/provider-utils";
|
15
15
|
|
16
|
+
// core/index.ts
|
17
|
+
import { jsonSchema } from "@ai-sdk/ui-utils";
|
18
|
+
|
16
19
|
// util/retry-with-exponential-backoff.ts
|
17
20
|
import { APICallError } from "@ai-sdk/provider";
|
18
21
|
import { getErrorMessage, isAbortError } from "@ai-sdk/provider-utils";
|
19
22
|
|
20
23
|
// util/delay.ts
|
21
24
|
async function delay(delayInMs) {
|
22
|
-
return new Promise((resolve) => setTimeout(resolve, delayInMs));
|
25
|
+
return delayInMs === void 0 ? Promise.resolve() : new Promise((resolve) => setTimeout(resolve, delayInMs));
|
23
26
|
}
|
24
27
|
|
25
28
|
// util/retry-error.ts
|
@@ -134,7 +137,7 @@ function getBaseTelemetryAttributes({
|
|
134
137
|
telemetry,
|
135
138
|
headers
|
136
139
|
}) {
|
137
|
-
var
|
140
|
+
var _a12;
|
138
141
|
return {
|
139
142
|
"ai.model.provider": model.provider,
|
140
143
|
"ai.model.id": model.modelId,
|
@@ -147,7 +150,7 @@ function getBaseTelemetryAttributes({
|
|
147
150
|
"resource.name": telemetry == null ? void 0 : telemetry.functionId,
|
148
151
|
"ai.telemetry.functionId": telemetry == null ? void 0 : telemetry.functionId,
|
149
152
|
// add metadata as attributes:
|
150
|
-
...Object.entries((
|
153
|
+
...Object.entries((_a12 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a12 : {}).reduce(
|
151
154
|
(attributes, [key, value]) => {
|
152
155
|
attributes[`ai.telemetry.metadata.${key}`] = value;
|
153
156
|
return attributes;
|
@@ -172,7 +175,7 @@ var noopTracer = {
|
|
172
175
|
startSpan() {
|
173
176
|
return noopSpan;
|
174
177
|
},
|
175
|
-
startActiveSpan(
|
178
|
+
startActiveSpan(name12, arg1, arg2, arg3) {
|
176
179
|
if (typeof arg1 === "function") {
|
177
180
|
return arg1(noopSpan);
|
178
181
|
}
|
@@ -240,13 +243,13 @@ function getTracer({ isEnabled }) {
|
|
240
243
|
// core/telemetry/record-span.ts
|
241
244
|
import { SpanStatusCode } from "@opentelemetry/api";
|
242
245
|
function recordSpan({
|
243
|
-
name:
|
246
|
+
name: name12,
|
244
247
|
tracer,
|
245
248
|
attributes,
|
246
249
|
fn,
|
247
250
|
endWhenDone = true
|
248
251
|
}) {
|
249
|
-
return tracer.startActiveSpan(
|
252
|
+
return tracer.startActiveSpan(name12, { attributes }, async (span) => {
|
250
253
|
try {
|
251
254
|
const result = await fn(span);
|
252
255
|
if (endWhenDone) {
|
@@ -312,14 +315,14 @@ async function embed({
|
|
312
315
|
headers,
|
313
316
|
experimental_telemetry: telemetry
|
314
317
|
}) {
|
315
|
-
var
|
318
|
+
var _a12;
|
316
319
|
const baseTelemetryAttributes = getBaseTelemetryAttributes({
|
317
320
|
model,
|
318
321
|
telemetry,
|
319
322
|
headers,
|
320
323
|
settings: { maxRetries }
|
321
324
|
});
|
322
|
-
const tracer = getTracer({ isEnabled: (
|
325
|
+
const tracer = getTracer({ isEnabled: (_a12 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a12 : false });
|
323
326
|
return recordSpan({
|
324
327
|
name: "ai.embed",
|
325
328
|
attributes: selectTelemetryAttributes({
|
@@ -352,14 +355,14 @@ async function embed({
|
|
352
355
|
}),
|
353
356
|
tracer,
|
354
357
|
fn: async (doEmbedSpan) => {
|
355
|
-
var
|
358
|
+
var _a13;
|
356
359
|
const modelResponse = await model.doEmbed({
|
357
360
|
values: [value],
|
358
361
|
abortSignal,
|
359
362
|
headers
|
360
363
|
});
|
361
364
|
const embedding2 = modelResponse.embeddings[0];
|
362
|
-
const usage2 = (
|
365
|
+
const usage2 = (_a13 = modelResponse.usage) != null ? _a13 : { tokens: NaN };
|
363
366
|
doEmbedSpan.setAttributes(
|
364
367
|
selectTelemetryAttributes({
|
365
368
|
telemetry,
|
@@ -425,14 +428,14 @@ async function embedMany({
|
|
425
428
|
headers,
|
426
429
|
experimental_telemetry: telemetry
|
427
430
|
}) {
|
428
|
-
var
|
431
|
+
var _a12;
|
429
432
|
const baseTelemetryAttributes = getBaseTelemetryAttributes({
|
430
433
|
model,
|
431
434
|
telemetry,
|
432
435
|
headers,
|
433
436
|
settings: { maxRetries }
|
434
437
|
});
|
435
|
-
const tracer = getTracer({ isEnabled: (
|
438
|
+
const tracer = getTracer({ isEnabled: (_a12 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a12 : false });
|
436
439
|
return recordSpan({
|
437
440
|
name: "ai.embedMany",
|
438
441
|
attributes: selectTelemetryAttributes({
|
@@ -470,14 +473,14 @@ async function embedMany({
|
|
470
473
|
}),
|
471
474
|
tracer,
|
472
475
|
fn: async (doEmbedSpan) => {
|
473
|
-
var
|
476
|
+
var _a13;
|
474
477
|
const modelResponse = await model.doEmbed({
|
475
478
|
values,
|
476
479
|
abortSignal,
|
477
480
|
headers
|
478
481
|
});
|
479
482
|
const embeddings3 = modelResponse.embeddings;
|
480
|
-
const usage2 = (
|
483
|
+
const usage2 = (_a13 = modelResponse.usage) != null ? _a13 : { tokens: NaN };
|
481
484
|
doEmbedSpan.setAttributes(
|
482
485
|
selectTelemetryAttributes({
|
483
486
|
telemetry,
|
@@ -529,14 +532,14 @@ async function embedMany({
|
|
529
532
|
}),
|
530
533
|
tracer,
|
531
534
|
fn: async (doEmbedSpan) => {
|
532
|
-
var
|
535
|
+
var _a13;
|
533
536
|
const modelResponse = await model.doEmbed({
|
534
537
|
values: chunk,
|
535
538
|
abortSignal,
|
536
539
|
headers
|
537
540
|
});
|
538
541
|
const embeddings2 = modelResponse.embeddings;
|
539
|
-
const usage2 = (
|
542
|
+
const usage2 = (_a13 = modelResponse.usage) != null ? _a13 : { tokens: NaN };
|
540
543
|
doEmbedSpan.setAttributes(
|
541
544
|
selectTelemetryAttributes({
|
542
545
|
telemetry,
|
@@ -584,6 +587,7 @@ var DefaultEmbedManyResult = class {
|
|
584
587
|
|
585
588
|
// core/generate-object/generate-object.ts
|
586
589
|
import { safeParseJSON } from "@ai-sdk/provider-utils";
|
590
|
+
import { asSchema } from "@ai-sdk/ui-utils";
|
587
591
|
|
588
592
|
// core/prompt/convert-to-language-model-prompt.ts
|
589
593
|
import { getErrorMessage as getErrorMessage2 } from "@ai-sdk/provider-utils";
|
@@ -638,7 +642,7 @@ async function download({
|
|
638
642
|
url,
|
639
643
|
fetchImplementation = fetch
|
640
644
|
}) {
|
641
|
-
var
|
645
|
+
var _a12;
|
642
646
|
const urlText = url.toString();
|
643
647
|
try {
|
644
648
|
const response = await fetchImplementation(urlText);
|
@@ -651,7 +655,7 @@ async function download({
|
|
651
655
|
}
|
652
656
|
return {
|
653
657
|
data: new Uint8Array(await response.arrayBuffer()),
|
654
|
-
mimeType: (
|
658
|
+
mimeType: (_a12 = response.headers.get("content-type")) != null ? _a12 : void 0
|
655
659
|
};
|
656
660
|
} catch (error) {
|
657
661
|
if (DownloadError.isInstance(error)) {
|
@@ -851,7 +855,7 @@ function convertToLanguageModelMessage(message, downloadedImages) {
|
|
851
855
|
role: "user",
|
852
856
|
content: message.content.map(
|
853
857
|
(part) => {
|
854
|
-
var
|
858
|
+
var _a12, _b, _c;
|
855
859
|
switch (part.type) {
|
856
860
|
case "text": {
|
857
861
|
return part;
|
@@ -869,7 +873,7 @@ function convertToLanguageModelMessage(message, downloadedImages) {
|
|
869
873
|
return {
|
870
874
|
type: "image",
|
871
875
|
image: downloadedImage.data,
|
872
|
-
mimeType: (
|
876
|
+
mimeType: (_a12 = part.mimeType) != null ? _a12 : downloadedImage.mimeType
|
873
877
|
};
|
874
878
|
}
|
875
879
|
}
|
@@ -1173,8 +1177,8 @@ function prepareResponseHeaders(init, {
|
|
1173
1177
|
contentType,
|
1174
1178
|
dataStreamVersion
|
1175
1179
|
}) {
|
1176
|
-
var
|
1177
|
-
const headers = new Headers((
|
1180
|
+
var _a12;
|
1181
|
+
const headers = new Headers((_a12 = init == null ? void 0 : init.headers) != null ? _a12 : {});
|
1178
1182
|
if (!headers.has("Content-Type")) {
|
1179
1183
|
headers.set("Content-Type", contentType);
|
1180
1184
|
}
|
@@ -1184,41 +1188,6 @@ function prepareResponseHeaders(init, {
|
|
1184
1188
|
return headers;
|
1185
1189
|
}
|
1186
1190
|
|
1187
|
-
// core/util/schema.ts
|
1188
|
-
import { validatorSymbol } from "@ai-sdk/provider-utils";
|
1189
|
-
import zodToJsonSchema from "zod-to-json-schema";
|
1190
|
-
var schemaSymbol = Symbol.for("vercel.ai.schema");
|
1191
|
-
function jsonSchema(jsonSchema2, {
|
1192
|
-
validate
|
1193
|
-
} = {}) {
|
1194
|
-
return {
|
1195
|
-
[schemaSymbol]: true,
|
1196
|
-
_type: void 0,
|
1197
|
-
// should never be used directly
|
1198
|
-
[validatorSymbol]: true,
|
1199
|
-
jsonSchema: jsonSchema2,
|
1200
|
-
validate
|
1201
|
-
};
|
1202
|
-
}
|
1203
|
-
function isSchema(value) {
|
1204
|
-
return typeof value === "object" && value !== null && schemaSymbol in value && value[schemaSymbol] === true && "jsonSchema" in value && "validate" in value;
|
1205
|
-
}
|
1206
|
-
function asSchema(schema) {
|
1207
|
-
return isSchema(schema) ? schema : zodSchema(schema);
|
1208
|
-
}
|
1209
|
-
function zodSchema(zodSchema2) {
|
1210
|
-
return jsonSchema(
|
1211
|
-
// we assume that zodToJsonSchema will return a valid JSONSchema7:
|
1212
|
-
zodToJsonSchema(zodSchema2),
|
1213
|
-
{
|
1214
|
-
validate: (value) => {
|
1215
|
-
const result = zodSchema2.safeParse(value);
|
1216
|
-
return result.success ? { success: true, value: result.data } : { success: false, error: result.error };
|
1217
|
-
}
|
1218
|
-
}
|
1219
|
-
);
|
1220
|
-
}
|
1221
|
-
|
1222
1191
|
// core/generate-object/inject-json-schema-into-system.ts
|
1223
1192
|
var DEFAULT_SCHEMA_PREFIX = "JSON schema:";
|
1224
1193
|
var DEFAULT_SCHEMA_SUFFIX = "You MUST answer with a JSON object that matches the JSON schema above.";
|
@@ -1289,7 +1258,7 @@ async function generateObject({
|
|
1289
1258
|
experimental_telemetry: telemetry,
|
1290
1259
|
...settings
|
1291
1260
|
}) {
|
1292
|
-
var
|
1261
|
+
var _a12;
|
1293
1262
|
const baseTelemetryAttributes = getBaseTelemetryAttributes({
|
1294
1263
|
model,
|
1295
1264
|
telemetry,
|
@@ -1297,7 +1266,7 @@ async function generateObject({
|
|
1297
1266
|
settings: { ...settings, maxRetries }
|
1298
1267
|
});
|
1299
1268
|
const schema = asSchema(inputSchema);
|
1300
|
-
const tracer = getTracer({ isEnabled: (
|
1269
|
+
const tracer = getTracer({ isEnabled: (_a12 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a12 : false });
|
1301
1270
|
return recordSpan({
|
1302
1271
|
name: "ai.generateObject",
|
1303
1272
|
attributes: selectTelemetryAttributes({
|
@@ -1457,7 +1426,7 @@ async function generateObject({
|
|
1457
1426
|
}),
|
1458
1427
|
tracer,
|
1459
1428
|
fn: async (span2) => {
|
1460
|
-
var
|
1429
|
+
var _a13, _b;
|
1461
1430
|
const result2 = await model.doGenerate({
|
1462
1431
|
mode: {
|
1463
1432
|
type: "object-tool",
|
@@ -1474,7 +1443,7 @@ async function generateObject({
|
|
1474
1443
|
abortSignal,
|
1475
1444
|
headers
|
1476
1445
|
});
|
1477
|
-
const objectText = (_b = (
|
1446
|
+
const objectText = (_b = (_a13 = result2.toolCalls) == null ? void 0 : _a13[0]) == null ? void 0 : _b.args;
|
1478
1447
|
if (objectText === void 0) {
|
1479
1448
|
throw new NoObjectGeneratedError();
|
1480
1449
|
}
|
@@ -1553,9 +1522,9 @@ var DefaultGenerateObjectResult = class {
|
|
1553
1522
|
this.logprobs = options.logprobs;
|
1554
1523
|
}
|
1555
1524
|
toJsonResponse(init) {
|
1556
|
-
var
|
1525
|
+
var _a12;
|
1557
1526
|
return new Response(JSON.stringify(this.object), {
|
1558
|
-
status: (
|
1527
|
+
status: (_a12 = init == null ? void 0 : init.status) != null ? _a12 : 200,
|
1559
1528
|
headers: prepareResponseHeaders(init, {
|
1560
1529
|
contentType: "application/json; charset=utf-8"
|
1561
1530
|
})
|
@@ -1567,6 +1536,7 @@ var experimental_generateObject = generateObject;
|
|
1567
1536
|
// core/generate-object/stream-object.ts
|
1568
1537
|
import { safeValidateTypes } from "@ai-sdk/provider-utils";
|
1569
1538
|
import {
|
1539
|
+
asSchema as asSchema2,
|
1570
1540
|
isDeepEqualData,
|
1571
1541
|
parsePartialJson
|
1572
1542
|
} from "@ai-sdk/ui-utils";
|
@@ -1609,17 +1579,17 @@ var DelayedPromise = class {
|
|
1609
1579
|
return this.promise;
|
1610
1580
|
}
|
1611
1581
|
resolve(value) {
|
1612
|
-
var
|
1582
|
+
var _a12;
|
1613
1583
|
this.status = { type: "resolved", value };
|
1614
1584
|
if (this.promise) {
|
1615
|
-
(
|
1585
|
+
(_a12 = this._resolve) == null ? void 0 : _a12.call(this, value);
|
1616
1586
|
}
|
1617
1587
|
}
|
1618
1588
|
reject(error) {
|
1619
|
-
var
|
1589
|
+
var _a12;
|
1620
1590
|
this.status = { type: "rejected", error };
|
1621
1591
|
if (this.promise) {
|
1622
|
-
(
|
1592
|
+
(_a12 = this._reject) == null ? void 0 : _a12.call(this, error);
|
1623
1593
|
}
|
1624
1594
|
}
|
1625
1595
|
};
|
@@ -1658,16 +1628,16 @@ async function streamObject({
|
|
1658
1628
|
onFinish,
|
1659
1629
|
...settings
|
1660
1630
|
}) {
|
1661
|
-
var
|
1631
|
+
var _a12;
|
1662
1632
|
const baseTelemetryAttributes = getBaseTelemetryAttributes({
|
1663
1633
|
model,
|
1664
1634
|
telemetry,
|
1665
1635
|
headers,
|
1666
1636
|
settings: { ...settings, maxRetries }
|
1667
1637
|
});
|
1668
|
-
const tracer = getTracer({ isEnabled: (
|
1638
|
+
const tracer = getTracer({ isEnabled: (_a12 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a12 : false });
|
1669
1639
|
const retry = retryWithExponentialBackoff({ maxRetries });
|
1670
|
-
const schema =
|
1640
|
+
const schema = asSchema2(inputSchema);
|
1671
1641
|
return recordSpan({
|
1672
1642
|
name: "ai.streamObject",
|
1673
1643
|
attributes: selectTelemetryAttributes({
|
@@ -2027,8 +1997,8 @@ var DefaultStreamObjectResult = class {
|
|
2027
1997
|
});
|
2028
1998
|
}
|
2029
1999
|
pipeTextStreamToResponse(response, init) {
|
2030
|
-
var
|
2031
|
-
response.writeHead((
|
2000
|
+
var _a12;
|
2001
|
+
response.writeHead((_a12 = init == null ? void 0 : init.status) != null ? _a12 : 200, {
|
2032
2002
|
"Content-Type": "text/plain; charset=utf-8",
|
2033
2003
|
...init == null ? void 0 : init.headers
|
2034
2004
|
});
|
@@ -2050,9 +2020,9 @@ var DefaultStreamObjectResult = class {
|
|
2050
2020
|
read();
|
2051
2021
|
}
|
2052
2022
|
toTextStreamResponse(init) {
|
2053
|
-
var
|
2023
|
+
var _a12;
|
2054
2024
|
return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
|
2055
|
-
status: (
|
2025
|
+
status: (_a12 = init == null ? void 0 : init.status) != null ? _a12 : 200,
|
2056
2026
|
headers: prepareResponseHeaders(init, {
|
2057
2027
|
contentType: "text/plain; charset=utf-8"
|
2058
2028
|
})
|
@@ -2061,6 +2031,9 @@ var DefaultStreamObjectResult = class {
|
|
2061
2031
|
};
|
2062
2032
|
var experimental_streamObject = streamObject;
|
2063
2033
|
|
2034
|
+
// core/prompt/prepare-tools-and-tool-choice.ts
|
2035
|
+
import { asSchema as asSchema3 } from "@ai-sdk/ui-utils";
|
2036
|
+
|
2064
2037
|
// core/util/is-non-empty-object.ts
|
2065
2038
|
function isNonEmptyObject(object) {
|
2066
2039
|
return object != null && Object.keys(object).length > 0;
|
@@ -2078,11 +2051,11 @@ function prepareToolsAndToolChoice({
|
|
2078
2051
|
};
|
2079
2052
|
}
|
2080
2053
|
return {
|
2081
|
-
tools: Object.entries(tools).map(([
|
2054
|
+
tools: Object.entries(tools).map(([name12, tool2]) => ({
|
2082
2055
|
type: "function",
|
2083
|
-
name:
|
2056
|
+
name: name12,
|
2084
2057
|
description: tool2.description,
|
2085
|
-
parameters:
|
2058
|
+
parameters: asSchema3(tool2.parameters).jsonSchema
|
2086
2059
|
})),
|
2087
2060
|
toolChoice: toolChoice == null ? { type: "auto" } : typeof toolChoice === "string" ? { type: toolChoice } : { type: "tool", toolName: toolChoice.toolName }
|
2088
2061
|
};
|
@@ -2090,6 +2063,7 @@ function prepareToolsAndToolChoice({
|
|
2090
2063
|
|
2091
2064
|
// core/generate-text/tool-call.ts
|
2092
2065
|
import { safeParseJSON as safeParseJSON2 } from "@ai-sdk/provider-utils";
|
2066
|
+
import { asSchema as asSchema4 } from "@ai-sdk/ui-utils";
|
2093
2067
|
|
2094
2068
|
// errors/invalid-tool-arguments-error.ts
|
2095
2069
|
import { AISDKError as AISDKError7, getErrorMessage as getErrorMessage3 } from "@ai-sdk/provider";
|
@@ -2195,7 +2169,7 @@ function parseToolCall({
|
|
2195
2169
|
}
|
2196
2170
|
const parseResult = safeParseJSON2({
|
2197
2171
|
text: toolCall.args,
|
2198
|
-
schema:
|
2172
|
+
schema: asSchema4(tool2.parameters)
|
2199
2173
|
});
|
2200
2174
|
if (parseResult.success === false) {
|
2201
2175
|
throw new InvalidToolArgumentsError({
|
@@ -2228,14 +2202,14 @@ async function generateText({
|
|
2228
2202
|
experimental_telemetry: telemetry,
|
2229
2203
|
...settings
|
2230
2204
|
}) {
|
2231
|
-
var
|
2205
|
+
var _a12;
|
2232
2206
|
const baseTelemetryAttributes = getBaseTelemetryAttributes({
|
2233
2207
|
model,
|
2234
2208
|
telemetry,
|
2235
2209
|
headers,
|
2236
2210
|
settings: { ...settings, maxRetries }
|
2237
2211
|
});
|
2238
|
-
const tracer = getTracer({ isEnabled: (
|
2212
|
+
const tracer = getTracer({ isEnabled: (_a12 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a12 : false });
|
2239
2213
|
return recordSpan({
|
2240
2214
|
name: "ai.generateText",
|
2241
2215
|
attributes: selectTelemetryAttributes({
|
@@ -2255,7 +2229,7 @@ async function generateText({
|
|
2255
2229
|
}),
|
2256
2230
|
tracer,
|
2257
2231
|
fn: async (span) => {
|
2258
|
-
var
|
2232
|
+
var _a13, _b, _c, _d;
|
2259
2233
|
const retry = retryWithExponentialBackoff({ maxRetries });
|
2260
2234
|
const validatedPrompt = getValidatedPrompt({
|
2261
2235
|
system,
|
@@ -2341,7 +2315,7 @@ async function generateText({
|
|
2341
2315
|
}
|
2342
2316
|
})
|
2343
2317
|
);
|
2344
|
-
currentToolCalls = ((
|
2318
|
+
currentToolCalls = ((_a13 = currentModelResponse.toolCalls) != null ? _a13 : []).map(
|
2345
2319
|
(modelToolCall) => parseToolCall({ toolCall: modelToolCall, tools })
|
2346
2320
|
);
|
2347
2321
|
currentToolResults = tools == null ? [] : await executeTools({
|
@@ -2802,17 +2776,18 @@ async function streamText({
|
|
2802
2776
|
headers,
|
2803
2777
|
experimental_telemetry: telemetry,
|
2804
2778
|
experimental_toolCallStreaming: toolCallStreaming = false,
|
2779
|
+
onChunk,
|
2805
2780
|
onFinish,
|
2806
2781
|
...settings
|
2807
2782
|
}) {
|
2808
|
-
var
|
2783
|
+
var _a12;
|
2809
2784
|
const baseTelemetryAttributes = getBaseTelemetryAttributes({
|
2810
2785
|
model,
|
2811
2786
|
telemetry,
|
2812
2787
|
headers,
|
2813
2788
|
settings: { ...settings, maxRetries }
|
2814
2789
|
});
|
2815
|
-
const tracer = getTracer({ isEnabled: (
|
2790
|
+
const tracer = getTracer({ isEnabled: (_a12 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a12 : false });
|
2816
2791
|
return recordSpan({
|
2817
2792
|
name: "ai.streamText",
|
2818
2793
|
attributes: selectTelemetryAttributes({
|
@@ -2893,6 +2868,7 @@ async function streamText({
|
|
2893
2868
|
}),
|
2894
2869
|
warnings,
|
2895
2870
|
rawResponse,
|
2871
|
+
onChunk,
|
2896
2872
|
onFinish,
|
2897
2873
|
rootSpan,
|
2898
2874
|
doStreamSpan,
|
@@ -2906,6 +2882,7 @@ var DefaultStreamTextResult = class {
|
|
2906
2882
|
stream,
|
2907
2883
|
warnings,
|
2908
2884
|
rawResponse,
|
2885
|
+
onChunk,
|
2909
2886
|
onFinish,
|
2910
2887
|
rootSpan,
|
2911
2888
|
doStreamSpan,
|
@@ -2913,7 +2890,6 @@ var DefaultStreamTextResult = class {
|
|
2913
2890
|
}) {
|
2914
2891
|
this.warnings = warnings;
|
2915
2892
|
this.rawResponse = rawResponse;
|
2916
|
-
this.onFinish = onFinish;
|
2917
2893
|
const { resolve: resolveUsage, promise: usagePromise } = createResolvablePromise();
|
2918
2894
|
this.usage = usagePromise;
|
2919
2895
|
const { resolve: resolveFinishReason, promise: finishReasonPromise } = createResolvablePromise();
|
@@ -2930,25 +2906,30 @@ var DefaultStreamTextResult = class {
|
|
2930
2906
|
const toolCalls = [];
|
2931
2907
|
const toolResults = [];
|
2932
2908
|
let firstChunk = true;
|
2933
|
-
const self = this;
|
2934
2909
|
this.originalStream = stream.pipeThrough(
|
2935
2910
|
new TransformStream({
|
2936
2911
|
async transform(chunk, controller) {
|
2937
|
-
controller.enqueue(chunk);
|
2938
2912
|
if (firstChunk) {
|
2939
2913
|
firstChunk = false;
|
2940
2914
|
doStreamSpan.addEvent("ai.stream.firstChunk");
|
2941
2915
|
}
|
2916
|
+
if (chunk.type === "text-delta" && chunk.textDelta.length === 0) {
|
2917
|
+
return;
|
2918
|
+
}
|
2919
|
+
controller.enqueue(chunk);
|
2942
2920
|
const chunkType = chunk.type;
|
2943
2921
|
switch (chunkType) {
|
2944
2922
|
case "text-delta":
|
2945
2923
|
text += chunk.textDelta;
|
2924
|
+
await (onChunk == null ? void 0 : onChunk({ chunk }));
|
2946
2925
|
break;
|
2947
2926
|
case "tool-call":
|
2948
2927
|
toolCalls.push(chunk);
|
2928
|
+
await (onChunk == null ? void 0 : onChunk({ chunk }));
|
2949
2929
|
break;
|
2950
2930
|
case "tool-result":
|
2951
2931
|
toolResults.push(chunk);
|
2932
|
+
await (onChunk == null ? void 0 : onChunk({ chunk }));
|
2952
2933
|
break;
|
2953
2934
|
case "finish":
|
2954
2935
|
usage = chunk.usage;
|
@@ -2959,7 +2940,10 @@ var DefaultStreamTextResult = class {
|
|
2959
2940
|
resolveToolCalls(toolCalls);
|
2960
2941
|
break;
|
2961
2942
|
case "tool-call-streaming-start":
|
2962
|
-
case "tool-call-delta":
|
2943
|
+
case "tool-call-delta": {
|
2944
|
+
await (onChunk == null ? void 0 : onChunk({ chunk }));
|
2945
|
+
break;
|
2946
|
+
}
|
2963
2947
|
case "error":
|
2964
2948
|
break;
|
2965
2949
|
default: {
|
@@ -2970,7 +2954,6 @@ var DefaultStreamTextResult = class {
|
|
2970
2954
|
},
|
2971
2955
|
// invoke onFinish callback and resolve toolResults promise when the stream is about to close:
|
2972
2956
|
async flush(controller) {
|
2973
|
-
var _a9;
|
2974
2957
|
try {
|
2975
2958
|
const finalUsage = usage != null ? usage : {
|
2976
2959
|
promptTokens: NaN,
|
@@ -3009,7 +2992,7 @@ var DefaultStreamTextResult = class {
|
|
3009
2992
|
})
|
3010
2993
|
);
|
3011
2994
|
resolveToolResults(toolResults);
|
3012
|
-
await (
|
2995
|
+
await (onFinish == null ? void 0 : onFinish({
|
3013
2996
|
finishReason: finalFinishReason,
|
3014
2997
|
usage: finalUsage,
|
3015
2998
|
text,
|
@@ -3048,9 +3031,7 @@ var DefaultStreamTextResult = class {
|
|
3048
3031
|
return createAsyncIterableStream(this.teeStream(), {
|
3049
3032
|
transform(chunk, controller) {
|
3050
3033
|
if (chunk.type === "text-delta") {
|
3051
|
-
|
3052
|
-
controller.enqueue(chunk.textDelta);
|
3053
|
-
}
|
3034
|
+
controller.enqueue(chunk.textDelta);
|
3054
3035
|
} else if (chunk.type === "error") {
|
3055
3036
|
controller.error(chunk.error);
|
3056
3037
|
}
|
@@ -3060,13 +3041,7 @@ var DefaultStreamTextResult = class {
|
|
3060
3041
|
get fullStream() {
|
3061
3042
|
return createAsyncIterableStream(this.teeStream(), {
|
3062
3043
|
transform(chunk, controller) {
|
3063
|
-
|
3064
|
-
if (chunk.textDelta.length > 0) {
|
3065
|
-
controller.enqueue(chunk);
|
3066
|
-
}
|
3067
|
-
} else {
|
3068
|
-
controller.enqueue(chunk);
|
3069
|
-
}
|
3044
|
+
controller.enqueue(chunk);
|
3070
3045
|
}
|
3071
3046
|
});
|
3072
3047
|
}
|
@@ -3171,8 +3146,8 @@ var DefaultStreamTextResult = class {
|
|
3171
3146
|
return this.pipeDataStreamToResponse(response, init);
|
3172
3147
|
}
|
3173
3148
|
pipeDataStreamToResponse(response, init) {
|
3174
|
-
var
|
3175
|
-
response.writeHead((
|
3149
|
+
var _a12;
|
3150
|
+
response.writeHead((_a12 = init == null ? void 0 : init.status) != null ? _a12 : 200, {
|
3176
3151
|
"Content-Type": "text/plain; charset=utf-8",
|
3177
3152
|
...init == null ? void 0 : init.headers
|
3178
3153
|
});
|
@@ -3194,8 +3169,8 @@ var DefaultStreamTextResult = class {
|
|
3194
3169
|
read();
|
3195
3170
|
}
|
3196
3171
|
pipeTextStreamToResponse(response, init) {
|
3197
|
-
var
|
3198
|
-
response.writeHead((
|
3172
|
+
var _a12;
|
3173
|
+
response.writeHead((_a12 = init == null ? void 0 : init.status) != null ? _a12 : 200, {
|
3199
3174
|
"Content-Type": "text/plain; charset=utf-8",
|
3200
3175
|
...init == null ? void 0 : init.headers
|
3201
3176
|
});
|
@@ -3220,7 +3195,7 @@ var DefaultStreamTextResult = class {
|
|
3220
3195
|
return this.toDataStreamResponse(options);
|
3221
3196
|
}
|
3222
3197
|
toDataStreamResponse(options) {
|
3223
|
-
var
|
3198
|
+
var _a12;
|
3224
3199
|
const init = options == null ? void 0 : "init" in options ? options.init : {
|
3225
3200
|
headers: "headers" in options ? options.headers : void 0,
|
3226
3201
|
status: "status" in options ? options.status : void 0,
|
@@ -3230,7 +3205,7 @@ var DefaultStreamTextResult = class {
|
|
3230
3205
|
const getErrorMessage4 = options == null ? void 0 : "getErrorMessage" in options ? options.getErrorMessage : void 0;
|
3231
3206
|
const stream = data ? mergeStreams(data.stream, this.toDataStream({ getErrorMessage: getErrorMessage4 })) : this.toDataStream({ getErrorMessage: getErrorMessage4 });
|
3232
3207
|
return new Response(stream, {
|
3233
|
-
status: (
|
3208
|
+
status: (_a12 = init == null ? void 0 : init.status) != null ? _a12 : 200,
|
3234
3209
|
statusText: init == null ? void 0 : init.statusText,
|
3235
3210
|
headers: prepareResponseHeaders(init, {
|
3236
3211
|
contentType: "text/plain; charset=utf-8",
|
@@ -3239,9 +3214,9 @@ var DefaultStreamTextResult = class {
|
|
3239
3214
|
});
|
3240
3215
|
}
|
3241
3216
|
toTextStreamResponse(init) {
|
3242
|
-
var
|
3217
|
+
var _a12;
|
3243
3218
|
return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
|
3244
|
-
status: (
|
3219
|
+
status: (_a12 = init == null ? void 0 : init.status) != null ? _a12 : 200,
|
3245
3220
|
headers: prepareResponseHeaders(init, {
|
3246
3221
|
contentType: "text/plain; charset=utf-8"
|
3247
3222
|
})
|
@@ -3252,7 +3227,7 @@ var experimental_streamText = streamText;
|
|
3252
3227
|
|
3253
3228
|
// core/prompt/attachments-to-parts.ts
|
3254
3229
|
function attachmentsToParts(attachments) {
|
3255
|
-
var
|
3230
|
+
var _a12, _b, _c;
|
3256
3231
|
const parts = [];
|
3257
3232
|
for (const attachment of attachments) {
|
3258
3233
|
let url;
|
@@ -3264,7 +3239,7 @@ function attachmentsToParts(attachments) {
|
|
3264
3239
|
switch (url.protocol) {
|
3265
3240
|
case "http:":
|
3266
3241
|
case "https:": {
|
3267
|
-
if ((
|
3242
|
+
if ((_a12 = attachment.contentType) == null ? void 0 : _a12.startsWith("image/")) {
|
3268
3243
|
parts.push({ type: "image", image: url });
|
3269
3244
|
}
|
3270
3245
|
break;
|
@@ -3373,18 +3348,32 @@ function convertToCoreMessages(messages) {
|
|
3373
3348
|
}
|
3374
3349
|
|
3375
3350
|
// core/registry/invalid-model-id-error.ts
|
3376
|
-
|
3351
|
+
import { AISDKError as AISDKError9 } from "@ai-sdk/provider";
|
3352
|
+
var name9 = "AI_InvalidModelIdError";
|
3353
|
+
var marker9 = `vercel.ai.error.${name9}`;
|
3354
|
+
var symbol9 = Symbol.for(marker9);
|
3355
|
+
var _a9;
|
3356
|
+
var InvalidModelIdError = class extends AISDKError9 {
|
3377
3357
|
constructor({
|
3378
3358
|
id,
|
3379
3359
|
message = `Invalid model id: ${id}`
|
3380
3360
|
}) {
|
3381
|
-
super(message);
|
3382
|
-
this
|
3361
|
+
super({ name: name9, message });
|
3362
|
+
this[_a9] = true;
|
3383
3363
|
this.id = id;
|
3384
3364
|
}
|
3365
|
+
static isInstance(error) {
|
3366
|
+
return AISDKError9.hasMarker(error, marker9);
|
3367
|
+
}
|
3368
|
+
/**
|
3369
|
+
* @deprecated use `isInstance` instead
|
3370
|
+
*/
|
3385
3371
|
static isInvalidModelIdError(error) {
|
3386
|
-
return error instanceof Error && error.name ===
|
3372
|
+
return error instanceof Error && error.name === name9 && typeof error.id === "string";
|
3387
3373
|
}
|
3374
|
+
/**
|
3375
|
+
* @deprecated Do not use this method. It will be removed in the next major version.
|
3376
|
+
*/
|
3388
3377
|
toJSON() {
|
3389
3378
|
return {
|
3390
3379
|
name: this.name,
|
@@ -3394,22 +3383,37 @@ var InvalidModelIdError = class extends Error {
|
|
3394
3383
|
};
|
3395
3384
|
}
|
3396
3385
|
};
|
3386
|
+
_a9 = symbol9;
|
3397
3387
|
|
3398
3388
|
// core/registry/no-such-model-error.ts
|
3399
|
-
|
3389
|
+
import { AISDKError as AISDKError10 } from "@ai-sdk/provider";
|
3390
|
+
var name10 = "AI_NoSuchModelError";
|
3391
|
+
var marker10 = `vercel.ai.error.${name10}`;
|
3392
|
+
var symbol10 = Symbol.for(marker10);
|
3393
|
+
var _a10;
|
3394
|
+
var NoSuchModelError = class extends AISDKError10 {
|
3400
3395
|
constructor({
|
3401
3396
|
modelId,
|
3402
3397
|
modelType,
|
3403
3398
|
message = `No such ${modelType}: ${modelId}`
|
3404
3399
|
}) {
|
3405
|
-
super(message);
|
3406
|
-
this
|
3400
|
+
super({ name: name10, message });
|
3401
|
+
this[_a10] = true;
|
3407
3402
|
this.modelId = modelId;
|
3408
3403
|
this.modelType = modelType;
|
3409
3404
|
}
|
3405
|
+
static isInstance(error) {
|
3406
|
+
return AISDKError10.hasMarker(error, marker10);
|
3407
|
+
}
|
3408
|
+
/**
|
3409
|
+
* @deprecated use `isInstance` instead
|
3410
|
+
*/
|
3410
3411
|
static isNoSuchModelError(error) {
|
3411
|
-
return error instanceof Error && error.name ===
|
3412
|
+
return error instanceof Error && error.name === name10 && typeof error.modelId === "string" && typeof error.modelType === "string";
|
3412
3413
|
}
|
3414
|
+
/**
|
3415
|
+
* @deprecated Do not use this method. It will be removed in the next major version.
|
3416
|
+
*/
|
3413
3417
|
toJSON() {
|
3414
3418
|
return {
|
3415
3419
|
name: this.name,
|
@@ -3420,22 +3424,37 @@ var NoSuchModelError = class extends Error {
|
|
3420
3424
|
};
|
3421
3425
|
}
|
3422
3426
|
};
|
3427
|
+
_a10 = symbol10;
|
3423
3428
|
|
3424
3429
|
// core/registry/no-such-provider-error.ts
|
3425
|
-
|
3430
|
+
import { AISDKError as AISDKError11 } from "@ai-sdk/provider";
|
3431
|
+
var name11 = "AI_NoSuchProviderError";
|
3432
|
+
var marker11 = `vercel.ai.error.${name11}`;
|
3433
|
+
var symbol11 = Symbol.for(marker11);
|
3434
|
+
var _a11;
|
3435
|
+
var NoSuchProviderError = class extends AISDKError11 {
|
3426
3436
|
constructor({
|
3427
3437
|
providerId,
|
3428
3438
|
availableProviders,
|
3429
3439
|
message = `No such provider: ${providerId} (available providers: ${availableProviders.join()})`
|
3430
3440
|
}) {
|
3431
|
-
super(message);
|
3432
|
-
this
|
3441
|
+
super({ name: name11, message });
|
3442
|
+
this[_a11] = true;
|
3433
3443
|
this.providerId = providerId;
|
3434
3444
|
this.availableProviders = availableProviders;
|
3435
3445
|
}
|
3446
|
+
static isInstance(error) {
|
3447
|
+
return AISDKError11.hasMarker(error, marker11);
|
3448
|
+
}
|
3449
|
+
/**
|
3450
|
+
* @deprecated use `isInstance` instead
|
3451
|
+
*/
|
3436
3452
|
static isNoSuchProviderError(error) {
|
3437
|
-
return error instanceof Error && error.name ===
|
3453
|
+
return error instanceof Error && error.name === name11 && typeof error.providerId === "string" && Array.isArray(error.availableProviders);
|
3438
3454
|
}
|
3455
|
+
/**
|
3456
|
+
* @deprecated Do not use this method. It will be removed in the next major version.
|
3457
|
+
*/
|
3439
3458
|
toJSON() {
|
3440
3459
|
return {
|
3441
3460
|
name: this.name,
|
@@ -3446,6 +3465,7 @@ var NoSuchProviderError = class extends Error {
|
|
3446
3465
|
};
|
3447
3466
|
}
|
3448
3467
|
};
|
3468
|
+
_a11 = symbol11;
|
3449
3469
|
|
3450
3470
|
// core/registry/provider-registry.ts
|
3451
3471
|
function experimental_createProviderRegistry(providers) {
|
@@ -3460,7 +3480,10 @@ var DefaultProviderRegistry = class {
|
|
3460
3480
|
constructor() {
|
3461
3481
|
this.providers = {};
|
3462
3482
|
}
|
3463
|
-
registerProvider({
|
3483
|
+
registerProvider({
|
3484
|
+
id,
|
3485
|
+
provider
|
3486
|
+
}) {
|
3464
3487
|
this.providers[id] = provider;
|
3465
3488
|
}
|
3466
3489
|
getProvider(id) {
|
@@ -3481,26 +3504,33 @@ var DefaultProviderRegistry = class {
|
|
3481
3504
|
return [id.slice(0, index), id.slice(index + 1)];
|
3482
3505
|
}
|
3483
3506
|
languageModel(id) {
|
3484
|
-
var
|
3507
|
+
var _a12, _b;
|
3485
3508
|
const [providerId, modelId] = this.splitId(id);
|
3486
|
-
const model = (_b = (
|
3509
|
+
const model = (_b = (_a12 = this.getProvider(providerId)).languageModel) == null ? void 0 : _b.call(_a12, modelId);
|
3487
3510
|
if (model == null) {
|
3488
|
-
throw new NoSuchModelError({ modelId: id, modelType: "
|
3511
|
+
throw new NoSuchModelError({ modelId: id, modelType: "languageModel" });
|
3489
3512
|
}
|
3490
3513
|
return model;
|
3491
3514
|
}
|
3492
3515
|
textEmbeddingModel(id) {
|
3493
|
-
var
|
3516
|
+
var _a12, _b, _c;
|
3494
3517
|
const [providerId, modelId] = this.splitId(id);
|
3495
|
-
const
|
3518
|
+
const provider = this.getProvider(providerId);
|
3519
|
+
const model = (_c = (_a12 = provider.textEmbeddingModel) == null ? void 0 : _a12.call(provider, modelId)) != null ? _c : (_b = provider.textEmbedding) == null ? void 0 : _b.call(provider, modelId);
|
3496
3520
|
if (model == null) {
|
3497
3521
|
throw new NoSuchModelError({
|
3498
3522
|
modelId: id,
|
3499
|
-
modelType: "
|
3523
|
+
modelType: "textEmbeddingModel"
|
3500
3524
|
});
|
3501
3525
|
}
|
3502
3526
|
return model;
|
3503
3527
|
}
|
3528
|
+
/**
|
3529
|
+
* @deprecated Use `textEmbeddingModel` instead.
|
3530
|
+
*/
|
3531
|
+
textEmbedding(id) {
|
3532
|
+
return this.textEmbeddingModel(id);
|
3533
|
+
}
|
3504
3534
|
};
|
3505
3535
|
|
3506
3536
|
// core/tool/tool.ts
|
@@ -3529,7 +3559,7 @@ function magnitude(vector) {
|
|
3529
3559
|
|
3530
3560
|
// errors/index.ts
|
3531
3561
|
import {
|
3532
|
-
AISDKError as
|
3562
|
+
AISDKError as AISDKError12,
|
3533
3563
|
APICallError as APICallError2,
|
3534
3564
|
EmptyResponseBodyError,
|
3535
3565
|
InvalidPromptError as InvalidPromptError2,
|
@@ -3658,15 +3688,19 @@ function readableFromAsyncIterable(iterable) {
|
|
3658
3688
|
controller.enqueue(value);
|
3659
3689
|
},
|
3660
3690
|
async cancel(reason) {
|
3661
|
-
var
|
3662
|
-
await ((
|
3691
|
+
var _a12;
|
3692
|
+
await ((_a12 = it.return) == null ? void 0 : _a12.call(it, reason));
|
3663
3693
|
}
|
3664
3694
|
});
|
3665
3695
|
}
|
3666
3696
|
|
3667
3697
|
// streams/stream-data.ts
|
3668
3698
|
import { formatStreamPart as formatStreamPart2 } from "@ai-sdk/ui-utils";
|
3669
|
-
|
3699
|
+
|
3700
|
+
// util/constants.ts
|
3701
|
+
var HANGING_STREAM_WARNING_TIME_MS = 15 * 1e3;
|
3702
|
+
|
3703
|
+
// streams/stream-data.ts
|
3670
3704
|
var StreamData2 = class {
|
3671
3705
|
constructor() {
|
3672
3706
|
this.encoder = new TextEncoder();
|
@@ -3682,7 +3716,7 @@ var StreamData2 = class {
|
|
3682
3716
|
console.warn(
|
3683
3717
|
"The data stream is hanging. Did you forget to close it with `data.close()`?"
|
3684
3718
|
);
|
3685
|
-
},
|
3719
|
+
}, HANGING_STREAM_WARNING_TIME_MS);
|
3686
3720
|
}
|
3687
3721
|
},
|
3688
3722
|
pull: (controller) => {
|
@@ -3794,7 +3828,7 @@ import {
|
|
3794
3828
|
function AssistantResponse({ threadId, messageId }, process2) {
|
3795
3829
|
const stream = new ReadableStream({
|
3796
3830
|
async start(controller) {
|
3797
|
-
var
|
3831
|
+
var _a12;
|
3798
3832
|
const textEncoder = new TextEncoder();
|
3799
3833
|
const sendMessage = (message) => {
|
3800
3834
|
controller.enqueue(
|
@@ -3812,7 +3846,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
3812
3846
|
);
|
3813
3847
|
};
|
3814
3848
|
const forwardStream = async (stream2) => {
|
3815
|
-
var
|
3849
|
+
var _a13, _b;
|
3816
3850
|
let result = void 0;
|
3817
3851
|
for await (const value of stream2) {
|
3818
3852
|
switch (value.event) {
|
@@ -3829,7 +3863,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
3829
3863
|
break;
|
3830
3864
|
}
|
3831
3865
|
case "thread.message.delta": {
|
3832
|
-
const content = (
|
3866
|
+
const content = (_a13 = value.data.delta.content) == null ? void 0 : _a13[0];
|
3833
3867
|
if ((content == null ? void 0 : content.type) === "text" && ((_b = content.text) == null ? void 0 : _b.value) != null) {
|
3834
3868
|
controller.enqueue(
|
3835
3869
|
textEncoder.encode(
|
@@ -3865,7 +3899,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
3865
3899
|
forwardStream
|
3866
3900
|
});
|
3867
3901
|
} catch (error) {
|
3868
|
-
sendError((
|
3902
|
+
sendError((_a12 = error.message) != null ? _a12 : `${error}`);
|
3869
3903
|
} finally {
|
3870
3904
|
controller.close();
|
3871
3905
|
}
|
@@ -3886,9 +3920,9 @@ var experimental_AssistantResponse = AssistantResponse;
|
|
3886
3920
|
|
3887
3921
|
// streams/aws-bedrock-stream.ts
|
3888
3922
|
async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
|
3889
|
-
var
|
3923
|
+
var _a12, _b;
|
3890
3924
|
const decoder = new TextDecoder();
|
3891
|
-
for await (const chunk of (
|
3925
|
+
for await (const chunk of (_a12 = response.body) != null ? _a12 : []) {
|
3892
3926
|
const bytes = (_b = chunk.chunk) == null ? void 0 : _b.bytes;
|
3893
3927
|
if (bytes != null) {
|
3894
3928
|
const chunkText = decoder.decode(bytes);
|
@@ -3902,8 +3936,8 @@ async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
|
|
3902
3936
|
}
|
3903
3937
|
function AWSBedrockAnthropicMessagesStream(response, callbacks) {
|
3904
3938
|
return AWSBedrockStream(response, callbacks, (chunk) => {
|
3905
|
-
var
|
3906
|
-
return (
|
3939
|
+
var _a12;
|
3940
|
+
return (_a12 = chunk.delta) == null ? void 0 : _a12.text;
|
3907
3941
|
});
|
3908
3942
|
}
|
3909
3943
|
function AWSBedrockAnthropicStream(response, callbacks) {
|
@@ -3950,8 +3984,8 @@ async function readAndProcessLines(reader, controller) {
|
|
3950
3984
|
controller.close();
|
3951
3985
|
}
|
3952
3986
|
function createParser2(res) {
|
3953
|
-
var
|
3954
|
-
const reader = (
|
3987
|
+
var _a12;
|
3988
|
+
const reader = (_a12 = res.body) == null ? void 0 : _a12.getReader();
|
3955
3989
|
return new ReadableStream({
|
3956
3990
|
async start(controller) {
|
3957
3991
|
if (!reader) {
|
@@ -3981,9 +4015,9 @@ function CohereStream(reader, callbacks) {
|
|
3981
4015
|
|
3982
4016
|
// streams/google-generative-ai-stream.ts
|
3983
4017
|
async function* streamable3(response) {
|
3984
|
-
var
|
4018
|
+
var _a12, _b, _c;
|
3985
4019
|
for await (const chunk of response.stream) {
|
3986
|
-
const parts = (_c = (_b = (
|
4020
|
+
const parts = (_c = (_b = (_a12 = chunk.candidates) == null ? void 0 : _a12[0]) == null ? void 0 : _b.content) == null ? void 0 : _c.parts;
|
3987
4021
|
if (parts === void 0) {
|
3988
4022
|
continue;
|
3989
4023
|
}
|
@@ -4002,13 +4036,13 @@ function createParser3(res) {
|
|
4002
4036
|
const trimStartOfStream = trimStartOfStreamHelper();
|
4003
4037
|
return new ReadableStream({
|
4004
4038
|
async pull(controller) {
|
4005
|
-
var
|
4039
|
+
var _a12, _b;
|
4006
4040
|
const { value, done } = await res.next();
|
4007
4041
|
if (done) {
|
4008
4042
|
controller.close();
|
4009
4043
|
return;
|
4010
4044
|
}
|
4011
|
-
const text = trimStartOfStream((_b = (
|
4045
|
+
const text = trimStartOfStream((_b = (_a12 = value.token) == null ? void 0 : _a12.text) != null ? _b : "");
|
4012
4046
|
if (!text)
|
4013
4047
|
return;
|
4014
4048
|
if (value.generated_text != null && value.generated_text.length > 0) {
|
@@ -4033,11 +4067,11 @@ function InkeepStream(res, callbacks) {
|
|
4033
4067
|
let chat_session_id = "";
|
4034
4068
|
let records_cited;
|
4035
4069
|
const inkeepEventParser = (data, options) => {
|
4036
|
-
var
|
4070
|
+
var _a12, _b;
|
4037
4071
|
const { event } = options;
|
4038
4072
|
if (event === "records_cited") {
|
4039
4073
|
records_cited = JSON.parse(data);
|
4040
|
-
(
|
4074
|
+
(_a12 = callbacks == null ? void 0 : callbacks.onRecordsCited) == null ? void 0 : _a12.call(callbacks, records_cited);
|
4041
4075
|
}
|
4042
4076
|
if (event === "message_chunk") {
|
4043
4077
|
const inkeepMessageChunk = JSON.parse(data);
|
@@ -4050,12 +4084,12 @@ function InkeepStream(res, callbacks) {
|
|
4050
4084
|
passThroughCallbacks = {
|
4051
4085
|
...passThroughCallbacks,
|
4052
4086
|
onFinal: (completion) => {
|
4053
|
-
var
|
4087
|
+
var _a12;
|
4054
4088
|
const inkeepOnFinalMetadata = {
|
4055
4089
|
chat_session_id,
|
4056
4090
|
records_cited
|
4057
4091
|
};
|
4058
|
-
(
|
4092
|
+
(_a12 = callbacks == null ? void 0 : callbacks.onFinal) == null ? void 0 : _a12.call(callbacks, completion, inkeepOnFinalMetadata);
|
4059
4093
|
}
|
4060
4094
|
};
|
4061
4095
|
return AIStream(res, inkeepEventParser, passThroughCallbacks).pipeThrough(
|
@@ -4077,7 +4111,7 @@ function toDataStream(stream, callbacks) {
|
|
4077
4111
|
return stream.pipeThrough(
|
4078
4112
|
new TransformStream({
|
4079
4113
|
transform: async (value, controller) => {
|
4080
|
-
var
|
4114
|
+
var _a12;
|
4081
4115
|
if (typeof value === "string") {
|
4082
4116
|
controller.enqueue(value);
|
4083
4117
|
return;
|
@@ -4085,7 +4119,7 @@ function toDataStream(stream, callbacks) {
|
|
4085
4119
|
if ("event" in value) {
|
4086
4120
|
if (value.event === "on_chat_model_stream") {
|
4087
4121
|
forwardAIMessageChunk(
|
4088
|
-
(
|
4122
|
+
(_a12 = value.data) == null ? void 0 : _a12.chunk,
|
4089
4123
|
controller
|
4090
4124
|
);
|
4091
4125
|
}
|
@@ -4097,13 +4131,13 @@ function toDataStream(stream, callbacks) {
|
|
4097
4131
|
).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
|
4098
4132
|
}
|
4099
4133
|
function toDataStreamResponse(stream, options) {
|
4100
|
-
var
|
4134
|
+
var _a12;
|
4101
4135
|
const dataStream = toDataStream(stream, options == null ? void 0 : options.callbacks);
|
4102
4136
|
const data = options == null ? void 0 : options.data;
|
4103
4137
|
const init = options == null ? void 0 : options.init;
|
4104
4138
|
const responseStream = data ? mergeStreams(data.stream, dataStream) : dataStream;
|
4105
4139
|
return new Response(responseStream, {
|
4106
|
-
status: (
|
4140
|
+
status: (_a12 = init == null ? void 0 : init.status) != null ? _a12 : 200,
|
4107
4141
|
statusText: init == null ? void 0 : init.statusText,
|
4108
4142
|
headers: prepareResponseHeaders(init, {
|
4109
4143
|
contentType: "text/plain; charset=utf-8",
|
@@ -4185,9 +4219,9 @@ function LangChainStream(callbacks) {
|
|
4185
4219
|
|
4186
4220
|
// streams/mistral-stream.ts
|
4187
4221
|
async function* streamable4(stream) {
|
4188
|
-
var
|
4222
|
+
var _a12, _b;
|
4189
4223
|
for await (const chunk of stream) {
|
4190
|
-
const content = (_b = (
|
4224
|
+
const content = (_b = (_a12 = chunk.choices[0]) == null ? void 0 : _a12.delta) == null ? void 0 : _b.content;
|
4191
4225
|
if (content === void 0 || content === "") {
|
4192
4226
|
continue;
|
4193
4227
|
}
|
@@ -4220,10 +4254,10 @@ async function* streamable5(stream) {
|
|
4220
4254
|
model: chunk.model,
|
4221
4255
|
// not exposed by Azure API
|
4222
4256
|
choices: chunk.choices.map((choice) => {
|
4223
|
-
var
|
4257
|
+
var _a12, _b, _c, _d, _e, _f, _g;
|
4224
4258
|
return {
|
4225
4259
|
delta: {
|
4226
|
-
content: (
|
4260
|
+
content: (_a12 = choice.delta) == null ? void 0 : _a12.content,
|
4227
4261
|
function_call: (_b = choice.delta) == null ? void 0 : _b.functionCall,
|
4228
4262
|
role: (_c = choice.delta) == null ? void 0 : _c.role,
|
4229
4263
|
tool_calls: ((_e = (_d = choice.delta) == null ? void 0 : _d.toolCalls) == null ? void 0 : _e.length) ? (_g = (_f = choice.delta) == null ? void 0 : _f.toolCalls) == null ? void 0 : _g.map((toolCall, index) => ({
|
@@ -4248,9 +4282,9 @@ function chunkToText() {
|
|
4248
4282
|
const trimStartOfStream = trimStartOfStreamHelper();
|
4249
4283
|
let isFunctionStreamingIn;
|
4250
4284
|
return (json) => {
|
4251
|
-
var
|
4285
|
+
var _a12, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r;
|
4252
4286
|
if (isChatCompletionChunk(json)) {
|
4253
|
-
const delta = (
|
4287
|
+
const delta = (_a12 = json.choices[0]) == null ? void 0 : _a12.delta;
|
4254
4288
|
if ((_b = delta.function_call) == null ? void 0 : _b.name) {
|
4255
4289
|
isFunctionStreamingIn = true;
|
4256
4290
|
return {
|
@@ -4523,8 +4557,8 @@ function createFunctionCallTransformer(callbacks) {
|
|
4523
4557
|
|
4524
4558
|
// streams/replicate-stream.ts
|
4525
4559
|
async function ReplicateStream(res, cb, options) {
|
4526
|
-
var
|
4527
|
-
const url = (
|
4560
|
+
var _a12;
|
4561
|
+
const url = (_a12 = res.urls) == null ? void 0 : _a12.stream;
|
4528
4562
|
if (!url) {
|
4529
4563
|
if (res.error)
|
4530
4564
|
throw new Error(res.error);
|
@@ -4545,8 +4579,8 @@ async function ReplicateStream(res, cb, options) {
|
|
4545
4579
|
|
4546
4580
|
// streams/stream-to-response.ts
|
4547
4581
|
function streamToResponse(res, response, init, data) {
|
4548
|
-
var
|
4549
|
-
response.writeHead((
|
4582
|
+
var _a12;
|
4583
|
+
response.writeHead((_a12 = init == null ? void 0 : init.status) != null ? _a12 : 200, {
|
4550
4584
|
"Content-Type": "text/plain; charset=utf-8",
|
4551
4585
|
...init == null ? void 0 : init.headers
|
4552
4586
|
});
|
@@ -4589,7 +4623,7 @@ var StreamingTextResponse = class extends Response {
|
|
4589
4623
|
var generateId2 = generateIdImpl;
|
4590
4624
|
var nanoid = generateIdImpl;
|
4591
4625
|
export {
|
4592
|
-
|
4626
|
+
AISDKError12 as AISDKError,
|
4593
4627
|
AIStream,
|
4594
4628
|
APICallError2 as APICallError,
|
4595
4629
|
AWSBedrockAnthropicMessagesStream,
|