ai 4.0.22 → 4.0.24
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/index.d.mts +49 -21
- package/dist/index.d.ts +49 -21
- package/dist/index.js +307 -180
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +293 -164
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
var __defProp = Object.defineProperty;
|
2
2
|
var __export = (target, all) => {
|
3
|
-
for (var
|
4
|
-
__defProp(target,
|
3
|
+
for (var name14 in all)
|
4
|
+
__defProp(target, name14, { get: all[name14], enumerable: true });
|
5
5
|
};
|
6
6
|
|
7
7
|
// streams/index.ts
|
@@ -354,7 +354,7 @@ function getBaseTelemetryAttributes({
|
|
354
354
|
telemetry,
|
355
355
|
headers
|
356
356
|
}) {
|
357
|
-
var
|
357
|
+
var _a14;
|
358
358
|
return {
|
359
359
|
"ai.model.provider": model.provider,
|
360
360
|
"ai.model.id": model.modelId,
|
@@ -364,7 +364,7 @@ function getBaseTelemetryAttributes({
|
|
364
364
|
return attributes;
|
365
365
|
}, {}),
|
366
366
|
// add metadata as attributes:
|
367
|
-
...Object.entries((
|
367
|
+
...Object.entries((_a14 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a14 : {}).reduce(
|
368
368
|
(attributes, [key, value]) => {
|
369
369
|
attributes[`ai.telemetry.metadata.${key}`] = value;
|
370
370
|
return attributes;
|
@@ -389,7 +389,7 @@ var noopTracer = {
|
|
389
389
|
startSpan() {
|
390
390
|
return noopSpan;
|
391
391
|
},
|
392
|
-
startActiveSpan(
|
392
|
+
startActiveSpan(name14, arg1, arg2, arg3) {
|
393
393
|
if (typeof arg1 === "function") {
|
394
394
|
return arg1(noopSpan);
|
395
395
|
}
|
@@ -459,13 +459,13 @@ function getTracer({
|
|
459
459
|
// core/telemetry/record-span.ts
|
460
460
|
import { SpanStatusCode } from "@opentelemetry/api";
|
461
461
|
function recordSpan({
|
462
|
-
name:
|
462
|
+
name: name14,
|
463
463
|
tracer,
|
464
464
|
attributes,
|
465
465
|
fn,
|
466
466
|
endWhenDone = true
|
467
467
|
}) {
|
468
|
-
return tracer.startActiveSpan(
|
468
|
+
return tracer.startActiveSpan(name14, { attributes }, async (span) => {
|
469
469
|
try {
|
470
470
|
const result = await fn(span);
|
471
471
|
if (endWhenDone) {
|
@@ -573,14 +573,14 @@ async function embed({
|
|
573
573
|
}),
|
574
574
|
tracer,
|
575
575
|
fn: async (doEmbedSpan) => {
|
576
|
-
var
|
576
|
+
var _a14;
|
577
577
|
const modelResponse = await model.doEmbed({
|
578
578
|
values: [value],
|
579
579
|
abortSignal,
|
580
580
|
headers
|
581
581
|
});
|
582
582
|
const embedding2 = modelResponse.embeddings[0];
|
583
|
-
const usage2 = (
|
583
|
+
const usage2 = (_a14 = modelResponse.usage) != null ? _a14 : { tokens: NaN };
|
584
584
|
doEmbedSpan.setAttributes(
|
585
585
|
selectTelemetryAttributes({
|
586
586
|
telemetry,
|
@@ -690,14 +690,14 @@ async function embedMany({
|
|
690
690
|
}),
|
691
691
|
tracer,
|
692
692
|
fn: async (doEmbedSpan) => {
|
693
|
-
var
|
693
|
+
var _a14;
|
694
694
|
const modelResponse = await model.doEmbed({
|
695
695
|
values,
|
696
696
|
abortSignal,
|
697
697
|
headers
|
698
698
|
});
|
699
699
|
const embeddings3 = modelResponse.embeddings;
|
700
|
-
const usage2 = (
|
700
|
+
const usage2 = (_a14 = modelResponse.usage) != null ? _a14 : { tokens: NaN };
|
701
701
|
doEmbedSpan.setAttributes(
|
702
702
|
selectTelemetryAttributes({
|
703
703
|
telemetry,
|
@@ -749,14 +749,14 @@ async function embedMany({
|
|
749
749
|
}),
|
750
750
|
tracer,
|
751
751
|
fn: async (doEmbedSpan) => {
|
752
|
-
var
|
752
|
+
var _a14;
|
753
753
|
const modelResponse = await model.doEmbed({
|
754
754
|
values: chunk,
|
755
755
|
abortSignal,
|
756
756
|
headers
|
757
757
|
});
|
758
758
|
const embeddings2 = modelResponse.embeddings;
|
759
|
-
const usage2 = (
|
759
|
+
const usage2 = (_a14 = modelResponse.usage) != null ? _a14 : { tokens: NaN };
|
760
760
|
doEmbedSpan.setAttributes(
|
761
761
|
selectTelemetryAttributes({
|
762
762
|
telemetry,
|
@@ -901,7 +901,7 @@ async function download({
|
|
901
901
|
url,
|
902
902
|
fetchImplementation = fetch
|
903
903
|
}) {
|
904
|
-
var
|
904
|
+
var _a14;
|
905
905
|
const urlText = url.toString();
|
906
906
|
try {
|
907
907
|
const response = await fetchImplementation(urlText);
|
@@ -914,7 +914,7 @@ async function download({
|
|
914
914
|
}
|
915
915
|
return {
|
916
916
|
data: new Uint8Array(await response.arrayBuffer()),
|
917
|
-
mimeType: (
|
917
|
+
mimeType: (_a14 = response.headers.get("content-type")) != null ? _a14 : void 0
|
918
918
|
};
|
919
919
|
} catch (error) {
|
920
920
|
if (DownloadError.isInstance(error)) {
|
@@ -977,8 +977,8 @@ var dataContentSchema = z.union([
|
|
977
977
|
z.custom(
|
978
978
|
// Buffer might not be available in some environments such as CloudFlare:
|
979
979
|
(value) => {
|
980
|
-
var
|
981
|
-
return (_b = (
|
980
|
+
var _a14, _b;
|
981
|
+
return (_b = (_a14 = globalThis.Buffer) == null ? void 0 : _a14.isBuffer(value)) != null ? _b : false;
|
982
982
|
},
|
983
983
|
{ message: "Must be a Buffer" }
|
984
984
|
)
|
@@ -1169,7 +1169,7 @@ async function downloadAssets(messages, downloadImplementation, modelSupportsIma
|
|
1169
1169
|
);
|
1170
1170
|
}
|
1171
1171
|
function convertPartToLanguageModelPart(part, downloadedAssets) {
|
1172
|
-
var
|
1172
|
+
var _a14;
|
1173
1173
|
if (part.type === "text") {
|
1174
1174
|
return {
|
1175
1175
|
type: "text",
|
@@ -1222,7 +1222,7 @@ function convertPartToLanguageModelPart(part, downloadedAssets) {
|
|
1222
1222
|
switch (type) {
|
1223
1223
|
case "image": {
|
1224
1224
|
if (normalizedData instanceof Uint8Array) {
|
1225
|
-
mimeType = (
|
1225
|
+
mimeType = (_a14 = detectImageMimeType(normalizedData)) != null ? _a14 : mimeType;
|
1226
1226
|
}
|
1227
1227
|
return {
|
1228
1228
|
type: "image",
|
@@ -1489,7 +1489,7 @@ function detectSingleMessageCharacteristics(message) {
|
|
1489
1489
|
|
1490
1490
|
// core/prompt/attachments-to-parts.ts
|
1491
1491
|
function attachmentsToParts(attachments) {
|
1492
|
-
var
|
1492
|
+
var _a14, _b, _c;
|
1493
1493
|
const parts = [];
|
1494
1494
|
for (const attachment of attachments) {
|
1495
1495
|
let url;
|
@@ -1501,7 +1501,7 @@ function attachmentsToParts(attachments) {
|
|
1501
1501
|
switch (url.protocol) {
|
1502
1502
|
case "http:":
|
1503
1503
|
case "https:": {
|
1504
|
-
if ((
|
1504
|
+
if ((_a14 = attachment.contentType) == null ? void 0 : _a14.startsWith("image/")) {
|
1505
1505
|
parts.push({ type: "image", image: url });
|
1506
1506
|
} else {
|
1507
1507
|
if (!attachment.contentType) {
|
@@ -1587,8 +1587,8 @@ _a7 = symbol7;
|
|
1587
1587
|
|
1588
1588
|
// core/prompt/convert-to-core-messages.ts
|
1589
1589
|
function convertToCoreMessages(messages, options) {
|
1590
|
-
var
|
1591
|
-
const tools = (
|
1590
|
+
var _a14;
|
1591
|
+
const tools = (_a14 = options == null ? void 0 : options.tools) != null ? _a14 : {};
|
1592
1592
|
const coreMessages = [];
|
1593
1593
|
for (const message of messages) {
|
1594
1594
|
const { role, content, toolInvocations, experimental_attachments } = message;
|
@@ -1870,7 +1870,7 @@ var arrayOutputStrategy = (schema) => {
|
|
1870
1870
|
additionalProperties: false
|
1871
1871
|
},
|
1872
1872
|
validatePartialResult({ value, latestObject, isFirstDelta, isFinalDelta }) {
|
1873
|
-
var
|
1873
|
+
var _a14;
|
1874
1874
|
if (!isJSONObject(value) || !isJSONArray(value.elements)) {
|
1875
1875
|
return {
|
1876
1876
|
success: false,
|
@@ -1893,7 +1893,7 @@ var arrayOutputStrategy = (schema) => {
|
|
1893
1893
|
}
|
1894
1894
|
resultArray.push(result.value);
|
1895
1895
|
}
|
1896
|
-
const publishedElementCount = (
|
1896
|
+
const publishedElementCount = (_a14 = latestObject == null ? void 0 : latestObject.length) != null ? _a14 : 0;
|
1897
1897
|
let textDelta = "";
|
1898
1898
|
if (isFirstDelta) {
|
1899
1899
|
textDelta += "[";
|
@@ -2231,7 +2231,7 @@ async function generateObject({
|
|
2231
2231
|
}),
|
2232
2232
|
tracer,
|
2233
2233
|
fn: async (span) => {
|
2234
|
-
var
|
2234
|
+
var _a14, _b;
|
2235
2235
|
if (mode === "auto" || mode == null) {
|
2236
2236
|
mode = model.defaultObjectGenerationMode;
|
2237
2237
|
}
|
@@ -2293,7 +2293,7 @@ async function generateObject({
|
|
2293
2293
|
}),
|
2294
2294
|
tracer,
|
2295
2295
|
fn: async (span2) => {
|
2296
|
-
var
|
2296
|
+
var _a15, _b2, _c, _d, _e, _f;
|
2297
2297
|
const result2 = await model.doGenerate({
|
2298
2298
|
mode: {
|
2299
2299
|
type: "object-json",
|
@@ -2309,7 +2309,7 @@ async function generateObject({
|
|
2309
2309
|
headers
|
2310
2310
|
});
|
2311
2311
|
const responseData = {
|
2312
|
-
id: (_b2 = (
|
2312
|
+
id: (_b2 = (_a15 = result2.response) == null ? void 0 : _a15.id) != null ? _b2 : generateId3(),
|
2313
2313
|
timestamp: (_d = (_c = result2.response) == null ? void 0 : _c.timestamp) != null ? _d : currentDate(),
|
2314
2314
|
modelId: (_f = (_e = result2.response) == null ? void 0 : _e.modelId) != null ? _f : model.modelId
|
2315
2315
|
};
|
@@ -2351,7 +2351,7 @@ async function generateObject({
|
|
2351
2351
|
rawResponse = generateResult.rawResponse;
|
2352
2352
|
logprobs = generateResult.logprobs;
|
2353
2353
|
resultProviderMetadata = generateResult.providerMetadata;
|
2354
|
-
request = (
|
2354
|
+
request = (_a14 = generateResult.request) != null ? _a14 : {};
|
2355
2355
|
response = generateResult.responseData;
|
2356
2356
|
break;
|
2357
2357
|
}
|
@@ -2397,7 +2397,7 @@ async function generateObject({
|
|
2397
2397
|
}),
|
2398
2398
|
tracer,
|
2399
2399
|
fn: async (span2) => {
|
2400
|
-
var
|
2400
|
+
var _a15, _b2, _c, _d, _e, _f, _g, _h;
|
2401
2401
|
const result2 = await model.doGenerate({
|
2402
2402
|
mode: {
|
2403
2403
|
type: "object-tool",
|
@@ -2415,7 +2415,7 @@ async function generateObject({
|
|
2415
2415
|
abortSignal,
|
2416
2416
|
headers
|
2417
2417
|
});
|
2418
|
-
const objectText = (_b2 = (
|
2418
|
+
const objectText = (_b2 = (_a15 = result2.toolCalls) == null ? void 0 : _a15[0]) == null ? void 0 : _b2.args;
|
2419
2419
|
const responseData = {
|
2420
2420
|
id: (_d = (_c = result2.response) == null ? void 0 : _c.id) != null ? _d : generateId3(),
|
2421
2421
|
timestamp: (_f = (_e = result2.response) == null ? void 0 : _e.timestamp) != null ? _f : currentDate(),
|
@@ -2541,9 +2541,9 @@ var DefaultGenerateObjectResult = class {
|
|
2541
2541
|
this.logprobs = options.logprobs;
|
2542
2542
|
}
|
2543
2543
|
toJsonResponse(init) {
|
2544
|
-
var
|
2544
|
+
var _a14;
|
2545
2545
|
return new Response(JSON.stringify(this.object), {
|
2546
|
-
status: (
|
2546
|
+
status: (_a14 = init == null ? void 0 : init.status) != null ? _a14 : 200,
|
2547
2547
|
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
2548
2548
|
contentType: "application/json; charset=utf-8"
|
2549
2549
|
})
|
@@ -2581,17 +2581,17 @@ var DelayedPromise = class {
|
|
2581
2581
|
return this.promise;
|
2582
2582
|
}
|
2583
2583
|
resolve(value) {
|
2584
|
-
var
|
2584
|
+
var _a14;
|
2585
2585
|
this.status = { type: "resolved", value };
|
2586
2586
|
if (this.promise) {
|
2587
|
-
(
|
2587
|
+
(_a14 = this._resolve) == null ? void 0 : _a14.call(this, value);
|
2588
2588
|
}
|
2589
2589
|
}
|
2590
2590
|
reject(error) {
|
2591
|
-
var
|
2591
|
+
var _a14;
|
2592
2592
|
this.status = { type: "rejected", error };
|
2593
2593
|
if (this.promise) {
|
2594
|
-
(
|
2594
|
+
(_a14 = this._reject) == null ? void 0 : _a14.call(this, error);
|
2595
2595
|
}
|
2596
2596
|
}
|
2597
2597
|
};
|
@@ -2680,8 +2680,8 @@ function createStitchableStream() {
|
|
2680
2680
|
|
2681
2681
|
// core/util/now.ts
|
2682
2682
|
function now() {
|
2683
|
-
var
|
2684
|
-
return (_b = (
|
2683
|
+
var _a14, _b;
|
2684
|
+
return (_b = (_a14 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a14.now()) != null ? _b : Date.now();
|
2685
2685
|
}
|
2686
2686
|
|
2687
2687
|
// core/generate-object/stream-object.ts
|
@@ -2970,7 +2970,7 @@ var DefaultStreamObjectResult = class {
|
|
2970
2970
|
const transformedStream = stream.pipeThrough(new TransformStream(transformer)).pipeThrough(
|
2971
2971
|
new TransformStream({
|
2972
2972
|
async transform(chunk, controller) {
|
2973
|
-
var
|
2973
|
+
var _a14, _b, _c;
|
2974
2974
|
if (isFirstChunk) {
|
2975
2975
|
const msToFirstChunk = now2() - startTimestampMs;
|
2976
2976
|
isFirstChunk = false;
|
@@ -3016,7 +3016,7 @@ var DefaultStreamObjectResult = class {
|
|
3016
3016
|
switch (chunk.type) {
|
3017
3017
|
case "response-metadata": {
|
3018
3018
|
response = {
|
3019
|
-
id: (
|
3019
|
+
id: (_a14 = chunk.id) != null ? _a14 : response.id,
|
3020
3020
|
timestamp: (_b = chunk.timestamp) != null ? _b : response.timestamp,
|
3021
3021
|
modelId: (_c = chunk.modelId) != null ? _c : response.modelId
|
3022
3022
|
};
|
@@ -3230,9 +3230,9 @@ var DefaultStreamObjectResult = class {
|
|
3230
3230
|
});
|
3231
3231
|
}
|
3232
3232
|
toTextStreamResponse(init) {
|
3233
|
-
var
|
3233
|
+
var _a14;
|
3234
3234
|
return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
|
3235
|
-
status: (
|
3235
|
+
status: (_a14 = init == null ? void 0 : init.status) != null ? _a14 : 200,
|
3236
3236
|
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
3237
3237
|
contentType: "text/plain; charset=utf-8"
|
3238
3238
|
})
|
@@ -3245,7 +3245,7 @@ import { createIdGenerator as createIdGenerator3 } from "@ai-sdk/provider-utils"
|
|
3245
3245
|
|
3246
3246
|
// errors/index.ts
|
3247
3247
|
import {
|
3248
|
-
AISDKError as
|
3248
|
+
AISDKError as AISDKError13,
|
3249
3249
|
APICallError as APICallError2,
|
3250
3250
|
EmptyResponseBodyError,
|
3251
3251
|
InvalidPromptError as InvalidPromptError2,
|
@@ -3284,22 +3284,17 @@ var InvalidToolArgumentsError = class extends AISDKError8 {
|
|
3284
3284
|
};
|
3285
3285
|
_a8 = symbol8;
|
3286
3286
|
|
3287
|
-
// errors/no-
|
3287
|
+
// errors/no-output-specified-error.ts
|
3288
3288
|
import { AISDKError as AISDKError9 } from "@ai-sdk/provider";
|
3289
|
-
var name9 = "
|
3289
|
+
var name9 = "AI_NoOutputSpecifiedError";
|
3290
3290
|
var marker9 = `vercel.ai.error.${name9}`;
|
3291
3291
|
var symbol9 = Symbol.for(marker9);
|
3292
3292
|
var _a9;
|
3293
|
-
var
|
3294
|
-
|
3295
|
-
|
3296
|
-
availableTools = void 0,
|
3297
|
-
message = `Model tried to call unavailable tool '${toolName}'. ${availableTools === void 0 ? "No tools are available." : `Available tools: ${availableTools.join(", ")}.`}`
|
3298
|
-
}) {
|
3293
|
+
var NoOutputSpecifiedError = class extends AISDKError9 {
|
3294
|
+
// used in isInstance
|
3295
|
+
constructor({ message = "No output specified." } = {}) {
|
3299
3296
|
super({ name: name9, message });
|
3300
3297
|
this[_a9] = true;
|
3301
|
-
this.toolName = toolName;
|
3302
|
-
this.availableTools = availableTools;
|
3303
3298
|
}
|
3304
3299
|
static isInstance(error) {
|
3305
3300
|
return AISDKError9.hasMarker(error, marker9);
|
@@ -3307,21 +3302,22 @@ var NoSuchToolError = class extends AISDKError9 {
|
|
3307
3302
|
};
|
3308
3303
|
_a9 = symbol9;
|
3309
3304
|
|
3310
|
-
// errors/
|
3311
|
-
import { AISDKError as AISDKError10
|
3312
|
-
var name10 = "
|
3305
|
+
// errors/no-such-tool-error.ts
|
3306
|
+
import { AISDKError as AISDKError10 } from "@ai-sdk/provider";
|
3307
|
+
var name10 = "AI_NoSuchToolError";
|
3313
3308
|
var marker10 = `vercel.ai.error.${name10}`;
|
3314
3309
|
var symbol10 = Symbol.for(marker10);
|
3315
3310
|
var _a10;
|
3316
|
-
var
|
3311
|
+
var NoSuchToolError = class extends AISDKError10 {
|
3317
3312
|
constructor({
|
3318
|
-
|
3319
|
-
|
3320
|
-
message = `
|
3313
|
+
toolName,
|
3314
|
+
availableTools = void 0,
|
3315
|
+
message = `Model tried to call unavailable tool '${toolName}'. ${availableTools === void 0 ? "No tools are available." : `Available tools: ${availableTools.join(", ")}.`}`
|
3321
3316
|
}) {
|
3322
|
-
super({ name: name10, message
|
3317
|
+
super({ name: name10, message });
|
3323
3318
|
this[_a10] = true;
|
3324
|
-
this.
|
3319
|
+
this.toolName = toolName;
|
3320
|
+
this.availableTools = availableTools;
|
3325
3321
|
}
|
3326
3322
|
static isInstance(error) {
|
3327
3323
|
return AISDKError10.hasMarker(error, marker10);
|
@@ -3329,29 +3325,51 @@ var ToolCallRepairError = class extends AISDKError10 {
|
|
3329
3325
|
};
|
3330
3326
|
_a10 = symbol10;
|
3331
3327
|
|
3332
|
-
// errors/tool-
|
3333
|
-
import { AISDKError as AISDKError11, getErrorMessage as
|
3334
|
-
var name11 = "
|
3328
|
+
// errors/tool-call-repair-error.ts
|
3329
|
+
import { AISDKError as AISDKError11, getErrorMessage as getErrorMessage3 } from "@ai-sdk/provider";
|
3330
|
+
var name11 = "AI_ToolCallRepairError";
|
3335
3331
|
var marker11 = `vercel.ai.error.${name11}`;
|
3336
3332
|
var symbol11 = Symbol.for(marker11);
|
3337
3333
|
var _a11;
|
3338
|
-
var
|
3334
|
+
var ToolCallRepairError = class extends AISDKError11 {
|
3335
|
+
constructor({
|
3336
|
+
cause,
|
3337
|
+
originalError,
|
3338
|
+
message = `Error repairing tool call: ${getErrorMessage3(cause)}`
|
3339
|
+
}) {
|
3340
|
+
super({ name: name11, message, cause });
|
3341
|
+
this[_a11] = true;
|
3342
|
+
this.originalError = originalError;
|
3343
|
+
}
|
3344
|
+
static isInstance(error) {
|
3345
|
+
return AISDKError11.hasMarker(error, marker11);
|
3346
|
+
}
|
3347
|
+
};
|
3348
|
+
_a11 = symbol11;
|
3349
|
+
|
3350
|
+
// errors/tool-execution-error.ts
|
3351
|
+
import { AISDKError as AISDKError12, getErrorMessage as getErrorMessage4 } from "@ai-sdk/provider";
|
3352
|
+
var name12 = "AI_ToolExecutionError";
|
3353
|
+
var marker12 = `vercel.ai.error.${name12}`;
|
3354
|
+
var symbol12 = Symbol.for(marker12);
|
3355
|
+
var _a12;
|
3356
|
+
var ToolExecutionError = class extends AISDKError12 {
|
3339
3357
|
constructor({
|
3340
3358
|
toolArgs,
|
3341
3359
|
toolName,
|
3342
3360
|
cause,
|
3343
3361
|
message = `Error executing tool ${toolName}: ${getErrorMessage4(cause)}`
|
3344
3362
|
}) {
|
3345
|
-
super({ name:
|
3346
|
-
this[
|
3363
|
+
super({ name: name12, message, cause });
|
3364
|
+
this[_a12] = true;
|
3347
3365
|
this.toolArgs = toolArgs;
|
3348
3366
|
this.toolName = toolName;
|
3349
3367
|
}
|
3350
3368
|
static isInstance(error) {
|
3351
|
-
return
|
3369
|
+
return AISDKError12.hasMarker(error, marker12);
|
3352
3370
|
}
|
3353
3371
|
};
|
3354
|
-
|
3372
|
+
_a12 = symbol12;
|
3355
3373
|
|
3356
3374
|
// core/prompt/prepare-tools-and-tool-choice.ts
|
3357
3375
|
import { asSchema as asSchema2 } from "@ai-sdk/ui-utils";
|
@@ -3374,24 +3392,24 @@ function prepareToolsAndToolChoice({
|
|
3374
3392
|
};
|
3375
3393
|
}
|
3376
3394
|
const filteredTools = activeTools != null ? Object.entries(tools).filter(
|
3377
|
-
([
|
3395
|
+
([name14]) => activeTools.includes(name14)
|
3378
3396
|
) : Object.entries(tools);
|
3379
3397
|
return {
|
3380
|
-
tools: filteredTools.map(([
|
3398
|
+
tools: filteredTools.map(([name14, tool2]) => {
|
3381
3399
|
const toolType = tool2.type;
|
3382
3400
|
switch (toolType) {
|
3383
3401
|
case void 0:
|
3384
3402
|
case "function":
|
3385
3403
|
return {
|
3386
3404
|
type: "function",
|
3387
|
-
name:
|
3405
|
+
name: name14,
|
3388
3406
|
description: tool2.description,
|
3389
3407
|
parameters: asSchema2(tool2.parameters).jsonSchema
|
3390
3408
|
};
|
3391
3409
|
case "provider-defined":
|
3392
3410
|
return {
|
3393
3411
|
type: "provider-defined",
|
3394
|
-
name:
|
3412
|
+
name: name14,
|
3395
3413
|
id: tool2.id,
|
3396
3414
|
args: tool2.args
|
3397
3415
|
};
|
@@ -3551,7 +3569,7 @@ async function generateText({
|
|
3551
3569
|
onStepFinish,
|
3552
3570
|
...settings
|
3553
3571
|
}) {
|
3554
|
-
var
|
3572
|
+
var _a14;
|
3555
3573
|
if (maxSteps < 1) {
|
3556
3574
|
throw new InvalidArgumentError({
|
3557
3575
|
parameter: "maxSteps",
|
@@ -3568,7 +3586,7 @@ async function generateText({
|
|
3568
3586
|
});
|
3569
3587
|
const initialPrompt = standardizePrompt({
|
3570
3588
|
prompt: {
|
3571
|
-
system: (
|
3589
|
+
system: (_a14 = output == null ? void 0 : output.injectIntoSystemPrompt({ system, model })) != null ? _a14 : system,
|
3572
3590
|
prompt,
|
3573
3591
|
messages
|
3574
3592
|
},
|
@@ -3594,7 +3612,7 @@ async function generateText({
|
|
3594
3612
|
}),
|
3595
3613
|
tracer,
|
3596
3614
|
fn: async (span) => {
|
3597
|
-
var
|
3615
|
+
var _a15, _b, _c, _d, _e, _f;
|
3598
3616
|
const mode = {
|
3599
3617
|
type: "regular",
|
3600
3618
|
...prepareToolsAndToolChoice({ tools, toolChoice, activeTools })
|
@@ -3646,8 +3664,8 @@ async function generateText({
|
|
3646
3664
|
"ai.prompt.tools": {
|
3647
3665
|
// convert the language model level tools:
|
3648
3666
|
input: () => {
|
3649
|
-
var
|
3650
|
-
return (
|
3667
|
+
var _a16;
|
3668
|
+
return (_a16 = mode.tools) == null ? void 0 : _a16.map((tool2) => JSON.stringify(tool2));
|
3651
3669
|
}
|
3652
3670
|
},
|
3653
3671
|
"ai.prompt.toolChoice": {
|
@@ -3667,7 +3685,7 @@ async function generateText({
|
|
3667
3685
|
}),
|
3668
3686
|
tracer,
|
3669
3687
|
fn: async (span2) => {
|
3670
|
-
var
|
3688
|
+
var _a16, _b2, _c2, _d2, _e2, _f2;
|
3671
3689
|
const result = await model.doGenerate({
|
3672
3690
|
mode,
|
3673
3691
|
...callSettings,
|
@@ -3679,7 +3697,7 @@ async function generateText({
|
|
3679
3697
|
headers
|
3680
3698
|
});
|
3681
3699
|
const responseData = {
|
3682
|
-
id: (_b2 = (
|
3700
|
+
id: (_b2 = (_a16 = result.response) == null ? void 0 : _a16.id) != null ? _b2 : generateId3(),
|
3683
3701
|
timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
|
3684
3702
|
modelId: (_f2 = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f2 : model.modelId
|
3685
3703
|
};
|
@@ -3713,7 +3731,7 @@ async function generateText({
|
|
3713
3731
|
})
|
3714
3732
|
);
|
3715
3733
|
currentToolCalls = await Promise.all(
|
3716
|
-
((
|
3734
|
+
((_a15 = currentModelResponse.toolCalls) != null ? _a15 : []).map(
|
3717
3735
|
(toolCall) => parseToolCall({
|
3718
3736
|
toolCall,
|
3719
3737
|
tools,
|
@@ -3937,13 +3955,19 @@ __export(output_exports, {
|
|
3937
3955
|
text: () => text
|
3938
3956
|
});
|
3939
3957
|
import { safeParseJSON as safeParseJSON3, safeValidateTypes as safeValidateTypes4 } from "@ai-sdk/provider-utils";
|
3940
|
-
import {
|
3958
|
+
import {
|
3959
|
+
asSchema as asSchema4,
|
3960
|
+
parsePartialJson as parsePartialJson2
|
3961
|
+
} from "@ai-sdk/ui-utils";
|
3941
3962
|
var text = () => ({
|
3942
3963
|
type: "text",
|
3943
3964
|
responseFormat: () => ({ type: "text" }),
|
3944
3965
|
injectIntoSystemPrompt({ system }) {
|
3945
3966
|
return system;
|
3946
3967
|
},
|
3968
|
+
parsePartial({ text: text2 }) {
|
3969
|
+
return { partial: text2 };
|
3970
|
+
},
|
3947
3971
|
parseOutput({ text: text2 }) {
|
3948
3972
|
return text2;
|
3949
3973
|
}
|
@@ -3964,6 +3988,24 @@ var object = ({
|
|
3964
3988
|
schema: schema.jsonSchema
|
3965
3989
|
});
|
3966
3990
|
},
|
3991
|
+
parsePartial({ text: text2 }) {
|
3992
|
+
const result = parsePartialJson2(text2);
|
3993
|
+
switch (result.state) {
|
3994
|
+
case "failed-parse":
|
3995
|
+
case "undefined-input":
|
3996
|
+
return void 0;
|
3997
|
+
case "repaired-parse":
|
3998
|
+
case "successful-parse":
|
3999
|
+
return {
|
4000
|
+
// Note: currently no validation of partial results:
|
4001
|
+
partial: result.value
|
4002
|
+
};
|
4003
|
+
default: {
|
4004
|
+
const _exhaustiveCheck = result.state;
|
4005
|
+
throw new Error(`Unsupported parse state: ${_exhaustiveCheck}`);
|
4006
|
+
}
|
4007
|
+
}
|
4008
|
+
},
|
3967
4009
|
parseOutput({ text: text2 }, context) {
|
3968
4010
|
const parseResult = safeParseJSON3({ text: text2 });
|
3969
4011
|
if (!parseResult.success) {
|
@@ -4287,6 +4329,7 @@ function streamText({
|
|
4287
4329
|
abortSignal,
|
4288
4330
|
headers,
|
4289
4331
|
maxSteps = 1,
|
4332
|
+
experimental_output: output,
|
4290
4333
|
experimental_continueSteps: continueSteps = false,
|
4291
4334
|
experimental_telemetry: telemetry,
|
4292
4335
|
experimental_providerMetadata: providerMetadata,
|
@@ -4321,6 +4364,7 @@ function streamText({
|
|
4321
4364
|
activeTools,
|
4322
4365
|
repairToolCall,
|
4323
4366
|
maxSteps,
|
4367
|
+
output,
|
4324
4368
|
continueSteps,
|
4325
4369
|
providerMetadata,
|
4326
4370
|
onChunk,
|
@@ -4331,6 +4375,57 @@ function streamText({
|
|
4331
4375
|
generateId: generateId3
|
4332
4376
|
});
|
4333
4377
|
}
|
4378
|
+
function createOutputTransformStream(output) {
|
4379
|
+
if (!output) {
|
4380
|
+
return new TransformStream({
|
4381
|
+
transform(chunk, controller) {
|
4382
|
+
controller.enqueue({ part: chunk, partialOutput: void 0 });
|
4383
|
+
}
|
4384
|
+
});
|
4385
|
+
}
|
4386
|
+
let text2 = "";
|
4387
|
+
let textChunk = "";
|
4388
|
+
let lastPublishedJson = "";
|
4389
|
+
return new TransformStream({
|
4390
|
+
transform(chunk, controller) {
|
4391
|
+
if (chunk.type !== "text-delta") {
|
4392
|
+
controller.enqueue({
|
4393
|
+
part: chunk,
|
4394
|
+
partialOutput: void 0
|
4395
|
+
});
|
4396
|
+
return;
|
4397
|
+
}
|
4398
|
+
text2 += chunk.textDelta;
|
4399
|
+
textChunk += chunk.textDelta;
|
4400
|
+
const result = output.parsePartial({ text: text2 });
|
4401
|
+
if (result != null) {
|
4402
|
+
const currentJson = JSON.stringify(result.partial);
|
4403
|
+
if (currentJson !== lastPublishedJson) {
|
4404
|
+
controller.enqueue({
|
4405
|
+
part: {
|
4406
|
+
type: "text-delta",
|
4407
|
+
textDelta: textChunk
|
4408
|
+
},
|
4409
|
+
partialOutput: result.partial
|
4410
|
+
});
|
4411
|
+
lastPublishedJson = currentJson;
|
4412
|
+
textChunk = "";
|
4413
|
+
}
|
4414
|
+
}
|
4415
|
+
},
|
4416
|
+
flush(controller) {
|
4417
|
+
if (textChunk.length > 0) {
|
4418
|
+
controller.enqueue({
|
4419
|
+
part: {
|
4420
|
+
type: "text-delta",
|
4421
|
+
textDelta: textChunk
|
4422
|
+
},
|
4423
|
+
partialOutput: void 0
|
4424
|
+
});
|
4425
|
+
}
|
4426
|
+
}
|
4427
|
+
});
|
4428
|
+
}
|
4334
4429
|
var DefaultStreamTextResult = class {
|
4335
4430
|
constructor({
|
4336
4431
|
model,
|
@@ -4349,6 +4444,7 @@ var DefaultStreamTextResult = class {
|
|
4349
4444
|
activeTools,
|
4350
4445
|
repairToolCall,
|
4351
4446
|
maxSteps,
|
4447
|
+
output,
|
4352
4448
|
continueSteps,
|
4353
4449
|
providerMetadata,
|
4354
4450
|
onChunk,
|
@@ -4368,6 +4464,7 @@ var DefaultStreamTextResult = class {
|
|
4368
4464
|
this.requestPromise = new DelayedPromise();
|
4369
4465
|
this.responsePromise = new DelayedPromise();
|
4370
4466
|
this.stepsPromise = new DelayedPromise();
|
4467
|
+
var _a14;
|
4371
4468
|
if (maxSteps < 1) {
|
4372
4469
|
throw new InvalidArgumentError({
|
4373
4470
|
parameter: "maxSteps",
|
@@ -4375,10 +4472,10 @@ var DefaultStreamTextResult = class {
|
|
4375
4472
|
message: "maxSteps must be at least 1"
|
4376
4473
|
});
|
4377
4474
|
}
|
4475
|
+
this.output = output;
|
4378
4476
|
let recordedStepText = "";
|
4379
4477
|
let recordedContinuationText = "";
|
4380
4478
|
let recordedFullText = "";
|
4381
|
-
let recordedRequest = void 0;
|
4382
4479
|
const recordedResponse = {
|
4383
4480
|
id: generateId3(),
|
4384
4481
|
timestamp: currentDate(),
|
@@ -4389,28 +4486,28 @@ var DefaultStreamTextResult = class {
|
|
4389
4486
|
let recordedToolResults = [];
|
4390
4487
|
let recordedFinishReason = void 0;
|
4391
4488
|
let recordedUsage = void 0;
|
4392
|
-
let recordedProviderMetadata = void 0;
|
4393
4489
|
let stepType = "initial";
|
4394
4490
|
const recordedSteps = [];
|
4395
4491
|
let rootSpan;
|
4396
4492
|
const eventProcessor = new TransformStream({
|
4397
4493
|
async transform(chunk, controller) {
|
4398
4494
|
controller.enqueue(chunk);
|
4399
|
-
|
4400
|
-
|
4495
|
+
const { part } = chunk;
|
4496
|
+
if (part.type === "text-delta" || part.type === "tool-call" || part.type === "tool-result" || part.type === "tool-call-streaming-start" || part.type === "tool-call-delta") {
|
4497
|
+
await (onChunk == null ? void 0 : onChunk({ chunk: part }));
|
4401
4498
|
}
|
4402
|
-
if (
|
4403
|
-
recordedStepText +=
|
4404
|
-
recordedContinuationText +=
|
4405
|
-
recordedFullText +=
|
4499
|
+
if (part.type === "text-delta") {
|
4500
|
+
recordedStepText += part.textDelta;
|
4501
|
+
recordedContinuationText += part.textDelta;
|
4502
|
+
recordedFullText += part.textDelta;
|
4406
4503
|
}
|
4407
|
-
if (
|
4408
|
-
recordedToolCalls.push(
|
4504
|
+
if (part.type === "tool-call") {
|
4505
|
+
recordedToolCalls.push(part);
|
4409
4506
|
}
|
4410
|
-
if (
|
4411
|
-
recordedToolResults.push(
|
4507
|
+
if (part.type === "tool-result") {
|
4508
|
+
recordedToolResults.push(part);
|
4412
4509
|
}
|
4413
|
-
if (
|
4510
|
+
if (part.type === "step-finish") {
|
4414
4511
|
const stepMessages = toResponseMessages({
|
4415
4512
|
text: recordedContinuationText,
|
4416
4513
|
tools: tools != null ? tools : {},
|
@@ -4420,7 +4517,7 @@ var DefaultStreamTextResult = class {
|
|
4420
4517
|
const currentStep = recordedSteps.length;
|
4421
4518
|
let nextStepType = "done";
|
4422
4519
|
if (currentStep + 1 < maxSteps) {
|
4423
|
-
if (continueSteps &&
|
4520
|
+
if (continueSteps && part.finishReason === "length" && // only use continue when there are no tool calls:
|
4424
4521
|
recordedToolCalls.length === 0) {
|
4425
4522
|
nextStepType = "continue";
|
4426
4523
|
} else if (
|
@@ -4436,24 +4533,23 @@ var DefaultStreamTextResult = class {
|
|
4436
4533
|
text: recordedStepText,
|
4437
4534
|
toolCalls: recordedToolCalls,
|
4438
4535
|
toolResults: recordedToolResults,
|
4439
|
-
finishReason:
|
4440
|
-
usage:
|
4441
|
-
warnings:
|
4442
|
-
logprobs:
|
4443
|
-
request:
|
4536
|
+
finishReason: part.finishReason,
|
4537
|
+
usage: part.usage,
|
4538
|
+
warnings: part.warnings,
|
4539
|
+
logprobs: part.logprobs,
|
4540
|
+
request: part.request,
|
4444
4541
|
response: {
|
4445
|
-
...
|
4542
|
+
...part.response,
|
4446
4543
|
messages: [...recordedResponse.messages, ...stepMessages]
|
4447
4544
|
},
|
4448
|
-
experimental_providerMetadata:
|
4449
|
-
isContinued:
|
4545
|
+
experimental_providerMetadata: part.experimental_providerMetadata,
|
4546
|
+
isContinued: part.isContinued
|
4450
4547
|
};
|
4451
4548
|
await (onStepFinish == null ? void 0 : onStepFinish(currentStepResult));
|
4452
4549
|
recordedSteps.push(currentStepResult);
|
4453
4550
|
recordedToolCalls = [];
|
4454
4551
|
recordedToolResults = [];
|
4455
4552
|
recordedStepText = "";
|
4456
|
-
recordedRequest = chunk.request;
|
4457
4553
|
if (nextStepType !== "done") {
|
4458
4554
|
stepType = nextStepType;
|
4459
4555
|
}
|
@@ -4462,18 +4558,17 @@ var DefaultStreamTextResult = class {
|
|
4462
4558
|
recordedContinuationText = "";
|
4463
4559
|
}
|
4464
4560
|
}
|
4465
|
-
if (
|
4466
|
-
recordedResponse.id =
|
4467
|
-
recordedResponse.timestamp =
|
4468
|
-
recordedResponse.modelId =
|
4469
|
-
recordedResponse.headers =
|
4470
|
-
recordedUsage =
|
4471
|
-
recordedFinishReason =
|
4472
|
-
recordedProviderMetadata = chunk.experimental_providerMetadata;
|
4561
|
+
if (part.type === "finish") {
|
4562
|
+
recordedResponse.id = part.response.id;
|
4563
|
+
recordedResponse.timestamp = part.response.timestamp;
|
4564
|
+
recordedResponse.modelId = part.response.modelId;
|
4565
|
+
recordedResponse.headers = part.response.headers;
|
4566
|
+
recordedUsage = part.usage;
|
4567
|
+
recordedFinishReason = part.finishReason;
|
4473
4568
|
}
|
4474
4569
|
},
|
4475
4570
|
async flush(controller) {
|
4476
|
-
var
|
4571
|
+
var _a15;
|
4477
4572
|
try {
|
4478
4573
|
const lastStep = recordedSteps[recordedSteps.length - 1];
|
4479
4574
|
if (lastStep) {
|
@@ -4503,7 +4598,7 @@ var DefaultStreamTextResult = class {
|
|
4503
4598
|
text: recordedFullText,
|
4504
4599
|
toolCalls: lastStep.toolCalls,
|
4505
4600
|
toolResults: lastStep.toolResults,
|
4506
|
-
request: (
|
4601
|
+
request: (_a15 = lastStep.request) != null ? _a15 : {},
|
4507
4602
|
response: lastStep.response,
|
4508
4603
|
warnings: lastStep.warnings,
|
4509
4604
|
experimental_providerMetadata: lastStep.experimental_providerMetadata,
|
@@ -4517,8 +4612,8 @@ var DefaultStreamTextResult = class {
|
|
4517
4612
|
"ai.response.text": { output: () => recordedFullText },
|
4518
4613
|
"ai.response.toolCalls": {
|
4519
4614
|
output: () => {
|
4520
|
-
var
|
4521
|
-
return ((
|
4615
|
+
var _a16;
|
4616
|
+
return ((_a16 = lastStep.toolCalls) == null ? void 0 : _a16.length) ? JSON.stringify(lastStep.toolCalls) : void 0;
|
4522
4617
|
}
|
4523
4618
|
},
|
4524
4619
|
"ai.usage.promptTokens": usage.promptTokens,
|
@@ -4536,7 +4631,11 @@ var DefaultStreamTextResult = class {
|
|
4536
4631
|
const stitchableStream = createStitchableStream();
|
4537
4632
|
this.addStream = stitchableStream.addStream;
|
4538
4633
|
this.closeStream = stitchableStream.close;
|
4539
|
-
|
4634
|
+
let stream = stitchableStream.stream;
|
4635
|
+
if (transform) {
|
4636
|
+
stream = stream.pipeThrough(transform);
|
4637
|
+
}
|
4638
|
+
this.baseStream = stream.pipeThrough(createOutputTransformStream(output)).pipeThrough(eventProcessor);
|
4540
4639
|
const { maxRetries, retry } = prepareRetries({
|
4541
4640
|
maxRetries: maxRetriesArg
|
4542
4641
|
});
|
@@ -4548,7 +4647,11 @@ var DefaultStreamTextResult = class {
|
|
4548
4647
|
settings: { ...settings, maxRetries }
|
4549
4648
|
});
|
4550
4649
|
const initialPrompt = standardizePrompt({
|
4551
|
-
prompt: {
|
4650
|
+
prompt: {
|
4651
|
+
system: (_a14 = output == null ? void 0 : output.injectIntoSystemPrompt({ system, model })) != null ? _a14 : system,
|
4652
|
+
prompt,
|
4653
|
+
messages
|
4654
|
+
},
|
4552
4655
|
tools
|
4553
4656
|
});
|
4554
4657
|
const self = this;
|
@@ -4597,7 +4700,7 @@ var DefaultStreamTextResult = class {
|
|
4597
4700
|
...prepareToolsAndToolChoice({ tools, toolChoice, activeTools })
|
4598
4701
|
};
|
4599
4702
|
const {
|
4600
|
-
result: { stream, warnings, rawResponse, request },
|
4703
|
+
result: { stream: stream2, warnings, rawResponse, request },
|
4601
4704
|
doStreamSpan,
|
4602
4705
|
startTimestampMs
|
4603
4706
|
} = await retry(
|
@@ -4620,8 +4723,8 @@ var DefaultStreamTextResult = class {
|
|
4620
4723
|
"ai.prompt.tools": {
|
4621
4724
|
// convert the language model level tools:
|
4622
4725
|
input: () => {
|
4623
|
-
var
|
4624
|
-
return (
|
4726
|
+
var _a15;
|
4727
|
+
return (_a15 = mode.tools) == null ? void 0 : _a15.map((tool2) => JSON.stringify(tool2));
|
4625
4728
|
}
|
4626
4729
|
},
|
4627
4730
|
"ai.prompt.toolChoice": {
|
@@ -4649,6 +4752,7 @@ var DefaultStreamTextResult = class {
|
|
4649
4752
|
mode,
|
4650
4753
|
...prepareCallSettings(settings),
|
4651
4754
|
inputFormat: promptFormat,
|
4755
|
+
responseFormat: output == null ? void 0 : output.responseFormat({ model }),
|
4652
4756
|
prompt: promptMessages,
|
4653
4757
|
providerMetadata,
|
4654
4758
|
abortSignal,
|
@@ -4659,7 +4763,7 @@ var DefaultStreamTextResult = class {
|
|
4659
4763
|
);
|
4660
4764
|
const transformedStream = runToolsTransformation({
|
4661
4765
|
tools,
|
4662
|
-
generatorStream:
|
4766
|
+
generatorStream: stream2,
|
4663
4767
|
toolCallStreaming,
|
4664
4768
|
tracer,
|
4665
4769
|
telemetry,
|
@@ -4705,7 +4809,7 @@ var DefaultStreamTextResult = class {
|
|
4705
4809
|
transformedStream.pipeThrough(
|
4706
4810
|
new TransformStream({
|
4707
4811
|
async transform(chunk, controller) {
|
4708
|
-
var
|
4812
|
+
var _a15, _b, _c;
|
4709
4813
|
if (stepFirstChunk) {
|
4710
4814
|
const msToFirstChunk = now2() - startTimestampMs;
|
4711
4815
|
stepFirstChunk = false;
|
@@ -4757,7 +4861,7 @@ var DefaultStreamTextResult = class {
|
|
4757
4861
|
}
|
4758
4862
|
case "response-metadata": {
|
4759
4863
|
stepResponse = {
|
4760
|
-
id: (
|
4864
|
+
id: (_a15 = chunk.id) != null ? _a15 : stepResponse.id,
|
4761
4865
|
timestamp: (_b = chunk.timestamp) != null ? _b : stepResponse.timestamp,
|
4762
4866
|
modelId: (_c = chunk.modelId) != null ? _c : stepResponse.modelId
|
4763
4867
|
};
|
@@ -4982,11 +5086,11 @@ var DefaultStreamTextResult = class {
|
|
4982
5086
|
return createAsyncIterableStream(
|
4983
5087
|
this.teeStream().pipeThrough(
|
4984
5088
|
new TransformStream({
|
4985
|
-
transform(
|
4986
|
-
if (
|
4987
|
-
controller.enqueue(
|
4988
|
-
} else if (
|
4989
|
-
controller.error(
|
5089
|
+
transform({ part }, controller) {
|
5090
|
+
if (part.type === "text-delta") {
|
5091
|
+
controller.enqueue(part.textDelta);
|
5092
|
+
} else if (part.type === "error") {
|
5093
|
+
controller.error(part.error);
|
4990
5094
|
}
|
4991
5095
|
}
|
4992
5096
|
})
|
@@ -4994,7 +5098,31 @@ var DefaultStreamTextResult = class {
|
|
4994
5098
|
);
|
4995
5099
|
}
|
4996
5100
|
get fullStream() {
|
4997
|
-
return createAsyncIterableStream(
|
5101
|
+
return createAsyncIterableStream(
|
5102
|
+
this.teeStream().pipeThrough(
|
5103
|
+
new TransformStream({
|
5104
|
+
transform({ part }, controller) {
|
5105
|
+
controller.enqueue(part);
|
5106
|
+
}
|
5107
|
+
})
|
5108
|
+
)
|
5109
|
+
);
|
5110
|
+
}
|
5111
|
+
get experimental_partialOutputStream() {
|
5112
|
+
if (this.output == null) {
|
5113
|
+
throw new NoOutputSpecifiedError();
|
5114
|
+
}
|
5115
|
+
return createAsyncIterableStream(
|
5116
|
+
this.teeStream().pipeThrough(
|
5117
|
+
new TransformStream({
|
5118
|
+
transform({ partialOutput }, controller) {
|
5119
|
+
if (partialOutput != null) {
|
5120
|
+
controller.enqueue(partialOutput);
|
5121
|
+
}
|
5122
|
+
}
|
5123
|
+
})
|
5124
|
+
)
|
5125
|
+
);
|
4998
5126
|
}
|
4999
5127
|
toDataStreamInternal({
|
5000
5128
|
getErrorMessage: getErrorMessage5 = () => "An error occurred.",
|
@@ -5161,9 +5289,9 @@ var DefaultStreamTextResult = class {
|
|
5161
5289
|
);
|
5162
5290
|
}
|
5163
5291
|
toTextStreamResponse(init) {
|
5164
|
-
var
|
5292
|
+
var _a14;
|
5165
5293
|
return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
|
5166
|
-
status: (
|
5294
|
+
status: (_a14 = init == null ? void 0 : init.status) != null ? _a14 : 200,
|
5167
5295
|
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
5168
5296
|
contentType: "text/plain; charset=utf-8"
|
5169
5297
|
})
|
@@ -5192,11 +5320,11 @@ function smoothStream({
|
|
5192
5320
|
return;
|
5193
5321
|
}
|
5194
5322
|
buffer += chunk.textDelta;
|
5195
|
-
|
5196
|
-
|
5197
|
-
const
|
5198
|
-
controller.enqueue({ type: "text-delta", textDelta:
|
5199
|
-
buffer = buffer.slice(
|
5323
|
+
const regexp = /\s*\S+\s+/m;
|
5324
|
+
while (regexp.test(buffer)) {
|
5325
|
+
const chunk2 = buffer.match(regexp)[0];
|
5326
|
+
controller.enqueue({ type: "text-delta", textDelta: chunk2 });
|
5327
|
+
buffer = buffer.slice(chunk2.length);
|
5200
5328
|
if (delayInMs > 0) {
|
5201
5329
|
await delay2(delayInMs);
|
5202
5330
|
}
|
@@ -5269,11 +5397,11 @@ function experimental_customProvider({
|
|
5269
5397
|
}
|
5270
5398
|
|
5271
5399
|
// core/registry/no-such-provider-error.ts
|
5272
|
-
import { AISDKError as
|
5273
|
-
var
|
5274
|
-
var
|
5275
|
-
var
|
5276
|
-
var
|
5400
|
+
import { AISDKError as AISDKError14, NoSuchModelError as NoSuchModelError3 } from "@ai-sdk/provider";
|
5401
|
+
var name13 = "AI_NoSuchProviderError";
|
5402
|
+
var marker13 = `vercel.ai.error.${name13}`;
|
5403
|
+
var symbol13 = Symbol.for(marker13);
|
5404
|
+
var _a13;
|
5277
5405
|
var NoSuchProviderError = class extends NoSuchModelError3 {
|
5278
5406
|
constructor({
|
5279
5407
|
modelId,
|
@@ -5282,16 +5410,16 @@ var NoSuchProviderError = class extends NoSuchModelError3 {
|
|
5282
5410
|
availableProviders,
|
5283
5411
|
message = `No such provider: ${providerId} (available providers: ${availableProviders.join()})`
|
5284
5412
|
}) {
|
5285
|
-
super({ errorName:
|
5286
|
-
this[
|
5413
|
+
super({ errorName: name13, modelId, modelType, message });
|
5414
|
+
this[_a13] = true;
|
5287
5415
|
this.providerId = providerId;
|
5288
5416
|
this.availableProviders = availableProviders;
|
5289
5417
|
}
|
5290
5418
|
static isInstance(error) {
|
5291
|
-
return
|
5419
|
+
return AISDKError14.hasMarker(error, marker13);
|
5292
5420
|
}
|
5293
5421
|
};
|
5294
|
-
|
5422
|
+
_a13 = symbol13;
|
5295
5423
|
|
5296
5424
|
// core/registry/provider-registry.ts
|
5297
5425
|
import { NoSuchModelError as NoSuchModelError4 } from "@ai-sdk/provider";
|
@@ -5333,19 +5461,19 @@ var DefaultProviderRegistry = class {
|
|
5333
5461
|
return [id.slice(0, index), id.slice(index + 1)];
|
5334
5462
|
}
|
5335
5463
|
languageModel(id) {
|
5336
|
-
var
|
5464
|
+
var _a14, _b;
|
5337
5465
|
const [providerId, modelId] = this.splitId(id, "languageModel");
|
5338
|
-
const model = (_b = (
|
5466
|
+
const model = (_b = (_a14 = this.getProvider(providerId)).languageModel) == null ? void 0 : _b.call(_a14, modelId);
|
5339
5467
|
if (model == null) {
|
5340
5468
|
throw new NoSuchModelError4({ modelId: id, modelType: "languageModel" });
|
5341
5469
|
}
|
5342
5470
|
return model;
|
5343
5471
|
}
|
5344
5472
|
textEmbeddingModel(id) {
|
5345
|
-
var
|
5473
|
+
var _a14;
|
5346
5474
|
const [providerId, modelId] = this.splitId(id, "textEmbeddingModel");
|
5347
5475
|
const provider = this.getProvider(providerId);
|
5348
|
-
const model = (
|
5476
|
+
const model = (_a14 = provider.textEmbeddingModel) == null ? void 0 : _a14.call(provider, modelId);
|
5349
5477
|
if (model == null) {
|
5350
5478
|
throw new NoSuchModelError4({
|
5351
5479
|
modelId: id,
|
@@ -5393,7 +5521,7 @@ import {
|
|
5393
5521
|
function AssistantResponse({ threadId, messageId }, process2) {
|
5394
5522
|
const stream = new ReadableStream({
|
5395
5523
|
async start(controller) {
|
5396
|
-
var
|
5524
|
+
var _a14;
|
5397
5525
|
const textEncoder = new TextEncoder();
|
5398
5526
|
const sendMessage = (message) => {
|
5399
5527
|
controller.enqueue(
|
@@ -5415,7 +5543,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
5415
5543
|
);
|
5416
5544
|
};
|
5417
5545
|
const forwardStream = async (stream2) => {
|
5418
|
-
var
|
5546
|
+
var _a15, _b;
|
5419
5547
|
let result = void 0;
|
5420
5548
|
for await (const value of stream2) {
|
5421
5549
|
switch (value.event) {
|
@@ -5432,7 +5560,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
5432
5560
|
break;
|
5433
5561
|
}
|
5434
5562
|
case "thread.message.delta": {
|
5435
|
-
const content = (
|
5563
|
+
const content = (_a15 = value.data.delta.content) == null ? void 0 : _a15[0];
|
5436
5564
|
if ((content == null ? void 0 : content.type) === "text" && ((_b = content.text) == null ? void 0 : _b.value) != null) {
|
5437
5565
|
controller.enqueue(
|
5438
5566
|
textEncoder.encode(
|
@@ -5466,7 +5594,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
5466
5594
|
forwardStream
|
5467
5595
|
});
|
5468
5596
|
} catch (error) {
|
5469
|
-
sendError((
|
5597
|
+
sendError((_a14 = error.message) != null ? _a14 : `${error}`);
|
5470
5598
|
} finally {
|
5471
5599
|
controller.close();
|
5472
5600
|
}
|
@@ -5527,7 +5655,7 @@ function toDataStreamInternal(stream, callbacks) {
|
|
5527
5655
|
return stream.pipeThrough(
|
5528
5656
|
new TransformStream({
|
5529
5657
|
transform: async (value, controller) => {
|
5530
|
-
var
|
5658
|
+
var _a14;
|
5531
5659
|
if (typeof value === "string") {
|
5532
5660
|
controller.enqueue(value);
|
5533
5661
|
return;
|
@@ -5535,7 +5663,7 @@ function toDataStreamInternal(stream, callbacks) {
|
|
5535
5663
|
if ("event" in value) {
|
5536
5664
|
if (value.event === "on_chat_model_stream") {
|
5537
5665
|
forwardAIMessageChunk(
|
5538
|
-
(
|
5666
|
+
(_a14 = value.data) == null ? void 0 : _a14.chunk,
|
5539
5667
|
controller
|
5540
5668
|
);
|
5541
5669
|
}
|
@@ -5558,7 +5686,7 @@ function toDataStream(stream, callbacks) {
|
|
5558
5686
|
);
|
5559
5687
|
}
|
5560
5688
|
function toDataStreamResponse(stream, options) {
|
5561
|
-
var
|
5689
|
+
var _a14;
|
5562
5690
|
const dataStream = toDataStreamInternal(
|
5563
5691
|
stream,
|
5564
5692
|
options == null ? void 0 : options.callbacks
|
@@ -5567,7 +5695,7 @@ function toDataStreamResponse(stream, options) {
|
|
5567
5695
|
const init = options == null ? void 0 : options.init;
|
5568
5696
|
const responseStream = data ? mergeStreams(data.stream, dataStream) : dataStream;
|
5569
5697
|
return new Response(responseStream, {
|
5570
|
-
status: (
|
5698
|
+
status: (_a14 = init == null ? void 0 : init.status) != null ? _a14 : 200,
|
5571
5699
|
statusText: init == null ? void 0 : init.statusText,
|
5572
5700
|
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
5573
5701
|
contentType: "text/plain; charset=utf-8",
|
@@ -5622,14 +5750,14 @@ function toDataStream2(stream, callbacks) {
|
|
5622
5750
|
);
|
5623
5751
|
}
|
5624
5752
|
function toDataStreamResponse2(stream, options = {}) {
|
5625
|
-
var
|
5753
|
+
var _a14;
|
5626
5754
|
const { init, data, callbacks } = options;
|
5627
5755
|
const dataStream = toDataStreamInternal2(stream, callbacks).pipeThrough(
|
5628
5756
|
new TextEncoderStream()
|
5629
5757
|
);
|
5630
5758
|
const responseStream = data ? mergeStreams(data.stream, dataStream) : dataStream;
|
5631
5759
|
return new Response(responseStream, {
|
5632
|
-
status: (
|
5760
|
+
status: (_a14 = init == null ? void 0 : init.status) != null ? _a14 : 200,
|
5633
5761
|
statusText: init == null ? void 0 : init.statusText,
|
5634
5762
|
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
5635
5763
|
contentType: "text/plain; charset=utf-8",
|
@@ -5721,7 +5849,7 @@ var StreamData = class {
|
|
5721
5849
|
}
|
5722
5850
|
};
|
5723
5851
|
export {
|
5724
|
-
|
5852
|
+
AISDKError13 as AISDKError,
|
5725
5853
|
APICallError2 as APICallError,
|
5726
5854
|
AssistantResponse,
|
5727
5855
|
DownloadError,
|
@@ -5739,6 +5867,7 @@ export {
|
|
5739
5867
|
MessageConversionError,
|
5740
5868
|
NoContentGeneratedError,
|
5741
5869
|
NoObjectGeneratedError,
|
5870
|
+
NoOutputSpecifiedError,
|
5742
5871
|
NoSuchModelError,
|
5743
5872
|
NoSuchProviderError,
|
5744
5873
|
NoSuchToolError,
|