ai 4.0.12 → 4.0.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/dist/index.d.mts +73 -29
- package/dist/index.d.ts +73 -29
- package/dist/index.js +254 -176
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +223 -146
- package/dist/index.mjs.map +1 -1
- package/package.json +2 -2
package/dist/index.mjs
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
var __defProp = Object.defineProperty;
|
2
2
|
var __export = (target, all) => {
|
3
|
-
for (var
|
4
|
-
__defProp(target,
|
3
|
+
for (var name13 in all)
|
4
|
+
__defProp(target, name13, { get: all[name13], enumerable: true });
|
5
5
|
};
|
6
6
|
|
7
7
|
// streams/index.ts
|
@@ -347,7 +347,7 @@ function getBaseTelemetryAttributes({
|
|
347
347
|
telemetry,
|
348
348
|
headers
|
349
349
|
}) {
|
350
|
-
var
|
350
|
+
var _a13;
|
351
351
|
return {
|
352
352
|
"ai.model.provider": model.provider,
|
353
353
|
"ai.model.id": model.modelId,
|
@@ -357,7 +357,7 @@ function getBaseTelemetryAttributes({
|
|
357
357
|
return attributes;
|
358
358
|
}, {}),
|
359
359
|
// add metadata as attributes:
|
360
|
-
...Object.entries((
|
360
|
+
...Object.entries((_a13 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a13 : {}).reduce(
|
361
361
|
(attributes, [key, value]) => {
|
362
362
|
attributes[`ai.telemetry.metadata.${key}`] = value;
|
363
363
|
return attributes;
|
@@ -382,7 +382,7 @@ var noopTracer = {
|
|
382
382
|
startSpan() {
|
383
383
|
return noopSpan;
|
384
384
|
},
|
385
|
-
startActiveSpan(
|
385
|
+
startActiveSpan(name13, arg1, arg2, arg3) {
|
386
386
|
if (typeof arg1 === "function") {
|
387
387
|
return arg1(noopSpan);
|
388
388
|
}
|
@@ -452,13 +452,13 @@ function getTracer({
|
|
452
452
|
// core/telemetry/record-span.ts
|
453
453
|
import { SpanStatusCode } from "@opentelemetry/api";
|
454
454
|
function recordSpan({
|
455
|
-
name:
|
455
|
+
name: name13,
|
456
456
|
tracer,
|
457
457
|
attributes,
|
458
458
|
fn,
|
459
459
|
endWhenDone = true
|
460
460
|
}) {
|
461
|
-
return tracer.startActiveSpan(
|
461
|
+
return tracer.startActiveSpan(name13, { attributes }, async (span) => {
|
462
462
|
try {
|
463
463
|
const result = await fn(span);
|
464
464
|
if (endWhenDone) {
|
@@ -566,14 +566,14 @@ async function embed({
|
|
566
566
|
}),
|
567
567
|
tracer,
|
568
568
|
fn: async (doEmbedSpan) => {
|
569
|
-
var
|
569
|
+
var _a13;
|
570
570
|
const modelResponse = await model.doEmbed({
|
571
571
|
values: [value],
|
572
572
|
abortSignal,
|
573
573
|
headers
|
574
574
|
});
|
575
575
|
const embedding2 = modelResponse.embeddings[0];
|
576
|
-
const usage2 = (
|
576
|
+
const usage2 = (_a13 = modelResponse.usage) != null ? _a13 : { tokens: NaN };
|
577
577
|
doEmbedSpan.setAttributes(
|
578
578
|
selectTelemetryAttributes({
|
579
579
|
telemetry,
|
@@ -683,14 +683,14 @@ async function embedMany({
|
|
683
683
|
}),
|
684
684
|
tracer,
|
685
685
|
fn: async (doEmbedSpan) => {
|
686
|
-
var
|
686
|
+
var _a13;
|
687
687
|
const modelResponse = await model.doEmbed({
|
688
688
|
values,
|
689
689
|
abortSignal,
|
690
690
|
headers
|
691
691
|
});
|
692
692
|
const embeddings3 = modelResponse.embeddings;
|
693
|
-
const usage2 = (
|
693
|
+
const usage2 = (_a13 = modelResponse.usage) != null ? _a13 : { tokens: NaN };
|
694
694
|
doEmbedSpan.setAttributes(
|
695
695
|
selectTelemetryAttributes({
|
696
696
|
telemetry,
|
@@ -742,14 +742,14 @@ async function embedMany({
|
|
742
742
|
}),
|
743
743
|
tracer,
|
744
744
|
fn: async (doEmbedSpan) => {
|
745
|
-
var
|
745
|
+
var _a13;
|
746
746
|
const modelResponse = await model.doEmbed({
|
747
747
|
values: chunk,
|
748
748
|
abortSignal,
|
749
749
|
headers
|
750
750
|
});
|
751
751
|
const embeddings2 = modelResponse.embeddings;
|
752
|
-
const usage2 = (
|
752
|
+
const usage2 = (_a13 = modelResponse.usage) != null ? _a13 : { tokens: NaN };
|
753
753
|
doEmbedSpan.setAttributes(
|
754
754
|
selectTelemetryAttributes({
|
755
755
|
telemetry,
|
@@ -829,7 +829,7 @@ async function download({
|
|
829
829
|
url,
|
830
830
|
fetchImplementation = fetch
|
831
831
|
}) {
|
832
|
-
var
|
832
|
+
var _a13;
|
833
833
|
const urlText = url.toString();
|
834
834
|
try {
|
835
835
|
const response = await fetchImplementation(urlText);
|
@@ -842,7 +842,7 @@ async function download({
|
|
842
842
|
}
|
843
843
|
return {
|
844
844
|
data: new Uint8Array(await response.arrayBuffer()),
|
845
|
-
mimeType: (
|
845
|
+
mimeType: (_a13 = response.headers.get("content-type")) != null ? _a13 : void 0
|
846
846
|
};
|
847
847
|
} catch (error) {
|
848
848
|
if (DownloadError.isInstance(error)) {
|
@@ -905,8 +905,8 @@ var dataContentSchema = z.union([
|
|
905
905
|
z.custom(
|
906
906
|
// Buffer might not be available in some environments such as CloudFlare:
|
907
907
|
(value) => {
|
908
|
-
var
|
909
|
-
return (_b = (
|
908
|
+
var _a13, _b;
|
909
|
+
return (_b = (_a13 = globalThis.Buffer) == null ? void 0 : _a13.isBuffer(value)) != null ? _b : false;
|
910
910
|
},
|
911
911
|
{ message: "Must be a Buffer" }
|
912
912
|
)
|
@@ -1414,7 +1414,7 @@ function detectSingleMessageCharacteristics(message) {
|
|
1414
1414
|
|
1415
1415
|
// core/prompt/attachments-to-parts.ts
|
1416
1416
|
function attachmentsToParts(attachments) {
|
1417
|
-
var
|
1417
|
+
var _a13, _b, _c;
|
1418
1418
|
const parts = [];
|
1419
1419
|
for (const attachment of attachments) {
|
1420
1420
|
let url;
|
@@ -1426,7 +1426,7 @@ function attachmentsToParts(attachments) {
|
|
1426
1426
|
switch (url.protocol) {
|
1427
1427
|
case "http:":
|
1428
1428
|
case "https:": {
|
1429
|
-
if ((
|
1429
|
+
if ((_a13 = attachment.contentType) == null ? void 0 : _a13.startsWith("image/")) {
|
1430
1430
|
parts.push({ type: "image", image: url });
|
1431
1431
|
} else {
|
1432
1432
|
if (!attachment.contentType) {
|
@@ -1512,8 +1512,8 @@ _a6 = symbol6;
|
|
1512
1512
|
|
1513
1513
|
// core/prompt/convert-to-core-messages.ts
|
1514
1514
|
function convertToCoreMessages(messages, options) {
|
1515
|
-
var
|
1516
|
-
const tools = (
|
1515
|
+
var _a13;
|
1516
|
+
const tools = (_a13 = options == null ? void 0 : options.tools) != null ? _a13 : {};
|
1517
1517
|
const coreMessages = [];
|
1518
1518
|
for (const message of messages) {
|
1519
1519
|
const { role, content, toolInvocations, experimental_attachments } = message;
|
@@ -1800,7 +1800,7 @@ var arrayOutputStrategy = (schema) => {
|
|
1800
1800
|
additionalProperties: false
|
1801
1801
|
},
|
1802
1802
|
validatePartialResult({ value, latestObject, isFirstDelta, isFinalDelta }) {
|
1803
|
-
var
|
1803
|
+
var _a13;
|
1804
1804
|
if (!isJSONObject(value) || !isJSONArray(value.elements)) {
|
1805
1805
|
return {
|
1806
1806
|
success: false,
|
@@ -1823,7 +1823,7 @@ var arrayOutputStrategy = (schema) => {
|
|
1823
1823
|
}
|
1824
1824
|
resultArray.push(result.value);
|
1825
1825
|
}
|
1826
|
-
const publishedElementCount = (
|
1826
|
+
const publishedElementCount = (_a13 = latestObject == null ? void 0 : latestObject.length) != null ? _a13 : 0;
|
1827
1827
|
let textDelta = "";
|
1828
1828
|
if (isFirstDelta) {
|
1829
1829
|
textDelta += "[";
|
@@ -2155,7 +2155,7 @@ async function generateObject({
|
|
2155
2155
|
}),
|
2156
2156
|
tracer,
|
2157
2157
|
fn: async (span) => {
|
2158
|
-
var
|
2158
|
+
var _a13, _b;
|
2159
2159
|
if (mode === "auto" || mode == null) {
|
2160
2160
|
mode = model.defaultObjectGenerationMode;
|
2161
2161
|
}
|
@@ -2217,7 +2217,7 @@ async function generateObject({
|
|
2217
2217
|
}),
|
2218
2218
|
tracer,
|
2219
2219
|
fn: async (span2) => {
|
2220
|
-
var
|
2220
|
+
var _a14, _b2, _c, _d, _e, _f;
|
2221
2221
|
const result2 = await model.doGenerate({
|
2222
2222
|
mode: {
|
2223
2223
|
type: "object-json",
|
@@ -2236,7 +2236,7 @@ async function generateObject({
|
|
2236
2236
|
throw new NoObjectGeneratedError();
|
2237
2237
|
}
|
2238
2238
|
const responseData = {
|
2239
|
-
id: (_b2 = (
|
2239
|
+
id: (_b2 = (_a14 = result2.response) == null ? void 0 : _a14.id) != null ? _b2 : generateId3(),
|
2240
2240
|
timestamp: (_d = (_c = result2.response) == null ? void 0 : _c.timestamp) != null ? _d : currentDate(),
|
2241
2241
|
modelId: (_f = (_e = result2.response) == null ? void 0 : _e.modelId) != null ? _f : model.modelId
|
2242
2242
|
};
|
@@ -2271,7 +2271,7 @@ async function generateObject({
|
|
2271
2271
|
rawResponse = generateResult.rawResponse;
|
2272
2272
|
logprobs = generateResult.logprobs;
|
2273
2273
|
resultProviderMetadata = generateResult.providerMetadata;
|
2274
|
-
request = (
|
2274
|
+
request = (_a13 = generateResult.request) != null ? _a13 : {};
|
2275
2275
|
response = generateResult.responseData;
|
2276
2276
|
break;
|
2277
2277
|
}
|
@@ -2317,7 +2317,7 @@ async function generateObject({
|
|
2317
2317
|
}),
|
2318
2318
|
tracer,
|
2319
2319
|
fn: async (span2) => {
|
2320
|
-
var
|
2320
|
+
var _a14, _b2, _c, _d, _e, _f, _g, _h;
|
2321
2321
|
const result2 = await model.doGenerate({
|
2322
2322
|
mode: {
|
2323
2323
|
type: "object-tool",
|
@@ -2335,7 +2335,7 @@ async function generateObject({
|
|
2335
2335
|
abortSignal,
|
2336
2336
|
headers
|
2337
2337
|
});
|
2338
|
-
const objectText = (_b2 = (
|
2338
|
+
const objectText = (_b2 = (_a14 = result2.toolCalls) == null ? void 0 : _a14[0]) == null ? void 0 : _b2.args;
|
2339
2339
|
if (objectText === void 0) {
|
2340
2340
|
throw new NoObjectGeneratedError();
|
2341
2341
|
}
|
@@ -2440,9 +2440,9 @@ var DefaultGenerateObjectResult = class {
|
|
2440
2440
|
this.logprobs = options.logprobs;
|
2441
2441
|
}
|
2442
2442
|
toJsonResponse(init) {
|
2443
|
-
var
|
2443
|
+
var _a13;
|
2444
2444
|
return new Response(JSON.stringify(this.object), {
|
2445
|
-
status: (
|
2445
|
+
status: (_a13 = init == null ? void 0 : init.status) != null ? _a13 : 200,
|
2446
2446
|
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
2447
2447
|
contentType: "application/json; charset=utf-8"
|
2448
2448
|
})
|
@@ -2480,17 +2480,17 @@ var DelayedPromise = class {
|
|
2480
2480
|
return this.promise;
|
2481
2481
|
}
|
2482
2482
|
resolve(value) {
|
2483
|
-
var
|
2483
|
+
var _a13;
|
2484
2484
|
this.status = { type: "resolved", value };
|
2485
2485
|
if (this.promise) {
|
2486
|
-
(
|
2486
|
+
(_a13 = this._resolve) == null ? void 0 : _a13.call(this, value);
|
2487
2487
|
}
|
2488
2488
|
}
|
2489
2489
|
reject(error) {
|
2490
|
-
var
|
2490
|
+
var _a13;
|
2491
2491
|
this.status = { type: "rejected", error };
|
2492
2492
|
if (this.promise) {
|
2493
|
-
(
|
2493
|
+
(_a13 = this._reject) == null ? void 0 : _a13.call(this, error);
|
2494
2494
|
}
|
2495
2495
|
}
|
2496
2496
|
};
|
@@ -2579,8 +2579,8 @@ function createStitchableStream() {
|
|
2579
2579
|
|
2580
2580
|
// core/util/now.ts
|
2581
2581
|
function now() {
|
2582
|
-
var
|
2583
|
-
return (_b = (
|
2582
|
+
var _a13, _b;
|
2583
|
+
return (_b = (_a13 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a13.now()) != null ? _b : Date.now();
|
2584
2584
|
}
|
2585
2585
|
|
2586
2586
|
// core/generate-object/stream-object.ts
|
@@ -2869,7 +2869,7 @@ var DefaultStreamObjectResult = class {
|
|
2869
2869
|
const transformedStream = stream.pipeThrough(new TransformStream(transformer)).pipeThrough(
|
2870
2870
|
new TransformStream({
|
2871
2871
|
async transform(chunk, controller) {
|
2872
|
-
var
|
2872
|
+
var _a13, _b, _c;
|
2873
2873
|
if (isFirstChunk) {
|
2874
2874
|
const msToFirstChunk = now2() - startTimestampMs;
|
2875
2875
|
isFirstChunk = false;
|
@@ -2915,7 +2915,7 @@ var DefaultStreamObjectResult = class {
|
|
2915
2915
|
switch (chunk.type) {
|
2916
2916
|
case "response-metadata": {
|
2917
2917
|
response = {
|
2918
|
-
id: (
|
2918
|
+
id: (_a13 = chunk.id) != null ? _a13 : response.id,
|
2919
2919
|
timestamp: (_b = chunk.timestamp) != null ? _b : response.timestamp,
|
2920
2920
|
modelId: (_c = chunk.modelId) != null ? _c : response.modelId
|
2921
2921
|
};
|
@@ -3112,9 +3112,9 @@ var DefaultStreamObjectResult = class {
|
|
3112
3112
|
});
|
3113
3113
|
}
|
3114
3114
|
toTextStreamResponse(init) {
|
3115
|
-
var
|
3115
|
+
var _a13;
|
3116
3116
|
return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
|
3117
|
-
status: (
|
3117
|
+
status: (_a13 = init == null ? void 0 : init.status) != null ? _a13 : 200,
|
3118
3118
|
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
3119
3119
|
contentType: "text/plain; charset=utf-8"
|
3120
3120
|
})
|
@@ -3127,7 +3127,7 @@ import { createIdGenerator as createIdGenerator3 } from "@ai-sdk/provider-utils"
|
|
3127
3127
|
|
3128
3128
|
// errors/index.ts
|
3129
3129
|
import {
|
3130
|
-
AISDKError as
|
3130
|
+
AISDKError as AISDKError12,
|
3131
3131
|
APICallError as APICallError2,
|
3132
3132
|
EmptyResponseBodyError,
|
3133
3133
|
InvalidPromptError as InvalidPromptError2,
|
@@ -3189,23 +3189,21 @@ var NoSuchToolError = class extends AISDKError9 {
|
|
3189
3189
|
};
|
3190
3190
|
_a9 = symbol9;
|
3191
3191
|
|
3192
|
-
// errors/tool-
|
3192
|
+
// errors/tool-call-repair-error.ts
|
3193
3193
|
import { AISDKError as AISDKError10, getErrorMessage as getErrorMessage3 } from "@ai-sdk/provider";
|
3194
|
-
var name10 = "
|
3194
|
+
var name10 = "AI_ToolCallRepairError";
|
3195
3195
|
var marker10 = `vercel.ai.error.${name10}`;
|
3196
3196
|
var symbol10 = Symbol.for(marker10);
|
3197
3197
|
var _a10;
|
3198
|
-
var
|
3198
|
+
var ToolCallRepairError = class extends AISDKError10 {
|
3199
3199
|
constructor({
|
3200
|
-
toolArgs,
|
3201
|
-
toolName,
|
3202
3200
|
cause,
|
3203
|
-
|
3201
|
+
originalError,
|
3202
|
+
message = `Error repairing tool call: ${getErrorMessage3(cause)}`
|
3204
3203
|
}) {
|
3205
3204
|
super({ name: name10, message, cause });
|
3206
3205
|
this[_a10] = true;
|
3207
|
-
this.
|
3208
|
-
this.toolName = toolName;
|
3206
|
+
this.originalError = originalError;
|
3209
3207
|
}
|
3210
3208
|
static isInstance(error) {
|
3211
3209
|
return AISDKError10.hasMarker(error, marker10);
|
@@ -3213,6 +3211,30 @@ var ToolExecutionError = class extends AISDKError10 {
|
|
3213
3211
|
};
|
3214
3212
|
_a10 = symbol10;
|
3215
3213
|
|
3214
|
+
// errors/tool-execution-error.ts
|
3215
|
+
import { AISDKError as AISDKError11, getErrorMessage as getErrorMessage4 } from "@ai-sdk/provider";
|
3216
|
+
var name11 = "AI_ToolExecutionError";
|
3217
|
+
var marker11 = `vercel.ai.error.${name11}`;
|
3218
|
+
var symbol11 = Symbol.for(marker11);
|
3219
|
+
var _a11;
|
3220
|
+
var ToolExecutionError = class extends AISDKError11 {
|
3221
|
+
constructor({
|
3222
|
+
toolArgs,
|
3223
|
+
toolName,
|
3224
|
+
cause,
|
3225
|
+
message = `Error executing tool ${toolName}: ${getErrorMessage4(cause)}`
|
3226
|
+
}) {
|
3227
|
+
super({ name: name11, message, cause });
|
3228
|
+
this[_a11] = true;
|
3229
|
+
this.toolArgs = toolArgs;
|
3230
|
+
this.toolName = toolName;
|
3231
|
+
}
|
3232
|
+
static isInstance(error) {
|
3233
|
+
return AISDKError11.hasMarker(error, marker11);
|
3234
|
+
}
|
3235
|
+
};
|
3236
|
+
_a11 = symbol11;
|
3237
|
+
|
3216
3238
|
// core/prompt/prepare-tools-and-tool-choice.ts
|
3217
3239
|
import { asSchema as asSchema2 } from "@ai-sdk/ui-utils";
|
3218
3240
|
|
@@ -3234,24 +3256,24 @@ function prepareToolsAndToolChoice({
|
|
3234
3256
|
};
|
3235
3257
|
}
|
3236
3258
|
const filteredTools = activeTools != null ? Object.entries(tools).filter(
|
3237
|
-
([
|
3259
|
+
([name13]) => activeTools.includes(name13)
|
3238
3260
|
) : Object.entries(tools);
|
3239
3261
|
return {
|
3240
|
-
tools: filteredTools.map(([
|
3262
|
+
tools: filteredTools.map(([name13, tool2]) => {
|
3241
3263
|
const toolType = tool2.type;
|
3242
3264
|
switch (toolType) {
|
3243
3265
|
case void 0:
|
3244
3266
|
case "function":
|
3245
3267
|
return {
|
3246
3268
|
type: "function",
|
3247
|
-
name:
|
3269
|
+
name: name13,
|
3248
3270
|
description: tool2.description,
|
3249
3271
|
parameters: asSchema2(tool2.parameters).jsonSchema
|
3250
3272
|
};
|
3251
3273
|
case "provider-defined":
|
3252
3274
|
return {
|
3253
3275
|
type: "provider-defined",
|
3254
|
-
name:
|
3276
|
+
name: name13,
|
3255
3277
|
id: tool2.id,
|
3256
3278
|
args: tool2.args
|
3257
3279
|
};
|
@@ -3281,14 +3303,49 @@ function removeTextAfterLastWhitespace(text2) {
|
|
3281
3303
|
// core/generate-text/parse-tool-call.ts
|
3282
3304
|
import { safeParseJSON as safeParseJSON2, safeValidateTypes as safeValidateTypes3 } from "@ai-sdk/provider-utils";
|
3283
3305
|
import { asSchema as asSchema3 } from "@ai-sdk/ui-utils";
|
3284
|
-
function parseToolCall({
|
3306
|
+
async function parseToolCall({
|
3285
3307
|
toolCall,
|
3286
|
-
tools
|
3308
|
+
tools,
|
3309
|
+
repairToolCall,
|
3310
|
+
system,
|
3311
|
+
messages
|
3287
3312
|
}) {
|
3288
|
-
const toolName = toolCall.toolName;
|
3289
3313
|
if (tools == null) {
|
3290
3314
|
throw new NoSuchToolError({ toolName: toolCall.toolName });
|
3291
3315
|
}
|
3316
|
+
try {
|
3317
|
+
return await doParseToolCall({ toolCall, tools });
|
3318
|
+
} catch (error) {
|
3319
|
+
if (repairToolCall == null || !(NoSuchToolError.isInstance(error) || InvalidToolArgumentsError.isInstance(error))) {
|
3320
|
+
throw error;
|
3321
|
+
}
|
3322
|
+
let repairedToolCall = null;
|
3323
|
+
try {
|
3324
|
+
repairedToolCall = await repairToolCall({
|
3325
|
+
toolCall,
|
3326
|
+
tools,
|
3327
|
+
parameterSchema: ({ toolName }) => asSchema3(tools[toolName].parameters).jsonSchema,
|
3328
|
+
system,
|
3329
|
+
messages,
|
3330
|
+
error
|
3331
|
+
});
|
3332
|
+
} catch (repairError) {
|
3333
|
+
throw new ToolCallRepairError({
|
3334
|
+
cause: repairError,
|
3335
|
+
originalError: error
|
3336
|
+
});
|
3337
|
+
}
|
3338
|
+
if (repairedToolCall == null) {
|
3339
|
+
throw error;
|
3340
|
+
}
|
3341
|
+
return await doParseToolCall({ toolCall: repairedToolCall, tools });
|
3342
|
+
}
|
3343
|
+
}
|
3344
|
+
async function doParseToolCall({
|
3345
|
+
toolCall,
|
3346
|
+
tools
|
3347
|
+
}) {
|
3348
|
+
const toolName = toolCall.toolName;
|
3292
3349
|
const tool2 = tools[toolName];
|
3293
3350
|
if (tool2 == null) {
|
3294
3351
|
throw new NoSuchToolError({
|
@@ -3368,6 +3425,7 @@ async function generateText({
|
|
3368
3425
|
experimental_telemetry: telemetry,
|
3369
3426
|
experimental_providerMetadata: providerMetadata,
|
3370
3427
|
experimental_activeTools: activeTools,
|
3428
|
+
experimental_repairToolCall: repairToolCall,
|
3371
3429
|
_internal: {
|
3372
3430
|
generateId: generateId3 = originalGenerateId3,
|
3373
3431
|
currentDate = () => /* @__PURE__ */ new Date()
|
@@ -3375,7 +3433,7 @@ async function generateText({
|
|
3375
3433
|
onStepFinish,
|
3376
3434
|
...settings
|
3377
3435
|
}) {
|
3378
|
-
var
|
3436
|
+
var _a13;
|
3379
3437
|
if (maxSteps < 1) {
|
3380
3438
|
throw new InvalidArgumentError({
|
3381
3439
|
parameter: "maxSteps",
|
@@ -3392,7 +3450,7 @@ async function generateText({
|
|
3392
3450
|
});
|
3393
3451
|
const initialPrompt = standardizePrompt({
|
3394
3452
|
prompt: {
|
3395
|
-
system: (
|
3453
|
+
system: (_a13 = output == null ? void 0 : output.injectIntoSystemPrompt({ system, model })) != null ? _a13 : system,
|
3396
3454
|
prompt,
|
3397
3455
|
messages
|
3398
3456
|
},
|
@@ -3418,7 +3476,7 @@ async function generateText({
|
|
3418
3476
|
}),
|
3419
3477
|
tracer,
|
3420
3478
|
fn: async (span) => {
|
3421
|
-
var
|
3479
|
+
var _a14, _b, _c, _d, _e, _f;
|
3422
3480
|
const mode = {
|
3423
3481
|
type: "regular",
|
3424
3482
|
...prepareToolsAndToolChoice({ tools, toolChoice, activeTools })
|
@@ -3470,8 +3528,8 @@ async function generateText({
|
|
3470
3528
|
"ai.prompt.tools": {
|
3471
3529
|
// convert the language model level tools:
|
3472
3530
|
input: () => {
|
3473
|
-
var
|
3474
|
-
return (
|
3531
|
+
var _a15;
|
3532
|
+
return (_a15 = mode.tools) == null ? void 0 : _a15.map((tool2) => JSON.stringify(tool2));
|
3475
3533
|
}
|
3476
3534
|
},
|
3477
3535
|
"ai.prompt.toolChoice": {
|
@@ -3491,7 +3549,7 @@ async function generateText({
|
|
3491
3549
|
}),
|
3492
3550
|
tracer,
|
3493
3551
|
fn: async (span2) => {
|
3494
|
-
var
|
3552
|
+
var _a15, _b2, _c2, _d2, _e2, _f2;
|
3495
3553
|
const result = await model.doGenerate({
|
3496
3554
|
mode,
|
3497
3555
|
...callSettings,
|
@@ -3503,7 +3561,7 @@ async function generateText({
|
|
3503
3561
|
headers
|
3504
3562
|
});
|
3505
3563
|
const responseData = {
|
3506
|
-
id: (_b2 = (
|
3564
|
+
id: (_b2 = (_a15 = result.response) == null ? void 0 : _a15.id) != null ? _b2 : generateId3(),
|
3507
3565
|
timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
|
3508
3566
|
modelId: (_f2 = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f2 : model.modelId
|
3509
3567
|
};
|
@@ -3536,8 +3594,16 @@ async function generateText({
|
|
3536
3594
|
}
|
3537
3595
|
})
|
3538
3596
|
);
|
3539
|
-
currentToolCalls =
|
3540
|
-
(
|
3597
|
+
currentToolCalls = await Promise.all(
|
3598
|
+
((_a14 = currentModelResponse.toolCalls) != null ? _a14 : []).map(
|
3599
|
+
(toolCall) => parseToolCall({
|
3600
|
+
toolCall,
|
3601
|
+
tools,
|
3602
|
+
repairToolCall,
|
3603
|
+
system,
|
3604
|
+
messages: stepInputMessages
|
3605
|
+
})
|
3606
|
+
)
|
3541
3607
|
);
|
3542
3608
|
currentToolResults = tools == null ? [] : await executeTools({
|
3543
3609
|
toolCalls: currentToolCalls,
|
@@ -3741,6 +3807,46 @@ var DefaultGenerateTextResult = class {
|
|
3741
3807
|
}
|
3742
3808
|
};
|
3743
3809
|
|
3810
|
+
// core/generate-text/output.ts
|
3811
|
+
var output_exports = {};
|
3812
|
+
__export(output_exports, {
|
3813
|
+
object: () => object,
|
3814
|
+
text: () => text
|
3815
|
+
});
|
3816
|
+
import { parseJSON } from "@ai-sdk/provider-utils";
|
3817
|
+
import { asSchema as asSchema4 } from "@ai-sdk/ui-utils";
|
3818
|
+
var text = () => ({
|
3819
|
+
type: "text",
|
3820
|
+
responseFormat: () => ({ type: "text" }),
|
3821
|
+
injectIntoSystemPrompt({ system }) {
|
3822
|
+
return system;
|
3823
|
+
},
|
3824
|
+
parseOutput({ text: text2 }) {
|
3825
|
+
return text2;
|
3826
|
+
}
|
3827
|
+
});
|
3828
|
+
var object = ({
|
3829
|
+
schema: inputSchema
|
3830
|
+
}) => {
|
3831
|
+
const schema = asSchema4(inputSchema);
|
3832
|
+
return {
|
3833
|
+
type: "object",
|
3834
|
+
responseFormat: ({ model }) => ({
|
3835
|
+
type: "json",
|
3836
|
+
schema: model.supportsStructuredOutputs ? schema.jsonSchema : void 0
|
3837
|
+
}),
|
3838
|
+
injectIntoSystemPrompt({ system, model }) {
|
3839
|
+
return model.supportsStructuredOutputs ? system : injectJsonInstruction({
|
3840
|
+
prompt: system,
|
3841
|
+
schema: schema.jsonSchema
|
3842
|
+
});
|
3843
|
+
},
|
3844
|
+
parseOutput({ text: text2 }) {
|
3845
|
+
return parseJSON({ text: text2, schema });
|
3846
|
+
}
|
3847
|
+
};
|
3848
|
+
};
|
3849
|
+
|
3744
3850
|
// core/generate-text/stream-text.ts
|
3745
3851
|
import { createIdGenerator as createIdGenerator4 } from "@ai-sdk/provider-utils";
|
3746
3852
|
import { formatDataStreamPart as formatDataStreamPart2 } from "@ai-sdk/ui-utils";
|
@@ -3841,8 +3947,10 @@ function runToolsTransformation({
|
|
3841
3947
|
toolCallStreaming,
|
3842
3948
|
tracer,
|
3843
3949
|
telemetry,
|
3950
|
+
system,
|
3844
3951
|
messages,
|
3845
|
-
abortSignal
|
3952
|
+
abortSignal,
|
3953
|
+
repairToolCall
|
3846
3954
|
}) {
|
3847
3955
|
let toolResultsStreamController = null;
|
3848
3956
|
const toolResultsStream = new ReadableStream({
|
@@ -3863,7 +3971,7 @@ function runToolsTransformation({
|
|
3863
3971
|
}
|
3864
3972
|
}
|
3865
3973
|
const forwardStream = new TransformStream({
|
3866
|
-
transform(chunk, controller) {
|
3974
|
+
async transform(chunk, controller) {
|
3867
3975
|
const chunkType = chunk.type;
|
3868
3976
|
switch (chunkType) {
|
3869
3977
|
case "text-delta":
|
@@ -3912,9 +4020,12 @@ function runToolsTransformation({
|
|
3912
4020
|
break;
|
3913
4021
|
}
|
3914
4022
|
try {
|
3915
|
-
const toolCall = parseToolCall({
|
4023
|
+
const toolCall = await parseToolCall({
|
3916
4024
|
toolCall: chunk,
|
3917
|
-
tools
|
4025
|
+
tools,
|
4026
|
+
repairToolCall,
|
4027
|
+
system,
|
4028
|
+
messages
|
3918
4029
|
});
|
3919
4030
|
controller.enqueue(toolCall);
|
3920
4031
|
if (tool2.execute != null) {
|
@@ -4053,6 +4164,7 @@ function streamText({
|
|
4053
4164
|
experimental_providerMetadata: providerMetadata,
|
4054
4165
|
experimental_toolCallStreaming: toolCallStreaming = false,
|
4055
4166
|
experimental_activeTools: activeTools,
|
4167
|
+
experimental_repairToolCall: repairToolCall,
|
4056
4168
|
onChunk,
|
4057
4169
|
onFinish,
|
4058
4170
|
onStepFinish,
|
@@ -4077,6 +4189,7 @@ function streamText({
|
|
4077
4189
|
toolChoice,
|
4078
4190
|
toolCallStreaming,
|
4079
4191
|
activeTools,
|
4192
|
+
repairToolCall,
|
4080
4193
|
maxSteps,
|
4081
4194
|
continueSteps,
|
4082
4195
|
providerMetadata,
|
@@ -4103,6 +4216,7 @@ var DefaultStreamTextResult = class {
|
|
4103
4216
|
toolChoice,
|
4104
4217
|
toolCallStreaming,
|
4105
4218
|
activeTools,
|
4219
|
+
repairToolCall,
|
4106
4220
|
maxSteps,
|
4107
4221
|
continueSteps,
|
4108
4222
|
providerMetadata,
|
@@ -4214,8 +4328,8 @@ var DefaultStreamTextResult = class {
|
|
4214
4328
|
"ai.prompt.tools": {
|
4215
4329
|
// convert the language model level tools:
|
4216
4330
|
input: () => {
|
4217
|
-
var
|
4218
|
-
return (
|
4331
|
+
var _a13;
|
4332
|
+
return (_a13 = mode.tools) == null ? void 0 : _a13.map((tool2) => JSON.stringify(tool2));
|
4219
4333
|
}
|
4220
4334
|
},
|
4221
4335
|
"ai.prompt.toolChoice": {
|
@@ -4257,7 +4371,9 @@ var DefaultStreamTextResult = class {
|
|
4257
4371
|
toolCallStreaming,
|
4258
4372
|
tracer,
|
4259
4373
|
telemetry,
|
4374
|
+
system,
|
4260
4375
|
messages: stepInputMessages,
|
4376
|
+
repairToolCall,
|
4261
4377
|
abortSignal
|
4262
4378
|
});
|
4263
4379
|
const stepRequest = request != null ? request : {};
|
@@ -4298,7 +4414,7 @@ var DefaultStreamTextResult = class {
|
|
4298
4414
|
transformedStream.pipeThrough(
|
4299
4415
|
new TransformStream({
|
4300
4416
|
async transform(chunk, controller) {
|
4301
|
-
var
|
4417
|
+
var _a13, _b, _c;
|
4302
4418
|
if (stepFirstChunk) {
|
4303
4419
|
const msToFirstChunk = now2() - startTimestampMs;
|
4304
4420
|
stepFirstChunk = false;
|
@@ -4352,7 +4468,7 @@ var DefaultStreamTextResult = class {
|
|
4352
4468
|
}
|
4353
4469
|
case "response-metadata": {
|
4354
4470
|
stepResponse = {
|
4355
|
-
id: (
|
4471
|
+
id: (_a13 = chunk.id) != null ? _a13 : stepResponse.id,
|
4356
4472
|
timestamp: (_b = chunk.timestamp) != null ? _b : stepResponse.timestamp,
|
4357
4473
|
modelId: (_c = chunk.modelId) != null ? _c : stepResponse.modelId
|
4358
4474
|
};
|
@@ -4669,7 +4785,7 @@ var DefaultStreamTextResult = class {
|
|
4669
4785
|
});
|
4670
4786
|
}
|
4671
4787
|
toDataStreamInternal({
|
4672
|
-
getErrorMessage:
|
4788
|
+
getErrorMessage: getErrorMessage5 = () => "An error occurred.",
|
4673
4789
|
// mask error messages for safety by default
|
4674
4790
|
sendUsage = true
|
4675
4791
|
} = {}) {
|
@@ -4729,7 +4845,7 @@ var DefaultStreamTextResult = class {
|
|
4729
4845
|
}
|
4730
4846
|
case "error": {
|
4731
4847
|
controller.enqueue(
|
4732
|
-
formatDataStreamPart2("error",
|
4848
|
+
formatDataStreamPart2("error", getErrorMessage5(chunk.error))
|
4733
4849
|
);
|
4734
4850
|
break;
|
4735
4851
|
}
|
@@ -4772,7 +4888,7 @@ var DefaultStreamTextResult = class {
|
|
4772
4888
|
statusText,
|
4773
4889
|
headers,
|
4774
4890
|
data,
|
4775
|
-
getErrorMessage:
|
4891
|
+
getErrorMessage: getErrorMessage5,
|
4776
4892
|
sendUsage
|
4777
4893
|
} = {}) {
|
4778
4894
|
writeToServerResponse({
|
@@ -4783,7 +4899,7 @@ var DefaultStreamTextResult = class {
|
|
4783
4899
|
contentType: "text/plain; charset=utf-8",
|
4784
4900
|
dataStreamVersion: "v1"
|
4785
4901
|
}),
|
4786
|
-
stream: this.toDataStream({ data, getErrorMessage:
|
4902
|
+
stream: this.toDataStream({ data, getErrorMessage: getErrorMessage5, sendUsage })
|
4787
4903
|
});
|
4788
4904
|
}
|
4789
4905
|
pipeTextStreamToResponse(response, init) {
|
@@ -4817,11 +4933,11 @@ var DefaultStreamTextResult = class {
|
|
4817
4933
|
status,
|
4818
4934
|
statusText,
|
4819
4935
|
data,
|
4820
|
-
getErrorMessage:
|
4936
|
+
getErrorMessage: getErrorMessage5,
|
4821
4937
|
sendUsage
|
4822
4938
|
} = {}) {
|
4823
4939
|
return new Response(
|
4824
|
-
this.toDataStream({ data, getErrorMessage:
|
4940
|
+
this.toDataStream({ data, getErrorMessage: getErrorMessage5, sendUsage }),
|
4825
4941
|
{
|
4826
4942
|
status,
|
4827
4943
|
statusText,
|
@@ -4833,9 +4949,9 @@ var DefaultStreamTextResult = class {
|
|
4833
4949
|
);
|
4834
4950
|
}
|
4835
4951
|
toTextStreamResponse(init) {
|
4836
|
-
var
|
4952
|
+
var _a13;
|
4837
4953
|
return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
|
4838
|
-
status: (
|
4954
|
+
status: (_a13 = init == null ? void 0 : init.status) != null ? _a13 : 200,
|
4839
4955
|
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
4840
4956
|
contentType: "text/plain; charset=utf-8"
|
4841
4957
|
})
|
@@ -4843,46 +4959,6 @@ var DefaultStreamTextResult = class {
|
|
4843
4959
|
}
|
4844
4960
|
};
|
4845
4961
|
|
4846
|
-
// core/generate-text/output.ts
|
4847
|
-
var output_exports = {};
|
4848
|
-
__export(output_exports, {
|
4849
|
-
object: () => object,
|
4850
|
-
text: () => text
|
4851
|
-
});
|
4852
|
-
import { parseJSON } from "@ai-sdk/provider-utils";
|
4853
|
-
import { asSchema as asSchema4 } from "@ai-sdk/ui-utils";
|
4854
|
-
var text = () => ({
|
4855
|
-
type: "text",
|
4856
|
-
responseFormat: () => ({ type: "text" }),
|
4857
|
-
injectIntoSystemPrompt({ system }) {
|
4858
|
-
return system;
|
4859
|
-
},
|
4860
|
-
parseOutput({ text: text2 }) {
|
4861
|
-
return text2;
|
4862
|
-
}
|
4863
|
-
});
|
4864
|
-
var object = ({
|
4865
|
-
schema: inputSchema
|
4866
|
-
}) => {
|
4867
|
-
const schema = asSchema4(inputSchema);
|
4868
|
-
return {
|
4869
|
-
type: "object",
|
4870
|
-
responseFormat: ({ model }) => ({
|
4871
|
-
type: "json",
|
4872
|
-
schema: model.supportsStructuredOutputs ? schema.jsonSchema : void 0
|
4873
|
-
}),
|
4874
|
-
injectIntoSystemPrompt({ system, model }) {
|
4875
|
-
return model.supportsStructuredOutputs ? system : injectJsonInstruction({
|
4876
|
-
prompt: system,
|
4877
|
-
schema: schema.jsonSchema
|
4878
|
-
});
|
4879
|
-
},
|
4880
|
-
parseOutput({ text: text2 }) {
|
4881
|
-
return parseJSON({ text: text2, schema });
|
4882
|
-
}
|
4883
|
-
};
|
4884
|
-
};
|
4885
|
-
|
4886
4962
|
// core/middleware/wrap-language-model.ts
|
4887
4963
|
var experimental_wrapLanguageModel = ({
|
4888
4964
|
model,
|
@@ -4947,11 +5023,11 @@ function experimental_customProvider({
|
|
4947
5023
|
}
|
4948
5024
|
|
4949
5025
|
// core/registry/no-such-provider-error.ts
|
4950
|
-
import { AISDKError as
|
4951
|
-
var
|
4952
|
-
var
|
4953
|
-
var
|
4954
|
-
var
|
5026
|
+
import { AISDKError as AISDKError13, NoSuchModelError as NoSuchModelError3 } from "@ai-sdk/provider";
|
5027
|
+
var name12 = "AI_NoSuchProviderError";
|
5028
|
+
var marker12 = `vercel.ai.error.${name12}`;
|
5029
|
+
var symbol12 = Symbol.for(marker12);
|
5030
|
+
var _a12;
|
4955
5031
|
var NoSuchProviderError = class extends NoSuchModelError3 {
|
4956
5032
|
constructor({
|
4957
5033
|
modelId,
|
@@ -4960,16 +5036,16 @@ var NoSuchProviderError = class extends NoSuchModelError3 {
|
|
4960
5036
|
availableProviders,
|
4961
5037
|
message = `No such provider: ${providerId} (available providers: ${availableProviders.join()})`
|
4962
5038
|
}) {
|
4963
|
-
super({ errorName:
|
4964
|
-
this[
|
5039
|
+
super({ errorName: name12, modelId, modelType, message });
|
5040
|
+
this[_a12] = true;
|
4965
5041
|
this.providerId = providerId;
|
4966
5042
|
this.availableProviders = availableProviders;
|
4967
5043
|
}
|
4968
5044
|
static isInstance(error) {
|
4969
|
-
return
|
5045
|
+
return AISDKError13.hasMarker(error, marker12);
|
4970
5046
|
}
|
4971
5047
|
};
|
4972
|
-
|
5048
|
+
_a12 = symbol12;
|
4973
5049
|
|
4974
5050
|
// core/registry/provider-registry.ts
|
4975
5051
|
import { NoSuchModelError as NoSuchModelError4 } from "@ai-sdk/provider";
|
@@ -5011,19 +5087,19 @@ var DefaultProviderRegistry = class {
|
|
5011
5087
|
return [id.slice(0, index), id.slice(index + 1)];
|
5012
5088
|
}
|
5013
5089
|
languageModel(id) {
|
5014
|
-
var
|
5090
|
+
var _a13, _b;
|
5015
5091
|
const [providerId, modelId] = this.splitId(id, "languageModel");
|
5016
|
-
const model = (_b = (
|
5092
|
+
const model = (_b = (_a13 = this.getProvider(providerId)).languageModel) == null ? void 0 : _b.call(_a13, modelId);
|
5017
5093
|
if (model == null) {
|
5018
5094
|
throw new NoSuchModelError4({ modelId: id, modelType: "languageModel" });
|
5019
5095
|
}
|
5020
5096
|
return model;
|
5021
5097
|
}
|
5022
5098
|
textEmbeddingModel(id) {
|
5023
|
-
var
|
5099
|
+
var _a13;
|
5024
5100
|
const [providerId, modelId] = this.splitId(id, "textEmbeddingModel");
|
5025
5101
|
const provider = this.getProvider(providerId);
|
5026
|
-
const model = (
|
5102
|
+
const model = (_a13 = provider.textEmbeddingModel) == null ? void 0 : _a13.call(provider, modelId);
|
5027
5103
|
if (model == null) {
|
5028
5104
|
throw new NoSuchModelError4({
|
5029
5105
|
modelId: id,
|
@@ -5071,7 +5147,7 @@ import {
|
|
5071
5147
|
function AssistantResponse({ threadId, messageId }, process2) {
|
5072
5148
|
const stream = new ReadableStream({
|
5073
5149
|
async start(controller) {
|
5074
|
-
var
|
5150
|
+
var _a13;
|
5075
5151
|
const textEncoder = new TextEncoder();
|
5076
5152
|
const sendMessage = (message) => {
|
5077
5153
|
controller.enqueue(
|
@@ -5093,7 +5169,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
5093
5169
|
);
|
5094
5170
|
};
|
5095
5171
|
const forwardStream = async (stream2) => {
|
5096
|
-
var
|
5172
|
+
var _a14, _b;
|
5097
5173
|
let result = void 0;
|
5098
5174
|
for await (const value of stream2) {
|
5099
5175
|
switch (value.event) {
|
@@ -5110,7 +5186,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
5110
5186
|
break;
|
5111
5187
|
}
|
5112
5188
|
case "thread.message.delta": {
|
5113
|
-
const content = (
|
5189
|
+
const content = (_a14 = value.data.delta.content) == null ? void 0 : _a14[0];
|
5114
5190
|
if ((content == null ? void 0 : content.type) === "text" && ((_b = content.text) == null ? void 0 : _b.value) != null) {
|
5115
5191
|
controller.enqueue(
|
5116
5192
|
textEncoder.encode(
|
@@ -5144,7 +5220,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
5144
5220
|
forwardStream
|
5145
5221
|
});
|
5146
5222
|
} catch (error) {
|
5147
|
-
sendError((
|
5223
|
+
sendError((_a13 = error.message) != null ? _a13 : `${error}`);
|
5148
5224
|
} finally {
|
5149
5225
|
controller.close();
|
5150
5226
|
}
|
@@ -5205,7 +5281,7 @@ function toDataStreamInternal(stream, callbacks) {
|
|
5205
5281
|
return stream.pipeThrough(
|
5206
5282
|
new TransformStream({
|
5207
5283
|
transform: async (value, controller) => {
|
5208
|
-
var
|
5284
|
+
var _a13;
|
5209
5285
|
if (typeof value === "string") {
|
5210
5286
|
controller.enqueue(value);
|
5211
5287
|
return;
|
@@ -5213,7 +5289,7 @@ function toDataStreamInternal(stream, callbacks) {
|
|
5213
5289
|
if ("event" in value) {
|
5214
5290
|
if (value.event === "on_chat_model_stream") {
|
5215
5291
|
forwardAIMessageChunk(
|
5216
|
-
(
|
5292
|
+
(_a13 = value.data) == null ? void 0 : _a13.chunk,
|
5217
5293
|
controller
|
5218
5294
|
);
|
5219
5295
|
}
|
@@ -5236,7 +5312,7 @@ function toDataStream(stream, callbacks) {
|
|
5236
5312
|
);
|
5237
5313
|
}
|
5238
5314
|
function toDataStreamResponse(stream, options) {
|
5239
|
-
var
|
5315
|
+
var _a13;
|
5240
5316
|
const dataStream = toDataStreamInternal(
|
5241
5317
|
stream,
|
5242
5318
|
options == null ? void 0 : options.callbacks
|
@@ -5245,7 +5321,7 @@ function toDataStreamResponse(stream, options) {
|
|
5245
5321
|
const init = options == null ? void 0 : options.init;
|
5246
5322
|
const responseStream = data ? mergeStreams(data.stream, dataStream) : dataStream;
|
5247
5323
|
return new Response(responseStream, {
|
5248
|
-
status: (
|
5324
|
+
status: (_a13 = init == null ? void 0 : init.status) != null ? _a13 : 200,
|
5249
5325
|
statusText: init == null ? void 0 : init.statusText,
|
5250
5326
|
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
5251
5327
|
contentType: "text/plain; charset=utf-8",
|
@@ -5300,14 +5376,14 @@ function toDataStream2(stream, callbacks) {
|
|
5300
5376
|
);
|
5301
5377
|
}
|
5302
5378
|
function toDataStreamResponse2(stream, options = {}) {
|
5303
|
-
var
|
5379
|
+
var _a13;
|
5304
5380
|
const { init, data, callbacks } = options;
|
5305
5381
|
const dataStream = toDataStreamInternal2(stream, callbacks).pipeThrough(
|
5306
5382
|
new TextEncoderStream()
|
5307
5383
|
);
|
5308
5384
|
const responseStream = data ? mergeStreams(data.stream, dataStream) : dataStream;
|
5309
5385
|
return new Response(responseStream, {
|
5310
|
-
status: (
|
5386
|
+
status: (_a13 = init == null ? void 0 : init.status) != null ? _a13 : 200,
|
5311
5387
|
statusText: init == null ? void 0 : init.statusText,
|
5312
5388
|
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
5313
5389
|
contentType: "text/plain; charset=utf-8",
|
@@ -5399,7 +5475,7 @@ var StreamData = class {
|
|
5399
5475
|
}
|
5400
5476
|
};
|
5401
5477
|
export {
|
5402
|
-
|
5478
|
+
AISDKError12 as AISDKError,
|
5403
5479
|
APICallError2 as APICallError,
|
5404
5480
|
AssistantResponse,
|
5405
5481
|
DownloadError,
|
@@ -5423,6 +5499,7 @@ export {
|
|
5423
5499
|
output_exports as Output,
|
5424
5500
|
RetryError,
|
5425
5501
|
StreamData,
|
5502
|
+
ToolCallRepairError,
|
5426
5503
|
ToolExecutionError,
|
5427
5504
|
TypeValidationError2 as TypeValidationError,
|
5428
5505
|
UnsupportedFunctionalityError2 as UnsupportedFunctionalityError,
|