ai 4.0.11 → 4.0.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +11 -0
- package/dist/index.d.mts +23 -9
- package/dist/index.d.ts +23 -9
- package/dist/index.js +165 -126
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +146 -108
- package/dist/index.mjs.map +1 -1
- package/package.json +4 -4
package/dist/index.mjs
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
var __defProp = Object.defineProperty;
|
2
2
|
var __export = (target, all) => {
|
3
|
-
for (var
|
4
|
-
__defProp(target,
|
3
|
+
for (var name12 in all)
|
4
|
+
__defProp(target, name12, { get: all[name12], enumerable: true });
|
5
5
|
};
|
6
6
|
|
7
7
|
// streams/index.ts
|
@@ -347,7 +347,7 @@ function getBaseTelemetryAttributes({
|
|
347
347
|
telemetry,
|
348
348
|
headers
|
349
349
|
}) {
|
350
|
-
var
|
350
|
+
var _a12;
|
351
351
|
return {
|
352
352
|
"ai.model.provider": model.provider,
|
353
353
|
"ai.model.id": model.modelId,
|
@@ -357,7 +357,7 @@ function getBaseTelemetryAttributes({
|
|
357
357
|
return attributes;
|
358
358
|
}, {}),
|
359
359
|
// add metadata as attributes:
|
360
|
-
...Object.entries((
|
360
|
+
...Object.entries((_a12 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a12 : {}).reduce(
|
361
361
|
(attributes, [key, value]) => {
|
362
362
|
attributes[`ai.telemetry.metadata.${key}`] = value;
|
363
363
|
return attributes;
|
@@ -382,7 +382,7 @@ var noopTracer = {
|
|
382
382
|
startSpan() {
|
383
383
|
return noopSpan;
|
384
384
|
},
|
385
|
-
startActiveSpan(
|
385
|
+
startActiveSpan(name12, arg1, arg2, arg3) {
|
386
386
|
if (typeof arg1 === "function") {
|
387
387
|
return arg1(noopSpan);
|
388
388
|
}
|
@@ -452,13 +452,13 @@ function getTracer({
|
|
452
452
|
// core/telemetry/record-span.ts
|
453
453
|
import { SpanStatusCode } from "@opentelemetry/api";
|
454
454
|
function recordSpan({
|
455
|
-
name:
|
455
|
+
name: name12,
|
456
456
|
tracer,
|
457
457
|
attributes,
|
458
458
|
fn,
|
459
459
|
endWhenDone = true
|
460
460
|
}) {
|
461
|
-
return tracer.startActiveSpan(
|
461
|
+
return tracer.startActiveSpan(name12, { attributes }, async (span) => {
|
462
462
|
try {
|
463
463
|
const result = await fn(span);
|
464
464
|
if (endWhenDone) {
|
@@ -566,14 +566,14 @@ async function embed({
|
|
566
566
|
}),
|
567
567
|
tracer,
|
568
568
|
fn: async (doEmbedSpan) => {
|
569
|
-
var
|
569
|
+
var _a12;
|
570
570
|
const modelResponse = await model.doEmbed({
|
571
571
|
values: [value],
|
572
572
|
abortSignal,
|
573
573
|
headers
|
574
574
|
});
|
575
575
|
const embedding2 = modelResponse.embeddings[0];
|
576
|
-
const usage2 = (
|
576
|
+
const usage2 = (_a12 = modelResponse.usage) != null ? _a12 : { tokens: NaN };
|
577
577
|
doEmbedSpan.setAttributes(
|
578
578
|
selectTelemetryAttributes({
|
579
579
|
telemetry,
|
@@ -683,14 +683,14 @@ async function embedMany({
|
|
683
683
|
}),
|
684
684
|
tracer,
|
685
685
|
fn: async (doEmbedSpan) => {
|
686
|
-
var
|
686
|
+
var _a12;
|
687
687
|
const modelResponse = await model.doEmbed({
|
688
688
|
values,
|
689
689
|
abortSignal,
|
690
690
|
headers
|
691
691
|
});
|
692
692
|
const embeddings3 = modelResponse.embeddings;
|
693
|
-
const usage2 = (
|
693
|
+
const usage2 = (_a12 = modelResponse.usage) != null ? _a12 : { tokens: NaN };
|
694
694
|
doEmbedSpan.setAttributes(
|
695
695
|
selectTelemetryAttributes({
|
696
696
|
telemetry,
|
@@ -742,14 +742,14 @@ async function embedMany({
|
|
742
742
|
}),
|
743
743
|
tracer,
|
744
744
|
fn: async (doEmbedSpan) => {
|
745
|
-
var
|
745
|
+
var _a12;
|
746
746
|
const modelResponse = await model.doEmbed({
|
747
747
|
values: chunk,
|
748
748
|
abortSignal,
|
749
749
|
headers
|
750
750
|
});
|
751
751
|
const embeddings2 = modelResponse.embeddings;
|
752
|
-
const usage2 = (
|
752
|
+
const usage2 = (_a12 = modelResponse.usage) != null ? _a12 : { tokens: NaN };
|
753
753
|
doEmbedSpan.setAttributes(
|
754
754
|
selectTelemetryAttributes({
|
755
755
|
telemetry,
|
@@ -829,7 +829,7 @@ async function download({
|
|
829
829
|
url,
|
830
830
|
fetchImplementation = fetch
|
831
831
|
}) {
|
832
|
-
var
|
832
|
+
var _a12;
|
833
833
|
const urlText = url.toString();
|
834
834
|
try {
|
835
835
|
const response = await fetchImplementation(urlText);
|
@@ -842,7 +842,7 @@ async function download({
|
|
842
842
|
}
|
843
843
|
return {
|
844
844
|
data: new Uint8Array(await response.arrayBuffer()),
|
845
|
-
mimeType: (
|
845
|
+
mimeType: (_a12 = response.headers.get("content-type")) != null ? _a12 : void 0
|
846
846
|
};
|
847
847
|
} catch (error) {
|
848
848
|
if (DownloadError.isInstance(error)) {
|
@@ -905,8 +905,8 @@ var dataContentSchema = z.union([
|
|
905
905
|
z.custom(
|
906
906
|
// Buffer might not be available in some environments such as CloudFlare:
|
907
907
|
(value) => {
|
908
|
-
var
|
909
|
-
return (_b = (
|
908
|
+
var _a12, _b;
|
909
|
+
return (_b = (_a12 = globalThis.Buffer) == null ? void 0 : _a12.isBuffer(value)) != null ? _b : false;
|
910
910
|
},
|
911
911
|
{ message: "Must be a Buffer" }
|
912
912
|
)
|
@@ -1414,7 +1414,7 @@ function detectSingleMessageCharacteristics(message) {
|
|
1414
1414
|
|
1415
1415
|
// core/prompt/attachments-to-parts.ts
|
1416
1416
|
function attachmentsToParts(attachments) {
|
1417
|
-
var
|
1417
|
+
var _a12, _b, _c;
|
1418
1418
|
const parts = [];
|
1419
1419
|
for (const attachment of attachments) {
|
1420
1420
|
let url;
|
@@ -1426,7 +1426,7 @@ function attachmentsToParts(attachments) {
|
|
1426
1426
|
switch (url.protocol) {
|
1427
1427
|
case "http:":
|
1428
1428
|
case "https:": {
|
1429
|
-
if ((
|
1429
|
+
if ((_a12 = attachment.contentType) == null ? void 0 : _a12.startsWith("image/")) {
|
1430
1430
|
parts.push({ type: "image", image: url });
|
1431
1431
|
} else {
|
1432
1432
|
if (!attachment.contentType) {
|
@@ -1512,8 +1512,8 @@ _a6 = symbol6;
|
|
1512
1512
|
|
1513
1513
|
// core/prompt/convert-to-core-messages.ts
|
1514
1514
|
function convertToCoreMessages(messages, options) {
|
1515
|
-
var
|
1516
|
-
const tools = (
|
1515
|
+
var _a12;
|
1516
|
+
const tools = (_a12 = options == null ? void 0 : options.tools) != null ? _a12 : {};
|
1517
1517
|
const coreMessages = [];
|
1518
1518
|
for (const message of messages) {
|
1519
1519
|
const { role, content, toolInvocations, experimental_attachments } = message;
|
@@ -1800,7 +1800,7 @@ var arrayOutputStrategy = (schema) => {
|
|
1800
1800
|
additionalProperties: false
|
1801
1801
|
},
|
1802
1802
|
validatePartialResult({ value, latestObject, isFirstDelta, isFinalDelta }) {
|
1803
|
-
var
|
1803
|
+
var _a12;
|
1804
1804
|
if (!isJSONObject(value) || !isJSONArray(value.elements)) {
|
1805
1805
|
return {
|
1806
1806
|
success: false,
|
@@ -1823,7 +1823,7 @@ var arrayOutputStrategy = (schema) => {
|
|
1823
1823
|
}
|
1824
1824
|
resultArray.push(result.value);
|
1825
1825
|
}
|
1826
|
-
const publishedElementCount = (
|
1826
|
+
const publishedElementCount = (_a12 = latestObject == null ? void 0 : latestObject.length) != null ? _a12 : 0;
|
1827
1827
|
let textDelta = "";
|
1828
1828
|
if (isFirstDelta) {
|
1829
1829
|
textDelta += "[";
|
@@ -2155,7 +2155,7 @@ async function generateObject({
|
|
2155
2155
|
}),
|
2156
2156
|
tracer,
|
2157
2157
|
fn: async (span) => {
|
2158
|
-
var
|
2158
|
+
var _a12, _b;
|
2159
2159
|
if (mode === "auto" || mode == null) {
|
2160
2160
|
mode = model.defaultObjectGenerationMode;
|
2161
2161
|
}
|
@@ -2217,7 +2217,7 @@ async function generateObject({
|
|
2217
2217
|
}),
|
2218
2218
|
tracer,
|
2219
2219
|
fn: async (span2) => {
|
2220
|
-
var
|
2220
|
+
var _a13, _b2, _c, _d, _e, _f;
|
2221
2221
|
const result2 = await model.doGenerate({
|
2222
2222
|
mode: {
|
2223
2223
|
type: "object-json",
|
@@ -2236,7 +2236,7 @@ async function generateObject({
|
|
2236
2236
|
throw new NoObjectGeneratedError();
|
2237
2237
|
}
|
2238
2238
|
const responseData = {
|
2239
|
-
id: (_b2 = (
|
2239
|
+
id: (_b2 = (_a13 = result2.response) == null ? void 0 : _a13.id) != null ? _b2 : generateId3(),
|
2240
2240
|
timestamp: (_d = (_c = result2.response) == null ? void 0 : _c.timestamp) != null ? _d : currentDate(),
|
2241
2241
|
modelId: (_f = (_e = result2.response) == null ? void 0 : _e.modelId) != null ? _f : model.modelId
|
2242
2242
|
};
|
@@ -2271,7 +2271,7 @@ async function generateObject({
|
|
2271
2271
|
rawResponse = generateResult.rawResponse;
|
2272
2272
|
logprobs = generateResult.logprobs;
|
2273
2273
|
resultProviderMetadata = generateResult.providerMetadata;
|
2274
|
-
request = (
|
2274
|
+
request = (_a12 = generateResult.request) != null ? _a12 : {};
|
2275
2275
|
response = generateResult.responseData;
|
2276
2276
|
break;
|
2277
2277
|
}
|
@@ -2317,7 +2317,7 @@ async function generateObject({
|
|
2317
2317
|
}),
|
2318
2318
|
tracer,
|
2319
2319
|
fn: async (span2) => {
|
2320
|
-
var
|
2320
|
+
var _a13, _b2, _c, _d, _e, _f, _g, _h;
|
2321
2321
|
const result2 = await model.doGenerate({
|
2322
2322
|
mode: {
|
2323
2323
|
type: "object-tool",
|
@@ -2335,7 +2335,7 @@ async function generateObject({
|
|
2335
2335
|
abortSignal,
|
2336
2336
|
headers
|
2337
2337
|
});
|
2338
|
-
const objectText = (_b2 = (
|
2338
|
+
const objectText = (_b2 = (_a13 = result2.toolCalls) == null ? void 0 : _a13[0]) == null ? void 0 : _b2.args;
|
2339
2339
|
if (objectText === void 0) {
|
2340
2340
|
throw new NoObjectGeneratedError();
|
2341
2341
|
}
|
@@ -2440,9 +2440,9 @@ var DefaultGenerateObjectResult = class {
|
|
2440
2440
|
this.logprobs = options.logprobs;
|
2441
2441
|
}
|
2442
2442
|
toJsonResponse(init) {
|
2443
|
-
var
|
2443
|
+
var _a12;
|
2444
2444
|
return new Response(JSON.stringify(this.object), {
|
2445
|
-
status: (
|
2445
|
+
status: (_a12 = init == null ? void 0 : init.status) != null ? _a12 : 200,
|
2446
2446
|
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
2447
2447
|
contentType: "application/json; charset=utf-8"
|
2448
2448
|
})
|
@@ -2480,17 +2480,17 @@ var DelayedPromise = class {
|
|
2480
2480
|
return this.promise;
|
2481
2481
|
}
|
2482
2482
|
resolve(value) {
|
2483
|
-
var
|
2483
|
+
var _a12;
|
2484
2484
|
this.status = { type: "resolved", value };
|
2485
2485
|
if (this.promise) {
|
2486
|
-
(
|
2486
|
+
(_a12 = this._resolve) == null ? void 0 : _a12.call(this, value);
|
2487
2487
|
}
|
2488
2488
|
}
|
2489
2489
|
reject(error) {
|
2490
|
-
var
|
2490
|
+
var _a12;
|
2491
2491
|
this.status = { type: "rejected", error };
|
2492
2492
|
if (this.promise) {
|
2493
|
-
(
|
2493
|
+
(_a12 = this._reject) == null ? void 0 : _a12.call(this, error);
|
2494
2494
|
}
|
2495
2495
|
}
|
2496
2496
|
};
|
@@ -2579,8 +2579,8 @@ function createStitchableStream() {
|
|
2579
2579
|
|
2580
2580
|
// core/util/now.ts
|
2581
2581
|
function now() {
|
2582
|
-
var
|
2583
|
-
return (_b = (
|
2582
|
+
var _a12, _b;
|
2583
|
+
return (_b = (_a12 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a12.now()) != null ? _b : Date.now();
|
2584
2584
|
}
|
2585
2585
|
|
2586
2586
|
// core/generate-object/stream-object.ts
|
@@ -2869,7 +2869,7 @@ var DefaultStreamObjectResult = class {
|
|
2869
2869
|
const transformedStream = stream.pipeThrough(new TransformStream(transformer)).pipeThrough(
|
2870
2870
|
new TransformStream({
|
2871
2871
|
async transform(chunk, controller) {
|
2872
|
-
var
|
2872
|
+
var _a12, _b, _c;
|
2873
2873
|
if (isFirstChunk) {
|
2874
2874
|
const msToFirstChunk = now2() - startTimestampMs;
|
2875
2875
|
isFirstChunk = false;
|
@@ -2915,7 +2915,7 @@ var DefaultStreamObjectResult = class {
|
|
2915
2915
|
switch (chunk.type) {
|
2916
2916
|
case "response-metadata": {
|
2917
2917
|
response = {
|
2918
|
-
id: (
|
2918
|
+
id: (_a12 = chunk.id) != null ? _a12 : response.id,
|
2919
2919
|
timestamp: (_b = chunk.timestamp) != null ? _b : response.timestamp,
|
2920
2920
|
modelId: (_c = chunk.modelId) != null ? _c : response.modelId
|
2921
2921
|
};
|
@@ -3112,9 +3112,9 @@ var DefaultStreamObjectResult = class {
|
|
3112
3112
|
});
|
3113
3113
|
}
|
3114
3114
|
toTextStreamResponse(init) {
|
3115
|
-
var
|
3115
|
+
var _a12;
|
3116
3116
|
return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
|
3117
|
-
status: (
|
3117
|
+
status: (_a12 = init == null ? void 0 : init.status) != null ? _a12 : 200,
|
3118
3118
|
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
3119
3119
|
contentType: "text/plain; charset=utf-8"
|
3120
3120
|
})
|
@@ -3127,7 +3127,7 @@ import { createIdGenerator as createIdGenerator3 } from "@ai-sdk/provider-utils"
|
|
3127
3127
|
|
3128
3128
|
// errors/index.ts
|
3129
3129
|
import {
|
3130
|
-
AISDKError as
|
3130
|
+
AISDKError as AISDKError11,
|
3131
3131
|
APICallError as APICallError2,
|
3132
3132
|
EmptyResponseBodyError,
|
3133
3133
|
InvalidPromptError as InvalidPromptError2,
|
@@ -3189,6 +3189,30 @@ var NoSuchToolError = class extends AISDKError9 {
|
|
3189
3189
|
};
|
3190
3190
|
_a9 = symbol9;
|
3191
3191
|
|
3192
|
+
// errors/tool-execution-error.ts
|
3193
|
+
import { AISDKError as AISDKError10, getErrorMessage as getErrorMessage3 } from "@ai-sdk/provider";
|
3194
|
+
var name10 = "AI_ToolExecutionError";
|
3195
|
+
var marker10 = `vercel.ai.error.${name10}`;
|
3196
|
+
var symbol10 = Symbol.for(marker10);
|
3197
|
+
var _a10;
|
3198
|
+
var ToolExecutionError = class extends AISDKError10 {
|
3199
|
+
constructor({
|
3200
|
+
toolArgs,
|
3201
|
+
toolName,
|
3202
|
+
cause,
|
3203
|
+
message = `Error executing tool ${toolName}: ${getErrorMessage3(cause)}`
|
3204
|
+
}) {
|
3205
|
+
super({ name: name10, message, cause });
|
3206
|
+
this[_a10] = true;
|
3207
|
+
this.toolArgs = toolArgs;
|
3208
|
+
this.toolName = toolName;
|
3209
|
+
}
|
3210
|
+
static isInstance(error) {
|
3211
|
+
return AISDKError10.hasMarker(error, marker10);
|
3212
|
+
}
|
3213
|
+
};
|
3214
|
+
_a10 = symbol10;
|
3215
|
+
|
3192
3216
|
// core/prompt/prepare-tools-and-tool-choice.ts
|
3193
3217
|
import { asSchema as asSchema2 } from "@ai-sdk/ui-utils";
|
3194
3218
|
|
@@ -3210,24 +3234,24 @@ function prepareToolsAndToolChoice({
|
|
3210
3234
|
};
|
3211
3235
|
}
|
3212
3236
|
const filteredTools = activeTools != null ? Object.entries(tools).filter(
|
3213
|
-
([
|
3237
|
+
([name12]) => activeTools.includes(name12)
|
3214
3238
|
) : Object.entries(tools);
|
3215
3239
|
return {
|
3216
|
-
tools: filteredTools.map(([
|
3240
|
+
tools: filteredTools.map(([name12, tool2]) => {
|
3217
3241
|
const toolType = tool2.type;
|
3218
3242
|
switch (toolType) {
|
3219
3243
|
case void 0:
|
3220
3244
|
case "function":
|
3221
3245
|
return {
|
3222
3246
|
type: "function",
|
3223
|
-
name:
|
3247
|
+
name: name12,
|
3224
3248
|
description: tool2.description,
|
3225
3249
|
parameters: asSchema2(tool2.parameters).jsonSchema
|
3226
3250
|
};
|
3227
3251
|
case "provider-defined":
|
3228
3252
|
return {
|
3229
3253
|
type: "provider-defined",
|
3230
|
-
name:
|
3254
|
+
name: name12,
|
3231
3255
|
id: tool2.id,
|
3232
3256
|
args: tool2.args
|
3233
3257
|
};
|
@@ -3351,7 +3375,7 @@ async function generateText({
|
|
3351
3375
|
onStepFinish,
|
3352
3376
|
...settings
|
3353
3377
|
}) {
|
3354
|
-
var
|
3378
|
+
var _a12;
|
3355
3379
|
if (maxSteps < 1) {
|
3356
3380
|
throw new InvalidArgumentError({
|
3357
3381
|
parameter: "maxSteps",
|
@@ -3368,7 +3392,7 @@ async function generateText({
|
|
3368
3392
|
});
|
3369
3393
|
const initialPrompt = standardizePrompt({
|
3370
3394
|
prompt: {
|
3371
|
-
system: (
|
3395
|
+
system: (_a12 = output == null ? void 0 : output.injectIntoSystemPrompt({ system, model })) != null ? _a12 : system,
|
3372
3396
|
prompt,
|
3373
3397
|
messages
|
3374
3398
|
},
|
@@ -3394,7 +3418,7 @@ async function generateText({
|
|
3394
3418
|
}),
|
3395
3419
|
tracer,
|
3396
3420
|
fn: async (span) => {
|
3397
|
-
var
|
3421
|
+
var _a13, _b, _c, _d, _e, _f;
|
3398
3422
|
const mode = {
|
3399
3423
|
type: "regular",
|
3400
3424
|
...prepareToolsAndToolChoice({ tools, toolChoice, activeTools })
|
@@ -3446,8 +3470,8 @@ async function generateText({
|
|
3446
3470
|
"ai.prompt.tools": {
|
3447
3471
|
// convert the language model level tools:
|
3448
3472
|
input: () => {
|
3449
|
-
var
|
3450
|
-
return (
|
3473
|
+
var _a14;
|
3474
|
+
return (_a14 = mode.tools) == null ? void 0 : _a14.map((tool2) => JSON.stringify(tool2));
|
3451
3475
|
}
|
3452
3476
|
},
|
3453
3477
|
"ai.prompt.toolChoice": {
|
@@ -3467,7 +3491,7 @@ async function generateText({
|
|
3467
3491
|
}),
|
3468
3492
|
tracer,
|
3469
3493
|
fn: async (span2) => {
|
3470
|
-
var
|
3494
|
+
var _a14, _b2, _c2, _d2, _e2, _f2;
|
3471
3495
|
const result = await model.doGenerate({
|
3472
3496
|
mode,
|
3473
3497
|
...callSettings,
|
@@ -3479,7 +3503,7 @@ async function generateText({
|
|
3479
3503
|
headers
|
3480
3504
|
});
|
3481
3505
|
const responseData = {
|
3482
|
-
id: (_b2 = (
|
3506
|
+
id: (_b2 = (_a14 = result.response) == null ? void 0 : _a14.id) != null ? _b2 : generateId3(),
|
3483
3507
|
timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
|
3484
3508
|
modelId: (_f2 = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f2 : model.modelId
|
3485
3509
|
};
|
@@ -3512,7 +3536,7 @@ async function generateText({
|
|
3512
3536
|
}
|
3513
3537
|
})
|
3514
3538
|
);
|
3515
|
-
currentToolCalls = ((
|
3539
|
+
currentToolCalls = ((_a13 = currentModelResponse.toolCalls) != null ? _a13 : []).map(
|
3516
3540
|
(modelToolCall) => parseToolCall({ toolCall: modelToolCall, tools })
|
3517
3541
|
);
|
3518
3542
|
currentToolResults = tools == null ? [] : await executeTools({
|
@@ -3659,25 +3683,33 @@ async function executeTools({
|
|
3659
3683
|
}),
|
3660
3684
|
tracer,
|
3661
3685
|
fn: async (span) => {
|
3662
|
-
const result2 = await tool2.execute(args, {
|
3663
|
-
toolCallId,
|
3664
|
-
messages,
|
3665
|
-
abortSignal
|
3666
|
-
});
|
3667
3686
|
try {
|
3668
|
-
|
3669
|
-
|
3670
|
-
|
3671
|
-
|
3672
|
-
|
3673
|
-
|
3687
|
+
const result2 = await tool2.execute(args, {
|
3688
|
+
toolCallId,
|
3689
|
+
messages,
|
3690
|
+
abortSignal
|
3691
|
+
});
|
3692
|
+
try {
|
3693
|
+
span.setAttributes(
|
3694
|
+
selectTelemetryAttributes({
|
3695
|
+
telemetry,
|
3696
|
+
attributes: {
|
3697
|
+
"ai.toolCall.result": {
|
3698
|
+
output: () => JSON.stringify(result2)
|
3699
|
+
}
|
3674
3700
|
}
|
3675
|
-
}
|
3676
|
-
|
3677
|
-
)
|
3678
|
-
|
3701
|
+
})
|
3702
|
+
);
|
3703
|
+
} catch (ignored) {
|
3704
|
+
}
|
3705
|
+
return result2;
|
3706
|
+
} catch (error) {
|
3707
|
+
throw new ToolExecutionError({
|
3708
|
+
toolName,
|
3709
|
+
toolArgs: args,
|
3710
|
+
cause: error
|
3711
|
+
});
|
3679
3712
|
}
|
3680
|
-
return result2;
|
3681
3713
|
}
|
3682
3714
|
});
|
3683
3715
|
return {
|
@@ -3935,7 +3967,11 @@ function runToolsTransformation({
|
|
3935
3967
|
(error) => {
|
3936
3968
|
toolResultsStreamController.enqueue({
|
3937
3969
|
type: "error",
|
3938
|
-
error
|
3970
|
+
error: new ToolExecutionError({
|
3971
|
+
toolName: toolCall.toolName,
|
3972
|
+
toolArgs: toolCall.args,
|
3973
|
+
cause: error
|
3974
|
+
})
|
3939
3975
|
});
|
3940
3976
|
outstandingToolResults.delete(toolExecutionId);
|
3941
3977
|
attemptClose();
|
@@ -4178,8 +4214,8 @@ var DefaultStreamTextResult = class {
|
|
4178
4214
|
"ai.prompt.tools": {
|
4179
4215
|
// convert the language model level tools:
|
4180
4216
|
input: () => {
|
4181
|
-
var
|
4182
|
-
return (
|
4217
|
+
var _a12;
|
4218
|
+
return (_a12 = mode.tools) == null ? void 0 : _a12.map((tool2) => JSON.stringify(tool2));
|
4183
4219
|
}
|
4184
4220
|
},
|
4185
4221
|
"ai.prompt.toolChoice": {
|
@@ -4262,7 +4298,7 @@ var DefaultStreamTextResult = class {
|
|
4262
4298
|
transformedStream.pipeThrough(
|
4263
4299
|
new TransformStream({
|
4264
4300
|
async transform(chunk, controller) {
|
4265
|
-
var
|
4301
|
+
var _a12, _b, _c;
|
4266
4302
|
if (stepFirstChunk) {
|
4267
4303
|
const msToFirstChunk = now2() - startTimestampMs;
|
4268
4304
|
stepFirstChunk = false;
|
@@ -4316,7 +4352,7 @@ var DefaultStreamTextResult = class {
|
|
4316
4352
|
}
|
4317
4353
|
case "response-metadata": {
|
4318
4354
|
stepResponse = {
|
4319
|
-
id: (
|
4355
|
+
id: (_a12 = chunk.id) != null ? _a12 : stepResponse.id,
|
4320
4356
|
timestamp: (_b = chunk.timestamp) != null ? _b : stepResponse.timestamp,
|
4321
4357
|
modelId: (_c = chunk.modelId) != null ? _c : stepResponse.modelId
|
4322
4358
|
};
|
@@ -4563,7 +4599,8 @@ var DefaultStreamTextResult = class {
|
|
4563
4599
|
self.stitchableStream.addStream(
|
4564
4600
|
new ReadableStream({
|
4565
4601
|
start(controller) {
|
4566
|
-
controller.
|
4602
|
+
controller.enqueue({ type: "error", error });
|
4603
|
+
controller.close();
|
4567
4604
|
}
|
4568
4605
|
})
|
4569
4606
|
);
|
@@ -4632,7 +4669,7 @@ var DefaultStreamTextResult = class {
|
|
4632
4669
|
});
|
4633
4670
|
}
|
4634
4671
|
toDataStreamInternal({
|
4635
|
-
getErrorMessage:
|
4672
|
+
getErrorMessage: getErrorMessage4 = () => "An error occurred.",
|
4636
4673
|
// mask error messages for safety by default
|
4637
4674
|
sendUsage = true
|
4638
4675
|
} = {}) {
|
@@ -4692,7 +4729,7 @@ var DefaultStreamTextResult = class {
|
|
4692
4729
|
}
|
4693
4730
|
case "error": {
|
4694
4731
|
controller.enqueue(
|
4695
|
-
formatDataStreamPart2("error",
|
4732
|
+
formatDataStreamPart2("error", getErrorMessage4(chunk.error))
|
4696
4733
|
);
|
4697
4734
|
break;
|
4698
4735
|
}
|
@@ -4735,7 +4772,7 @@ var DefaultStreamTextResult = class {
|
|
4735
4772
|
statusText,
|
4736
4773
|
headers,
|
4737
4774
|
data,
|
4738
|
-
getErrorMessage:
|
4775
|
+
getErrorMessage: getErrorMessage4,
|
4739
4776
|
sendUsage
|
4740
4777
|
} = {}) {
|
4741
4778
|
writeToServerResponse({
|
@@ -4746,7 +4783,7 @@ var DefaultStreamTextResult = class {
|
|
4746
4783
|
contentType: "text/plain; charset=utf-8",
|
4747
4784
|
dataStreamVersion: "v1"
|
4748
4785
|
}),
|
4749
|
-
stream: this.toDataStream({ data, getErrorMessage:
|
4786
|
+
stream: this.toDataStream({ data, getErrorMessage: getErrorMessage4, sendUsage })
|
4750
4787
|
});
|
4751
4788
|
}
|
4752
4789
|
pipeTextStreamToResponse(response, init) {
|
@@ -4780,11 +4817,11 @@ var DefaultStreamTextResult = class {
|
|
4780
4817
|
status,
|
4781
4818
|
statusText,
|
4782
4819
|
data,
|
4783
|
-
getErrorMessage:
|
4820
|
+
getErrorMessage: getErrorMessage4,
|
4784
4821
|
sendUsage
|
4785
4822
|
} = {}) {
|
4786
4823
|
return new Response(
|
4787
|
-
this.toDataStream({ data, getErrorMessage:
|
4824
|
+
this.toDataStream({ data, getErrorMessage: getErrorMessage4, sendUsage }),
|
4788
4825
|
{
|
4789
4826
|
status,
|
4790
4827
|
statusText,
|
@@ -4796,9 +4833,9 @@ var DefaultStreamTextResult = class {
|
|
4796
4833
|
);
|
4797
4834
|
}
|
4798
4835
|
toTextStreamResponse(init) {
|
4799
|
-
var
|
4836
|
+
var _a12;
|
4800
4837
|
return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
|
4801
|
-
status: (
|
4838
|
+
status: (_a12 = init == null ? void 0 : init.status) != null ? _a12 : 200,
|
4802
4839
|
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
4803
4840
|
contentType: "text/plain; charset=utf-8"
|
4804
4841
|
})
|
@@ -4910,11 +4947,11 @@ function experimental_customProvider({
|
|
4910
4947
|
}
|
4911
4948
|
|
4912
4949
|
// core/registry/no-such-provider-error.ts
|
4913
|
-
import { AISDKError as
|
4914
|
-
var
|
4915
|
-
var
|
4916
|
-
var
|
4917
|
-
var
|
4950
|
+
import { AISDKError as AISDKError12, NoSuchModelError as NoSuchModelError3 } from "@ai-sdk/provider";
|
4951
|
+
var name11 = "AI_NoSuchProviderError";
|
4952
|
+
var marker11 = `vercel.ai.error.${name11}`;
|
4953
|
+
var symbol11 = Symbol.for(marker11);
|
4954
|
+
var _a11;
|
4918
4955
|
var NoSuchProviderError = class extends NoSuchModelError3 {
|
4919
4956
|
constructor({
|
4920
4957
|
modelId,
|
@@ -4923,16 +4960,16 @@ var NoSuchProviderError = class extends NoSuchModelError3 {
|
|
4923
4960
|
availableProviders,
|
4924
4961
|
message = `No such provider: ${providerId} (available providers: ${availableProviders.join()})`
|
4925
4962
|
}) {
|
4926
|
-
super({ errorName:
|
4927
|
-
this[
|
4963
|
+
super({ errorName: name11, modelId, modelType, message });
|
4964
|
+
this[_a11] = true;
|
4928
4965
|
this.providerId = providerId;
|
4929
4966
|
this.availableProviders = availableProviders;
|
4930
4967
|
}
|
4931
4968
|
static isInstance(error) {
|
4932
|
-
return
|
4969
|
+
return AISDKError12.hasMarker(error, marker11);
|
4933
4970
|
}
|
4934
4971
|
};
|
4935
|
-
|
4972
|
+
_a11 = symbol11;
|
4936
4973
|
|
4937
4974
|
// core/registry/provider-registry.ts
|
4938
4975
|
import { NoSuchModelError as NoSuchModelError4 } from "@ai-sdk/provider";
|
@@ -4974,19 +5011,19 @@ var DefaultProviderRegistry = class {
|
|
4974
5011
|
return [id.slice(0, index), id.slice(index + 1)];
|
4975
5012
|
}
|
4976
5013
|
languageModel(id) {
|
4977
|
-
var
|
5014
|
+
var _a12, _b;
|
4978
5015
|
const [providerId, modelId] = this.splitId(id, "languageModel");
|
4979
|
-
const model = (_b = (
|
5016
|
+
const model = (_b = (_a12 = this.getProvider(providerId)).languageModel) == null ? void 0 : _b.call(_a12, modelId);
|
4980
5017
|
if (model == null) {
|
4981
5018
|
throw new NoSuchModelError4({ modelId: id, modelType: "languageModel" });
|
4982
5019
|
}
|
4983
5020
|
return model;
|
4984
5021
|
}
|
4985
5022
|
textEmbeddingModel(id) {
|
4986
|
-
var
|
5023
|
+
var _a12;
|
4987
5024
|
const [providerId, modelId] = this.splitId(id, "textEmbeddingModel");
|
4988
5025
|
const provider = this.getProvider(providerId);
|
4989
|
-
const model = (
|
5026
|
+
const model = (_a12 = provider.textEmbeddingModel) == null ? void 0 : _a12.call(provider, modelId);
|
4990
5027
|
if (model == null) {
|
4991
5028
|
throw new NoSuchModelError4({
|
4992
5029
|
modelId: id,
|
@@ -5034,7 +5071,7 @@ import {
|
|
5034
5071
|
function AssistantResponse({ threadId, messageId }, process2) {
|
5035
5072
|
const stream = new ReadableStream({
|
5036
5073
|
async start(controller) {
|
5037
|
-
var
|
5074
|
+
var _a12;
|
5038
5075
|
const textEncoder = new TextEncoder();
|
5039
5076
|
const sendMessage = (message) => {
|
5040
5077
|
controller.enqueue(
|
@@ -5056,7 +5093,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
5056
5093
|
);
|
5057
5094
|
};
|
5058
5095
|
const forwardStream = async (stream2) => {
|
5059
|
-
var
|
5096
|
+
var _a13, _b;
|
5060
5097
|
let result = void 0;
|
5061
5098
|
for await (const value of stream2) {
|
5062
5099
|
switch (value.event) {
|
@@ -5073,7 +5110,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
5073
5110
|
break;
|
5074
5111
|
}
|
5075
5112
|
case "thread.message.delta": {
|
5076
|
-
const content = (
|
5113
|
+
const content = (_a13 = value.data.delta.content) == null ? void 0 : _a13[0];
|
5077
5114
|
if ((content == null ? void 0 : content.type) === "text" && ((_b = content.text) == null ? void 0 : _b.value) != null) {
|
5078
5115
|
controller.enqueue(
|
5079
5116
|
textEncoder.encode(
|
@@ -5107,7 +5144,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
5107
5144
|
forwardStream
|
5108
5145
|
});
|
5109
5146
|
} catch (error) {
|
5110
|
-
sendError((
|
5147
|
+
sendError((_a12 = error.message) != null ? _a12 : `${error}`);
|
5111
5148
|
} finally {
|
5112
5149
|
controller.close();
|
5113
5150
|
}
|
@@ -5168,7 +5205,7 @@ function toDataStreamInternal(stream, callbacks) {
|
|
5168
5205
|
return stream.pipeThrough(
|
5169
5206
|
new TransformStream({
|
5170
5207
|
transform: async (value, controller) => {
|
5171
|
-
var
|
5208
|
+
var _a12;
|
5172
5209
|
if (typeof value === "string") {
|
5173
5210
|
controller.enqueue(value);
|
5174
5211
|
return;
|
@@ -5176,7 +5213,7 @@ function toDataStreamInternal(stream, callbacks) {
|
|
5176
5213
|
if ("event" in value) {
|
5177
5214
|
if (value.event === "on_chat_model_stream") {
|
5178
5215
|
forwardAIMessageChunk(
|
5179
|
-
(
|
5216
|
+
(_a12 = value.data) == null ? void 0 : _a12.chunk,
|
5180
5217
|
controller
|
5181
5218
|
);
|
5182
5219
|
}
|
@@ -5199,7 +5236,7 @@ function toDataStream(stream, callbacks) {
|
|
5199
5236
|
);
|
5200
5237
|
}
|
5201
5238
|
function toDataStreamResponse(stream, options) {
|
5202
|
-
var
|
5239
|
+
var _a12;
|
5203
5240
|
const dataStream = toDataStreamInternal(
|
5204
5241
|
stream,
|
5205
5242
|
options == null ? void 0 : options.callbacks
|
@@ -5208,7 +5245,7 @@ function toDataStreamResponse(stream, options) {
|
|
5208
5245
|
const init = options == null ? void 0 : options.init;
|
5209
5246
|
const responseStream = data ? mergeStreams(data.stream, dataStream) : dataStream;
|
5210
5247
|
return new Response(responseStream, {
|
5211
|
-
status: (
|
5248
|
+
status: (_a12 = init == null ? void 0 : init.status) != null ? _a12 : 200,
|
5212
5249
|
statusText: init == null ? void 0 : init.statusText,
|
5213
5250
|
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
5214
5251
|
contentType: "text/plain; charset=utf-8",
|
@@ -5263,14 +5300,14 @@ function toDataStream2(stream, callbacks) {
|
|
5263
5300
|
);
|
5264
5301
|
}
|
5265
5302
|
function toDataStreamResponse2(stream, options = {}) {
|
5266
|
-
var
|
5303
|
+
var _a12;
|
5267
5304
|
const { init, data, callbacks } = options;
|
5268
5305
|
const dataStream = toDataStreamInternal2(stream, callbacks).pipeThrough(
|
5269
5306
|
new TextEncoderStream()
|
5270
5307
|
);
|
5271
5308
|
const responseStream = data ? mergeStreams(data.stream, dataStream) : dataStream;
|
5272
5309
|
return new Response(responseStream, {
|
5273
|
-
status: (
|
5310
|
+
status: (_a12 = init == null ? void 0 : init.status) != null ? _a12 : 200,
|
5274
5311
|
statusText: init == null ? void 0 : init.statusText,
|
5275
5312
|
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
5276
5313
|
contentType: "text/plain; charset=utf-8",
|
@@ -5362,7 +5399,7 @@ var StreamData = class {
|
|
5362
5399
|
}
|
5363
5400
|
};
|
5364
5401
|
export {
|
5365
|
-
|
5402
|
+
AISDKError11 as AISDKError,
|
5366
5403
|
APICallError2 as APICallError,
|
5367
5404
|
AssistantResponse,
|
5368
5405
|
DownloadError,
|
@@ -5386,6 +5423,7 @@ export {
|
|
5386
5423
|
output_exports as Output,
|
5387
5424
|
RetryError,
|
5388
5425
|
StreamData,
|
5426
|
+
ToolExecutionError,
|
5389
5427
|
TypeValidationError2 as TypeValidationError,
|
5390
5428
|
UnsupportedFunctionalityError2 as UnsupportedFunctionalityError,
|
5391
5429
|
convertToCoreMessages,
|