ai 4.0.23 → 4.0.25
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/index.d.mts +49 -21
- package/dist/index.d.ts +49 -21
- package/dist/index.js +361 -225
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +360 -222
- package/dist/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/index.js
CHANGED
@@ -4,8 +4,8 @@ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
4
|
var __getOwnPropNames = Object.getOwnPropertyNames;
|
5
5
|
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
6
6
|
var __export = (target, all) => {
|
7
|
-
for (var
|
8
|
-
__defProp(target,
|
7
|
+
for (var name14 in all)
|
8
|
+
__defProp(target, name14, { get: all[name14], enumerable: true });
|
9
9
|
};
|
10
10
|
var __copyProps = (to, from, except, desc) => {
|
11
11
|
if (from && typeof from === "object" || typeof from === "function") {
|
@@ -20,25 +20,26 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
|
|
20
20
|
// streams/index.ts
|
21
21
|
var streams_exports = {};
|
22
22
|
__export(streams_exports, {
|
23
|
-
AISDKError: () =>
|
24
|
-
APICallError: () =>
|
23
|
+
AISDKError: () => import_provider16.AISDKError,
|
24
|
+
APICallError: () => import_provider16.APICallError,
|
25
25
|
AssistantResponse: () => AssistantResponse,
|
26
26
|
DownloadError: () => DownloadError,
|
27
|
-
EmptyResponseBodyError: () =>
|
27
|
+
EmptyResponseBodyError: () => import_provider16.EmptyResponseBodyError,
|
28
28
|
InvalidArgumentError: () => InvalidArgumentError,
|
29
29
|
InvalidDataContentError: () => InvalidDataContentError,
|
30
30
|
InvalidMessageRoleError: () => InvalidMessageRoleError,
|
31
|
-
InvalidPromptError: () =>
|
32
|
-
InvalidResponseDataError: () =>
|
31
|
+
InvalidPromptError: () => import_provider16.InvalidPromptError,
|
32
|
+
InvalidResponseDataError: () => import_provider16.InvalidResponseDataError,
|
33
33
|
InvalidToolArgumentsError: () => InvalidToolArgumentsError,
|
34
|
-
JSONParseError: () =>
|
34
|
+
JSONParseError: () => import_provider16.JSONParseError,
|
35
35
|
LangChainAdapter: () => langchain_adapter_exports,
|
36
36
|
LlamaIndexAdapter: () => llamaindex_adapter_exports,
|
37
|
-
LoadAPIKeyError: () =>
|
37
|
+
LoadAPIKeyError: () => import_provider16.LoadAPIKeyError,
|
38
38
|
MessageConversionError: () => MessageConversionError,
|
39
|
-
NoContentGeneratedError: () =>
|
39
|
+
NoContentGeneratedError: () => import_provider16.NoContentGeneratedError,
|
40
40
|
NoObjectGeneratedError: () => NoObjectGeneratedError,
|
41
|
-
|
41
|
+
NoOutputSpecifiedError: () => NoOutputSpecifiedError,
|
42
|
+
NoSuchModelError: () => import_provider16.NoSuchModelError,
|
42
43
|
NoSuchProviderError: () => NoSuchProviderError,
|
43
44
|
NoSuchToolError: () => NoSuchToolError,
|
44
45
|
Output: () => output_exports,
|
@@ -46,8 +47,8 @@ __export(streams_exports, {
|
|
46
47
|
StreamData: () => StreamData,
|
47
48
|
ToolCallRepairError: () => ToolCallRepairError,
|
48
49
|
ToolExecutionError: () => ToolExecutionError,
|
49
|
-
TypeValidationError: () =>
|
50
|
-
UnsupportedFunctionalityError: () =>
|
50
|
+
TypeValidationError: () => import_provider16.TypeValidationError,
|
51
|
+
UnsupportedFunctionalityError: () => import_provider16.UnsupportedFunctionalityError,
|
51
52
|
convertToCoreMessages: () => convertToCoreMessages,
|
52
53
|
cosineSimilarity: () => cosineSimilarity,
|
53
54
|
createDataStream: () => createDataStream,
|
@@ -417,7 +418,7 @@ function getBaseTelemetryAttributes({
|
|
417
418
|
telemetry,
|
418
419
|
headers
|
419
420
|
}) {
|
420
|
-
var
|
421
|
+
var _a14;
|
421
422
|
return {
|
422
423
|
"ai.model.provider": model.provider,
|
423
424
|
"ai.model.id": model.modelId,
|
@@ -427,7 +428,7 @@ function getBaseTelemetryAttributes({
|
|
427
428
|
return attributes;
|
428
429
|
}, {}),
|
429
430
|
// add metadata as attributes:
|
430
|
-
...Object.entries((
|
431
|
+
...Object.entries((_a14 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a14 : {}).reduce(
|
431
432
|
(attributes, [key, value]) => {
|
432
433
|
attributes[`ai.telemetry.metadata.${key}`] = value;
|
433
434
|
return attributes;
|
@@ -452,7 +453,7 @@ var noopTracer = {
|
|
452
453
|
startSpan() {
|
453
454
|
return noopSpan;
|
454
455
|
},
|
455
|
-
startActiveSpan(
|
456
|
+
startActiveSpan(name14, arg1, arg2, arg3) {
|
456
457
|
if (typeof arg1 === "function") {
|
457
458
|
return arg1(noopSpan);
|
458
459
|
}
|
@@ -522,13 +523,13 @@ function getTracer({
|
|
522
523
|
// core/telemetry/record-span.ts
|
523
524
|
var import_api2 = require("@opentelemetry/api");
|
524
525
|
function recordSpan({
|
525
|
-
name:
|
526
|
+
name: name14,
|
526
527
|
tracer,
|
527
528
|
attributes,
|
528
529
|
fn,
|
529
530
|
endWhenDone = true
|
530
531
|
}) {
|
531
|
-
return tracer.startActiveSpan(
|
532
|
+
return tracer.startActiveSpan(name14, { attributes }, async (span) => {
|
532
533
|
try {
|
533
534
|
const result = await fn(span);
|
534
535
|
if (endWhenDone) {
|
@@ -636,14 +637,14 @@ async function embed({
|
|
636
637
|
}),
|
637
638
|
tracer,
|
638
639
|
fn: async (doEmbedSpan) => {
|
639
|
-
var
|
640
|
+
var _a14;
|
640
641
|
const modelResponse = await model.doEmbed({
|
641
642
|
values: [value],
|
642
643
|
abortSignal,
|
643
644
|
headers
|
644
645
|
});
|
645
646
|
const embedding2 = modelResponse.embeddings[0];
|
646
|
-
const usage2 = (
|
647
|
+
const usage2 = (_a14 = modelResponse.usage) != null ? _a14 : { tokens: NaN };
|
647
648
|
doEmbedSpan.setAttributes(
|
648
649
|
selectTelemetryAttributes({
|
649
650
|
telemetry,
|
@@ -753,14 +754,14 @@ async function embedMany({
|
|
753
754
|
}),
|
754
755
|
tracer,
|
755
756
|
fn: async (doEmbedSpan) => {
|
756
|
-
var
|
757
|
+
var _a14;
|
757
758
|
const modelResponse = await model.doEmbed({
|
758
759
|
values,
|
759
760
|
abortSignal,
|
760
761
|
headers
|
761
762
|
});
|
762
763
|
const embeddings3 = modelResponse.embeddings;
|
763
|
-
const usage2 = (
|
764
|
+
const usage2 = (_a14 = modelResponse.usage) != null ? _a14 : { tokens: NaN };
|
764
765
|
doEmbedSpan.setAttributes(
|
765
766
|
selectTelemetryAttributes({
|
766
767
|
telemetry,
|
@@ -812,14 +813,14 @@ async function embedMany({
|
|
812
813
|
}),
|
813
814
|
tracer,
|
814
815
|
fn: async (doEmbedSpan) => {
|
815
|
-
var
|
816
|
+
var _a14;
|
816
817
|
const modelResponse = await model.doEmbed({
|
817
818
|
values: chunk,
|
818
819
|
abortSignal,
|
819
820
|
headers
|
820
821
|
});
|
821
822
|
const embeddings2 = modelResponse.embeddings;
|
822
|
-
const usage2 = (
|
823
|
+
const usage2 = (_a14 = modelResponse.usage) != null ? _a14 : { tokens: NaN };
|
823
824
|
doEmbedSpan.setAttributes(
|
824
825
|
selectTelemetryAttributes({
|
825
826
|
telemetry,
|
@@ -964,7 +965,7 @@ async function download({
|
|
964
965
|
url,
|
965
966
|
fetchImplementation = fetch
|
966
967
|
}) {
|
967
|
-
var
|
968
|
+
var _a14;
|
968
969
|
const urlText = url.toString();
|
969
970
|
try {
|
970
971
|
const response = await fetchImplementation(urlText);
|
@@ -977,7 +978,7 @@ async function download({
|
|
977
978
|
}
|
978
979
|
return {
|
979
980
|
data: new Uint8Array(await response.arrayBuffer()),
|
980
|
-
mimeType: (
|
981
|
+
mimeType: (_a14 = response.headers.get("content-type")) != null ? _a14 : void 0
|
981
982
|
};
|
982
983
|
} catch (error) {
|
983
984
|
if (DownloadError.isInstance(error)) {
|
@@ -1037,8 +1038,8 @@ var dataContentSchema = import_zod.z.union([
|
|
1037
1038
|
import_zod.z.custom(
|
1038
1039
|
// Buffer might not be available in some environments such as CloudFlare:
|
1039
1040
|
(value) => {
|
1040
|
-
var
|
1041
|
-
return (_b = (
|
1041
|
+
var _a14, _b;
|
1042
|
+
return (_b = (_a14 = globalThis.Buffer) == null ? void 0 : _a14.isBuffer(value)) != null ? _b : false;
|
1042
1043
|
},
|
1043
1044
|
{ message: "Must be a Buffer" }
|
1044
1045
|
)
|
@@ -1229,7 +1230,7 @@ async function downloadAssets(messages, downloadImplementation, modelSupportsIma
|
|
1229
1230
|
);
|
1230
1231
|
}
|
1231
1232
|
function convertPartToLanguageModelPart(part, downloadedAssets) {
|
1232
|
-
var
|
1233
|
+
var _a14;
|
1233
1234
|
if (part.type === "text") {
|
1234
1235
|
return {
|
1235
1236
|
type: "text",
|
@@ -1282,7 +1283,7 @@ function convertPartToLanguageModelPart(part, downloadedAssets) {
|
|
1282
1283
|
switch (type) {
|
1283
1284
|
case "image": {
|
1284
1285
|
if (normalizedData instanceof Uint8Array) {
|
1285
|
-
mimeType = (
|
1286
|
+
mimeType = (_a14 = detectImageMimeType(normalizedData)) != null ? _a14 : mimeType;
|
1286
1287
|
}
|
1287
1288
|
return {
|
1288
1289
|
type: "image",
|
@@ -1549,7 +1550,7 @@ function detectSingleMessageCharacteristics(message) {
|
|
1549
1550
|
|
1550
1551
|
// core/prompt/attachments-to-parts.ts
|
1551
1552
|
function attachmentsToParts(attachments) {
|
1552
|
-
var
|
1553
|
+
var _a14, _b, _c;
|
1553
1554
|
const parts = [];
|
1554
1555
|
for (const attachment of attachments) {
|
1555
1556
|
let url;
|
@@ -1561,7 +1562,7 @@ function attachmentsToParts(attachments) {
|
|
1561
1562
|
switch (url.protocol) {
|
1562
1563
|
case "http:":
|
1563
1564
|
case "https:": {
|
1564
|
-
if ((
|
1565
|
+
if ((_a14 = attachment.contentType) == null ? void 0 : _a14.startsWith("image/")) {
|
1565
1566
|
parts.push({ type: "image", image: url });
|
1566
1567
|
} else {
|
1567
1568
|
if (!attachment.contentType) {
|
@@ -1647,8 +1648,8 @@ _a7 = symbol7;
|
|
1647
1648
|
|
1648
1649
|
// core/prompt/convert-to-core-messages.ts
|
1649
1650
|
function convertToCoreMessages(messages, options) {
|
1650
|
-
var
|
1651
|
-
const tools = (
|
1651
|
+
var _a14;
|
1652
|
+
const tools = (_a14 = options == null ? void 0 : options.tools) != null ? _a14 : {};
|
1652
1653
|
const coreMessages = [];
|
1653
1654
|
for (const message of messages) {
|
1654
1655
|
const { role, content, toolInvocations, experimental_attachments } = message;
|
@@ -1925,7 +1926,7 @@ var arrayOutputStrategy = (schema) => {
|
|
1925
1926
|
additionalProperties: false
|
1926
1927
|
},
|
1927
1928
|
validatePartialResult({ value, latestObject, isFirstDelta, isFinalDelta }) {
|
1928
|
-
var
|
1929
|
+
var _a14;
|
1929
1930
|
if (!(0, import_provider10.isJSONObject)(value) || !(0, import_provider10.isJSONArray)(value.elements)) {
|
1930
1931
|
return {
|
1931
1932
|
success: false,
|
@@ -1948,7 +1949,7 @@ var arrayOutputStrategy = (schema) => {
|
|
1948
1949
|
}
|
1949
1950
|
resultArray.push(result.value);
|
1950
1951
|
}
|
1951
|
-
const publishedElementCount = (
|
1952
|
+
const publishedElementCount = (_a14 = latestObject == null ? void 0 : latestObject.length) != null ? _a14 : 0;
|
1952
1953
|
let textDelta = "";
|
1953
1954
|
if (isFirstDelta) {
|
1954
1955
|
textDelta += "[";
|
@@ -2286,7 +2287,7 @@ async function generateObject({
|
|
2286
2287
|
}),
|
2287
2288
|
tracer,
|
2288
2289
|
fn: async (span) => {
|
2289
|
-
var
|
2290
|
+
var _a14, _b;
|
2290
2291
|
if (mode === "auto" || mode == null) {
|
2291
2292
|
mode = model.defaultObjectGenerationMode;
|
2292
2293
|
}
|
@@ -2348,7 +2349,7 @@ async function generateObject({
|
|
2348
2349
|
}),
|
2349
2350
|
tracer,
|
2350
2351
|
fn: async (span2) => {
|
2351
|
-
var
|
2352
|
+
var _a15, _b2, _c, _d, _e, _f;
|
2352
2353
|
const result2 = await model.doGenerate({
|
2353
2354
|
mode: {
|
2354
2355
|
type: "object-json",
|
@@ -2364,7 +2365,7 @@ async function generateObject({
|
|
2364
2365
|
headers
|
2365
2366
|
});
|
2366
2367
|
const responseData = {
|
2367
|
-
id: (_b2 = (
|
2368
|
+
id: (_b2 = (_a15 = result2.response) == null ? void 0 : _a15.id) != null ? _b2 : generateId3(),
|
2368
2369
|
timestamp: (_d = (_c = result2.response) == null ? void 0 : _c.timestamp) != null ? _d : currentDate(),
|
2369
2370
|
modelId: (_f = (_e = result2.response) == null ? void 0 : _e.modelId) != null ? _f : model.modelId
|
2370
2371
|
};
|
@@ -2406,7 +2407,7 @@ async function generateObject({
|
|
2406
2407
|
rawResponse = generateResult.rawResponse;
|
2407
2408
|
logprobs = generateResult.logprobs;
|
2408
2409
|
resultProviderMetadata = generateResult.providerMetadata;
|
2409
|
-
request = (
|
2410
|
+
request = (_a14 = generateResult.request) != null ? _a14 : {};
|
2410
2411
|
response = generateResult.responseData;
|
2411
2412
|
break;
|
2412
2413
|
}
|
@@ -2452,7 +2453,7 @@ async function generateObject({
|
|
2452
2453
|
}),
|
2453
2454
|
tracer,
|
2454
2455
|
fn: async (span2) => {
|
2455
|
-
var
|
2456
|
+
var _a15, _b2, _c, _d, _e, _f, _g, _h;
|
2456
2457
|
const result2 = await model.doGenerate({
|
2457
2458
|
mode: {
|
2458
2459
|
type: "object-tool",
|
@@ -2470,7 +2471,7 @@ async function generateObject({
|
|
2470
2471
|
abortSignal,
|
2471
2472
|
headers
|
2472
2473
|
});
|
2473
|
-
const objectText = (_b2 = (
|
2474
|
+
const objectText = (_b2 = (_a15 = result2.toolCalls) == null ? void 0 : _a15[0]) == null ? void 0 : _b2.args;
|
2474
2475
|
const responseData = {
|
2475
2476
|
id: (_d = (_c = result2.response) == null ? void 0 : _c.id) != null ? _d : generateId3(),
|
2476
2477
|
timestamp: (_f = (_e = result2.response) == null ? void 0 : _e.timestamp) != null ? _f : currentDate(),
|
@@ -2596,9 +2597,9 @@ var DefaultGenerateObjectResult = class {
|
|
2596
2597
|
this.logprobs = options.logprobs;
|
2597
2598
|
}
|
2598
2599
|
toJsonResponse(init) {
|
2599
|
-
var
|
2600
|
+
var _a14;
|
2600
2601
|
return new Response(JSON.stringify(this.object), {
|
2601
|
-
status: (
|
2602
|
+
status: (_a14 = init == null ? void 0 : init.status) != null ? _a14 : 200,
|
2602
2603
|
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
2603
2604
|
contentType: "application/json; charset=utf-8"
|
2604
2605
|
})
|
@@ -2633,17 +2634,17 @@ var DelayedPromise = class {
|
|
2633
2634
|
return this.promise;
|
2634
2635
|
}
|
2635
2636
|
resolve(value) {
|
2636
|
-
var
|
2637
|
+
var _a14;
|
2637
2638
|
this.status = { type: "resolved", value };
|
2638
2639
|
if (this.promise) {
|
2639
|
-
(
|
2640
|
+
(_a14 = this._resolve) == null ? void 0 : _a14.call(this, value);
|
2640
2641
|
}
|
2641
2642
|
}
|
2642
2643
|
reject(error) {
|
2643
|
-
var
|
2644
|
+
var _a14;
|
2644
2645
|
this.status = { type: "rejected", error };
|
2645
2646
|
if (this.promise) {
|
2646
|
-
(
|
2647
|
+
(_a14 = this._reject) == null ? void 0 : _a14.call(this, error);
|
2647
2648
|
}
|
2648
2649
|
}
|
2649
2650
|
};
|
@@ -2732,8 +2733,8 @@ function createStitchableStream() {
|
|
2732
2733
|
|
2733
2734
|
// core/util/now.ts
|
2734
2735
|
function now() {
|
2735
|
-
var
|
2736
|
-
return (_b = (
|
2736
|
+
var _a14, _b;
|
2737
|
+
return (_b = (_a14 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a14.now()) != null ? _b : Date.now();
|
2737
2738
|
}
|
2738
2739
|
|
2739
2740
|
// core/generate-object/stream-object.ts
|
@@ -3022,7 +3023,7 @@ var DefaultStreamObjectResult = class {
|
|
3022
3023
|
const transformedStream = stream.pipeThrough(new TransformStream(transformer)).pipeThrough(
|
3023
3024
|
new TransformStream({
|
3024
3025
|
async transform(chunk, controller) {
|
3025
|
-
var
|
3026
|
+
var _a14, _b, _c;
|
3026
3027
|
if (isFirstChunk) {
|
3027
3028
|
const msToFirstChunk = now2() - startTimestampMs;
|
3028
3029
|
isFirstChunk = false;
|
@@ -3068,7 +3069,7 @@ var DefaultStreamObjectResult = class {
|
|
3068
3069
|
switch (chunk.type) {
|
3069
3070
|
case "response-metadata": {
|
3070
3071
|
response = {
|
3071
|
-
id: (
|
3072
|
+
id: (_a14 = chunk.id) != null ? _a14 : response.id,
|
3072
3073
|
timestamp: (_b = chunk.timestamp) != null ? _b : response.timestamp,
|
3073
3074
|
modelId: (_c = chunk.modelId) != null ? _c : response.modelId
|
3074
3075
|
};
|
@@ -3282,9 +3283,9 @@ var DefaultStreamObjectResult = class {
|
|
3282
3283
|
});
|
3283
3284
|
}
|
3284
3285
|
toTextStreamResponse(init) {
|
3285
|
-
var
|
3286
|
+
var _a14;
|
3286
3287
|
return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
|
3287
|
-
status: (
|
3288
|
+
status: (_a14 = init == null ? void 0 : init.status) != null ? _a14 : 200,
|
3288
3289
|
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
3289
3290
|
contentType: "text/plain; charset=utf-8"
|
3290
3291
|
})
|
@@ -3295,28 +3296,17 @@ var DefaultStreamObjectResult = class {
|
|
3295
3296
|
// core/generate-text/generate-text.ts
|
3296
3297
|
var import_provider_utils9 = require("@ai-sdk/provider-utils");
|
3297
3298
|
|
3298
|
-
// errors/
|
3299
|
-
var import_provider15 = require("@ai-sdk/provider");
|
3300
|
-
|
3301
|
-
// errors/invalid-tool-arguments-error.ts
|
3299
|
+
// errors/no-output-specified-error.ts
|
3302
3300
|
var import_provider11 = require("@ai-sdk/provider");
|
3303
|
-
var name8 = "
|
3301
|
+
var name8 = "AI_NoOutputSpecifiedError";
|
3304
3302
|
var marker8 = `vercel.ai.error.${name8}`;
|
3305
3303
|
var symbol8 = Symbol.for(marker8);
|
3306
3304
|
var _a8;
|
3307
|
-
var
|
3308
|
-
|
3309
|
-
|
3310
|
-
|
3311
|
-
cause,
|
3312
|
-
message = `Invalid arguments for tool ${toolName}: ${(0, import_provider11.getErrorMessage)(
|
3313
|
-
cause
|
3314
|
-
)}`
|
3315
|
-
}) {
|
3316
|
-
super({ name: name8, message, cause });
|
3305
|
+
var NoOutputSpecifiedError = class extends import_provider11.AISDKError {
|
3306
|
+
// used in isInstance
|
3307
|
+
constructor({ message = "No output specified." } = {}) {
|
3308
|
+
super({ name: name8, message });
|
3317
3309
|
this[_a8] = true;
|
3318
|
-
this.toolArgs = toolArgs;
|
3319
|
-
this.toolName = toolName;
|
3320
3310
|
}
|
3321
3311
|
static isInstance(error) {
|
3322
3312
|
return import_provider11.AISDKError.hasMarker(error, marker8);
|
@@ -3324,74 +3314,29 @@ var InvalidToolArgumentsError = class extends import_provider11.AISDKError {
|
|
3324
3314
|
};
|
3325
3315
|
_a8 = symbol8;
|
3326
3316
|
|
3327
|
-
// errors/
|
3317
|
+
// errors/tool-execution-error.ts
|
3328
3318
|
var import_provider12 = require("@ai-sdk/provider");
|
3329
|
-
var name9 = "
|
3319
|
+
var name9 = "AI_ToolExecutionError";
|
3330
3320
|
var marker9 = `vercel.ai.error.${name9}`;
|
3331
3321
|
var symbol9 = Symbol.for(marker9);
|
3332
3322
|
var _a9;
|
3333
|
-
var
|
3334
|
-
constructor({
|
3335
|
-
toolName,
|
3336
|
-
availableTools = void 0,
|
3337
|
-
message = `Model tried to call unavailable tool '${toolName}'. ${availableTools === void 0 ? "No tools are available." : `Available tools: ${availableTools.join(", ")}.`}`
|
3338
|
-
}) {
|
3339
|
-
super({ name: name9, message });
|
3340
|
-
this[_a9] = true;
|
3341
|
-
this.toolName = toolName;
|
3342
|
-
this.availableTools = availableTools;
|
3343
|
-
}
|
3344
|
-
static isInstance(error) {
|
3345
|
-
return import_provider12.AISDKError.hasMarker(error, marker9);
|
3346
|
-
}
|
3347
|
-
};
|
3348
|
-
_a9 = symbol9;
|
3349
|
-
|
3350
|
-
// errors/tool-call-repair-error.ts
|
3351
|
-
var import_provider13 = require("@ai-sdk/provider");
|
3352
|
-
var name10 = "AI_ToolCallRepairError";
|
3353
|
-
var marker10 = `vercel.ai.error.${name10}`;
|
3354
|
-
var symbol10 = Symbol.for(marker10);
|
3355
|
-
var _a10;
|
3356
|
-
var ToolCallRepairError = class extends import_provider13.AISDKError {
|
3357
|
-
constructor({
|
3358
|
-
cause,
|
3359
|
-
originalError,
|
3360
|
-
message = `Error repairing tool call: ${(0, import_provider13.getErrorMessage)(cause)}`
|
3361
|
-
}) {
|
3362
|
-
super({ name: name10, message, cause });
|
3363
|
-
this[_a10] = true;
|
3364
|
-
this.originalError = originalError;
|
3365
|
-
}
|
3366
|
-
static isInstance(error) {
|
3367
|
-
return import_provider13.AISDKError.hasMarker(error, marker10);
|
3368
|
-
}
|
3369
|
-
};
|
3370
|
-
_a10 = symbol10;
|
3371
|
-
|
3372
|
-
// errors/tool-execution-error.ts
|
3373
|
-
var import_provider14 = require("@ai-sdk/provider");
|
3374
|
-
var name11 = "AI_ToolExecutionError";
|
3375
|
-
var marker11 = `vercel.ai.error.${name11}`;
|
3376
|
-
var symbol11 = Symbol.for(marker11);
|
3377
|
-
var _a11;
|
3378
|
-
var ToolExecutionError = class extends import_provider14.AISDKError {
|
3323
|
+
var ToolExecutionError = class extends import_provider12.AISDKError {
|
3379
3324
|
constructor({
|
3380
3325
|
toolArgs,
|
3381
3326
|
toolName,
|
3382
3327
|
cause,
|
3383
|
-
message = `Error executing tool ${toolName}: ${(0,
|
3328
|
+
message = `Error executing tool ${toolName}: ${(0, import_provider12.getErrorMessage)(cause)}`
|
3384
3329
|
}) {
|
3385
|
-
super({ name:
|
3386
|
-
this[
|
3330
|
+
super({ name: name9, message, cause });
|
3331
|
+
this[_a9] = true;
|
3387
3332
|
this.toolArgs = toolArgs;
|
3388
3333
|
this.toolName = toolName;
|
3389
3334
|
}
|
3390
3335
|
static isInstance(error) {
|
3391
|
-
return
|
3336
|
+
return import_provider12.AISDKError.hasMarker(error, marker9);
|
3392
3337
|
}
|
3393
3338
|
};
|
3394
|
-
|
3339
|
+
_a9 = symbol9;
|
3395
3340
|
|
3396
3341
|
// core/prompt/prepare-tools-and-tool-choice.ts
|
3397
3342
|
var import_ui_utils4 = require("@ai-sdk/ui-utils");
|
@@ -3414,24 +3359,24 @@ function prepareToolsAndToolChoice({
|
|
3414
3359
|
};
|
3415
3360
|
}
|
3416
3361
|
const filteredTools = activeTools != null ? Object.entries(tools).filter(
|
3417
|
-
([
|
3362
|
+
([name14]) => activeTools.includes(name14)
|
3418
3363
|
) : Object.entries(tools);
|
3419
3364
|
return {
|
3420
|
-
tools: filteredTools.map(([
|
3365
|
+
tools: filteredTools.map(([name14, tool2]) => {
|
3421
3366
|
const toolType = tool2.type;
|
3422
3367
|
switch (toolType) {
|
3423
3368
|
case void 0:
|
3424
3369
|
case "function":
|
3425
3370
|
return {
|
3426
3371
|
type: "function",
|
3427
|
-
name:
|
3372
|
+
name: name14,
|
3428
3373
|
description: tool2.description,
|
3429
3374
|
parameters: (0, import_ui_utils4.asSchema)(tool2.parameters).jsonSchema
|
3430
3375
|
};
|
3431
3376
|
case "provider-defined":
|
3432
3377
|
return {
|
3433
3378
|
type: "provider-defined",
|
3434
|
-
name:
|
3379
|
+
name: name14,
|
3435
3380
|
id: tool2.id,
|
3436
3381
|
args: tool2.args
|
3437
3382
|
};
|
@@ -3461,6 +3406,79 @@ function removeTextAfterLastWhitespace(text2) {
|
|
3461
3406
|
// core/generate-text/parse-tool-call.ts
|
3462
3407
|
var import_provider_utils8 = require("@ai-sdk/provider-utils");
|
3463
3408
|
var import_ui_utils5 = require("@ai-sdk/ui-utils");
|
3409
|
+
|
3410
|
+
// errors/invalid-tool-arguments-error.ts
|
3411
|
+
var import_provider13 = require("@ai-sdk/provider");
|
3412
|
+
var name10 = "AI_InvalidToolArgumentsError";
|
3413
|
+
var marker10 = `vercel.ai.error.${name10}`;
|
3414
|
+
var symbol10 = Symbol.for(marker10);
|
3415
|
+
var _a10;
|
3416
|
+
var InvalidToolArgumentsError = class extends import_provider13.AISDKError {
|
3417
|
+
constructor({
|
3418
|
+
toolArgs,
|
3419
|
+
toolName,
|
3420
|
+
cause,
|
3421
|
+
message = `Invalid arguments for tool ${toolName}: ${(0, import_provider13.getErrorMessage)(
|
3422
|
+
cause
|
3423
|
+
)}`
|
3424
|
+
}) {
|
3425
|
+
super({ name: name10, message, cause });
|
3426
|
+
this[_a10] = true;
|
3427
|
+
this.toolArgs = toolArgs;
|
3428
|
+
this.toolName = toolName;
|
3429
|
+
}
|
3430
|
+
static isInstance(error) {
|
3431
|
+
return import_provider13.AISDKError.hasMarker(error, marker10);
|
3432
|
+
}
|
3433
|
+
};
|
3434
|
+
_a10 = symbol10;
|
3435
|
+
|
3436
|
+
// errors/no-such-tool-error.ts
|
3437
|
+
var import_provider14 = require("@ai-sdk/provider");
|
3438
|
+
var name11 = "AI_NoSuchToolError";
|
3439
|
+
var marker11 = `vercel.ai.error.${name11}`;
|
3440
|
+
var symbol11 = Symbol.for(marker11);
|
3441
|
+
var _a11;
|
3442
|
+
var NoSuchToolError = class extends import_provider14.AISDKError {
|
3443
|
+
constructor({
|
3444
|
+
toolName,
|
3445
|
+
availableTools = void 0,
|
3446
|
+
message = `Model tried to call unavailable tool '${toolName}'. ${availableTools === void 0 ? "No tools are available." : `Available tools: ${availableTools.join(", ")}.`}`
|
3447
|
+
}) {
|
3448
|
+
super({ name: name11, message });
|
3449
|
+
this[_a11] = true;
|
3450
|
+
this.toolName = toolName;
|
3451
|
+
this.availableTools = availableTools;
|
3452
|
+
}
|
3453
|
+
static isInstance(error) {
|
3454
|
+
return import_provider14.AISDKError.hasMarker(error, marker11);
|
3455
|
+
}
|
3456
|
+
};
|
3457
|
+
_a11 = symbol11;
|
3458
|
+
|
3459
|
+
// errors/tool-call-repair-error.ts
|
3460
|
+
var import_provider15 = require("@ai-sdk/provider");
|
3461
|
+
var name12 = "AI_ToolCallRepairError";
|
3462
|
+
var marker12 = `vercel.ai.error.${name12}`;
|
3463
|
+
var symbol12 = Symbol.for(marker12);
|
3464
|
+
var _a12;
|
3465
|
+
var ToolCallRepairError = class extends import_provider15.AISDKError {
|
3466
|
+
constructor({
|
3467
|
+
cause,
|
3468
|
+
originalError,
|
3469
|
+
message = `Error repairing tool call: ${(0, import_provider15.getErrorMessage)(cause)}`
|
3470
|
+
}) {
|
3471
|
+
super({ name: name12, message, cause });
|
3472
|
+
this[_a12] = true;
|
3473
|
+
this.originalError = originalError;
|
3474
|
+
}
|
3475
|
+
static isInstance(error) {
|
3476
|
+
return import_provider15.AISDKError.hasMarker(error, marker12);
|
3477
|
+
}
|
3478
|
+
};
|
3479
|
+
_a12 = symbol12;
|
3480
|
+
|
3481
|
+
// core/generate-text/parse-tool-call.ts
|
3464
3482
|
async function parseToolCall({
|
3465
3483
|
toolCall,
|
3466
3484
|
tools,
|
@@ -3591,7 +3609,7 @@ async function generateText({
|
|
3591
3609
|
onStepFinish,
|
3592
3610
|
...settings
|
3593
3611
|
}) {
|
3594
|
-
var
|
3612
|
+
var _a14;
|
3595
3613
|
if (maxSteps < 1) {
|
3596
3614
|
throw new InvalidArgumentError({
|
3597
3615
|
parameter: "maxSteps",
|
@@ -3608,7 +3626,7 @@ async function generateText({
|
|
3608
3626
|
});
|
3609
3627
|
const initialPrompt = standardizePrompt({
|
3610
3628
|
prompt: {
|
3611
|
-
system: (
|
3629
|
+
system: (_a14 = output == null ? void 0 : output.injectIntoSystemPrompt({ system, model })) != null ? _a14 : system,
|
3612
3630
|
prompt,
|
3613
3631
|
messages
|
3614
3632
|
},
|
@@ -3634,7 +3652,7 @@ async function generateText({
|
|
3634
3652
|
}),
|
3635
3653
|
tracer,
|
3636
3654
|
fn: async (span) => {
|
3637
|
-
var
|
3655
|
+
var _a15, _b, _c, _d, _e, _f;
|
3638
3656
|
const mode = {
|
3639
3657
|
type: "regular",
|
3640
3658
|
...prepareToolsAndToolChoice({ tools, toolChoice, activeTools })
|
@@ -3686,8 +3704,8 @@ async function generateText({
|
|
3686
3704
|
"ai.prompt.tools": {
|
3687
3705
|
// convert the language model level tools:
|
3688
3706
|
input: () => {
|
3689
|
-
var
|
3690
|
-
return (
|
3707
|
+
var _a16;
|
3708
|
+
return (_a16 = mode.tools) == null ? void 0 : _a16.map((tool2) => JSON.stringify(tool2));
|
3691
3709
|
}
|
3692
3710
|
},
|
3693
3711
|
"ai.prompt.toolChoice": {
|
@@ -3707,7 +3725,7 @@ async function generateText({
|
|
3707
3725
|
}),
|
3708
3726
|
tracer,
|
3709
3727
|
fn: async (span2) => {
|
3710
|
-
var
|
3728
|
+
var _a16, _b2, _c2, _d2, _e2, _f2;
|
3711
3729
|
const result = await model.doGenerate({
|
3712
3730
|
mode,
|
3713
3731
|
...callSettings,
|
@@ -3719,7 +3737,7 @@ async function generateText({
|
|
3719
3737
|
headers
|
3720
3738
|
});
|
3721
3739
|
const responseData = {
|
3722
|
-
id: (_b2 = (
|
3740
|
+
id: (_b2 = (_a16 = result.response) == null ? void 0 : _a16.id) != null ? _b2 : generateId3(),
|
3723
3741
|
timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
|
3724
3742
|
modelId: (_f2 = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f2 : model.modelId
|
3725
3743
|
};
|
@@ -3753,7 +3771,7 @@ async function generateText({
|
|
3753
3771
|
})
|
3754
3772
|
);
|
3755
3773
|
currentToolCalls = await Promise.all(
|
3756
|
-
((
|
3774
|
+
((_a15 = currentModelResponse.toolCalls) != null ? _a15 : []).map(
|
3757
3775
|
(toolCall) => parseToolCall({
|
3758
3776
|
toolCall,
|
3759
3777
|
tools,
|
@@ -3854,13 +3872,15 @@ async function generateText({
|
|
3854
3872
|
);
|
3855
3873
|
return new DefaultGenerateTextResult({
|
3856
3874
|
text: text2,
|
3857
|
-
|
3858
|
-
|
3859
|
-
|
3860
|
-
response: currentModelResponse.response,
|
3861
|
-
usage
|
3875
|
+
outputResolver: () => {
|
3876
|
+
if (output == null) {
|
3877
|
+
throw new NoOutputSpecifiedError();
|
3862
3878
|
}
|
3863
|
-
|
3879
|
+
return output.parseOutput(
|
3880
|
+
{ text: text2 },
|
3881
|
+
{ response: currentModelResponse.response, usage }
|
3882
|
+
);
|
3883
|
+
},
|
3864
3884
|
toolCalls: currentToolCalls,
|
3865
3885
|
toolResults: currentToolResults,
|
3866
3886
|
finishReason: currentModelResponse.finishReason,
|
@@ -3966,7 +3986,10 @@ var DefaultGenerateTextResult = class {
|
|
3966
3986
|
this.steps = options.steps;
|
3967
3987
|
this.experimental_providerMetadata = options.providerMetadata;
|
3968
3988
|
this.logprobs = options.logprobs;
|
3969
|
-
this.
|
3989
|
+
this.outputResolver = options.outputResolver;
|
3990
|
+
}
|
3991
|
+
get experimental_output() {
|
3992
|
+
return this.outputResolver();
|
3970
3993
|
}
|
3971
3994
|
};
|
3972
3995
|
|
@@ -3978,12 +4001,20 @@ __export(output_exports, {
|
|
3978
4001
|
});
|
3979
4002
|
var import_provider_utils10 = require("@ai-sdk/provider-utils");
|
3980
4003
|
var import_ui_utils6 = require("@ai-sdk/ui-utils");
|
4004
|
+
|
4005
|
+
// errors/index.ts
|
4006
|
+
var import_provider16 = require("@ai-sdk/provider");
|
4007
|
+
|
4008
|
+
// core/generate-text/output.ts
|
3981
4009
|
var text = () => ({
|
3982
4010
|
type: "text",
|
3983
4011
|
responseFormat: () => ({ type: "text" }),
|
3984
4012
|
injectIntoSystemPrompt({ system }) {
|
3985
4013
|
return system;
|
3986
4014
|
},
|
4015
|
+
parsePartial({ text: text2 }) {
|
4016
|
+
return { partial: text2 };
|
4017
|
+
},
|
3987
4018
|
parseOutput({ text: text2 }) {
|
3988
4019
|
return text2;
|
3989
4020
|
}
|
@@ -4004,6 +4035,24 @@ var object = ({
|
|
4004
4035
|
schema: schema.jsonSchema
|
4005
4036
|
});
|
4006
4037
|
},
|
4038
|
+
parsePartial({ text: text2 }) {
|
4039
|
+
const result = (0, import_ui_utils6.parsePartialJson)(text2);
|
4040
|
+
switch (result.state) {
|
4041
|
+
case "failed-parse":
|
4042
|
+
case "undefined-input":
|
4043
|
+
return void 0;
|
4044
|
+
case "repaired-parse":
|
4045
|
+
case "successful-parse":
|
4046
|
+
return {
|
4047
|
+
// Note: currently no validation of partial results:
|
4048
|
+
partial: result.value
|
4049
|
+
};
|
4050
|
+
default: {
|
4051
|
+
const _exhaustiveCheck = result.state;
|
4052
|
+
throw new Error(`Unsupported parse state: ${_exhaustiveCheck}`);
|
4053
|
+
}
|
4054
|
+
}
|
4055
|
+
},
|
4007
4056
|
parseOutput({ text: text2 }, context) {
|
4008
4057
|
const parseResult = (0, import_provider_utils10.safeParseJSON)({ text: text2 });
|
4009
4058
|
if (!parseResult.success) {
|
@@ -4327,6 +4376,7 @@ function streamText({
|
|
4327
4376
|
abortSignal,
|
4328
4377
|
headers,
|
4329
4378
|
maxSteps = 1,
|
4379
|
+
experimental_output: output,
|
4330
4380
|
experimental_continueSteps: continueSteps = false,
|
4331
4381
|
experimental_telemetry: telemetry,
|
4332
4382
|
experimental_providerMetadata: providerMetadata,
|
@@ -4361,6 +4411,7 @@ function streamText({
|
|
4361
4411
|
activeTools,
|
4362
4412
|
repairToolCall,
|
4363
4413
|
maxSteps,
|
4414
|
+
output,
|
4364
4415
|
continueSteps,
|
4365
4416
|
providerMetadata,
|
4366
4417
|
onChunk,
|
@@ -4371,6 +4422,57 @@ function streamText({
|
|
4371
4422
|
generateId: generateId3
|
4372
4423
|
});
|
4373
4424
|
}
|
4425
|
+
function createOutputTransformStream(output) {
|
4426
|
+
if (!output) {
|
4427
|
+
return new TransformStream({
|
4428
|
+
transform(chunk, controller) {
|
4429
|
+
controller.enqueue({ part: chunk, partialOutput: void 0 });
|
4430
|
+
}
|
4431
|
+
});
|
4432
|
+
}
|
4433
|
+
let text2 = "";
|
4434
|
+
let textChunk = "";
|
4435
|
+
let lastPublishedJson = "";
|
4436
|
+
return new TransformStream({
|
4437
|
+
transform(chunk, controller) {
|
4438
|
+
if (chunk.type !== "text-delta") {
|
4439
|
+
controller.enqueue({
|
4440
|
+
part: chunk,
|
4441
|
+
partialOutput: void 0
|
4442
|
+
});
|
4443
|
+
return;
|
4444
|
+
}
|
4445
|
+
text2 += chunk.textDelta;
|
4446
|
+
textChunk += chunk.textDelta;
|
4447
|
+
const result = output.parsePartial({ text: text2 });
|
4448
|
+
if (result != null) {
|
4449
|
+
const currentJson = JSON.stringify(result.partial);
|
4450
|
+
if (currentJson !== lastPublishedJson) {
|
4451
|
+
controller.enqueue({
|
4452
|
+
part: {
|
4453
|
+
type: "text-delta",
|
4454
|
+
textDelta: textChunk
|
4455
|
+
},
|
4456
|
+
partialOutput: result.partial
|
4457
|
+
});
|
4458
|
+
lastPublishedJson = currentJson;
|
4459
|
+
textChunk = "";
|
4460
|
+
}
|
4461
|
+
}
|
4462
|
+
},
|
4463
|
+
flush(controller) {
|
4464
|
+
if (textChunk.length > 0) {
|
4465
|
+
controller.enqueue({
|
4466
|
+
part: {
|
4467
|
+
type: "text-delta",
|
4468
|
+
textDelta: textChunk
|
4469
|
+
},
|
4470
|
+
partialOutput: void 0
|
4471
|
+
});
|
4472
|
+
}
|
4473
|
+
}
|
4474
|
+
});
|
4475
|
+
}
|
4374
4476
|
var DefaultStreamTextResult = class {
|
4375
4477
|
constructor({
|
4376
4478
|
model,
|
@@ -4389,6 +4491,7 @@ var DefaultStreamTextResult = class {
|
|
4389
4491
|
activeTools,
|
4390
4492
|
repairToolCall,
|
4391
4493
|
maxSteps,
|
4494
|
+
output,
|
4392
4495
|
continueSteps,
|
4393
4496
|
providerMetadata,
|
4394
4497
|
onChunk,
|
@@ -4408,6 +4511,7 @@ var DefaultStreamTextResult = class {
|
|
4408
4511
|
this.requestPromise = new DelayedPromise();
|
4409
4512
|
this.responsePromise = new DelayedPromise();
|
4410
4513
|
this.stepsPromise = new DelayedPromise();
|
4514
|
+
var _a14;
|
4411
4515
|
if (maxSteps < 1) {
|
4412
4516
|
throw new InvalidArgumentError({
|
4413
4517
|
parameter: "maxSteps",
|
@@ -4415,10 +4519,10 @@ var DefaultStreamTextResult = class {
|
|
4415
4519
|
message: "maxSteps must be at least 1"
|
4416
4520
|
});
|
4417
4521
|
}
|
4522
|
+
this.output = output;
|
4418
4523
|
let recordedStepText = "";
|
4419
4524
|
let recordedContinuationText = "";
|
4420
4525
|
let recordedFullText = "";
|
4421
|
-
let recordedRequest = void 0;
|
4422
4526
|
const recordedResponse = {
|
4423
4527
|
id: generateId3(),
|
4424
4528
|
timestamp: currentDate(),
|
@@ -4429,28 +4533,28 @@ var DefaultStreamTextResult = class {
|
|
4429
4533
|
let recordedToolResults = [];
|
4430
4534
|
let recordedFinishReason = void 0;
|
4431
4535
|
let recordedUsage = void 0;
|
4432
|
-
let recordedProviderMetadata = void 0;
|
4433
4536
|
let stepType = "initial";
|
4434
4537
|
const recordedSteps = [];
|
4435
4538
|
let rootSpan;
|
4436
4539
|
const eventProcessor = new TransformStream({
|
4437
4540
|
async transform(chunk, controller) {
|
4438
4541
|
controller.enqueue(chunk);
|
4439
|
-
|
4440
|
-
|
4542
|
+
const { part } = chunk;
|
4543
|
+
if (part.type === "text-delta" || part.type === "tool-call" || part.type === "tool-result" || part.type === "tool-call-streaming-start" || part.type === "tool-call-delta") {
|
4544
|
+
await (onChunk == null ? void 0 : onChunk({ chunk: part }));
|
4441
4545
|
}
|
4442
|
-
if (
|
4443
|
-
recordedStepText +=
|
4444
|
-
recordedContinuationText +=
|
4445
|
-
recordedFullText +=
|
4546
|
+
if (part.type === "text-delta") {
|
4547
|
+
recordedStepText += part.textDelta;
|
4548
|
+
recordedContinuationText += part.textDelta;
|
4549
|
+
recordedFullText += part.textDelta;
|
4446
4550
|
}
|
4447
|
-
if (
|
4448
|
-
recordedToolCalls.push(
|
4551
|
+
if (part.type === "tool-call") {
|
4552
|
+
recordedToolCalls.push(part);
|
4449
4553
|
}
|
4450
|
-
if (
|
4451
|
-
recordedToolResults.push(
|
4554
|
+
if (part.type === "tool-result") {
|
4555
|
+
recordedToolResults.push(part);
|
4452
4556
|
}
|
4453
|
-
if (
|
4557
|
+
if (part.type === "step-finish") {
|
4454
4558
|
const stepMessages = toResponseMessages({
|
4455
4559
|
text: recordedContinuationText,
|
4456
4560
|
tools: tools != null ? tools : {},
|
@@ -4460,7 +4564,7 @@ var DefaultStreamTextResult = class {
|
|
4460
4564
|
const currentStep = recordedSteps.length;
|
4461
4565
|
let nextStepType = "done";
|
4462
4566
|
if (currentStep + 1 < maxSteps) {
|
4463
|
-
if (continueSteps &&
|
4567
|
+
if (continueSteps && part.finishReason === "length" && // only use continue when there are no tool calls:
|
4464
4568
|
recordedToolCalls.length === 0) {
|
4465
4569
|
nextStepType = "continue";
|
4466
4570
|
} else if (
|
@@ -4476,24 +4580,23 @@ var DefaultStreamTextResult = class {
|
|
4476
4580
|
text: recordedStepText,
|
4477
4581
|
toolCalls: recordedToolCalls,
|
4478
4582
|
toolResults: recordedToolResults,
|
4479
|
-
finishReason:
|
4480
|
-
usage:
|
4481
|
-
warnings:
|
4482
|
-
logprobs:
|
4483
|
-
request:
|
4583
|
+
finishReason: part.finishReason,
|
4584
|
+
usage: part.usage,
|
4585
|
+
warnings: part.warnings,
|
4586
|
+
logprobs: part.logprobs,
|
4587
|
+
request: part.request,
|
4484
4588
|
response: {
|
4485
|
-
...
|
4589
|
+
...part.response,
|
4486
4590
|
messages: [...recordedResponse.messages, ...stepMessages]
|
4487
4591
|
},
|
4488
|
-
experimental_providerMetadata:
|
4489
|
-
isContinued:
|
4592
|
+
experimental_providerMetadata: part.experimental_providerMetadata,
|
4593
|
+
isContinued: part.isContinued
|
4490
4594
|
};
|
4491
4595
|
await (onStepFinish == null ? void 0 : onStepFinish(currentStepResult));
|
4492
4596
|
recordedSteps.push(currentStepResult);
|
4493
4597
|
recordedToolCalls = [];
|
4494
4598
|
recordedToolResults = [];
|
4495
4599
|
recordedStepText = "";
|
4496
|
-
recordedRequest = chunk.request;
|
4497
4600
|
if (nextStepType !== "done") {
|
4498
4601
|
stepType = nextStepType;
|
4499
4602
|
}
|
@@ -4502,18 +4605,17 @@ var DefaultStreamTextResult = class {
|
|
4502
4605
|
recordedContinuationText = "";
|
4503
4606
|
}
|
4504
4607
|
}
|
4505
|
-
if (
|
4506
|
-
recordedResponse.id =
|
4507
|
-
recordedResponse.timestamp =
|
4508
|
-
recordedResponse.modelId =
|
4509
|
-
recordedResponse.headers =
|
4510
|
-
recordedUsage =
|
4511
|
-
recordedFinishReason =
|
4512
|
-
recordedProviderMetadata = chunk.experimental_providerMetadata;
|
4608
|
+
if (part.type === "finish") {
|
4609
|
+
recordedResponse.id = part.response.id;
|
4610
|
+
recordedResponse.timestamp = part.response.timestamp;
|
4611
|
+
recordedResponse.modelId = part.response.modelId;
|
4612
|
+
recordedResponse.headers = part.response.headers;
|
4613
|
+
recordedUsage = part.usage;
|
4614
|
+
recordedFinishReason = part.finishReason;
|
4513
4615
|
}
|
4514
4616
|
},
|
4515
4617
|
async flush(controller) {
|
4516
|
-
var
|
4618
|
+
var _a15;
|
4517
4619
|
try {
|
4518
4620
|
const lastStep = recordedSteps[recordedSteps.length - 1];
|
4519
4621
|
if (lastStep) {
|
@@ -4543,7 +4645,7 @@ var DefaultStreamTextResult = class {
|
|
4543
4645
|
text: recordedFullText,
|
4544
4646
|
toolCalls: lastStep.toolCalls,
|
4545
4647
|
toolResults: lastStep.toolResults,
|
4546
|
-
request: (
|
4648
|
+
request: (_a15 = lastStep.request) != null ? _a15 : {},
|
4547
4649
|
response: lastStep.response,
|
4548
4650
|
warnings: lastStep.warnings,
|
4549
4651
|
experimental_providerMetadata: lastStep.experimental_providerMetadata,
|
@@ -4557,8 +4659,8 @@ var DefaultStreamTextResult = class {
|
|
4557
4659
|
"ai.response.text": { output: () => recordedFullText },
|
4558
4660
|
"ai.response.toolCalls": {
|
4559
4661
|
output: () => {
|
4560
|
-
var
|
4561
|
-
return ((
|
4662
|
+
var _a16;
|
4663
|
+
return ((_a16 = lastStep.toolCalls) == null ? void 0 : _a16.length) ? JSON.stringify(lastStep.toolCalls) : void 0;
|
4562
4664
|
}
|
4563
4665
|
},
|
4564
4666
|
"ai.usage.promptTokens": usage.promptTokens,
|
@@ -4576,7 +4678,11 @@ var DefaultStreamTextResult = class {
|
|
4576
4678
|
const stitchableStream = createStitchableStream();
|
4577
4679
|
this.addStream = stitchableStream.addStream;
|
4578
4680
|
this.closeStream = stitchableStream.close;
|
4579
|
-
|
4681
|
+
let stream = stitchableStream.stream;
|
4682
|
+
if (transform) {
|
4683
|
+
stream = stream.pipeThrough(transform);
|
4684
|
+
}
|
4685
|
+
this.baseStream = stream.pipeThrough(createOutputTransformStream(output)).pipeThrough(eventProcessor);
|
4580
4686
|
const { maxRetries, retry } = prepareRetries({
|
4581
4687
|
maxRetries: maxRetriesArg
|
4582
4688
|
});
|
@@ -4588,7 +4694,11 @@ var DefaultStreamTextResult = class {
|
|
4588
4694
|
settings: { ...settings, maxRetries }
|
4589
4695
|
});
|
4590
4696
|
const initialPrompt = standardizePrompt({
|
4591
|
-
prompt: {
|
4697
|
+
prompt: {
|
4698
|
+
system: (_a14 = output == null ? void 0 : output.injectIntoSystemPrompt({ system, model })) != null ? _a14 : system,
|
4699
|
+
prompt,
|
4700
|
+
messages
|
4701
|
+
},
|
4592
4702
|
tools
|
4593
4703
|
});
|
4594
4704
|
const self = this;
|
@@ -4637,7 +4747,7 @@ var DefaultStreamTextResult = class {
|
|
4637
4747
|
...prepareToolsAndToolChoice({ tools, toolChoice, activeTools })
|
4638
4748
|
};
|
4639
4749
|
const {
|
4640
|
-
result: { stream, warnings, rawResponse, request },
|
4750
|
+
result: { stream: stream2, warnings, rawResponse, request },
|
4641
4751
|
doStreamSpan,
|
4642
4752
|
startTimestampMs
|
4643
4753
|
} = await retry(
|
@@ -4660,8 +4770,8 @@ var DefaultStreamTextResult = class {
|
|
4660
4770
|
"ai.prompt.tools": {
|
4661
4771
|
// convert the language model level tools:
|
4662
4772
|
input: () => {
|
4663
|
-
var
|
4664
|
-
return (
|
4773
|
+
var _a15;
|
4774
|
+
return (_a15 = mode.tools) == null ? void 0 : _a15.map((tool2) => JSON.stringify(tool2));
|
4665
4775
|
}
|
4666
4776
|
},
|
4667
4777
|
"ai.prompt.toolChoice": {
|
@@ -4689,6 +4799,7 @@ var DefaultStreamTextResult = class {
|
|
4689
4799
|
mode,
|
4690
4800
|
...prepareCallSettings(settings),
|
4691
4801
|
inputFormat: promptFormat,
|
4802
|
+
responseFormat: output == null ? void 0 : output.responseFormat({ model }),
|
4692
4803
|
prompt: promptMessages,
|
4693
4804
|
providerMetadata,
|
4694
4805
|
abortSignal,
|
@@ -4699,7 +4810,7 @@ var DefaultStreamTextResult = class {
|
|
4699
4810
|
);
|
4700
4811
|
const transformedStream = runToolsTransformation({
|
4701
4812
|
tools,
|
4702
|
-
generatorStream:
|
4813
|
+
generatorStream: stream2,
|
4703
4814
|
toolCallStreaming,
|
4704
4815
|
tracer,
|
4705
4816
|
telemetry,
|
@@ -4745,7 +4856,7 @@ var DefaultStreamTextResult = class {
|
|
4745
4856
|
transformedStream.pipeThrough(
|
4746
4857
|
new TransformStream({
|
4747
4858
|
async transform(chunk, controller) {
|
4748
|
-
var
|
4859
|
+
var _a15, _b, _c;
|
4749
4860
|
if (stepFirstChunk) {
|
4750
4861
|
const msToFirstChunk = now2() - startTimestampMs;
|
4751
4862
|
stepFirstChunk = false;
|
@@ -4797,7 +4908,7 @@ var DefaultStreamTextResult = class {
|
|
4797
4908
|
}
|
4798
4909
|
case "response-metadata": {
|
4799
4910
|
stepResponse = {
|
4800
|
-
id: (
|
4911
|
+
id: (_a15 = chunk.id) != null ? _a15 : stepResponse.id,
|
4801
4912
|
timestamp: (_b = chunk.timestamp) != null ? _b : stepResponse.timestamp,
|
4802
4913
|
modelId: (_c = chunk.modelId) != null ? _c : stepResponse.modelId
|
4803
4914
|
};
|
@@ -5022,11 +5133,11 @@ var DefaultStreamTextResult = class {
|
|
5022
5133
|
return createAsyncIterableStream(
|
5023
5134
|
this.teeStream().pipeThrough(
|
5024
5135
|
new TransformStream({
|
5025
|
-
transform(
|
5026
|
-
if (
|
5027
|
-
controller.enqueue(
|
5028
|
-
} else if (
|
5029
|
-
controller.error(
|
5136
|
+
transform({ part }, controller) {
|
5137
|
+
if (part.type === "text-delta") {
|
5138
|
+
controller.enqueue(part.textDelta);
|
5139
|
+
} else if (part.type === "error") {
|
5140
|
+
controller.error(part.error);
|
5030
5141
|
}
|
5031
5142
|
}
|
5032
5143
|
})
|
@@ -5034,7 +5145,31 @@ var DefaultStreamTextResult = class {
|
|
5034
5145
|
);
|
5035
5146
|
}
|
5036
5147
|
get fullStream() {
|
5037
|
-
return createAsyncIterableStream(
|
5148
|
+
return createAsyncIterableStream(
|
5149
|
+
this.teeStream().pipeThrough(
|
5150
|
+
new TransformStream({
|
5151
|
+
transform({ part }, controller) {
|
5152
|
+
controller.enqueue(part);
|
5153
|
+
}
|
5154
|
+
})
|
5155
|
+
)
|
5156
|
+
);
|
5157
|
+
}
|
5158
|
+
get experimental_partialOutputStream() {
|
5159
|
+
if (this.output == null) {
|
5160
|
+
throw new NoOutputSpecifiedError();
|
5161
|
+
}
|
5162
|
+
return createAsyncIterableStream(
|
5163
|
+
this.teeStream().pipeThrough(
|
5164
|
+
new TransformStream({
|
5165
|
+
transform({ partialOutput }, controller) {
|
5166
|
+
if (partialOutput != null) {
|
5167
|
+
controller.enqueue(partialOutput);
|
5168
|
+
}
|
5169
|
+
}
|
5170
|
+
})
|
5171
|
+
)
|
5172
|
+
);
|
5038
5173
|
}
|
5039
5174
|
toDataStreamInternal({
|
5040
5175
|
getErrorMessage: getErrorMessage5 = () => "An error occurred.",
|
@@ -5201,9 +5336,9 @@ var DefaultStreamTextResult = class {
|
|
5201
5336
|
);
|
5202
5337
|
}
|
5203
5338
|
toTextStreamResponse(init) {
|
5204
|
-
var
|
5339
|
+
var _a14;
|
5205
5340
|
return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
|
5206
|
-
status: (
|
5341
|
+
status: (_a14 = init == null ? void 0 : init.status) != null ? _a14 : 200,
|
5207
5342
|
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
5208
5343
|
contentType: "text/plain; charset=utf-8"
|
5209
5344
|
})
|
@@ -5280,7 +5415,7 @@ var experimental_wrapLanguageModel = ({
|
|
5280
5415
|
};
|
5281
5416
|
|
5282
5417
|
// core/registry/custom-provider.ts
|
5283
|
-
var
|
5418
|
+
var import_provider17 = require("@ai-sdk/provider");
|
5284
5419
|
function experimental_customProvider({
|
5285
5420
|
languageModels,
|
5286
5421
|
textEmbeddingModels,
|
@@ -5294,7 +5429,7 @@ function experimental_customProvider({
|
|
5294
5429
|
if (fallbackProvider) {
|
5295
5430
|
return fallbackProvider.languageModel(modelId);
|
5296
5431
|
}
|
5297
|
-
throw new
|
5432
|
+
throw new import_provider17.NoSuchModelError({ modelId, modelType: "languageModel" });
|
5298
5433
|
},
|
5299
5434
|
textEmbeddingModel(modelId) {
|
5300
5435
|
if (textEmbeddingModels != null && modelId in textEmbeddingModels) {
|
@@ -5303,18 +5438,18 @@ function experimental_customProvider({
|
|
5303
5438
|
if (fallbackProvider) {
|
5304
5439
|
return fallbackProvider.textEmbeddingModel(modelId);
|
5305
5440
|
}
|
5306
|
-
throw new
|
5441
|
+
throw new import_provider17.NoSuchModelError({ modelId, modelType: "textEmbeddingModel" });
|
5307
5442
|
}
|
5308
5443
|
};
|
5309
5444
|
}
|
5310
5445
|
|
5311
5446
|
// core/registry/no-such-provider-error.ts
|
5312
|
-
var
|
5313
|
-
var
|
5314
|
-
var
|
5315
|
-
var
|
5316
|
-
var
|
5317
|
-
var NoSuchProviderError = class extends
|
5447
|
+
var import_provider18 = require("@ai-sdk/provider");
|
5448
|
+
var name13 = "AI_NoSuchProviderError";
|
5449
|
+
var marker13 = `vercel.ai.error.${name13}`;
|
5450
|
+
var symbol13 = Symbol.for(marker13);
|
5451
|
+
var _a13;
|
5452
|
+
var NoSuchProviderError = class extends import_provider18.NoSuchModelError {
|
5318
5453
|
constructor({
|
5319
5454
|
modelId,
|
5320
5455
|
modelType,
|
@@ -5322,19 +5457,19 @@ var NoSuchProviderError = class extends import_provider17.NoSuchModelError {
|
|
5322
5457
|
availableProviders,
|
5323
5458
|
message = `No such provider: ${providerId} (available providers: ${availableProviders.join()})`
|
5324
5459
|
}) {
|
5325
|
-
super({ errorName:
|
5326
|
-
this[
|
5460
|
+
super({ errorName: name13, modelId, modelType, message });
|
5461
|
+
this[_a13] = true;
|
5327
5462
|
this.providerId = providerId;
|
5328
5463
|
this.availableProviders = availableProviders;
|
5329
5464
|
}
|
5330
5465
|
static isInstance(error) {
|
5331
|
-
return
|
5466
|
+
return import_provider18.AISDKError.hasMarker(error, marker13);
|
5332
5467
|
}
|
5333
5468
|
};
|
5334
|
-
|
5469
|
+
_a13 = symbol13;
|
5335
5470
|
|
5336
5471
|
// core/registry/provider-registry.ts
|
5337
|
-
var
|
5472
|
+
var import_provider19 = require("@ai-sdk/provider");
|
5338
5473
|
function experimental_createProviderRegistry(providers) {
|
5339
5474
|
const registry = new DefaultProviderRegistry();
|
5340
5475
|
for (const [id, provider] of Object.entries(providers)) {
|
@@ -5364,7 +5499,7 @@ var DefaultProviderRegistry = class {
|
|
5364
5499
|
splitId(id, modelType) {
|
5365
5500
|
const index = id.indexOf(":");
|
5366
5501
|
if (index === -1) {
|
5367
|
-
throw new
|
5502
|
+
throw new import_provider19.NoSuchModelError({
|
5368
5503
|
modelId: id,
|
5369
5504
|
modelType,
|
5370
5505
|
message: `Invalid ${modelType} id for registry: ${id} (must be in the format "providerId:modelId")`
|
@@ -5373,21 +5508,21 @@ var DefaultProviderRegistry = class {
|
|
5373
5508
|
return [id.slice(0, index), id.slice(index + 1)];
|
5374
5509
|
}
|
5375
5510
|
languageModel(id) {
|
5376
|
-
var
|
5511
|
+
var _a14, _b;
|
5377
5512
|
const [providerId, modelId] = this.splitId(id, "languageModel");
|
5378
|
-
const model = (_b = (
|
5513
|
+
const model = (_b = (_a14 = this.getProvider(providerId)).languageModel) == null ? void 0 : _b.call(_a14, modelId);
|
5379
5514
|
if (model == null) {
|
5380
|
-
throw new
|
5515
|
+
throw new import_provider19.NoSuchModelError({ modelId: id, modelType: "languageModel" });
|
5381
5516
|
}
|
5382
5517
|
return model;
|
5383
5518
|
}
|
5384
5519
|
textEmbeddingModel(id) {
|
5385
|
-
var
|
5520
|
+
var _a14;
|
5386
5521
|
const [providerId, modelId] = this.splitId(id, "textEmbeddingModel");
|
5387
5522
|
const provider = this.getProvider(providerId);
|
5388
|
-
const model = (
|
5523
|
+
const model = (_a14 = provider.textEmbeddingModel) == null ? void 0 : _a14.call(provider, modelId);
|
5389
5524
|
if (model == null) {
|
5390
|
-
throw new
|
5525
|
+
throw new import_provider19.NoSuchModelError({
|
5391
5526
|
modelId: id,
|
5392
5527
|
modelType: "textEmbeddingModel"
|
5393
5528
|
});
|
@@ -5431,7 +5566,7 @@ var import_ui_utils10 = require("@ai-sdk/ui-utils");
|
|
5431
5566
|
function AssistantResponse({ threadId, messageId }, process2) {
|
5432
5567
|
const stream = new ReadableStream({
|
5433
5568
|
async start(controller) {
|
5434
|
-
var
|
5569
|
+
var _a14;
|
5435
5570
|
const textEncoder = new TextEncoder();
|
5436
5571
|
const sendMessage = (message) => {
|
5437
5572
|
controller.enqueue(
|
@@ -5453,7 +5588,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
5453
5588
|
);
|
5454
5589
|
};
|
5455
5590
|
const forwardStream = async (stream2) => {
|
5456
|
-
var
|
5591
|
+
var _a15, _b;
|
5457
5592
|
let result = void 0;
|
5458
5593
|
for await (const value of stream2) {
|
5459
5594
|
switch (value.event) {
|
@@ -5470,7 +5605,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
5470
5605
|
break;
|
5471
5606
|
}
|
5472
5607
|
case "thread.message.delta": {
|
5473
|
-
const content = (
|
5608
|
+
const content = (_a15 = value.data.delta.content) == null ? void 0 : _a15[0];
|
5474
5609
|
if ((content == null ? void 0 : content.type) === "text" && ((_b = content.text) == null ? void 0 : _b.value) != null) {
|
5475
5610
|
controller.enqueue(
|
5476
5611
|
textEncoder.encode(
|
@@ -5504,7 +5639,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
5504
5639
|
forwardStream
|
5505
5640
|
});
|
5506
5641
|
} catch (error) {
|
5507
|
-
sendError((
|
5642
|
+
sendError((_a14 = error.message) != null ? _a14 : `${error}`);
|
5508
5643
|
} finally {
|
5509
5644
|
controller.close();
|
5510
5645
|
}
|
@@ -5565,7 +5700,7 @@ function toDataStreamInternal(stream, callbacks) {
|
|
5565
5700
|
return stream.pipeThrough(
|
5566
5701
|
new TransformStream({
|
5567
5702
|
transform: async (value, controller) => {
|
5568
|
-
var
|
5703
|
+
var _a14;
|
5569
5704
|
if (typeof value === "string") {
|
5570
5705
|
controller.enqueue(value);
|
5571
5706
|
return;
|
@@ -5573,7 +5708,7 @@ function toDataStreamInternal(stream, callbacks) {
|
|
5573
5708
|
if ("event" in value) {
|
5574
5709
|
if (value.event === "on_chat_model_stream") {
|
5575
5710
|
forwardAIMessageChunk(
|
5576
|
-
(
|
5711
|
+
(_a14 = value.data) == null ? void 0 : _a14.chunk,
|
5577
5712
|
controller
|
5578
5713
|
);
|
5579
5714
|
}
|
@@ -5596,7 +5731,7 @@ function toDataStream(stream, callbacks) {
|
|
5596
5731
|
);
|
5597
5732
|
}
|
5598
5733
|
function toDataStreamResponse(stream, options) {
|
5599
|
-
var
|
5734
|
+
var _a14;
|
5600
5735
|
const dataStream = toDataStreamInternal(
|
5601
5736
|
stream,
|
5602
5737
|
options == null ? void 0 : options.callbacks
|
@@ -5605,7 +5740,7 @@ function toDataStreamResponse(stream, options) {
|
|
5605
5740
|
const init = options == null ? void 0 : options.init;
|
5606
5741
|
const responseStream = data ? mergeStreams(data.stream, dataStream) : dataStream;
|
5607
5742
|
return new Response(responseStream, {
|
5608
|
-
status: (
|
5743
|
+
status: (_a14 = init == null ? void 0 : init.status) != null ? _a14 : 200,
|
5609
5744
|
statusText: init == null ? void 0 : init.statusText,
|
5610
5745
|
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
5611
5746
|
contentType: "text/plain; charset=utf-8",
|
@@ -5660,14 +5795,14 @@ function toDataStream2(stream, callbacks) {
|
|
5660
5795
|
);
|
5661
5796
|
}
|
5662
5797
|
function toDataStreamResponse2(stream, options = {}) {
|
5663
|
-
var
|
5798
|
+
var _a14;
|
5664
5799
|
const { init, data, callbacks } = options;
|
5665
5800
|
const dataStream = toDataStreamInternal2(stream, callbacks).pipeThrough(
|
5666
5801
|
new TextEncoderStream()
|
5667
5802
|
);
|
5668
5803
|
const responseStream = data ? mergeStreams(data.stream, dataStream) : dataStream;
|
5669
5804
|
return new Response(responseStream, {
|
5670
|
-
status: (
|
5805
|
+
status: (_a14 = init == null ? void 0 : init.status) != null ? _a14 : 200,
|
5671
5806
|
statusText: init == null ? void 0 : init.statusText,
|
5672
5807
|
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
5673
5808
|
contentType: "text/plain; charset=utf-8",
|
@@ -5778,6 +5913,7 @@ var StreamData = class {
|
|
5778
5913
|
MessageConversionError,
|
5779
5914
|
NoContentGeneratedError,
|
5780
5915
|
NoObjectGeneratedError,
|
5916
|
+
NoOutputSpecifiedError,
|
5781
5917
|
NoSuchModelError,
|
5782
5918
|
NoSuchProviderError,
|
5783
5919
|
NoSuchToolError,
|