ai 4.0.5 → 4.0.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +7 -0
- package/dist/index.d.mts +42 -8
- package/dist/index.d.ts +42 -8
- package/dist/index.js +272 -208
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +245 -182
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
- package/rsc/dist/rsc-server.mjs +138 -129
- package/rsc/dist/rsc-server.mjs.map +1 -1
package/dist/index.mjs
CHANGED
@@ -18,6 +18,32 @@ import { generateId as generateId2 } from "@ai-sdk/provider-utils";
|
|
18
18
|
// core/index.ts
|
19
19
|
import { jsonSchema } from "@ai-sdk/ui-utils";
|
20
20
|
|
21
|
+
// errors/invalid-argument-error.ts
|
22
|
+
import { AISDKError } from "@ai-sdk/provider";
|
23
|
+
var name = "AI_InvalidArgumentError";
|
24
|
+
var marker = `vercel.ai.error.${name}`;
|
25
|
+
var symbol = Symbol.for(marker);
|
26
|
+
var _a;
|
27
|
+
var InvalidArgumentError = class extends AISDKError {
|
28
|
+
constructor({
|
29
|
+
parameter,
|
30
|
+
value,
|
31
|
+
message
|
32
|
+
}) {
|
33
|
+
super({
|
34
|
+
name,
|
35
|
+
message: `Invalid argument for parameter ${parameter}: ${message}`
|
36
|
+
});
|
37
|
+
this[_a] = true;
|
38
|
+
this.parameter = parameter;
|
39
|
+
this.value = value;
|
40
|
+
}
|
41
|
+
static isInstance(error) {
|
42
|
+
return AISDKError.hasMarker(error, marker);
|
43
|
+
}
|
44
|
+
};
|
45
|
+
_a = symbol;
|
46
|
+
|
21
47
|
// util/retry-with-exponential-backoff.ts
|
22
48
|
import { APICallError } from "@ai-sdk/provider";
|
23
49
|
import { getErrorMessage, isAbortError } from "@ai-sdk/provider-utils";
|
@@ -28,28 +54,28 @@ async function delay(delayInMs) {
|
|
28
54
|
}
|
29
55
|
|
30
56
|
// util/retry-error.ts
|
31
|
-
import { AISDKError } from "@ai-sdk/provider";
|
32
|
-
var
|
33
|
-
var
|
34
|
-
var
|
35
|
-
var
|
36
|
-
var RetryError = class extends
|
57
|
+
import { AISDKError as AISDKError2 } from "@ai-sdk/provider";
|
58
|
+
var name2 = "AI_RetryError";
|
59
|
+
var marker2 = `vercel.ai.error.${name2}`;
|
60
|
+
var symbol2 = Symbol.for(marker2);
|
61
|
+
var _a2;
|
62
|
+
var RetryError = class extends AISDKError2 {
|
37
63
|
constructor({
|
38
64
|
message,
|
39
65
|
reason,
|
40
66
|
errors
|
41
67
|
}) {
|
42
|
-
super({ name, message });
|
43
|
-
this[
|
68
|
+
super({ name: name2, message });
|
69
|
+
this[_a2] = true;
|
44
70
|
this.reason = reason;
|
45
71
|
this.errors = errors;
|
46
72
|
this.lastError = errors[errors.length - 1];
|
47
73
|
}
|
48
74
|
static isInstance(error) {
|
49
|
-
return
|
75
|
+
return AISDKError2.hasMarker(error, marker2);
|
50
76
|
}
|
51
77
|
};
|
52
|
-
|
78
|
+
_a2 = symbol2;
|
53
79
|
|
54
80
|
// util/retry-with-exponential-backoff.ts
|
55
81
|
var retryWithExponentialBackoff = ({
|
@@ -104,6 +130,33 @@ async function _retryWithExponentialBackoff(f, {
|
|
104
130
|
}
|
105
131
|
}
|
106
132
|
|
133
|
+
// core/prompt/prepare-retries.ts
|
134
|
+
function prepareRetries({
|
135
|
+
maxRetries
|
136
|
+
}) {
|
137
|
+
if (maxRetries != null) {
|
138
|
+
if (!Number.isInteger(maxRetries)) {
|
139
|
+
throw new InvalidArgumentError({
|
140
|
+
parameter: "maxRetries",
|
141
|
+
value: maxRetries,
|
142
|
+
message: "maxRetries must be an integer"
|
143
|
+
});
|
144
|
+
}
|
145
|
+
if (maxRetries < 0) {
|
146
|
+
throw new InvalidArgumentError({
|
147
|
+
parameter: "maxRetries",
|
148
|
+
value: maxRetries,
|
149
|
+
message: "maxRetries must be >= 0"
|
150
|
+
});
|
151
|
+
}
|
152
|
+
}
|
153
|
+
const maxRetriesResult = maxRetries != null ? maxRetries : 2;
|
154
|
+
return {
|
155
|
+
maxRetries: maxRetriesResult,
|
156
|
+
retry: retryWithExponentialBackoff({ maxRetries: maxRetriesResult })
|
157
|
+
};
|
158
|
+
}
|
159
|
+
|
107
160
|
// core/telemetry/assemble-operation-name.ts
|
108
161
|
function assembleOperationName({
|
109
162
|
operationId,
|
@@ -301,11 +354,12 @@ function selectTelemetryAttributes({
|
|
301
354
|
async function embed({
|
302
355
|
model,
|
303
356
|
value,
|
304
|
-
maxRetries,
|
357
|
+
maxRetries: maxRetriesArg,
|
305
358
|
abortSignal,
|
306
359
|
headers,
|
307
360
|
experimental_telemetry: telemetry
|
308
361
|
}) {
|
362
|
+
const { maxRetries, retry } = prepareRetries({ maxRetries: maxRetriesArg });
|
309
363
|
const baseTelemetryAttributes = getBaseTelemetryAttributes({
|
310
364
|
model,
|
311
365
|
telemetry,
|
@@ -325,7 +379,6 @@ async function embed({
|
|
325
379
|
}),
|
326
380
|
tracer,
|
327
381
|
fn: async (span) => {
|
328
|
-
const retry = retryWithExponentialBackoff({ maxRetries });
|
329
382
|
const { embedding, usage, rawResponse } = await retry(
|
330
383
|
() => (
|
331
384
|
// nested spans to align with the embedMany telemetry data:
|
@@ -413,11 +466,12 @@ function splitArray(array, chunkSize) {
|
|
413
466
|
async function embedMany({
|
414
467
|
model,
|
415
468
|
values,
|
416
|
-
maxRetries,
|
469
|
+
maxRetries: maxRetriesArg,
|
417
470
|
abortSignal,
|
418
471
|
headers,
|
419
472
|
experimental_telemetry: telemetry
|
420
473
|
}) {
|
474
|
+
const { maxRetries, retry } = prepareRetries({ maxRetries: maxRetriesArg });
|
421
475
|
const baseTelemetryAttributes = getBaseTelemetryAttributes({
|
422
476
|
model,
|
423
477
|
telemetry,
|
@@ -440,7 +494,6 @@ async function embedMany({
|
|
440
494
|
}),
|
441
495
|
tracer,
|
442
496
|
fn: async (span) => {
|
443
|
-
const retry = retryWithExponentialBackoff({ maxRetries });
|
444
497
|
const maxEmbeddingsPerCall = model.maxEmbeddingsPerCall;
|
445
498
|
if (maxEmbeddingsPerCall == null) {
|
446
499
|
const { embeddings: embeddings2, usage } = await retry(() => {
|
@@ -578,12 +631,12 @@ var DefaultEmbedManyResult = class {
|
|
578
631
|
import { createIdGenerator, safeParseJSON } from "@ai-sdk/provider-utils";
|
579
632
|
|
580
633
|
// util/download-error.ts
|
581
|
-
import { AISDKError as
|
582
|
-
var
|
583
|
-
var
|
584
|
-
var
|
585
|
-
var
|
586
|
-
var DownloadError = class extends
|
634
|
+
import { AISDKError as AISDKError3 } from "@ai-sdk/provider";
|
635
|
+
var name3 = "AI_DownloadError";
|
636
|
+
var marker3 = `vercel.ai.error.${name3}`;
|
637
|
+
var symbol3 = Symbol.for(marker3);
|
638
|
+
var _a3;
|
639
|
+
var DownloadError = class extends AISDKError3 {
|
587
640
|
constructor({
|
588
641
|
url,
|
589
642
|
statusCode,
|
@@ -591,17 +644,17 @@ var DownloadError = class extends AISDKError2 {
|
|
591
644
|
cause,
|
592
645
|
message = cause == null ? `Failed to download ${url}: ${statusCode} ${statusText}` : `Failed to download ${url}: ${cause}`
|
593
646
|
}) {
|
594
|
-
super({ name:
|
595
|
-
this[
|
647
|
+
super({ name: name3, message, cause });
|
648
|
+
this[_a3] = true;
|
596
649
|
this.url = url;
|
597
650
|
this.statusCode = statusCode;
|
598
651
|
this.statusText = statusText;
|
599
652
|
}
|
600
653
|
static isInstance(error) {
|
601
|
-
return
|
654
|
+
return AISDKError3.hasMarker(error, marker3);
|
602
655
|
}
|
603
656
|
};
|
604
|
-
|
657
|
+
_a3 = symbol3;
|
605
658
|
|
606
659
|
// util/download.ts
|
607
660
|
async function download({
|
@@ -654,26 +707,26 @@ import {
|
|
654
707
|
} from "@ai-sdk/provider-utils";
|
655
708
|
|
656
709
|
// core/prompt/invalid-data-content-error.ts
|
657
|
-
import { AISDKError as
|
658
|
-
var
|
659
|
-
var
|
660
|
-
var
|
661
|
-
var
|
662
|
-
var InvalidDataContentError = class extends
|
710
|
+
import { AISDKError as AISDKError4 } from "@ai-sdk/provider";
|
711
|
+
var name4 = "AI_InvalidDataContentError";
|
712
|
+
var marker4 = `vercel.ai.error.${name4}`;
|
713
|
+
var symbol4 = Symbol.for(marker4);
|
714
|
+
var _a4;
|
715
|
+
var InvalidDataContentError = class extends AISDKError4 {
|
663
716
|
constructor({
|
664
717
|
content,
|
665
718
|
cause,
|
666
719
|
message = `Invalid data content. Expected a base64 string, Uint8Array, ArrayBuffer, or Buffer, but got ${typeof content}.`
|
667
720
|
}) {
|
668
|
-
super({ name:
|
669
|
-
this[
|
721
|
+
super({ name: name4, message, cause });
|
722
|
+
this[_a4] = true;
|
670
723
|
this.content = content;
|
671
724
|
}
|
672
725
|
static isInstance(error) {
|
673
|
-
return
|
726
|
+
return AISDKError4.hasMarker(error, marker4);
|
674
727
|
}
|
675
728
|
};
|
676
|
-
|
729
|
+
_a4 = symbol4;
|
677
730
|
|
678
731
|
// core/prompt/data-content.ts
|
679
732
|
import { z } from "zod";
|
@@ -728,25 +781,25 @@ function convertUint8ArrayToText(uint8Array) {
|
|
728
781
|
}
|
729
782
|
|
730
783
|
// core/prompt/invalid-message-role-error.ts
|
731
|
-
import { AISDKError as
|
732
|
-
var
|
733
|
-
var
|
734
|
-
var
|
735
|
-
var
|
736
|
-
var InvalidMessageRoleError = class extends
|
784
|
+
import { AISDKError as AISDKError5 } from "@ai-sdk/provider";
|
785
|
+
var name5 = "AI_InvalidMessageRoleError";
|
786
|
+
var marker5 = `vercel.ai.error.${name5}`;
|
787
|
+
var symbol5 = Symbol.for(marker5);
|
788
|
+
var _a5;
|
789
|
+
var InvalidMessageRoleError = class extends AISDKError5 {
|
737
790
|
constructor({
|
738
791
|
role,
|
739
792
|
message = `Invalid message role: '${role}'. Must be one of: "system", "user", "assistant", "tool".`
|
740
793
|
}) {
|
741
|
-
super({ name:
|
742
|
-
this[
|
794
|
+
super({ name: name5, message });
|
795
|
+
this[_a5] = true;
|
743
796
|
this.role = role;
|
744
797
|
}
|
745
798
|
static isInstance(error) {
|
746
|
-
return
|
799
|
+
return AISDKError5.hasMarker(error, marker5);
|
747
800
|
}
|
748
801
|
};
|
749
|
-
|
802
|
+
_a5 = symbol5;
|
750
803
|
|
751
804
|
// core/prompt/split-data-url.ts
|
752
805
|
function splitDataUrl(dataUrl) {
|
@@ -949,32 +1002,6 @@ function convertPartToLanguageModelPart(part, downloadedAssets) {
|
|
949
1002
|
}
|
950
1003
|
}
|
951
1004
|
|
952
|
-
// errors/invalid-argument-error.ts
|
953
|
-
import { AISDKError as AISDKError5 } from "@ai-sdk/provider";
|
954
|
-
var name5 = "AI_InvalidArgumentError";
|
955
|
-
var marker5 = `vercel.ai.error.${name5}`;
|
956
|
-
var symbol5 = Symbol.for(marker5);
|
957
|
-
var _a5;
|
958
|
-
var InvalidArgumentError = class extends AISDKError5 {
|
959
|
-
constructor({
|
960
|
-
parameter,
|
961
|
-
value,
|
962
|
-
message
|
963
|
-
}) {
|
964
|
-
super({
|
965
|
-
name: name5,
|
966
|
-
message: `Invalid argument for parameter ${parameter}: ${message}`
|
967
|
-
});
|
968
|
-
this[_a5] = true;
|
969
|
-
this.parameter = parameter;
|
970
|
-
this.value = value;
|
971
|
-
}
|
972
|
-
static isInstance(error) {
|
973
|
-
return AISDKError5.hasMarker(error, marker5);
|
974
|
-
}
|
975
|
-
};
|
976
|
-
_a5 = symbol5;
|
977
|
-
|
978
1005
|
// core/prompt/prepare-call-settings.ts
|
979
1006
|
function prepareCallSettings({
|
980
1007
|
maxTokens,
|
@@ -984,8 +1011,7 @@ function prepareCallSettings({
|
|
984
1011
|
presencePenalty,
|
985
1012
|
frequencyPenalty,
|
986
1013
|
stopSequences,
|
987
|
-
seed
|
988
|
-
maxRetries
|
1014
|
+
seed
|
989
1015
|
}) {
|
990
1016
|
if (maxTokens != null) {
|
991
1017
|
if (!Number.isInteger(maxTokens)) {
|
@@ -1057,22 +1083,6 @@ function prepareCallSettings({
|
|
1057
1083
|
});
|
1058
1084
|
}
|
1059
1085
|
}
|
1060
|
-
if (maxRetries != null) {
|
1061
|
-
if (!Number.isInteger(maxRetries)) {
|
1062
|
-
throw new InvalidArgumentError({
|
1063
|
-
parameter: "maxRetries",
|
1064
|
-
value: maxRetries,
|
1065
|
-
message: "maxRetries must be an integer"
|
1066
|
-
});
|
1067
|
-
}
|
1068
|
-
if (maxRetries < 0) {
|
1069
|
-
throw new InvalidArgumentError({
|
1070
|
-
parameter: "maxRetries",
|
1071
|
-
value: maxRetries,
|
1072
|
-
message: "maxRetries must be >= 0"
|
1073
|
-
});
|
1074
|
-
}
|
1075
|
-
}
|
1076
1086
|
return {
|
1077
1087
|
maxTokens,
|
1078
1088
|
temperature: temperature != null ? temperature : 0,
|
@@ -1081,8 +1091,7 @@ function prepareCallSettings({
|
|
1081
1091
|
presencePenalty,
|
1082
1092
|
frequencyPenalty,
|
1083
1093
|
stopSequences: stopSequences != null && stopSequences.length > 0 ? stopSequences : void 0,
|
1084
|
-
seed
|
1085
|
-
maxRetries: maxRetries != null ? maxRetries : 2
|
1094
|
+
seed
|
1086
1095
|
};
|
1087
1096
|
}
|
1088
1097
|
|
@@ -1935,7 +1944,7 @@ async function generateObject({
|
|
1935
1944
|
system,
|
1936
1945
|
prompt,
|
1937
1946
|
messages,
|
1938
|
-
maxRetries,
|
1947
|
+
maxRetries: maxRetriesArg,
|
1939
1948
|
abortSignal,
|
1940
1949
|
headers,
|
1941
1950
|
experimental_telemetry: telemetry,
|
@@ -1954,6 +1963,7 @@ async function generateObject({
|
|
1954
1963
|
schemaDescription,
|
1955
1964
|
enumValues
|
1956
1965
|
});
|
1966
|
+
const { maxRetries, retry } = prepareRetries({ maxRetries: maxRetriesArg });
|
1957
1967
|
const outputStrategy = getOutputStrategy({
|
1958
1968
|
output,
|
1959
1969
|
schema: inputSchema,
|
@@ -1993,7 +2003,6 @@ async function generateObject({
|
|
1993
2003
|
tracer,
|
1994
2004
|
fn: async (span) => {
|
1995
2005
|
var _a11, _b;
|
1996
|
-
const retry = retryWithExponentialBackoff({ maxRetries });
|
1997
2006
|
if (mode === "auto" || mode == null) {
|
1998
2007
|
mode = model.defaultObjectGenerationMode;
|
1999
2008
|
}
|
@@ -2333,59 +2342,6 @@ var DelayedPromise = class {
|
|
2333
2342
|
}
|
2334
2343
|
};
|
2335
2344
|
|
2336
|
-
// core/util/now.ts
|
2337
|
-
function now() {
|
2338
|
-
var _a11, _b;
|
2339
|
-
return (_b = (_a11 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a11.now()) != null ? _b : Date.now();
|
2340
|
-
}
|
2341
|
-
|
2342
|
-
// core/util/prepare-outgoing-http-headers.ts
|
2343
|
-
function prepareOutgoingHttpHeaders(headers, {
|
2344
|
-
contentType,
|
2345
|
-
dataStreamVersion
|
2346
|
-
}) {
|
2347
|
-
const outgoingHeaders = {};
|
2348
|
-
if (headers != null) {
|
2349
|
-
for (const [key, value] of Object.entries(headers)) {
|
2350
|
-
outgoingHeaders[key] = value;
|
2351
|
-
}
|
2352
|
-
}
|
2353
|
-
if (outgoingHeaders["Content-Type"] == null) {
|
2354
|
-
outgoingHeaders["Content-Type"] = contentType;
|
2355
|
-
}
|
2356
|
-
if (dataStreamVersion !== void 0) {
|
2357
|
-
outgoingHeaders["X-Vercel-AI-Data-Stream"] = dataStreamVersion;
|
2358
|
-
}
|
2359
|
-
return outgoingHeaders;
|
2360
|
-
}
|
2361
|
-
|
2362
|
-
// core/util/write-to-server-response.ts
|
2363
|
-
function writeToServerResponse({
|
2364
|
-
response,
|
2365
|
-
status,
|
2366
|
-
statusText,
|
2367
|
-
headers,
|
2368
|
-
stream
|
2369
|
-
}) {
|
2370
|
-
response.writeHead(status != null ? status : 200, statusText, headers);
|
2371
|
-
const reader = stream.getReader();
|
2372
|
-
const read = async () => {
|
2373
|
-
try {
|
2374
|
-
while (true) {
|
2375
|
-
const { done, value } = await reader.read();
|
2376
|
-
if (done)
|
2377
|
-
break;
|
2378
|
-
response.write(value);
|
2379
|
-
}
|
2380
|
-
} catch (error) {
|
2381
|
-
throw error;
|
2382
|
-
} finally {
|
2383
|
-
response.end();
|
2384
|
-
}
|
2385
|
-
};
|
2386
|
-
read();
|
2387
|
-
}
|
2388
|
-
|
2389
2345
|
// util/create-resolvable-promise.ts
|
2390
2346
|
function createResolvablePromise() {
|
2391
2347
|
let resolve;
|
@@ -2468,6 +2424,59 @@ function createStitchableStream() {
|
|
2468
2424
|
};
|
2469
2425
|
}
|
2470
2426
|
|
2427
|
+
// core/util/now.ts
|
2428
|
+
function now() {
|
2429
|
+
var _a11, _b;
|
2430
|
+
return (_b = (_a11 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a11.now()) != null ? _b : Date.now();
|
2431
|
+
}
|
2432
|
+
|
2433
|
+
// core/util/prepare-outgoing-http-headers.ts
|
2434
|
+
function prepareOutgoingHttpHeaders(headers, {
|
2435
|
+
contentType,
|
2436
|
+
dataStreamVersion
|
2437
|
+
}) {
|
2438
|
+
const outgoingHeaders = {};
|
2439
|
+
if (headers != null) {
|
2440
|
+
for (const [key, value] of Object.entries(headers)) {
|
2441
|
+
outgoingHeaders[key] = value;
|
2442
|
+
}
|
2443
|
+
}
|
2444
|
+
if (outgoingHeaders["Content-Type"] == null) {
|
2445
|
+
outgoingHeaders["Content-Type"] = contentType;
|
2446
|
+
}
|
2447
|
+
if (dataStreamVersion !== void 0) {
|
2448
|
+
outgoingHeaders["X-Vercel-AI-Data-Stream"] = dataStreamVersion;
|
2449
|
+
}
|
2450
|
+
return outgoingHeaders;
|
2451
|
+
}
|
2452
|
+
|
2453
|
+
// core/util/write-to-server-response.ts
|
2454
|
+
function writeToServerResponse({
|
2455
|
+
response,
|
2456
|
+
status,
|
2457
|
+
statusText,
|
2458
|
+
headers,
|
2459
|
+
stream
|
2460
|
+
}) {
|
2461
|
+
response.writeHead(status != null ? status : 200, statusText, headers);
|
2462
|
+
const reader = stream.getReader();
|
2463
|
+
const read = async () => {
|
2464
|
+
try {
|
2465
|
+
while (true) {
|
2466
|
+
const { done, value } = await reader.read();
|
2467
|
+
if (done)
|
2468
|
+
break;
|
2469
|
+
response.write(value);
|
2470
|
+
}
|
2471
|
+
} catch (error) {
|
2472
|
+
throw error;
|
2473
|
+
} finally {
|
2474
|
+
response.end();
|
2475
|
+
}
|
2476
|
+
};
|
2477
|
+
read();
|
2478
|
+
}
|
2479
|
+
|
2471
2480
|
// core/generate-object/stream-object.ts
|
2472
2481
|
var originalGenerateId2 = createIdGenerator2({ prefix: "aiobj", size: 24 });
|
2473
2482
|
function streamObject({
|
@@ -2531,7 +2540,7 @@ var DefaultStreamObjectResult = class {
|
|
2531
2540
|
headers,
|
2532
2541
|
telemetry,
|
2533
2542
|
settings,
|
2534
|
-
maxRetries,
|
2543
|
+
maxRetries: maxRetriesArg,
|
2535
2544
|
abortSignal,
|
2536
2545
|
outputStrategy,
|
2537
2546
|
system,
|
@@ -2553,6 +2562,9 @@ var DefaultStreamObjectResult = class {
|
|
2553
2562
|
this.requestPromise = new DelayedPromise();
|
2554
2563
|
this.responsePromise = new DelayedPromise();
|
2555
2564
|
this.stitchableStream = createStitchableStream();
|
2565
|
+
const { maxRetries, retry } = prepareRetries({
|
2566
|
+
maxRetries: maxRetriesArg
|
2567
|
+
});
|
2556
2568
|
const baseTelemetryAttributes = getBaseTelemetryAttributes({
|
2557
2569
|
model,
|
2558
2570
|
telemetry,
|
@@ -2560,7 +2572,6 @@ var DefaultStreamObjectResult = class {
|
|
2560
2572
|
settings: { ...settings, maxRetries }
|
2561
2573
|
});
|
2562
2574
|
const tracer = getTracer(telemetry);
|
2563
|
-
const retry = retryWithExponentialBackoff({ maxRetries });
|
2564
2575
|
const self = this;
|
2565
2576
|
recordSpan({
|
2566
2577
|
name: "ai.streamObject",
|
@@ -2736,7 +2747,7 @@ var DefaultStreamObjectResult = class {
|
|
2736
2747
|
let usage;
|
2737
2748
|
let finishReason;
|
2738
2749
|
let providerMetadata;
|
2739
|
-
let
|
2750
|
+
let object2;
|
2740
2751
|
let error;
|
2741
2752
|
let accumulatedText = "";
|
2742
2753
|
let textDelta = "";
|
@@ -2820,8 +2831,8 @@ var DefaultStreamObjectResult = class {
|
|
2820
2831
|
});
|
2821
2832
|
const validationResult = outputStrategy.validateFinalResult(latestObjectJson);
|
2822
2833
|
if (validationResult.success) {
|
2823
|
-
|
2824
|
-
self.objectPromise.resolve(
|
2834
|
+
object2 = validationResult.value;
|
2835
|
+
self.objectPromise.resolve(object2);
|
2825
2836
|
} else {
|
2826
2837
|
error = validationResult.error;
|
2827
2838
|
self.objectPromise.reject(error);
|
@@ -2848,7 +2859,7 @@ var DefaultStreamObjectResult = class {
|
|
2848
2859
|
attributes: {
|
2849
2860
|
"ai.response.finishReason": finishReason,
|
2850
2861
|
"ai.response.object": {
|
2851
|
-
output: () => JSON.stringify(
|
2862
|
+
output: () => JSON.stringify(object2)
|
2852
2863
|
},
|
2853
2864
|
"ai.response.id": response.id,
|
2854
2865
|
"ai.response.model": response.modelId,
|
@@ -2872,14 +2883,14 @@ var DefaultStreamObjectResult = class {
|
|
2872
2883
|
"ai.usage.promptTokens": finalUsage.promptTokens,
|
2873
2884
|
"ai.usage.completionTokens": finalUsage.completionTokens,
|
2874
2885
|
"ai.response.object": {
|
2875
|
-
output: () => JSON.stringify(
|
2886
|
+
output: () => JSON.stringify(object2)
|
2876
2887
|
}
|
2877
2888
|
}
|
2878
2889
|
})
|
2879
2890
|
);
|
2880
2891
|
await (onFinish == null ? void 0 : onFinish({
|
2881
2892
|
usage: finalUsage,
|
2882
|
-
object,
|
2893
|
+
object: object2,
|
2883
2894
|
error,
|
2884
2895
|
response: {
|
2885
2896
|
...response,
|
@@ -3076,8 +3087,8 @@ _a9 = symbol9;
|
|
3076
3087
|
import { asSchema as asSchema2 } from "@ai-sdk/ui-utils";
|
3077
3088
|
|
3078
3089
|
// core/util/is-non-empty-object.ts
|
3079
|
-
function isNonEmptyObject(
|
3080
|
-
return
|
3090
|
+
function isNonEmptyObject(object2) {
|
3091
|
+
return object2 != null && Object.keys(object2).length > 0;
|
3081
3092
|
}
|
3082
3093
|
|
3083
3094
|
// core/prompt/prepare-tools-and-tool-choice.ts
|
@@ -3126,15 +3137,15 @@ function prepareToolsAndToolChoice({
|
|
3126
3137
|
|
3127
3138
|
// core/util/split-on-last-whitespace.ts
|
3128
3139
|
var lastWhitespaceRegexp = /^([\s\S]*?)(\s+)(\S*)$/;
|
3129
|
-
function splitOnLastWhitespace(
|
3130
|
-
const match =
|
3140
|
+
function splitOnLastWhitespace(text2) {
|
3141
|
+
const match = text2.match(lastWhitespaceRegexp);
|
3131
3142
|
return match ? { prefix: match[1], whitespace: match[2], suffix: match[3] } : void 0;
|
3132
3143
|
}
|
3133
3144
|
|
3134
3145
|
// core/util/remove-text-after-last-whitespace.ts
|
3135
|
-
function removeTextAfterLastWhitespace(
|
3136
|
-
const match = splitOnLastWhitespace(
|
3137
|
-
return match ? match.prefix + match.whitespace :
|
3146
|
+
function removeTextAfterLastWhitespace(text2) {
|
3147
|
+
const match = splitOnLastWhitespace(text2);
|
3148
|
+
return match ? match.prefix + match.whitespace : text2;
|
3138
3149
|
}
|
3139
3150
|
|
3140
3151
|
// core/generate-text/parse-tool-call.ts
|
@@ -3174,7 +3185,7 @@ function parseToolCall({
|
|
3174
3185
|
|
3175
3186
|
// core/generate-text/to-response-messages.ts
|
3176
3187
|
function toResponseMessages({
|
3177
|
-
text = "",
|
3188
|
+
text: text2 = "",
|
3178
3189
|
tools,
|
3179
3190
|
toolCalls,
|
3180
3191
|
toolResults
|
@@ -3182,7 +3193,7 @@ function toResponseMessages({
|
|
3182
3193
|
const responseMessages = [];
|
3183
3194
|
responseMessages.push({
|
3184
3195
|
role: "assistant",
|
3185
|
-
content: [{ type: "text", text }, ...toolCalls]
|
3196
|
+
content: [{ type: "text", text: text2 }, ...toolCalls]
|
3186
3197
|
});
|
3187
3198
|
if (toolResults.length > 0) {
|
3188
3199
|
responseMessages.push({
|
@@ -3218,10 +3229,11 @@ async function generateText({
|
|
3218
3229
|
system,
|
3219
3230
|
prompt,
|
3220
3231
|
messages,
|
3221
|
-
maxRetries,
|
3232
|
+
maxRetries: maxRetriesArg,
|
3222
3233
|
abortSignal,
|
3223
3234
|
headers,
|
3224
3235
|
maxSteps = 1,
|
3236
|
+
experimental_output: output,
|
3225
3237
|
experimental_continueSteps: continueSteps = false,
|
3226
3238
|
experimental_telemetry: telemetry,
|
3227
3239
|
experimental_providerMetadata: providerMetadata,
|
@@ -3233,6 +3245,7 @@ async function generateText({
|
|
3233
3245
|
onStepFinish,
|
3234
3246
|
...settings
|
3235
3247
|
}) {
|
3248
|
+
var _a11;
|
3236
3249
|
if (maxSteps < 1) {
|
3237
3250
|
throw new InvalidArgumentError({
|
3238
3251
|
parameter: "maxSteps",
|
@@ -3240,6 +3253,7 @@ async function generateText({
|
|
3240
3253
|
message: "maxSteps must be at least 1"
|
3241
3254
|
});
|
3242
3255
|
}
|
3256
|
+
const { maxRetries, retry } = prepareRetries({ maxRetries: maxRetriesArg });
|
3243
3257
|
const baseTelemetryAttributes = getBaseTelemetryAttributes({
|
3244
3258
|
model,
|
3245
3259
|
telemetry,
|
@@ -3247,7 +3261,11 @@ async function generateText({
|
|
3247
3261
|
settings: { ...settings, maxRetries }
|
3248
3262
|
});
|
3249
3263
|
const initialPrompt = standardizePrompt({
|
3250
|
-
prompt: {
|
3264
|
+
prompt: {
|
3265
|
+
system: (_a11 = output == null ? void 0 : output.injectIntoSystemPrompt({ system, model })) != null ? _a11 : system,
|
3266
|
+
prompt,
|
3267
|
+
messages
|
3268
|
+
},
|
3251
3269
|
tools
|
3252
3270
|
});
|
3253
3271
|
const tracer = getTracer(telemetry);
|
@@ -3270,8 +3288,7 @@ async function generateText({
|
|
3270
3288
|
}),
|
3271
3289
|
tracer,
|
3272
3290
|
fn: async (span) => {
|
3273
|
-
var
|
3274
|
-
const retry = retryWithExponentialBackoff({ maxRetries });
|
3291
|
+
var _a12, _b, _c, _d, _e, _f;
|
3275
3292
|
const mode = {
|
3276
3293
|
type: "regular",
|
3277
3294
|
...prepareToolsAndToolChoice({ tools, toolChoice, activeTools })
|
@@ -3282,7 +3299,7 @@ async function generateText({
|
|
3282
3299
|
let currentToolResults = [];
|
3283
3300
|
let stepCount = 0;
|
3284
3301
|
const responseMessages = [];
|
3285
|
-
let
|
3302
|
+
let text2 = "";
|
3286
3303
|
const steps = [];
|
3287
3304
|
const usage = {
|
3288
3305
|
completionTokens: 0,
|
@@ -3323,8 +3340,8 @@ async function generateText({
|
|
3323
3340
|
"ai.prompt.tools": {
|
3324
3341
|
// convert the language model level tools:
|
3325
3342
|
input: () => {
|
3326
|
-
var
|
3327
|
-
return (
|
3343
|
+
var _a13;
|
3344
|
+
return (_a13 = mode.tools) == null ? void 0 : _a13.map((tool2) => JSON.stringify(tool2));
|
3328
3345
|
}
|
3329
3346
|
},
|
3330
3347
|
"ai.prompt.toolChoice": {
|
@@ -3344,18 +3361,19 @@ async function generateText({
|
|
3344
3361
|
}),
|
3345
3362
|
tracer,
|
3346
3363
|
fn: async (span2) => {
|
3347
|
-
var
|
3364
|
+
var _a13, _b2, _c2, _d2, _e2, _f2;
|
3348
3365
|
const result = await model.doGenerate({
|
3349
3366
|
mode,
|
3350
3367
|
...callSettings,
|
3351
3368
|
inputFormat: promptFormat,
|
3369
|
+
responseFormat: output == null ? void 0 : output.responseFormat({ model }),
|
3352
3370
|
prompt: promptMessages,
|
3353
3371
|
providerMetadata,
|
3354
3372
|
abortSignal,
|
3355
3373
|
headers
|
3356
3374
|
});
|
3357
3375
|
const responseData = {
|
3358
|
-
id: (_b2 = (
|
3376
|
+
id: (_b2 = (_a13 = result.response) == null ? void 0 : _a13.id) != null ? _b2 : generateId3(),
|
3359
3377
|
timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
|
3360
3378
|
modelId: (_f2 = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f2 : model.modelId
|
3361
3379
|
};
|
@@ -3388,7 +3406,7 @@ async function generateText({
|
|
3388
3406
|
}
|
3389
3407
|
})
|
3390
3408
|
);
|
3391
|
-
currentToolCalls = ((
|
3409
|
+
currentToolCalls = ((_a12 = currentModelResponse.toolCalls) != null ? _a12 : []).map(
|
3392
3410
|
(modelToolCall) => parseToolCall({ toolCall: modelToolCall, tools })
|
3393
3411
|
);
|
3394
3412
|
currentToolResults = tools == null ? [] : await executeTools({
|
@@ -3420,9 +3438,9 @@ async function generateText({
|
|
3420
3438
|
}
|
3421
3439
|
const originalText = (_b = currentModelResponse.text) != null ? _b : "";
|
3422
3440
|
const stepTextLeadingWhitespaceTrimmed = stepType === "continue" && // only for continue steps
|
3423
|
-
|
3441
|
+
text2.trimEnd() !== text2 ? originalText.trimStart() : originalText;
|
3424
3442
|
const stepText = nextStepType === "continue" ? removeTextAfterLastWhitespace(stepTextLeadingWhitespaceTrimmed) : stepTextLeadingWhitespaceTrimmed;
|
3425
|
-
|
3443
|
+
text2 = nextStepType === "continue" || stepType === "continue" ? text2 + stepText : stepText;
|
3426
3444
|
if (stepType === "continue") {
|
3427
3445
|
const lastMessage = responseMessages[responseMessages.length - 1];
|
3428
3446
|
if (typeof lastMessage.content === "string") {
|
@@ -3436,7 +3454,7 @@ async function generateText({
|
|
3436
3454
|
} else {
|
3437
3455
|
responseMessages.push(
|
3438
3456
|
...toResponseMessages({
|
3439
|
-
text,
|
3457
|
+
text: text2,
|
3440
3458
|
tools: tools != null ? tools : {},
|
3441
3459
|
toolCalls: currentToolCalls,
|
3442
3460
|
toolResults: currentToolResults
|
@@ -3483,7 +3501,8 @@ async function generateText({
|
|
3483
3501
|
})
|
3484
3502
|
);
|
3485
3503
|
return new DefaultGenerateTextResult({
|
3486
|
-
text,
|
3504
|
+
text: text2,
|
3505
|
+
output: output == null ? void 0 : output.parseOutput({ text: text2 }),
|
3487
3506
|
toolCalls: currentToolCalls,
|
3488
3507
|
toolResults: currentToolResults,
|
3489
3508
|
finishReason: currentModelResponse.finishReason,
|
@@ -3579,6 +3598,7 @@ var DefaultGenerateTextResult = class {
|
|
3579
3598
|
this.steps = options.steps;
|
3580
3599
|
this.experimental_providerMetadata = options.providerMetadata;
|
3581
3600
|
this.logprobs = options.logprobs;
|
3601
|
+
this.experimental_output = options.output;
|
3582
3602
|
}
|
3583
3603
|
};
|
3584
3604
|
|
@@ -3930,7 +3950,7 @@ var DefaultStreamTextResult = class {
|
|
3930
3950
|
telemetry,
|
3931
3951
|
headers,
|
3932
3952
|
settings,
|
3933
|
-
maxRetries,
|
3953
|
+
maxRetries: maxRetriesArg,
|
3934
3954
|
abortSignal,
|
3935
3955
|
system,
|
3936
3956
|
prompt,
|
@@ -3967,6 +3987,9 @@ var DefaultStreamTextResult = class {
|
|
3967
3987
|
message: "maxSteps must be at least 1"
|
3968
3988
|
});
|
3969
3989
|
}
|
3990
|
+
const { maxRetries, retry } = prepareRetries({
|
3991
|
+
maxRetries: maxRetriesArg
|
3992
|
+
});
|
3970
3993
|
const tracer = getTracer(telemetry);
|
3971
3994
|
const baseTelemetryAttributes = getBaseTelemetryAttributes({
|
3972
3995
|
model,
|
@@ -3996,7 +4019,6 @@ var DefaultStreamTextResult = class {
|
|
3996
4019
|
tracer,
|
3997
4020
|
endWhenDone: false,
|
3998
4021
|
fn: async (rootSpan) => {
|
3999
|
-
const retry = retryWithExponentialBackoff({ maxRetries });
|
4000
4022
|
const stepResults = [];
|
4001
4023
|
async function streamStep({
|
4002
4024
|
currentStep,
|
@@ -4668,6 +4690,46 @@ var DefaultStreamTextResult = class {
|
|
4668
4690
|
}
|
4669
4691
|
};
|
4670
4692
|
|
4693
|
+
// core/generate-text/output.ts
|
4694
|
+
var output_exports = {};
|
4695
|
+
__export(output_exports, {
|
4696
|
+
object: () => object,
|
4697
|
+
text: () => text
|
4698
|
+
});
|
4699
|
+
import { parseJSON } from "@ai-sdk/provider-utils";
|
4700
|
+
import { asSchema as asSchema4 } from "@ai-sdk/ui-utils";
|
4701
|
+
var text = () => ({
|
4702
|
+
type: "text",
|
4703
|
+
responseFormat: () => ({ type: "text" }),
|
4704
|
+
injectIntoSystemPrompt({ system }) {
|
4705
|
+
return system;
|
4706
|
+
},
|
4707
|
+
parseOutput({ text: text2 }) {
|
4708
|
+
return text2;
|
4709
|
+
}
|
4710
|
+
});
|
4711
|
+
var object = ({
|
4712
|
+
schema: inputSchema
|
4713
|
+
}) => {
|
4714
|
+
const schema = asSchema4(inputSchema);
|
4715
|
+
return {
|
4716
|
+
type: "object",
|
4717
|
+
responseFormat: ({ model }) => ({
|
4718
|
+
type: "json",
|
4719
|
+
schema: model.supportsStructuredOutputs ? schema.jsonSchema : void 0
|
4720
|
+
}),
|
4721
|
+
injectIntoSystemPrompt({ system, model }) {
|
4722
|
+
return model.supportsStructuredOutputs ? system : injectJsonInstruction({
|
4723
|
+
prompt: system,
|
4724
|
+
schema: schema.jsonSchema
|
4725
|
+
});
|
4726
|
+
},
|
4727
|
+
parseOutput({ text: text2 }) {
|
4728
|
+
return parseJSON({ text: text2, schema });
|
4729
|
+
}
|
4730
|
+
};
|
4731
|
+
};
|
4732
|
+
|
4671
4733
|
// core/middleware/wrap-language-model.ts
|
4672
4734
|
var experimental_wrapLanguageModel = ({
|
4673
4735
|
model,
|
@@ -5147,13 +5209,13 @@ function toDataStreamResponse2(stream, options = {}) {
|
|
5147
5209
|
}
|
5148
5210
|
function trimStartOfStream() {
|
5149
5211
|
let isStreamStart = true;
|
5150
|
-
return (
|
5212
|
+
return (text2) => {
|
5151
5213
|
if (isStreamStart) {
|
5152
|
-
|
5153
|
-
if (
|
5214
|
+
text2 = text2.trimStart();
|
5215
|
+
if (text2)
|
5154
5216
|
isStreamStart = false;
|
5155
5217
|
}
|
5156
|
-
return
|
5218
|
+
return text2;
|
5157
5219
|
};
|
5158
5220
|
}
|
5159
5221
|
export {
|
@@ -5178,6 +5240,7 @@ export {
|
|
5178
5240
|
NoSuchModelError,
|
5179
5241
|
NoSuchProviderError,
|
5180
5242
|
NoSuchToolError,
|
5243
|
+
output_exports as Output,
|
5181
5244
|
RetryError,
|
5182
5245
|
StreamData,
|
5183
5246
|
TypeValidationError2 as TypeValidationError,
|