ai 3.0.19 → 3.0.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/anthropic/dist/index.d.mts +1 -319
- package/anthropic/dist/index.d.ts +1 -319
- package/anthropic/dist/index.js +44 -216
- package/anthropic/dist/index.js.map +1 -1
- package/anthropic/dist/index.mjs +32 -200
- package/anthropic/dist/index.mjs.map +1 -1
- package/dist/index.d.mts +60 -333
- package/dist/index.d.ts +60 -333
- package/dist/index.js +152 -441
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +127 -412
- package/dist/index.mjs.map +1 -1
- package/google/dist/index.d.mts +1 -319
- package/google/dist/index.d.ts +1 -319
- package/google/dist/index.js +75 -229
- package/google/dist/index.js.map +1 -1
- package/google/dist/index.mjs +63 -213
- package/google/dist/index.mjs.map +1 -1
- package/mistral/dist/index.d.mts +1 -319
- package/mistral/dist/index.d.ts +1 -319
- package/mistral/dist/index.js +32 -205
- package/mistral/dist/index.js.map +1 -1
- package/mistral/dist/index.mjs +20 -189
- package/mistral/dist/index.mjs.map +1 -1
- package/openai/dist/index.d.mts +2 -320
- package/openai/dist/index.d.ts +2 -320
- package/openai/dist/index.js +69 -280
- package/openai/dist/index.js.map +1 -1
- package/openai/dist/index.mjs +57 -260
- package/openai/dist/index.mjs.map +1 -1
- package/package.json +4 -10
- package/react/dist/index.d.mts +8 -4
- package/react/dist/index.d.ts +12 -6
- package/react/dist/index.js +23 -105
- package/react/dist/index.js.map +1 -1
- package/react/dist/index.mjs +22 -105
- package/react/dist/index.mjs.map +1 -1
- package/react/dist/index.server.d.mts +4 -2
- package/react/dist/index.server.d.ts +4 -2
- package/react/dist/index.server.js.map +1 -1
- package/react/dist/index.server.mjs.map +1 -1
- package/rsc/dist/rsc-server.mjs +6 -16
- package/rsc/dist/rsc-server.mjs.map +1 -1
- package/solid/dist/index.d.mts +1 -1
- package/solid/dist/index.d.ts +1 -1
- package/solid/dist/index.js +19 -104
- package/solid/dist/index.js.map +1 -1
- package/solid/dist/index.mjs +19 -104
- package/solid/dist/index.mjs.map +1 -1
- package/svelte/dist/index.d.mts +1 -1
- package/svelte/dist/index.d.ts +1 -1
- package/svelte/dist/index.js +19 -104
- package/svelte/dist/index.js.map +1 -1
- package/svelte/dist/index.mjs +19 -104
- package/svelte/dist/index.mjs.map +1 -1
- package/vue/dist/index.d.mts +1 -1
- package/vue/dist/index.d.ts +1 -1
- package/vue/dist/index.js +19 -104
- package/vue/dist/index.js.map +1 -1
- package/vue/dist/index.mjs +19 -104
- package/vue/dist/index.mjs.map +1 -1
- package/spec/dist/index.d.mts +0 -752
- package/spec/dist/index.d.ts +0 -752
- package/spec/dist/index.js +0 -873
- package/spec/dist/index.js.map +0 -1
- package/spec/dist/index.mjs +0 -807
- package/spec/dist/index.mjs.map +0 -1
package/dist/index.mjs
CHANGED
@@ -1,115 +1,7 @@
|
|
1
|
-
//
|
2
|
-
|
3
|
-
|
4
|
-
|
5
|
-
url,
|
6
|
-
requestBodyValues,
|
7
|
-
statusCode,
|
8
|
-
responseBody,
|
9
|
-
cause,
|
10
|
-
isRetryable = statusCode != null && (statusCode === 408 || // request timeout
|
11
|
-
statusCode === 409 || // conflict
|
12
|
-
statusCode === 429 || // too many requests
|
13
|
-
statusCode >= 500),
|
14
|
-
// server error
|
15
|
-
data
|
16
|
-
}) {
|
17
|
-
super(message);
|
18
|
-
this.name = "AI_APICallError";
|
19
|
-
this.url = url;
|
20
|
-
this.requestBodyValues = requestBodyValues;
|
21
|
-
this.statusCode = statusCode;
|
22
|
-
this.responseBody = responseBody;
|
23
|
-
this.cause = cause;
|
24
|
-
this.isRetryable = isRetryable;
|
25
|
-
this.data = data;
|
26
|
-
}
|
27
|
-
static isAPICallError(error) {
|
28
|
-
return error instanceof Error && error.name === "AI_APICallError" && typeof error.url === "string" && typeof error.requestBodyValues === "object" && (error.statusCode == null || typeof error.statusCode === "number") && (error.responseBody == null || typeof error.responseBody === "string") && (error.cause == null || typeof error.cause === "object") && typeof error.isRetryable === "boolean" && (error.data == null || typeof error.data === "object");
|
29
|
-
}
|
30
|
-
toJSON() {
|
31
|
-
return {
|
32
|
-
name: this.name,
|
33
|
-
message: this.message,
|
34
|
-
url: this.url,
|
35
|
-
requestBodyValues: this.requestBodyValues,
|
36
|
-
statusCode: this.statusCode,
|
37
|
-
responseBody: this.responseBody,
|
38
|
-
cause: this.cause,
|
39
|
-
isRetryable: this.isRetryable,
|
40
|
-
data: this.data
|
41
|
-
};
|
42
|
-
}
|
43
|
-
};
|
44
|
-
|
45
|
-
// spec/errors/invalid-argument-error.ts
|
46
|
-
var InvalidArgumentError = class extends Error {
|
47
|
-
constructor({
|
48
|
-
parameter,
|
49
|
-
value,
|
50
|
-
message
|
51
|
-
}) {
|
52
|
-
super(`Invalid argument for parameter ${parameter}: ${message}`);
|
53
|
-
this.name = "AI_InvalidArgumentError";
|
54
|
-
this.parameter = parameter;
|
55
|
-
this.value = value;
|
56
|
-
}
|
57
|
-
static isInvalidArgumentError(error) {
|
58
|
-
return error instanceof Error && error.name === "AI_InvalidArgumentError" && typeof error.parameter === "string" && typeof error.value === "string";
|
59
|
-
}
|
60
|
-
toJSON() {
|
61
|
-
return {
|
62
|
-
name: this.name,
|
63
|
-
message: this.message,
|
64
|
-
stack: this.stack,
|
65
|
-
parameter: this.parameter,
|
66
|
-
value: this.value
|
67
|
-
};
|
68
|
-
}
|
69
|
-
};
|
70
|
-
|
71
|
-
// spec/errors/invalid-data-content-error.ts
|
72
|
-
var InvalidDataContentError = class extends Error {
|
73
|
-
constructor({
|
74
|
-
content,
|
75
|
-
message = `Invalid data content. Expected a string, Uint8Array, ArrayBuffer, or Buffer, but got ${typeof content}.`
|
76
|
-
}) {
|
77
|
-
super(message);
|
78
|
-
this.name = "AI_InvalidDataContentError";
|
79
|
-
this.content = content;
|
80
|
-
}
|
81
|
-
static isInvalidDataContentError(error) {
|
82
|
-
return error instanceof Error && error.name === "AI_InvalidDataContentError" && error.content != null;
|
83
|
-
}
|
84
|
-
toJSON() {
|
85
|
-
return {
|
86
|
-
name: this.name,
|
87
|
-
message: this.message,
|
88
|
-
stack: this.stack,
|
89
|
-
content: this.content
|
90
|
-
};
|
91
|
-
}
|
92
|
-
};
|
93
|
-
|
94
|
-
// spec/errors/invalid-prompt-error.ts
|
95
|
-
var InvalidPromptError = class extends Error {
|
96
|
-
constructor({ prompt: prompt2, message }) {
|
97
|
-
super(`Invalid prompt: ${message}`);
|
98
|
-
this.name = "AI_InvalidPromptError";
|
99
|
-
this.prompt = prompt2;
|
100
|
-
}
|
101
|
-
static isInvalidPromptError(error) {
|
102
|
-
return error instanceof Error && error.name === "AI_InvalidPromptError" && prompt != null;
|
103
|
-
}
|
104
|
-
toJSON() {
|
105
|
-
return {
|
106
|
-
name: this.name,
|
107
|
-
message: this.message,
|
108
|
-
stack: this.stack,
|
109
|
-
prompt: this.prompt
|
110
|
-
};
|
111
|
-
}
|
112
|
-
};
|
1
|
+
// core/generate-object/generate-object.ts
|
2
|
+
import {
|
3
|
+
NoTextGeneratedError
|
4
|
+
} from "@ai-sdk/provider";
|
113
5
|
|
114
6
|
// spec/util/get-error-message.ts
|
115
7
|
function getErrorMessage(error) {
|
@@ -126,59 +18,11 @@ function getErrorMessage(error) {
|
|
126
18
|
}
|
127
19
|
|
128
20
|
// spec/util/parse-json.ts
|
21
|
+
import { JSONParseError, TypeValidationError as TypeValidationError2 } from "@ai-sdk/provider";
|
129
22
|
import SecureJSON from "secure-json-parse";
|
130
23
|
|
131
|
-
// spec/errors/json-parse-error.ts
|
132
|
-
var JSONParseError = class extends Error {
|
133
|
-
constructor({ text, cause }) {
|
134
|
-
super(
|
135
|
-
`JSON parsing failed: Text: ${text}.
|
136
|
-
Error message: ${getErrorMessage(cause)}`
|
137
|
-
);
|
138
|
-
this.name = "AI_JSONParseError";
|
139
|
-
this.cause = cause;
|
140
|
-
this.text = text;
|
141
|
-
}
|
142
|
-
static isJSONParseError(error) {
|
143
|
-
return error instanceof Error && error.name === "AI_JSONParseError" && typeof error.text === "string" && typeof error.cause === "string";
|
144
|
-
}
|
145
|
-
toJSON() {
|
146
|
-
return {
|
147
|
-
name: this.name,
|
148
|
-
message: this.message,
|
149
|
-
cause: this.cause,
|
150
|
-
stack: this.stack,
|
151
|
-
valueText: this.text
|
152
|
-
};
|
153
|
-
}
|
154
|
-
};
|
155
|
-
|
156
|
-
// spec/errors/type-validation-error.ts
|
157
|
-
var TypeValidationError = class extends Error {
|
158
|
-
constructor({ value, cause }) {
|
159
|
-
super(
|
160
|
-
`Type validation failed: Value: ${JSON.stringify(value)}.
|
161
|
-
Error message: ${getErrorMessage(cause)}`
|
162
|
-
);
|
163
|
-
this.name = "AI_TypeValidationError";
|
164
|
-
this.cause = cause;
|
165
|
-
this.value = value;
|
166
|
-
}
|
167
|
-
static isTypeValidationError(error) {
|
168
|
-
return error instanceof Error && error.name === "AI_TypeValidationError" && typeof error.value === "string" && typeof error.cause === "string";
|
169
|
-
}
|
170
|
-
toJSON() {
|
171
|
-
return {
|
172
|
-
name: this.name,
|
173
|
-
message: this.message,
|
174
|
-
cause: this.cause,
|
175
|
-
stack: this.stack,
|
176
|
-
value: this.value
|
177
|
-
};
|
178
|
-
}
|
179
|
-
};
|
180
|
-
|
181
24
|
// spec/util/validate-types.ts
|
25
|
+
import { TypeValidationError } from "@ai-sdk/provider";
|
182
26
|
function safeValidateTypes({
|
183
27
|
value,
|
184
28
|
schema
|
@@ -242,109 +86,6 @@ function convertUint8ArrayToBase64(array) {
|
|
242
86
|
return globalThis.btoa(latin1string);
|
243
87
|
}
|
244
88
|
|
245
|
-
// spec/errors/invalid-tool-arguments-error.ts
|
246
|
-
var InvalidToolArgumentsError = class extends Error {
|
247
|
-
constructor({
|
248
|
-
toolArgs,
|
249
|
-
toolName,
|
250
|
-
cause,
|
251
|
-
message = `Invalid arguments for tool ${toolName}: ${getErrorMessage(
|
252
|
-
cause
|
253
|
-
)}`
|
254
|
-
}) {
|
255
|
-
super(message);
|
256
|
-
this.name = "AI_InvalidToolArgumentsError";
|
257
|
-
this.toolArgs = toolArgs;
|
258
|
-
this.toolName = toolName;
|
259
|
-
this.cause = cause;
|
260
|
-
}
|
261
|
-
static isInvalidToolArgumentsError(error) {
|
262
|
-
return error instanceof Error && error.name === "AI_InvalidToolArgumentsError" && typeof error.toolName === "string" && typeof error.toolArgs === "string";
|
263
|
-
}
|
264
|
-
toJSON() {
|
265
|
-
return {
|
266
|
-
name: this.name,
|
267
|
-
message: this.message,
|
268
|
-
cause: this.cause,
|
269
|
-
stack: this.stack,
|
270
|
-
toolName: this.toolName,
|
271
|
-
toolArgs: this.toolArgs
|
272
|
-
};
|
273
|
-
}
|
274
|
-
};
|
275
|
-
|
276
|
-
// spec/errors/no-object-generated-error.ts
|
277
|
-
var NoTextGeneratedError = class extends Error {
|
278
|
-
constructor() {
|
279
|
-
super(`No text generated.`);
|
280
|
-
this.name = "AI_NoTextGeneratedError";
|
281
|
-
}
|
282
|
-
static isNoTextGeneratedError(error) {
|
283
|
-
return error instanceof Error && error.name === "AI_NoTextGeneratedError";
|
284
|
-
}
|
285
|
-
toJSON() {
|
286
|
-
return {
|
287
|
-
name: this.name,
|
288
|
-
cause: this.cause,
|
289
|
-
message: this.message,
|
290
|
-
stack: this.stack
|
291
|
-
};
|
292
|
-
}
|
293
|
-
};
|
294
|
-
|
295
|
-
// spec/errors/no-such-tool-error.ts
|
296
|
-
var NoSuchToolError = class extends Error {
|
297
|
-
constructor({
|
298
|
-
toolName,
|
299
|
-
availableTools = void 0,
|
300
|
-
message = `Model tried to call unavailable tool '${toolName}'. ${availableTools === void 0 ? "No tools are available." : `Available tools: ${availableTools.join(", ")}.`}`
|
301
|
-
}) {
|
302
|
-
super(message);
|
303
|
-
this.name = "AI_NoSuchToolError";
|
304
|
-
this.toolName = toolName;
|
305
|
-
this.availableTools = availableTools;
|
306
|
-
}
|
307
|
-
static isNoSuchToolError(error) {
|
308
|
-
return error instanceof Error && error.name === "AI_NoSuchToolError" && "toolName" in error && error.toolName != void 0 && typeof error.name === "string";
|
309
|
-
}
|
310
|
-
toJSON() {
|
311
|
-
return {
|
312
|
-
name: this.name,
|
313
|
-
message: this.message,
|
314
|
-
stack: this.stack,
|
315
|
-
toolName: this.toolName,
|
316
|
-
availableTools: this.availableTools
|
317
|
-
};
|
318
|
-
}
|
319
|
-
};
|
320
|
-
|
321
|
-
// spec/errors/retry-error.ts
|
322
|
-
var RetryError = class extends Error {
|
323
|
-
constructor({
|
324
|
-
message,
|
325
|
-
reason,
|
326
|
-
errors
|
327
|
-
}) {
|
328
|
-
super(message);
|
329
|
-
this.name = "AI_RetryError";
|
330
|
-
this.reason = reason;
|
331
|
-
this.errors = errors;
|
332
|
-
this.lastError = errors[errors.length - 1];
|
333
|
-
}
|
334
|
-
static isRetryError(error) {
|
335
|
-
return error instanceof Error && error.name === "AI_RetryError" && typeof error.reason === "string" && Array.isArray(error.errors);
|
336
|
-
}
|
337
|
-
toJSON() {
|
338
|
-
return {
|
339
|
-
name: this.name,
|
340
|
-
message: this.message,
|
341
|
-
reason: this.reason,
|
342
|
-
lastError: this.lastError,
|
343
|
-
errors: this.errors
|
344
|
-
};
|
345
|
-
}
|
346
|
-
};
|
347
|
-
|
348
89
|
// core/generate-text/token-usage.ts
|
349
90
|
function calculateTokenUsage(usage) {
|
350
91
|
return {
|
@@ -371,6 +112,7 @@ function detectImageMimeType(image) {
|
|
371
112
|
}
|
372
113
|
|
373
114
|
// core/prompt/data-content.ts
|
115
|
+
import { InvalidDataContentError } from "@ai-sdk/provider";
|
374
116
|
function convertDataContentToBase64String(content) {
|
375
117
|
if (typeof content === "string") {
|
376
118
|
return content;
|
@@ -394,22 +136,22 @@ function convertDataContentToUint8Array(content) {
|
|
394
136
|
}
|
395
137
|
|
396
138
|
// core/prompt/convert-to-language-model-prompt.ts
|
397
|
-
function convertToLanguageModelPrompt(
|
139
|
+
function convertToLanguageModelPrompt(prompt) {
|
398
140
|
const languageModelMessages = [];
|
399
|
-
if (
|
400
|
-
languageModelMessages.push({ role: "system", content:
|
141
|
+
if (prompt.system != null) {
|
142
|
+
languageModelMessages.push({ role: "system", content: prompt.system });
|
401
143
|
}
|
402
|
-
switch (
|
144
|
+
switch (prompt.type) {
|
403
145
|
case "prompt": {
|
404
146
|
languageModelMessages.push({
|
405
147
|
role: "user",
|
406
|
-
content: [{ type: "text", text:
|
148
|
+
content: [{ type: "text", text: prompt.prompt }]
|
407
149
|
});
|
408
150
|
break;
|
409
151
|
}
|
410
152
|
case "messages": {
|
411
153
|
languageModelMessages.push(
|
412
|
-
...
|
154
|
+
...prompt.messages.map((message) => {
|
413
155
|
switch (message.role) {
|
414
156
|
case "user": {
|
415
157
|
if (typeof message.content === "string") {
|
@@ -467,7 +209,7 @@ function convertToLanguageModelPrompt(prompt2) {
|
|
467
209
|
break;
|
468
210
|
}
|
469
211
|
default: {
|
470
|
-
const _exhaustiveCheck =
|
212
|
+
const _exhaustiveCheck = prompt;
|
471
213
|
throw new Error(`Unsupported prompt type: ${_exhaustiveCheck}`);
|
472
214
|
}
|
473
215
|
}
|
@@ -475,34 +217,36 @@ function convertToLanguageModelPrompt(prompt2) {
|
|
475
217
|
}
|
476
218
|
|
477
219
|
// core/prompt/get-validated-prompt.ts
|
478
|
-
|
479
|
-
|
220
|
+
import { InvalidPromptError } from "@ai-sdk/provider";
|
221
|
+
function getValidatedPrompt(prompt) {
|
222
|
+
if (prompt.prompt == null && prompt.messages == null) {
|
480
223
|
throw new InvalidPromptError({
|
481
|
-
prompt
|
224
|
+
prompt,
|
482
225
|
message: "prompt or messages must be defined"
|
483
226
|
});
|
484
227
|
}
|
485
|
-
if (
|
228
|
+
if (prompt.prompt != null && prompt.messages != null) {
|
486
229
|
throw new InvalidPromptError({
|
487
|
-
prompt
|
230
|
+
prompt,
|
488
231
|
message: "prompt and messages cannot be defined at the same time"
|
489
232
|
});
|
490
233
|
}
|
491
|
-
return
|
234
|
+
return prompt.prompt != null ? {
|
492
235
|
type: "prompt",
|
493
|
-
prompt:
|
236
|
+
prompt: prompt.prompt,
|
494
237
|
messages: void 0,
|
495
|
-
system:
|
238
|
+
system: prompt.system
|
496
239
|
} : {
|
497
240
|
type: "messages",
|
498
241
|
prompt: void 0,
|
499
|
-
messages:
|
242
|
+
messages: prompt.messages,
|
500
243
|
// only possible case bc of checks above
|
501
|
-
system:
|
244
|
+
system: prompt.system
|
502
245
|
};
|
503
246
|
}
|
504
247
|
|
505
248
|
// core/prompt/prepare-call-settings.ts
|
249
|
+
import { InvalidArgumentError } from "@ai-sdk/provider";
|
506
250
|
function prepareCallSettings({
|
507
251
|
maxTokens,
|
508
252
|
temperature,
|
@@ -634,6 +378,9 @@ function convertZodToJSONSchema(zodSchema) {
|
|
634
378
|
return zodToJsonSchema(zodSchema);
|
635
379
|
}
|
636
380
|
|
381
|
+
// core/util/retry-with-exponential-backoff.ts
|
382
|
+
import { APICallError, RetryError } from "@ai-sdk/provider";
|
383
|
+
|
637
384
|
// core/util/delay.ts
|
638
385
|
async function delay(delayInMs) {
|
639
386
|
return new Promise((resolve) => setTimeout(resolve, delayInMs));
|
@@ -717,7 +464,7 @@ async function experimental_generateObject({
|
|
717
464
|
schema,
|
718
465
|
mode,
|
719
466
|
system,
|
720
|
-
prompt
|
467
|
+
prompt,
|
721
468
|
messages,
|
722
469
|
maxRetries,
|
723
470
|
abortSignal,
|
@@ -737,7 +484,7 @@ async function experimental_generateObject({
|
|
737
484
|
case "json": {
|
738
485
|
const validatedPrompt = getValidatedPrompt({
|
739
486
|
system: injectJsonSchemaIntoSystem({ system, schema: jsonSchema }),
|
740
|
-
prompt
|
487
|
+
prompt,
|
741
488
|
messages
|
742
489
|
});
|
743
490
|
const generateResult = await retry(() => {
|
@@ -761,7 +508,7 @@ async function experimental_generateObject({
|
|
761
508
|
case "grammar": {
|
762
509
|
const validatedPrompt = getValidatedPrompt({
|
763
510
|
system: injectJsonSchemaIntoSystem({ system, schema: jsonSchema }),
|
764
|
-
prompt
|
511
|
+
prompt,
|
765
512
|
messages
|
766
513
|
});
|
767
514
|
const generateResult = await retry(
|
@@ -785,7 +532,7 @@ async function experimental_generateObject({
|
|
785
532
|
case "tool": {
|
786
533
|
const validatedPrompt = getValidatedPrompt({
|
787
534
|
system,
|
788
|
-
prompt
|
535
|
+
prompt,
|
789
536
|
messages
|
790
537
|
});
|
791
538
|
const generateResult = await retry(
|
@@ -1238,7 +985,7 @@ async function experimental_streamObject({
|
|
1238
985
|
schema,
|
1239
986
|
mode,
|
1240
987
|
system,
|
1241
|
-
prompt
|
988
|
+
prompt,
|
1242
989
|
messages,
|
1243
990
|
maxRetries,
|
1244
991
|
abortSignal,
|
@@ -1255,7 +1002,7 @@ async function experimental_streamObject({
|
|
1255
1002
|
case "json": {
|
1256
1003
|
const validatedPrompt = getValidatedPrompt({
|
1257
1004
|
system: injectJsonSchemaIntoSystem({ system, schema: jsonSchema }),
|
1258
|
-
prompt
|
1005
|
+
prompt,
|
1259
1006
|
messages
|
1260
1007
|
});
|
1261
1008
|
callOptions = {
|
@@ -1282,7 +1029,7 @@ async function experimental_streamObject({
|
|
1282
1029
|
case "grammar": {
|
1283
1030
|
const validatedPrompt = getValidatedPrompt({
|
1284
1031
|
system: injectJsonSchemaIntoSystem({ system, schema: jsonSchema }),
|
1285
|
-
prompt
|
1032
|
+
prompt,
|
1286
1033
|
messages
|
1287
1034
|
});
|
1288
1035
|
callOptions = {
|
@@ -1309,7 +1056,7 @@ async function experimental_streamObject({
|
|
1309
1056
|
case "tool": {
|
1310
1057
|
const validatedPrompt = getValidatedPrompt({
|
1311
1058
|
system,
|
1312
|
-
prompt
|
1059
|
+
prompt,
|
1313
1060
|
messages
|
1314
1061
|
});
|
1315
1062
|
callOptions = {
|
@@ -1387,6 +1134,10 @@ var StreamObjectResult = class {
|
|
1387
1134
|
};
|
1388
1135
|
|
1389
1136
|
// core/generate-text/tool-call.ts
|
1137
|
+
import {
|
1138
|
+
InvalidToolArgumentsError,
|
1139
|
+
NoSuchToolError
|
1140
|
+
} from "@ai-sdk/provider";
|
1390
1141
|
function parseToolCall({
|
1391
1142
|
toolCall,
|
1392
1143
|
tools
|
@@ -1426,7 +1177,7 @@ async function experimental_generateText({
|
|
1426
1177
|
model,
|
1427
1178
|
tools,
|
1428
1179
|
system,
|
1429
|
-
prompt
|
1180
|
+
prompt,
|
1430
1181
|
messages,
|
1431
1182
|
maxRetries,
|
1432
1183
|
abortSignal,
|
@@ -1434,7 +1185,7 @@ async function experimental_generateText({
|
|
1434
1185
|
}) {
|
1435
1186
|
var _a, _b;
|
1436
1187
|
const retry = retryWithExponentialBackoff({ maxRetries });
|
1437
|
-
const validatedPrompt = getValidatedPrompt({ system, prompt
|
1188
|
+
const validatedPrompt = getValidatedPrompt({ system, prompt, messages });
|
1438
1189
|
const modelResponse = await retry(() => {
|
1439
1190
|
return model.doGenerate({
|
1440
1191
|
mode: {
|
@@ -1503,6 +1254,9 @@ var GenerateTextResult = class {
|
|
1503
1254
|
}
|
1504
1255
|
};
|
1505
1256
|
|
1257
|
+
// core/generate-text/run-tools-transformation.ts
|
1258
|
+
import { NoSuchToolError as NoSuchToolError2 } from "@ai-sdk/provider";
|
1259
|
+
|
1506
1260
|
// shared/generate-id.ts
|
1507
1261
|
import { customAlphabet } from "nanoid/non-secure";
|
1508
1262
|
var generateId = customAlphabet(
|
@@ -1537,7 +1291,7 @@ function runToolsTransformation({
|
|
1537
1291
|
if (tools == null) {
|
1538
1292
|
toolResultsStreamController.enqueue({
|
1539
1293
|
type: "error",
|
1540
|
-
error: new
|
1294
|
+
error: new NoSuchToolError2({ toolName: chunk.toolName })
|
1541
1295
|
});
|
1542
1296
|
break;
|
1543
1297
|
}
|
@@ -1545,7 +1299,7 @@ function runToolsTransformation({
|
|
1545
1299
|
if (tool2 == null) {
|
1546
1300
|
toolResultsStreamController.enqueue({
|
1547
1301
|
type: "error",
|
1548
|
-
error: new
|
1302
|
+
error: new NoSuchToolError2({
|
1549
1303
|
toolName: chunk.toolName,
|
1550
1304
|
availableTools: Object.keys(tools)
|
1551
1305
|
})
|
@@ -1651,14 +1405,14 @@ async function experimental_streamText({
|
|
1651
1405
|
model,
|
1652
1406
|
tools,
|
1653
1407
|
system,
|
1654
|
-
prompt
|
1408
|
+
prompt,
|
1655
1409
|
messages,
|
1656
1410
|
maxRetries,
|
1657
1411
|
abortSignal,
|
1658
1412
|
...settings
|
1659
1413
|
}) {
|
1660
1414
|
const retry = retryWithExponentialBackoff({ maxRetries });
|
1661
|
-
const validatedPrompt = getValidatedPrompt({ system, prompt
|
1415
|
+
const validatedPrompt = getValidatedPrompt({ system, prompt, messages });
|
1662
1416
|
const { stream, warnings } = await retry(
|
1663
1417
|
() => model.doStream({
|
1664
1418
|
mode: {
|
@@ -1739,8 +1493,31 @@ var StreamTextResult = class {
|
|
1739
1493
|
@returns an `AIStream` object.
|
1740
1494
|
*/
|
1741
1495
|
toAIStream(callbacks) {
|
1742
|
-
return readableFromAsyncIterable(this.textStream).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
1743
|
-
|
1496
|
+
return readableFromAsyncIterable(this.textStream).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
|
1497
|
+
}
|
1498
|
+
/**
|
1499
|
+
Creates a simple text stream response.
|
1500
|
+
Each text delta is encoded as UTF-8 and sent as a separate chunk.
|
1501
|
+
Non-text-delta events are ignored.
|
1502
|
+
*/
|
1503
|
+
toTextStreamResponse(init) {
|
1504
|
+
const encoder = new TextEncoder();
|
1505
|
+
return new Response(
|
1506
|
+
this.textStream.pipeThrough(
|
1507
|
+
new TransformStream({
|
1508
|
+
transform(chunk, controller) {
|
1509
|
+
controller.enqueue(encoder.encode(chunk));
|
1510
|
+
}
|
1511
|
+
})
|
1512
|
+
),
|
1513
|
+
{
|
1514
|
+
...init,
|
1515
|
+
status: 200,
|
1516
|
+
headers: {
|
1517
|
+
"Content-Type": "text/plain; charset=utf-8",
|
1518
|
+
...init == null ? void 0 : init.headers
|
1519
|
+
}
|
1520
|
+
}
|
1744
1521
|
);
|
1745
1522
|
}
|
1746
1523
|
};
|
@@ -1946,7 +1723,6 @@ function createChunkDecoder(complex) {
|
|
1946
1723
|
};
|
1947
1724
|
}
|
1948
1725
|
var isStreamStringEqualToType = (type, value) => value.startsWith(`${StreamStringPrefixes[type]}:`) && value.endsWith("\n");
|
1949
|
-
var COMPLEX_HEADER = "X-Experimental-Stream-Data";
|
1950
1726
|
|
1951
1727
|
// streams/ai-stream.ts
|
1952
1728
|
import {
|
@@ -2073,7 +1849,7 @@ function readableFromAsyncIterable(iterable) {
|
|
2073
1849
|
}
|
2074
1850
|
|
2075
1851
|
// streams/stream-data.ts
|
2076
|
-
var
|
1852
|
+
var StreamData = class {
|
2077
1853
|
constructor() {
|
2078
1854
|
this.encoder = new TextEncoder();
|
2079
1855
|
this.controller = null;
|
@@ -2159,14 +1935,7 @@ var experimental_StreamData = class {
|
|
2159
1935
|
this.messageAnnotations.push(value);
|
2160
1936
|
}
|
2161
1937
|
};
|
2162
|
-
function createStreamDataTransformer(
|
2163
|
-
if (!experimental_streamData) {
|
2164
|
-
return new TransformStream({
|
2165
|
-
transform: async (chunk, controller) => {
|
2166
|
-
controller.enqueue(chunk);
|
2167
|
-
}
|
2168
|
-
});
|
2169
|
-
}
|
1938
|
+
function createStreamDataTransformer() {
|
2170
1939
|
const encoder = new TextEncoder();
|
2171
1940
|
const decoder = new TextDecoder();
|
2172
1941
|
return new TransformStream({
|
@@ -2176,6 +1945,8 @@ function createStreamDataTransformer(experimental_streamData) {
|
|
2176
1945
|
}
|
2177
1946
|
});
|
2178
1947
|
}
|
1948
|
+
var experimental_StreamData = class extends StreamData {
|
1949
|
+
};
|
2179
1950
|
|
2180
1951
|
// streams/anthropic-stream.ts
|
2181
1952
|
function parseAnthropicStream() {
|
@@ -2215,16 +1986,16 @@ async function* streamable(stream) {
|
|
2215
1986
|
}
|
2216
1987
|
function AnthropicStream(res, cb) {
|
2217
1988
|
if (Symbol.asyncIterator in res) {
|
2218
|
-
return readableFromAsyncIterable(streamable(res)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(
|
1989
|
+
return readableFromAsyncIterable(streamable(res)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer());
|
2219
1990
|
} else {
|
2220
1991
|
return AIStream(res, parseAnthropicStream(), cb).pipeThrough(
|
2221
|
-
createStreamDataTransformer(
|
1992
|
+
createStreamDataTransformer()
|
2222
1993
|
);
|
2223
1994
|
}
|
2224
1995
|
}
|
2225
1996
|
|
2226
1997
|
// streams/assistant-response.ts
|
2227
|
-
function
|
1998
|
+
function AssistantResponse({ threadId, messageId }, process2) {
|
2228
1999
|
const stream = new ReadableStream({
|
2229
2000
|
async start(controller) {
|
2230
2001
|
var _a;
|
@@ -2315,6 +2086,7 @@ function experimental_AssistantResponse({ threadId, messageId }, process2) {
|
|
2315
2086
|
}
|
2316
2087
|
});
|
2317
2088
|
}
|
2089
|
+
var experimental_AssistantResponse = AssistantResponse;
|
2318
2090
|
|
2319
2091
|
// streams/aws-bedrock-stream.ts
|
2320
2092
|
async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
|
@@ -2342,16 +2114,7 @@ function AWSBedrockAnthropicStream(response, callbacks) {
|
|
2342
2114
|
return AWSBedrockStream(response, callbacks, (chunk) => chunk.completion);
|
2343
2115
|
}
|
2344
2116
|
function AWSBedrockCohereStream(response, callbacks) {
|
2345
|
-
return AWSBedrockStream(
|
2346
|
-
response,
|
2347
|
-
callbacks,
|
2348
|
-
// As of 2023-11-17, Bedrock does not support streaming for Cohere,
|
2349
|
-
// so we take the full generation:
|
2350
|
-
(chunk) => {
|
2351
|
-
var _a, _b;
|
2352
|
-
return (_b = (_a = chunk.generations) == null ? void 0 : _a[0]) == null ? void 0 : _b.text;
|
2353
|
-
}
|
2354
|
-
);
|
2117
|
+
return AWSBedrockStream(response, callbacks, (chunk) => chunk == null ? void 0 : chunk.text);
|
2355
2118
|
}
|
2356
2119
|
function AWSBedrockLlama2Stream(response, callbacks) {
|
2357
2120
|
return AWSBedrockStream(response, callbacks, (chunk) => chunk.generation);
|
@@ -2359,9 +2122,7 @@ function AWSBedrockLlama2Stream(response, callbacks) {
|
|
2359
2122
|
function AWSBedrockStream(response, callbacks, extractTextDeltaFromChunk) {
|
2360
2123
|
return readableFromAsyncIterable(
|
2361
2124
|
asDeltaIterable(response, extractTextDeltaFromChunk)
|
2362
|
-
).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
2363
|
-
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
2364
|
-
);
|
2125
|
+
).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
|
2365
2126
|
}
|
2366
2127
|
|
2367
2128
|
// streams/cohere-stream.ts
|
@@ -2416,13 +2177,9 @@ async function* streamable2(stream) {
|
|
2416
2177
|
}
|
2417
2178
|
function CohereStream(reader, callbacks) {
|
2418
2179
|
if (Symbol.asyncIterator in reader) {
|
2419
|
-
return readableFromAsyncIterable(streamable2(reader)).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
2420
|
-
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
2421
|
-
);
|
2180
|
+
return readableFromAsyncIterable(streamable2(reader)).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
|
2422
2181
|
} else {
|
2423
|
-
return createParser2(reader).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
2424
|
-
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
2425
|
-
);
|
2182
|
+
return createParser2(reader).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
|
2426
2183
|
}
|
2427
2184
|
}
|
2428
2185
|
|
@@ -2441,7 +2198,7 @@ async function* streamable3(response) {
|
|
2441
2198
|
}
|
2442
2199
|
}
|
2443
2200
|
function GoogleGenerativeAIStream(response, cb) {
|
2444
|
-
return readableFromAsyncIterable(streamable3(response)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(
|
2201
|
+
return readableFromAsyncIterable(streamable3(response)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer());
|
2445
2202
|
}
|
2446
2203
|
|
2447
2204
|
// streams/huggingface-stream.ts
|
@@ -2469,9 +2226,7 @@ function createParser3(res) {
|
|
2469
2226
|
});
|
2470
2227
|
}
|
2471
2228
|
function HuggingFaceStream(res, callbacks) {
|
2472
|
-
return createParser3(res).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
2473
|
-
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
2474
|
-
);
|
2229
|
+
return createParser3(res).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
|
2475
2230
|
}
|
2476
2231
|
|
2477
2232
|
// streams/inkeep-stream.ts
|
@@ -2508,7 +2263,7 @@ function InkeepStream(res, callbacks) {
|
|
2508
2263
|
}
|
2509
2264
|
};
|
2510
2265
|
return AIStream(res, inkeepEventParser, passThroughCallbacks).pipeThrough(
|
2511
|
-
createStreamDataTransformer(
|
2266
|
+
createStreamDataTransformer()
|
2512
2267
|
);
|
2513
2268
|
}
|
2514
2269
|
|
@@ -2533,9 +2288,7 @@ function LangChainStream(callbacks) {
|
|
2533
2288
|
}
|
2534
2289
|
};
|
2535
2290
|
return {
|
2536
|
-
stream: stream.readable.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
2537
|
-
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
2538
|
-
),
|
2291
|
+
stream: stream.readable.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer()),
|
2539
2292
|
writer,
|
2540
2293
|
handlers: {
|
2541
2294
|
handleLLMNewToken: async (token) => {
|
@@ -2586,9 +2339,7 @@ async function* streamable4(stream) {
|
|
2586
2339
|
}
|
2587
2340
|
function MistralStream(response, callbacks) {
|
2588
2341
|
const stream = readableFromAsyncIterable(streamable4(response));
|
2589
|
-
return stream.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
2590
|
-
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
2591
|
-
);
|
2342
|
+
return stream.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
|
2592
2343
|
}
|
2593
2344
|
|
2594
2345
|
// streams/openai-stream.ts
|
@@ -2732,9 +2483,7 @@ function OpenAIStream(res, callbacks) {
|
|
2732
2483
|
const functionCallTransformer = createFunctionCallTransformer(cb);
|
2733
2484
|
return stream.pipeThrough(functionCallTransformer);
|
2734
2485
|
} else {
|
2735
|
-
return stream.pipeThrough(
|
2736
|
-
createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData)
|
2737
|
-
);
|
2486
|
+
return stream.pipeThrough(createStreamDataTransformer());
|
2738
2487
|
}
|
2739
2488
|
}
|
2740
2489
|
function createFunctionCallTransformer(callbacks) {
|
@@ -2744,7 +2493,6 @@ function createFunctionCallTransformer(callbacks) {
|
|
2744
2493
|
let aggregatedFinalCompletionResponse = "";
|
2745
2494
|
let isFunctionStreamingIn = false;
|
2746
2495
|
let functionCallMessages = callbacks[__internal__OpenAIFnMessagesSymbol] || [];
|
2747
|
-
const isComplexMode = callbacks == null ? void 0 : callbacks.experimental_streamData;
|
2748
2496
|
const decode = createChunkDecoder();
|
2749
2497
|
return new TransformStream({
|
2750
2498
|
async transform(chunk, controller) {
|
@@ -2759,7 +2507,7 @@ function createFunctionCallTransformer(callbacks) {
|
|
2759
2507
|
}
|
2760
2508
|
if (!isFunctionStreamingIn) {
|
2761
2509
|
controller.enqueue(
|
2762
|
-
|
2510
|
+
textEncoder.encode(formatStreamPart("text", message))
|
2763
2511
|
);
|
2764
2512
|
return;
|
2765
2513
|
} else {
|
@@ -2870,17 +2618,17 @@ function createFunctionCallTransformer(callbacks) {
|
|
2870
2618
|
if (!functionResponse) {
|
2871
2619
|
controller.enqueue(
|
2872
2620
|
textEncoder.encode(
|
2873
|
-
|
2621
|
+
formatStreamPart(
|
2874
2622
|
payload.function_call ? "function_call" : "tool_calls",
|
2875
2623
|
// parse to prevent double-encoding:
|
2876
2624
|
JSON.parse(aggregatedResponse)
|
2877
|
-
)
|
2625
|
+
)
|
2878
2626
|
)
|
2879
2627
|
);
|
2880
2628
|
return;
|
2881
2629
|
} else if (typeof functionResponse === "string") {
|
2882
2630
|
controller.enqueue(
|
2883
|
-
|
2631
|
+
textEncoder.encode(formatStreamPart("text", functionResponse))
|
2884
2632
|
);
|
2885
2633
|
aggregatedFinalCompletionResponse = functionResponse;
|
2886
2634
|
return;
|
@@ -2930,7 +2678,7 @@ async function ReplicateStream(res, cb, options) {
|
|
2930
2678
|
}
|
2931
2679
|
});
|
2932
2680
|
return AIStream(eventStream, void 0, cb).pipeThrough(
|
2933
|
-
createStreamDataTransformer(
|
2681
|
+
createStreamDataTransformer()
|
2934
2682
|
);
|
2935
2683
|
}
|
2936
2684
|
|
@@ -3091,74 +2839,41 @@ async function parseComplexResponse({
|
|
3091
2839
|
// streams/streaming-react-response.ts
|
3092
2840
|
var experimental_StreamingReactResponse = class {
|
3093
2841
|
constructor(res, options) {
|
3094
|
-
var _a;
|
2842
|
+
var _a, _b;
|
3095
2843
|
let resolveFunc = () => {
|
3096
2844
|
};
|
3097
2845
|
let next = new Promise((resolve) => {
|
3098
2846
|
resolveFunc = resolve;
|
3099
2847
|
});
|
3100
|
-
|
3101
|
-
|
3102
|
-
|
3103
|
-
)
|
3104
|
-
|
3105
|
-
|
3106
|
-
|
3107
|
-
|
3108
|
-
|
3109
|
-
|
3110
|
-
|
3111
|
-
|
3112
|
-
|
3113
|
-
|
3114
|
-
|
3115
|
-
|
3116
|
-
|
3117
|
-
|
3118
|
-
|
2848
|
+
const processedStream = (options == null ? void 0 : options.data) != null ? res.pipeThrough((_a = options == null ? void 0 : options.data) == null ? void 0 : _a.stream) : res;
|
2849
|
+
let lastPayload = void 0;
|
2850
|
+
parseComplexResponse({
|
2851
|
+
reader: processedStream.getReader(),
|
2852
|
+
update: (merged, data) => {
|
2853
|
+
var _a2, _b2, _c;
|
2854
|
+
const content = (_b2 = (_a2 = merged[0]) == null ? void 0 : _a2.content) != null ? _b2 : "";
|
2855
|
+
const ui = ((_c = options == null ? void 0 : options.ui) == null ? void 0 : _c.call(options, { content, data })) || content;
|
2856
|
+
const payload = { ui, content };
|
2857
|
+
const resolvePrevious = resolveFunc;
|
2858
|
+
const nextRow = new Promise((resolve) => {
|
2859
|
+
resolveFunc = resolve;
|
2860
|
+
});
|
2861
|
+
resolvePrevious({
|
2862
|
+
next: nextRow,
|
2863
|
+
...payload
|
2864
|
+
});
|
2865
|
+
lastPayload = payload;
|
2866
|
+
},
|
2867
|
+
generateId: (_b = options == null ? void 0 : options.generateId) != null ? _b : generateId,
|
2868
|
+
onFinish: () => {
|
2869
|
+
if (lastPayload !== void 0) {
|
2870
|
+
resolveFunc({
|
2871
|
+
next: null,
|
2872
|
+
...lastPayload
|
3119
2873
|
});
|
3120
|
-
lastPayload = payload;
|
3121
|
-
},
|
3122
|
-
generateId: (_a = options.generateId) != null ? _a : generateId,
|
3123
|
-
onFinish: () => {
|
3124
|
-
if (lastPayload !== void 0) {
|
3125
|
-
resolveFunc({
|
3126
|
-
next: null,
|
3127
|
-
...lastPayload
|
3128
|
-
});
|
3129
|
-
}
|
3130
2874
|
}
|
3131
|
-
});
|
3132
|
-
return next;
|
3133
|
-
}
|
3134
|
-
let content = "";
|
3135
|
-
const decode = createChunkDecoder();
|
3136
|
-
const reader = res.getReader();
|
3137
|
-
async function readChunk() {
|
3138
|
-
var _a2;
|
3139
|
-
const { done, value } = await reader.read();
|
3140
|
-
if (!done) {
|
3141
|
-
content += decode(value);
|
3142
2875
|
}
|
3143
|
-
|
3144
|
-
const payload = {
|
3145
|
-
ui,
|
3146
|
-
content
|
3147
|
-
};
|
3148
|
-
const resolvePrevious = resolveFunc;
|
3149
|
-
const nextRow = done ? null : new Promise((resolve) => {
|
3150
|
-
resolveFunc = resolve;
|
3151
|
-
});
|
3152
|
-
resolvePrevious({
|
3153
|
-
next: nextRow,
|
3154
|
-
...payload
|
3155
|
-
});
|
3156
|
-
if (done) {
|
3157
|
-
return;
|
3158
|
-
}
|
3159
|
-
await readChunk();
|
3160
|
-
}
|
3161
|
-
readChunk();
|
2876
|
+
});
|
3162
2877
|
return next;
|
3163
2878
|
}
|
3164
2879
|
};
|
@@ -3175,7 +2890,6 @@ var StreamingTextResponse = class extends Response {
|
|
3175
2890
|
status: 200,
|
3176
2891
|
headers: {
|
3177
2892
|
"Content-Type": "text/plain; charset=utf-8",
|
3178
|
-
[COMPLEX_HEADER]: data ? "true" : "false",
|
3179
2893
|
...init == null ? void 0 : init.headers
|
3180
2894
|
}
|
3181
2895
|
});
|
@@ -3207,7 +2921,7 @@ export {
|
|
3207
2921
|
AWSBedrockLlama2Stream,
|
3208
2922
|
AWSBedrockStream,
|
3209
2923
|
AnthropicStream,
|
3210
|
-
|
2924
|
+
AssistantResponse,
|
3211
2925
|
CohereStream,
|
3212
2926
|
GenerateObjectResult,
|
3213
2927
|
GenerateTextResult,
|
@@ -3218,6 +2932,7 @@ export {
|
|
3218
2932
|
MistralStream,
|
3219
2933
|
OpenAIStream,
|
3220
2934
|
ReplicateStream,
|
2935
|
+
StreamData,
|
3221
2936
|
StreamObjectResult,
|
3222
2937
|
StreamTextResult,
|
3223
2938
|
StreamingTextResponse,
|