ai 3.1.0-canary.3 → 3.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/index.d.mts +982 -24
- package/dist/index.d.ts +982 -24
- package/dist/index.js +1748 -175
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +1723 -174
- package/dist/index.mjs.map +1 -1
- package/package.json +14 -31
- package/prompts/dist/index.d.mts +13 -1
- package/prompts/dist/index.d.ts +13 -1
- package/prompts/dist/index.js +13 -0
- package/prompts/dist/index.js.map +1 -1
- package/prompts/dist/index.mjs +12 -0
- package/prompts/dist/index.mjs.map +1 -1
- package/react/dist/index.d.mts +27 -6
- package/react/dist/index.d.ts +31 -8
- package/react/dist/index.js +155 -141
- package/react/dist/index.js.map +1 -1
- package/react/dist/index.mjs +154 -141
- package/react/dist/index.mjs.map +1 -1
- package/react/dist/index.server.d.mts +4 -2
- package/react/dist/index.server.d.ts +4 -2
- package/react/dist/index.server.js.map +1 -1
- package/react/dist/index.server.mjs.map +1 -1
- package/rsc/dist/index.d.ts +385 -20
- package/rsc/dist/rsc-client.d.mts +1 -1
- package/rsc/dist/rsc-client.mjs +2 -0
- package/rsc/dist/rsc-client.mjs.map +1 -1
- package/rsc/dist/rsc-server.d.mts +367 -20
- package/rsc/dist/rsc-server.mjs +676 -35
- package/rsc/dist/rsc-server.mjs.map +1 -1
- package/rsc/dist/rsc-shared.d.mts +24 -9
- package/rsc/dist/rsc-shared.mjs +98 -4
- package/rsc/dist/rsc-shared.mjs.map +1 -1
- package/solid/dist/index.d.mts +7 -3
- package/solid/dist/index.d.ts +7 -3
- package/solid/dist/index.js +106 -107
- package/solid/dist/index.js.map +1 -1
- package/solid/dist/index.mjs +106 -107
- package/solid/dist/index.mjs.map +1 -1
- package/svelte/dist/index.d.mts +7 -3
- package/svelte/dist/index.d.ts +7 -3
- package/svelte/dist/index.js +109 -109
- package/svelte/dist/index.js.map +1 -1
- package/svelte/dist/index.mjs +109 -109
- package/svelte/dist/index.mjs.map +1 -1
- package/vue/dist/index.d.mts +7 -3
- package/vue/dist/index.d.ts +7 -3
- package/vue/dist/index.js +106 -107
- package/vue/dist/index.js.map +1 -1
- package/vue/dist/index.mjs +106 -107
- package/vue/dist/index.mjs.map +1 -1
- package/ai-model-specification/dist/index.d.mts +0 -606
- package/ai-model-specification/dist/index.d.ts +0 -606
- package/ai-model-specification/dist/index.js +0 -617
- package/ai-model-specification/dist/index.js.map +0 -1
- package/ai-model-specification/dist/index.mjs +0 -560
- package/ai-model-specification/dist/index.mjs.map +0 -1
- package/core/dist/index.d.mts +0 -590
- package/core/dist/index.d.ts +0 -590
- package/core/dist/index.js +0 -1528
- package/core/dist/index.js.map +0 -1
- package/core/dist/index.mjs +0 -1481
- package/core/dist/index.mjs.map +0 -1
- package/provider/dist/index.d.mts +0 -429
- package/provider/dist/index.d.ts +0 -429
- package/provider/dist/index.js +0 -1194
- package/provider/dist/index.js.map +0 -1
- package/provider/dist/index.mjs +0 -1158
- package/provider/dist/index.mjs.map +0 -1
package/dist/index.js
CHANGED
@@ -1,7 +1,9 @@
|
|
1
1
|
"use strict";
|
2
|
+
var __create = Object.create;
|
2
3
|
var __defProp = Object.defineProperty;
|
3
4
|
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
4
5
|
var __getOwnPropNames = Object.getOwnPropertyNames;
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
5
7
|
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
6
8
|
var __export = (target, all) => {
|
7
9
|
for (var name in all)
|
@@ -15,27 +17,59 @@ var __copyProps = (to, from, except, desc) => {
|
|
15
17
|
}
|
16
18
|
return to;
|
17
19
|
};
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
26
|
+
mod
|
27
|
+
));
|
18
28
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
19
29
|
|
20
30
|
// streams/index.ts
|
21
31
|
var streams_exports = {};
|
22
32
|
__export(streams_exports, {
|
23
33
|
AIStream: () => AIStream,
|
34
|
+
APICallError: () => import_provider8.APICallError,
|
35
|
+
AWSBedrockAnthropicMessagesStream: () => AWSBedrockAnthropicMessagesStream,
|
24
36
|
AWSBedrockAnthropicStream: () => AWSBedrockAnthropicStream,
|
25
37
|
AWSBedrockCohereStream: () => AWSBedrockCohereStream,
|
26
38
|
AWSBedrockLlama2Stream: () => AWSBedrockLlama2Stream,
|
27
39
|
AWSBedrockStream: () => AWSBedrockStream,
|
28
40
|
AnthropicStream: () => AnthropicStream,
|
29
|
-
|
41
|
+
AssistantResponse: () => AssistantResponse,
|
30
42
|
CohereStream: () => CohereStream,
|
43
|
+
EmptyResponseBodyError: () => import_provider8.EmptyResponseBodyError,
|
44
|
+
GenerateObjectResult: () => GenerateObjectResult,
|
45
|
+
GenerateTextResult: () => GenerateTextResult,
|
31
46
|
GoogleGenerativeAIStream: () => GoogleGenerativeAIStream,
|
32
47
|
HuggingFaceStream: () => HuggingFaceStream,
|
33
48
|
InkeepStream: () => InkeepStream,
|
49
|
+
InvalidArgumentError: () => import_provider8.InvalidArgumentError,
|
50
|
+
InvalidDataContentError: () => import_provider8.InvalidDataContentError,
|
51
|
+
InvalidPromptError: () => import_provider8.InvalidPromptError,
|
52
|
+
InvalidResponseDataError: () => import_provider8.InvalidResponseDataError,
|
53
|
+
InvalidToolArgumentsError: () => import_provider8.InvalidToolArgumentsError,
|
54
|
+
JSONParseError: () => import_provider8.JSONParseError,
|
34
55
|
LangChainStream: () => LangChainStream,
|
56
|
+
LoadAPIKeyError: () => import_provider8.LoadAPIKeyError,
|
35
57
|
MistralStream: () => MistralStream,
|
58
|
+
NoObjectGeneratedError: () => import_provider8.NoObjectGeneratedError,
|
59
|
+
NoSuchToolError: () => import_provider8.NoSuchToolError,
|
36
60
|
OpenAIStream: () => OpenAIStream,
|
37
61
|
ReplicateStream: () => ReplicateStream,
|
62
|
+
RetryError: () => import_provider8.RetryError,
|
63
|
+
StreamData: () => StreamData,
|
64
|
+
StreamObjectResult: () => StreamObjectResult,
|
65
|
+
StreamTextResult: () => StreamTextResult,
|
38
66
|
StreamingTextResponse: () => StreamingTextResponse,
|
67
|
+
ToolCallParseError: () => import_provider8.ToolCallParseError,
|
68
|
+
TypeValidationError: () => import_provider8.TypeValidationError,
|
69
|
+
UnsupportedFunctionalityError: () => import_provider8.UnsupportedFunctionalityError,
|
70
|
+
UnsupportedJSONSchemaError: () => import_provider8.UnsupportedJSONSchemaError,
|
71
|
+
convertDataContentToBase64String: () => convertDataContentToBase64String,
|
72
|
+
convertDataContentToUint8Array: () => convertDataContentToUint8Array,
|
39
73
|
createCallbacksTransformer: () => createCallbacksTransformer,
|
40
74
|
createChunkDecoder: () => createChunkDecoder,
|
41
75
|
createEventStreamTransformer: () => createEventStreamTransformer,
|
@@ -43,16 +77,1579 @@ __export(streams_exports, {
|
|
43
77
|
experimental_AssistantResponse: () => experimental_AssistantResponse,
|
44
78
|
experimental_StreamData: () => experimental_StreamData,
|
45
79
|
experimental_StreamingReactResponse: () => experimental_StreamingReactResponse,
|
80
|
+
experimental_generateObject: () => experimental_generateObject,
|
81
|
+
experimental_generateText: () => experimental_generateText,
|
82
|
+
experimental_streamObject: () => experimental_streamObject,
|
83
|
+
experimental_streamText: () => experimental_streamText,
|
84
|
+
formatStreamPart: () => formatStreamPart,
|
85
|
+
generateId: () => generateId,
|
86
|
+
generateObject: () => generateObject,
|
87
|
+
generateText: () => generateText,
|
46
88
|
isStreamStringEqualToType: () => isStreamStringEqualToType,
|
47
|
-
nanoid: () =>
|
89
|
+
nanoid: () => generateId,
|
90
|
+
parseStreamPart: () => parseStreamPart,
|
91
|
+
readDataStream: () => readDataStream,
|
48
92
|
readableFromAsyncIterable: () => readableFromAsyncIterable,
|
93
|
+
streamObject: () => streamObject,
|
94
|
+
streamText: () => streamText,
|
49
95
|
streamToResponse: () => streamToResponse,
|
96
|
+
tool: () => tool,
|
50
97
|
trimStartOfStreamHelper: () => trimStartOfStreamHelper
|
51
98
|
});
|
52
99
|
module.exports = __toCommonJS(streams_exports);
|
53
100
|
|
54
|
-
//
|
101
|
+
// core/generate-object/generate-object.ts
|
102
|
+
var import_provider5 = require("@ai-sdk/provider");
|
103
|
+
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
104
|
+
|
105
|
+
// core/generate-text/token-usage.ts
|
106
|
+
function calculateTokenUsage(usage) {
|
107
|
+
return {
|
108
|
+
promptTokens: usage.promptTokens,
|
109
|
+
completionTokens: usage.completionTokens,
|
110
|
+
totalTokens: usage.promptTokens + usage.completionTokens
|
111
|
+
};
|
112
|
+
}
|
113
|
+
|
114
|
+
// core/util/detect-image-mimetype.ts
|
115
|
+
var mimeTypeSignatures = [
|
116
|
+
{ mimeType: "image/gif", bytes: [71, 73, 70] },
|
117
|
+
{ mimeType: "image/png", bytes: [137, 80, 78, 71] },
|
118
|
+
{ mimeType: "image/jpeg", bytes: [255, 216] },
|
119
|
+
{ mimeType: "image/webp", bytes: [82, 73, 70, 70] }
|
120
|
+
];
|
121
|
+
function detectImageMimeType(image) {
|
122
|
+
for (const { bytes, mimeType } of mimeTypeSignatures) {
|
123
|
+
if (image.length >= bytes.length && bytes.every((byte, index) => image[index] === byte)) {
|
124
|
+
return mimeType;
|
125
|
+
}
|
126
|
+
}
|
127
|
+
return void 0;
|
128
|
+
}
|
129
|
+
|
130
|
+
// core/prompt/data-content.ts
|
131
|
+
var import_provider = require("@ai-sdk/provider");
|
132
|
+
var import_provider_utils = require("@ai-sdk/provider-utils");
|
133
|
+
function convertDataContentToBase64String(content) {
|
134
|
+
if (typeof content === "string") {
|
135
|
+
return content;
|
136
|
+
}
|
137
|
+
if (content instanceof ArrayBuffer) {
|
138
|
+
return (0, import_provider_utils.convertUint8ArrayToBase64)(new Uint8Array(content));
|
139
|
+
}
|
140
|
+
return (0, import_provider_utils.convertUint8ArrayToBase64)(content);
|
141
|
+
}
|
142
|
+
function convertDataContentToUint8Array(content) {
|
143
|
+
if (content instanceof Uint8Array) {
|
144
|
+
return content;
|
145
|
+
}
|
146
|
+
if (typeof content === "string") {
|
147
|
+
return (0, import_provider_utils.convertBase64ToUint8Array)(content);
|
148
|
+
}
|
149
|
+
if (content instanceof ArrayBuffer) {
|
150
|
+
return new Uint8Array(content);
|
151
|
+
}
|
152
|
+
throw new import_provider.InvalidDataContentError({ content });
|
153
|
+
}
|
154
|
+
|
155
|
+
// core/prompt/convert-to-language-model-prompt.ts
|
156
|
+
function convertToLanguageModelPrompt(prompt) {
|
157
|
+
const languageModelMessages = [];
|
158
|
+
if (prompt.system != null) {
|
159
|
+
languageModelMessages.push({ role: "system", content: prompt.system });
|
160
|
+
}
|
161
|
+
switch (prompt.type) {
|
162
|
+
case "prompt": {
|
163
|
+
languageModelMessages.push({
|
164
|
+
role: "user",
|
165
|
+
content: [{ type: "text", text: prompt.prompt }]
|
166
|
+
});
|
167
|
+
break;
|
168
|
+
}
|
169
|
+
case "messages": {
|
170
|
+
languageModelMessages.push(
|
171
|
+
...prompt.messages.map((message) => {
|
172
|
+
switch (message.role) {
|
173
|
+
case "user": {
|
174
|
+
if (typeof message.content === "string") {
|
175
|
+
return {
|
176
|
+
role: "user",
|
177
|
+
content: [{ type: "text", text: message.content }]
|
178
|
+
};
|
179
|
+
}
|
180
|
+
return {
|
181
|
+
role: "user",
|
182
|
+
content: message.content.map(
|
183
|
+
(part) => {
|
184
|
+
var _a;
|
185
|
+
switch (part.type) {
|
186
|
+
case "text": {
|
187
|
+
return part;
|
188
|
+
}
|
189
|
+
case "image": {
|
190
|
+
if (part.image instanceof URL) {
|
191
|
+
return {
|
192
|
+
type: "image",
|
193
|
+
image: part.image,
|
194
|
+
mimeType: part.mimeType
|
195
|
+
};
|
196
|
+
}
|
197
|
+
const imageUint8 = convertDataContentToUint8Array(
|
198
|
+
part.image
|
199
|
+
);
|
200
|
+
return {
|
201
|
+
type: "image",
|
202
|
+
image: imageUint8,
|
203
|
+
mimeType: (_a = part.mimeType) != null ? _a : detectImageMimeType(imageUint8)
|
204
|
+
};
|
205
|
+
}
|
206
|
+
}
|
207
|
+
}
|
208
|
+
)
|
209
|
+
};
|
210
|
+
}
|
211
|
+
case "assistant": {
|
212
|
+
if (typeof message.content === "string") {
|
213
|
+
return {
|
214
|
+
role: "assistant",
|
215
|
+
content: [{ type: "text", text: message.content }]
|
216
|
+
};
|
217
|
+
}
|
218
|
+
return { role: "assistant", content: message.content };
|
219
|
+
}
|
220
|
+
case "tool": {
|
221
|
+
return message;
|
222
|
+
}
|
223
|
+
}
|
224
|
+
})
|
225
|
+
);
|
226
|
+
break;
|
227
|
+
}
|
228
|
+
default: {
|
229
|
+
const _exhaustiveCheck = prompt;
|
230
|
+
throw new Error(`Unsupported prompt type: ${_exhaustiveCheck}`);
|
231
|
+
}
|
232
|
+
}
|
233
|
+
return languageModelMessages;
|
234
|
+
}
|
235
|
+
|
236
|
+
// core/prompt/get-validated-prompt.ts
|
237
|
+
var import_provider2 = require("@ai-sdk/provider");
|
238
|
+
function getValidatedPrompt(prompt) {
|
239
|
+
if (prompt.prompt == null && prompt.messages == null) {
|
240
|
+
throw new import_provider2.InvalidPromptError({
|
241
|
+
prompt,
|
242
|
+
message: "prompt or messages must be defined"
|
243
|
+
});
|
244
|
+
}
|
245
|
+
if (prompt.prompt != null && prompt.messages != null) {
|
246
|
+
throw new import_provider2.InvalidPromptError({
|
247
|
+
prompt,
|
248
|
+
message: "prompt and messages cannot be defined at the same time"
|
249
|
+
});
|
250
|
+
}
|
251
|
+
return prompt.prompt != null ? {
|
252
|
+
type: "prompt",
|
253
|
+
prompt: prompt.prompt,
|
254
|
+
messages: void 0,
|
255
|
+
system: prompt.system
|
256
|
+
} : {
|
257
|
+
type: "messages",
|
258
|
+
prompt: void 0,
|
259
|
+
messages: prompt.messages,
|
260
|
+
// only possible case bc of checks above
|
261
|
+
system: prompt.system
|
262
|
+
};
|
263
|
+
}
|
264
|
+
|
265
|
+
// core/prompt/prepare-call-settings.ts
|
266
|
+
var import_provider3 = require("@ai-sdk/provider");
|
267
|
+
function prepareCallSettings({
|
268
|
+
maxTokens,
|
269
|
+
temperature,
|
270
|
+
topP,
|
271
|
+
presencePenalty,
|
272
|
+
frequencyPenalty,
|
273
|
+
seed,
|
274
|
+
maxRetries
|
275
|
+
}) {
|
276
|
+
if (maxTokens != null) {
|
277
|
+
if (!Number.isInteger(maxTokens)) {
|
278
|
+
throw new import_provider3.InvalidArgumentError({
|
279
|
+
parameter: "maxTokens",
|
280
|
+
value: maxTokens,
|
281
|
+
message: "maxTokens must be an integer"
|
282
|
+
});
|
283
|
+
}
|
284
|
+
if (maxTokens < 1) {
|
285
|
+
throw new import_provider3.InvalidArgumentError({
|
286
|
+
parameter: "maxTokens",
|
287
|
+
value: maxTokens,
|
288
|
+
message: "maxTokens must be >= 1"
|
289
|
+
});
|
290
|
+
}
|
291
|
+
}
|
292
|
+
if (temperature != null) {
|
293
|
+
if (typeof temperature !== "number") {
|
294
|
+
throw new import_provider3.InvalidArgumentError({
|
295
|
+
parameter: "temperature",
|
296
|
+
value: temperature,
|
297
|
+
message: "temperature must be a number"
|
298
|
+
});
|
299
|
+
}
|
300
|
+
}
|
301
|
+
if (topP != null) {
|
302
|
+
if (typeof topP !== "number") {
|
303
|
+
throw new import_provider3.InvalidArgumentError({
|
304
|
+
parameter: "topP",
|
305
|
+
value: topP,
|
306
|
+
message: "topP must be a number"
|
307
|
+
});
|
308
|
+
}
|
309
|
+
}
|
310
|
+
if (presencePenalty != null) {
|
311
|
+
if (typeof presencePenalty !== "number") {
|
312
|
+
throw new import_provider3.InvalidArgumentError({
|
313
|
+
parameter: "presencePenalty",
|
314
|
+
value: presencePenalty,
|
315
|
+
message: "presencePenalty must be a number"
|
316
|
+
});
|
317
|
+
}
|
318
|
+
}
|
319
|
+
if (frequencyPenalty != null) {
|
320
|
+
if (typeof frequencyPenalty !== "number") {
|
321
|
+
throw new import_provider3.InvalidArgumentError({
|
322
|
+
parameter: "frequencyPenalty",
|
323
|
+
value: frequencyPenalty,
|
324
|
+
message: "frequencyPenalty must be a number"
|
325
|
+
});
|
326
|
+
}
|
327
|
+
}
|
328
|
+
if (seed != null) {
|
329
|
+
if (!Number.isInteger(seed)) {
|
330
|
+
throw new import_provider3.InvalidArgumentError({
|
331
|
+
parameter: "seed",
|
332
|
+
value: seed,
|
333
|
+
message: "seed must be an integer"
|
334
|
+
});
|
335
|
+
}
|
336
|
+
}
|
337
|
+
if (maxRetries != null) {
|
338
|
+
if (!Number.isInteger(maxRetries)) {
|
339
|
+
throw new import_provider3.InvalidArgumentError({
|
340
|
+
parameter: "maxRetries",
|
341
|
+
value: maxRetries,
|
342
|
+
message: "maxRetries must be an integer"
|
343
|
+
});
|
344
|
+
}
|
345
|
+
if (maxRetries < 0) {
|
346
|
+
throw new import_provider3.InvalidArgumentError({
|
347
|
+
parameter: "maxRetries",
|
348
|
+
value: maxRetries,
|
349
|
+
message: "maxRetries must be >= 0"
|
350
|
+
});
|
351
|
+
}
|
352
|
+
}
|
353
|
+
return {
|
354
|
+
maxTokens,
|
355
|
+
temperature: temperature != null ? temperature : 0,
|
356
|
+
topP,
|
357
|
+
presencePenalty,
|
358
|
+
frequencyPenalty,
|
359
|
+
seed,
|
360
|
+
maxRetries: maxRetries != null ? maxRetries : 2
|
361
|
+
};
|
362
|
+
}
|
363
|
+
|
364
|
+
// core/util/convert-zod-to-json-schema.ts
|
365
|
+
var import_zod_to_json_schema = __toESM(require("zod-to-json-schema"));
|
366
|
+
function convertZodToJSONSchema(zodSchema) {
|
367
|
+
return (0, import_zod_to_json_schema.default)(zodSchema);
|
368
|
+
}
|
369
|
+
|
370
|
+
// core/util/retry-with-exponential-backoff.ts
|
371
|
+
var import_provider4 = require("@ai-sdk/provider");
|
372
|
+
var import_provider_utils2 = require("@ai-sdk/provider-utils");
|
373
|
+
|
374
|
+
// core/util/delay.ts
|
375
|
+
async function delay(delayInMs) {
|
376
|
+
return new Promise((resolve) => setTimeout(resolve, delayInMs));
|
377
|
+
}
|
378
|
+
|
379
|
+
// core/util/retry-with-exponential-backoff.ts
|
380
|
+
var retryWithExponentialBackoff = ({
|
381
|
+
maxRetries = 2,
|
382
|
+
initialDelayInMs = 2e3,
|
383
|
+
backoffFactor = 2
|
384
|
+
} = {}) => async (f) => _retryWithExponentialBackoff(f, {
|
385
|
+
maxRetries,
|
386
|
+
delayInMs: initialDelayInMs,
|
387
|
+
backoffFactor
|
388
|
+
});
|
389
|
+
async function _retryWithExponentialBackoff(f, {
|
390
|
+
maxRetries,
|
391
|
+
delayInMs,
|
392
|
+
backoffFactor
|
393
|
+
}, errors = []) {
|
394
|
+
try {
|
395
|
+
return await f();
|
396
|
+
} catch (error) {
|
397
|
+
if ((0, import_provider_utils2.isAbortError)(error)) {
|
398
|
+
throw error;
|
399
|
+
}
|
400
|
+
if (maxRetries === 0) {
|
401
|
+
throw error;
|
402
|
+
}
|
403
|
+
const errorMessage = (0, import_provider_utils2.getErrorMessage)(error);
|
404
|
+
const newErrors = [...errors, error];
|
405
|
+
const tryNumber = newErrors.length;
|
406
|
+
if (tryNumber > maxRetries) {
|
407
|
+
throw new import_provider4.RetryError({
|
408
|
+
message: `Failed after ${tryNumber} attempts. Last error: ${errorMessage}`,
|
409
|
+
reason: "maxRetriesExceeded",
|
410
|
+
errors: newErrors
|
411
|
+
});
|
412
|
+
}
|
413
|
+
if (error instanceof Error && import_provider4.APICallError.isAPICallError(error) && error.isRetryable === true && tryNumber <= maxRetries) {
|
414
|
+
await delay(delayInMs);
|
415
|
+
return _retryWithExponentialBackoff(
|
416
|
+
f,
|
417
|
+
{ maxRetries, delayInMs: backoffFactor * delayInMs, backoffFactor },
|
418
|
+
newErrors
|
419
|
+
);
|
420
|
+
}
|
421
|
+
if (tryNumber === 1) {
|
422
|
+
throw error;
|
423
|
+
}
|
424
|
+
throw new import_provider4.RetryError({
|
425
|
+
message: `Failed after ${tryNumber} attempts with non-retryable error: '${errorMessage}'`,
|
426
|
+
reason: "errorNotRetryable",
|
427
|
+
errors: newErrors
|
428
|
+
});
|
429
|
+
}
|
430
|
+
}
|
431
|
+
|
432
|
+
// core/generate-object/inject-json-schema-into-system.ts
|
433
|
+
var DEFAULT_SCHEMA_PREFIX = "JSON schema:";
|
434
|
+
var DEFAULT_SCHEMA_SUFFIX = "You MUST answer with a JSON object that matches the JSON schema above.";
|
435
|
+
function injectJsonSchemaIntoSystem({
|
436
|
+
system,
|
437
|
+
schema,
|
438
|
+
schemaPrefix = DEFAULT_SCHEMA_PREFIX,
|
439
|
+
schemaSuffix = DEFAULT_SCHEMA_SUFFIX
|
440
|
+
}) {
|
441
|
+
return [
|
442
|
+
system,
|
443
|
+
system != null ? "" : null,
|
444
|
+
// add a newline if system is not null
|
445
|
+
schemaPrefix,
|
446
|
+
JSON.stringify(schema),
|
447
|
+
schemaSuffix
|
448
|
+
].filter((line) => line != null).join("\n");
|
449
|
+
}
|
450
|
+
|
451
|
+
// core/generate-object/generate-object.ts
|
452
|
+
async function generateObject({
|
453
|
+
model,
|
454
|
+
schema,
|
455
|
+
mode,
|
456
|
+
system,
|
457
|
+
prompt,
|
458
|
+
messages,
|
459
|
+
maxRetries,
|
460
|
+
abortSignal,
|
461
|
+
...settings
|
462
|
+
}) {
|
463
|
+
var _a, _b;
|
464
|
+
const retry = retryWithExponentialBackoff({ maxRetries });
|
465
|
+
const jsonSchema = convertZodToJSONSchema(schema);
|
466
|
+
if (mode === "auto" || mode == null) {
|
467
|
+
mode = model.defaultObjectGenerationMode;
|
468
|
+
}
|
469
|
+
let result;
|
470
|
+
let finishReason;
|
471
|
+
let usage;
|
472
|
+
let warnings;
|
473
|
+
let rawResponse;
|
474
|
+
let logprobs;
|
475
|
+
switch (mode) {
|
476
|
+
case "json": {
|
477
|
+
const validatedPrompt = getValidatedPrompt({
|
478
|
+
system: injectJsonSchemaIntoSystem({ system, schema: jsonSchema }),
|
479
|
+
prompt,
|
480
|
+
messages
|
481
|
+
});
|
482
|
+
const generateResult = await retry(() => {
|
483
|
+
return model.doGenerate({
|
484
|
+
mode: { type: "object-json" },
|
485
|
+
...prepareCallSettings(settings),
|
486
|
+
inputFormat: validatedPrompt.type,
|
487
|
+
prompt: convertToLanguageModelPrompt(validatedPrompt),
|
488
|
+
abortSignal
|
489
|
+
});
|
490
|
+
});
|
491
|
+
if (generateResult.text === void 0) {
|
492
|
+
throw new import_provider5.NoObjectGeneratedError();
|
493
|
+
}
|
494
|
+
result = generateResult.text;
|
495
|
+
finishReason = generateResult.finishReason;
|
496
|
+
usage = generateResult.usage;
|
497
|
+
warnings = generateResult.warnings;
|
498
|
+
rawResponse = generateResult.rawResponse;
|
499
|
+
logprobs = generateResult.logprobs;
|
500
|
+
break;
|
501
|
+
}
|
502
|
+
case "grammar": {
|
503
|
+
const validatedPrompt = getValidatedPrompt({
|
504
|
+
system: injectJsonSchemaIntoSystem({ system, schema: jsonSchema }),
|
505
|
+
prompt,
|
506
|
+
messages
|
507
|
+
});
|
508
|
+
const generateResult = await retry(
|
509
|
+
() => model.doGenerate({
|
510
|
+
mode: { type: "object-grammar", schema: jsonSchema },
|
511
|
+
...settings,
|
512
|
+
inputFormat: validatedPrompt.type,
|
513
|
+
prompt: convertToLanguageModelPrompt(validatedPrompt),
|
514
|
+
abortSignal
|
515
|
+
})
|
516
|
+
);
|
517
|
+
if (generateResult.text === void 0) {
|
518
|
+
throw new import_provider5.NoObjectGeneratedError();
|
519
|
+
}
|
520
|
+
result = generateResult.text;
|
521
|
+
finishReason = generateResult.finishReason;
|
522
|
+
usage = generateResult.usage;
|
523
|
+
warnings = generateResult.warnings;
|
524
|
+
rawResponse = generateResult.rawResponse;
|
525
|
+
logprobs = generateResult.logprobs;
|
526
|
+
break;
|
527
|
+
}
|
528
|
+
case "tool": {
|
529
|
+
const validatedPrompt = getValidatedPrompt({
|
530
|
+
system,
|
531
|
+
prompt,
|
532
|
+
messages
|
533
|
+
});
|
534
|
+
const generateResult = await retry(
|
535
|
+
() => model.doGenerate({
|
536
|
+
mode: {
|
537
|
+
type: "object-tool",
|
538
|
+
tool: {
|
539
|
+
type: "function",
|
540
|
+
name: "json",
|
541
|
+
description: "Respond with a JSON object.",
|
542
|
+
parameters: jsonSchema
|
543
|
+
}
|
544
|
+
},
|
545
|
+
...settings,
|
546
|
+
inputFormat: validatedPrompt.type,
|
547
|
+
prompt: convertToLanguageModelPrompt(validatedPrompt),
|
548
|
+
abortSignal
|
549
|
+
})
|
550
|
+
);
|
551
|
+
const functionArgs = (_b = (_a = generateResult.toolCalls) == null ? void 0 : _a[0]) == null ? void 0 : _b.args;
|
552
|
+
if (functionArgs === void 0) {
|
553
|
+
throw new import_provider5.NoObjectGeneratedError();
|
554
|
+
}
|
555
|
+
result = functionArgs;
|
556
|
+
finishReason = generateResult.finishReason;
|
557
|
+
usage = generateResult.usage;
|
558
|
+
warnings = generateResult.warnings;
|
559
|
+
rawResponse = generateResult.rawResponse;
|
560
|
+
logprobs = generateResult.logprobs;
|
561
|
+
break;
|
562
|
+
}
|
563
|
+
case void 0: {
|
564
|
+
throw new Error("Model does not have a default object generation mode.");
|
565
|
+
}
|
566
|
+
default: {
|
567
|
+
const _exhaustiveCheck = mode;
|
568
|
+
throw new Error(`Unsupported mode: ${_exhaustiveCheck}`);
|
569
|
+
}
|
570
|
+
}
|
571
|
+
const parseResult = (0, import_provider_utils3.safeParseJSON)({ text: result, schema });
|
572
|
+
if (!parseResult.success) {
|
573
|
+
throw parseResult.error;
|
574
|
+
}
|
575
|
+
return new GenerateObjectResult({
|
576
|
+
object: parseResult.value,
|
577
|
+
finishReason,
|
578
|
+
usage: calculateTokenUsage(usage),
|
579
|
+
warnings,
|
580
|
+
rawResponse,
|
581
|
+
logprobs
|
582
|
+
});
|
583
|
+
}
|
584
|
+
var GenerateObjectResult = class {
|
585
|
+
constructor(options) {
|
586
|
+
this.object = options.object;
|
587
|
+
this.finishReason = options.finishReason;
|
588
|
+
this.usage = options.usage;
|
589
|
+
this.warnings = options.warnings;
|
590
|
+
this.rawResponse = options.rawResponse;
|
591
|
+
this.logprobs = options.logprobs;
|
592
|
+
}
|
593
|
+
};
|
594
|
+
var experimental_generateObject = generateObject;
|
595
|
+
|
596
|
+
// core/util/async-iterable-stream.ts
|
597
|
+
function createAsyncIterableStream(source, transformer) {
|
598
|
+
const transformedStream = source.pipeThrough(
|
599
|
+
new TransformStream(transformer)
|
600
|
+
);
|
601
|
+
transformedStream[Symbol.asyncIterator] = () => {
|
602
|
+
const reader = transformedStream.getReader();
|
603
|
+
return {
|
604
|
+
async next() {
|
605
|
+
const { done, value } = await reader.read();
|
606
|
+
return done ? { done: true, value: void 0 } : { done: false, value };
|
607
|
+
}
|
608
|
+
};
|
609
|
+
};
|
610
|
+
return transformedStream;
|
611
|
+
}
|
612
|
+
|
613
|
+
// core/util/is-deep-equal-data.ts
|
614
|
+
function isDeepEqualData(obj1, obj2) {
|
615
|
+
if (obj1 === obj2)
|
616
|
+
return true;
|
617
|
+
if (obj1 == null || obj2 == null)
|
618
|
+
return false;
|
619
|
+
if (typeof obj1 !== "object" && typeof obj2 !== "object")
|
620
|
+
return obj1 === obj2;
|
621
|
+
if (obj1.constructor !== obj2.constructor)
|
622
|
+
return false;
|
623
|
+
if (obj1 instanceof Date && obj2 instanceof Date) {
|
624
|
+
return obj1.getTime() === obj2.getTime();
|
625
|
+
}
|
626
|
+
if (Array.isArray(obj1)) {
|
627
|
+
if (obj1.length !== obj2.length)
|
628
|
+
return false;
|
629
|
+
for (let i = 0; i < obj1.length; i++) {
|
630
|
+
if (!isDeepEqualData(obj1[i], obj2[i]))
|
631
|
+
return false;
|
632
|
+
}
|
633
|
+
return true;
|
634
|
+
}
|
635
|
+
const keys1 = Object.keys(obj1);
|
636
|
+
const keys2 = Object.keys(obj2);
|
637
|
+
if (keys1.length !== keys2.length)
|
638
|
+
return false;
|
639
|
+
for (const key of keys1) {
|
640
|
+
if (!keys2.includes(key))
|
641
|
+
return false;
|
642
|
+
if (!isDeepEqualData(obj1[key], obj2[key]))
|
643
|
+
return false;
|
644
|
+
}
|
645
|
+
return true;
|
646
|
+
}
|
647
|
+
|
648
|
+
// core/util/parse-partial-json.ts
|
649
|
+
var import_secure_json_parse = __toESM(require("secure-json-parse"));
|
650
|
+
|
651
|
+
// core/util/fix-json.ts
|
652
|
+
function fixJson(input) {
|
653
|
+
const stack = ["ROOT"];
|
654
|
+
let lastValidIndex = -1;
|
655
|
+
let literalStart = null;
|
656
|
+
function processValueStart(char, i, swapState) {
|
657
|
+
{
|
658
|
+
switch (char) {
|
659
|
+
case '"': {
|
660
|
+
lastValidIndex = i;
|
661
|
+
stack.pop();
|
662
|
+
stack.push(swapState);
|
663
|
+
stack.push("INSIDE_STRING");
|
664
|
+
break;
|
665
|
+
}
|
666
|
+
case "f":
|
667
|
+
case "t":
|
668
|
+
case "n": {
|
669
|
+
lastValidIndex = i;
|
670
|
+
literalStart = i;
|
671
|
+
stack.pop();
|
672
|
+
stack.push(swapState);
|
673
|
+
stack.push("INSIDE_LITERAL");
|
674
|
+
break;
|
675
|
+
}
|
676
|
+
case "-": {
|
677
|
+
stack.pop();
|
678
|
+
stack.push(swapState);
|
679
|
+
stack.push("INSIDE_NUMBER");
|
680
|
+
break;
|
681
|
+
}
|
682
|
+
case "0":
|
683
|
+
case "1":
|
684
|
+
case "2":
|
685
|
+
case "3":
|
686
|
+
case "4":
|
687
|
+
case "5":
|
688
|
+
case "6":
|
689
|
+
case "7":
|
690
|
+
case "8":
|
691
|
+
case "9": {
|
692
|
+
lastValidIndex = i;
|
693
|
+
stack.pop();
|
694
|
+
stack.push(swapState);
|
695
|
+
stack.push("INSIDE_NUMBER");
|
696
|
+
break;
|
697
|
+
}
|
698
|
+
case "{": {
|
699
|
+
lastValidIndex = i;
|
700
|
+
stack.pop();
|
701
|
+
stack.push(swapState);
|
702
|
+
stack.push("INSIDE_OBJECT_START");
|
703
|
+
break;
|
704
|
+
}
|
705
|
+
case "[": {
|
706
|
+
lastValidIndex = i;
|
707
|
+
stack.pop();
|
708
|
+
stack.push(swapState);
|
709
|
+
stack.push("INSIDE_ARRAY_START");
|
710
|
+
break;
|
711
|
+
}
|
712
|
+
}
|
713
|
+
}
|
714
|
+
}
|
715
|
+
function processAfterObjectValue(char, i) {
|
716
|
+
switch (char) {
|
717
|
+
case ",": {
|
718
|
+
stack.pop();
|
719
|
+
stack.push("INSIDE_OBJECT_AFTER_COMMA");
|
720
|
+
break;
|
721
|
+
}
|
722
|
+
case "}": {
|
723
|
+
lastValidIndex = i;
|
724
|
+
stack.pop();
|
725
|
+
break;
|
726
|
+
}
|
727
|
+
}
|
728
|
+
}
|
729
|
+
function processAfterArrayValue(char, i) {
|
730
|
+
switch (char) {
|
731
|
+
case ",": {
|
732
|
+
stack.pop();
|
733
|
+
stack.push("INSIDE_ARRAY_AFTER_COMMA");
|
734
|
+
break;
|
735
|
+
}
|
736
|
+
case "]": {
|
737
|
+
lastValidIndex = i;
|
738
|
+
stack.pop();
|
739
|
+
break;
|
740
|
+
}
|
741
|
+
}
|
742
|
+
}
|
743
|
+
for (let i = 0; i < input.length; i++) {
|
744
|
+
const char = input[i];
|
745
|
+
const currentState = stack[stack.length - 1];
|
746
|
+
switch (currentState) {
|
747
|
+
case "ROOT":
|
748
|
+
processValueStart(char, i, "FINISH");
|
749
|
+
break;
|
750
|
+
case "INSIDE_OBJECT_START": {
|
751
|
+
switch (char) {
|
752
|
+
case '"': {
|
753
|
+
stack.pop();
|
754
|
+
stack.push("INSIDE_OBJECT_KEY");
|
755
|
+
break;
|
756
|
+
}
|
757
|
+
case "}": {
|
758
|
+
stack.pop();
|
759
|
+
break;
|
760
|
+
}
|
761
|
+
}
|
762
|
+
break;
|
763
|
+
}
|
764
|
+
case "INSIDE_OBJECT_AFTER_COMMA": {
|
765
|
+
switch (char) {
|
766
|
+
case '"': {
|
767
|
+
stack.pop();
|
768
|
+
stack.push("INSIDE_OBJECT_KEY");
|
769
|
+
break;
|
770
|
+
}
|
771
|
+
}
|
772
|
+
break;
|
773
|
+
}
|
774
|
+
case "INSIDE_OBJECT_KEY": {
|
775
|
+
switch (char) {
|
776
|
+
case '"': {
|
777
|
+
stack.pop();
|
778
|
+
stack.push("INSIDE_OBJECT_AFTER_KEY");
|
779
|
+
break;
|
780
|
+
}
|
781
|
+
}
|
782
|
+
break;
|
783
|
+
}
|
784
|
+
case "INSIDE_OBJECT_AFTER_KEY": {
|
785
|
+
switch (char) {
|
786
|
+
case ":": {
|
787
|
+
stack.pop();
|
788
|
+
stack.push("INSIDE_OBJECT_BEFORE_VALUE");
|
789
|
+
break;
|
790
|
+
}
|
791
|
+
}
|
792
|
+
break;
|
793
|
+
}
|
794
|
+
case "INSIDE_OBJECT_BEFORE_VALUE": {
|
795
|
+
processValueStart(char, i, "INSIDE_OBJECT_AFTER_VALUE");
|
796
|
+
break;
|
797
|
+
}
|
798
|
+
case "INSIDE_OBJECT_AFTER_VALUE": {
|
799
|
+
processAfterObjectValue(char, i);
|
800
|
+
break;
|
801
|
+
}
|
802
|
+
case "INSIDE_STRING": {
|
803
|
+
switch (char) {
|
804
|
+
case '"': {
|
805
|
+
stack.pop();
|
806
|
+
lastValidIndex = i;
|
807
|
+
break;
|
808
|
+
}
|
809
|
+
case "\\": {
|
810
|
+
stack.push("INSIDE_STRING_ESCAPE");
|
811
|
+
break;
|
812
|
+
}
|
813
|
+
default: {
|
814
|
+
lastValidIndex = i;
|
815
|
+
}
|
816
|
+
}
|
817
|
+
break;
|
818
|
+
}
|
819
|
+
case "INSIDE_ARRAY_START": {
|
820
|
+
switch (char) {
|
821
|
+
case "]": {
|
822
|
+
lastValidIndex = i;
|
823
|
+
stack.pop();
|
824
|
+
break;
|
825
|
+
}
|
826
|
+
default: {
|
827
|
+
lastValidIndex = i;
|
828
|
+
processValueStart(char, i, "INSIDE_ARRAY_AFTER_VALUE");
|
829
|
+
break;
|
830
|
+
}
|
831
|
+
}
|
832
|
+
break;
|
833
|
+
}
|
834
|
+
case "INSIDE_ARRAY_AFTER_VALUE": {
|
835
|
+
switch (char) {
|
836
|
+
case ",": {
|
837
|
+
stack.pop();
|
838
|
+
stack.push("INSIDE_ARRAY_AFTER_COMMA");
|
839
|
+
break;
|
840
|
+
}
|
841
|
+
case "]": {
|
842
|
+
lastValidIndex = i;
|
843
|
+
stack.pop();
|
844
|
+
break;
|
845
|
+
}
|
846
|
+
default: {
|
847
|
+
lastValidIndex = i;
|
848
|
+
break;
|
849
|
+
}
|
850
|
+
}
|
851
|
+
break;
|
852
|
+
}
|
853
|
+
case "INSIDE_ARRAY_AFTER_COMMA": {
|
854
|
+
processValueStart(char, i, "INSIDE_ARRAY_AFTER_VALUE");
|
855
|
+
break;
|
856
|
+
}
|
857
|
+
case "INSIDE_STRING_ESCAPE": {
|
858
|
+
stack.pop();
|
859
|
+
lastValidIndex = i;
|
860
|
+
break;
|
861
|
+
}
|
862
|
+
case "INSIDE_NUMBER": {
|
863
|
+
switch (char) {
|
864
|
+
case "0":
|
865
|
+
case "1":
|
866
|
+
case "2":
|
867
|
+
case "3":
|
868
|
+
case "4":
|
869
|
+
case "5":
|
870
|
+
case "6":
|
871
|
+
case "7":
|
872
|
+
case "8":
|
873
|
+
case "9": {
|
874
|
+
lastValidIndex = i;
|
875
|
+
break;
|
876
|
+
}
|
877
|
+
case "e":
|
878
|
+
case "E":
|
879
|
+
case "-":
|
880
|
+
case ".": {
|
881
|
+
break;
|
882
|
+
}
|
883
|
+
case ",": {
|
884
|
+
stack.pop();
|
885
|
+
if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
|
886
|
+
processAfterArrayValue(char, i);
|
887
|
+
}
|
888
|
+
if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
|
889
|
+
processAfterObjectValue(char, i);
|
890
|
+
}
|
891
|
+
break;
|
892
|
+
}
|
893
|
+
case "}": {
|
894
|
+
stack.pop();
|
895
|
+
if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
|
896
|
+
processAfterObjectValue(char, i);
|
897
|
+
}
|
898
|
+
break;
|
899
|
+
}
|
900
|
+
case "]": {
|
901
|
+
stack.pop();
|
902
|
+
if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
|
903
|
+
processAfterArrayValue(char, i);
|
904
|
+
}
|
905
|
+
break;
|
906
|
+
}
|
907
|
+
default: {
|
908
|
+
stack.pop();
|
909
|
+
break;
|
910
|
+
}
|
911
|
+
}
|
912
|
+
break;
|
913
|
+
}
|
914
|
+
case "INSIDE_LITERAL": {
|
915
|
+
const partialLiteral = input.substring(literalStart, i + 1);
|
916
|
+
if (!"false".startsWith(partialLiteral) && !"true".startsWith(partialLiteral) && !"null".startsWith(partialLiteral)) {
|
917
|
+
stack.pop();
|
918
|
+
if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
|
919
|
+
processAfterObjectValue(char, i);
|
920
|
+
} else if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
|
921
|
+
processAfterArrayValue(char, i);
|
922
|
+
}
|
923
|
+
} else {
|
924
|
+
lastValidIndex = i;
|
925
|
+
}
|
926
|
+
break;
|
927
|
+
}
|
928
|
+
}
|
929
|
+
}
|
930
|
+
let result = input.slice(0, lastValidIndex + 1);
|
931
|
+
for (let i = stack.length - 1; i >= 0; i--) {
|
932
|
+
const state = stack[i];
|
933
|
+
switch (state) {
|
934
|
+
case "INSIDE_STRING": {
|
935
|
+
result += '"';
|
936
|
+
break;
|
937
|
+
}
|
938
|
+
case "INSIDE_OBJECT_KEY":
|
939
|
+
case "INSIDE_OBJECT_AFTER_KEY":
|
940
|
+
case "INSIDE_OBJECT_AFTER_COMMA":
|
941
|
+
case "INSIDE_OBJECT_START":
|
942
|
+
case "INSIDE_OBJECT_BEFORE_VALUE":
|
943
|
+
case "INSIDE_OBJECT_AFTER_VALUE": {
|
944
|
+
result += "}";
|
945
|
+
break;
|
946
|
+
}
|
947
|
+
case "INSIDE_ARRAY_START":
|
948
|
+
case "INSIDE_ARRAY_AFTER_COMMA":
|
949
|
+
case "INSIDE_ARRAY_AFTER_VALUE": {
|
950
|
+
result += "]";
|
951
|
+
break;
|
952
|
+
}
|
953
|
+
case "INSIDE_LITERAL": {
|
954
|
+
const partialLiteral = input.substring(literalStart, input.length);
|
955
|
+
if ("true".startsWith(partialLiteral)) {
|
956
|
+
result += "true".slice(partialLiteral.length);
|
957
|
+
} else if ("false".startsWith(partialLiteral)) {
|
958
|
+
result += "false".slice(partialLiteral.length);
|
959
|
+
} else if ("null".startsWith(partialLiteral)) {
|
960
|
+
result += "null".slice(partialLiteral.length);
|
961
|
+
}
|
962
|
+
}
|
963
|
+
}
|
964
|
+
}
|
965
|
+
return result;
|
966
|
+
}
|
967
|
+
|
968
|
+
// core/util/parse-partial-json.ts
|
969
|
+
function parsePartialJson(jsonText) {
|
970
|
+
if (jsonText == null) {
|
971
|
+
return void 0;
|
972
|
+
}
|
973
|
+
try {
|
974
|
+
return import_secure_json_parse.default.parse(jsonText);
|
975
|
+
} catch (ignored) {
|
976
|
+
try {
|
977
|
+
const fixedJsonText = fixJson(jsonText);
|
978
|
+
return import_secure_json_parse.default.parse(fixedJsonText);
|
979
|
+
} catch (ignored2) {
|
980
|
+
}
|
981
|
+
}
|
982
|
+
return void 0;
|
983
|
+
}
|
984
|
+
|
985
|
+
// core/generate-object/stream-object.ts
|
986
|
+
async function streamObject({
|
987
|
+
model,
|
988
|
+
schema,
|
989
|
+
mode,
|
990
|
+
system,
|
991
|
+
prompt,
|
992
|
+
messages,
|
993
|
+
maxRetries,
|
994
|
+
abortSignal,
|
995
|
+
...settings
|
996
|
+
}) {
|
997
|
+
const retry = retryWithExponentialBackoff({ maxRetries });
|
998
|
+
const jsonSchema = convertZodToJSONSchema(schema);
|
999
|
+
if (mode === "auto" || mode == null) {
|
1000
|
+
mode = model.defaultObjectGenerationMode;
|
1001
|
+
}
|
1002
|
+
let callOptions;
|
1003
|
+
let transformer;
|
1004
|
+
switch (mode) {
|
1005
|
+
case "json": {
|
1006
|
+
const validatedPrompt = getValidatedPrompt({
|
1007
|
+
system: injectJsonSchemaIntoSystem({ system, schema: jsonSchema }),
|
1008
|
+
prompt,
|
1009
|
+
messages
|
1010
|
+
});
|
1011
|
+
callOptions = {
|
1012
|
+
mode: { type: "object-json" },
|
1013
|
+
...prepareCallSettings(settings),
|
1014
|
+
inputFormat: validatedPrompt.type,
|
1015
|
+
prompt: convertToLanguageModelPrompt(validatedPrompt),
|
1016
|
+
abortSignal
|
1017
|
+
};
|
1018
|
+
transformer = {
|
1019
|
+
transform: (chunk, controller) => {
|
1020
|
+
switch (chunk.type) {
|
1021
|
+
case "text-delta":
|
1022
|
+
controller.enqueue(chunk.textDelta);
|
1023
|
+
break;
|
1024
|
+
case "finish":
|
1025
|
+
case "error":
|
1026
|
+
controller.enqueue(chunk);
|
1027
|
+
break;
|
1028
|
+
}
|
1029
|
+
}
|
1030
|
+
};
|
1031
|
+
break;
|
1032
|
+
}
|
1033
|
+
case "grammar": {
|
1034
|
+
const validatedPrompt = getValidatedPrompt({
|
1035
|
+
system: injectJsonSchemaIntoSystem({ system, schema: jsonSchema }),
|
1036
|
+
prompt,
|
1037
|
+
messages
|
1038
|
+
});
|
1039
|
+
callOptions = {
|
1040
|
+
mode: { type: "object-grammar", schema: jsonSchema },
|
1041
|
+
...settings,
|
1042
|
+
inputFormat: validatedPrompt.type,
|
1043
|
+
prompt: convertToLanguageModelPrompt(validatedPrompt),
|
1044
|
+
abortSignal
|
1045
|
+
};
|
1046
|
+
transformer = {
|
1047
|
+
transform: (chunk, controller) => {
|
1048
|
+
switch (chunk.type) {
|
1049
|
+
case "text-delta":
|
1050
|
+
controller.enqueue(chunk.textDelta);
|
1051
|
+
break;
|
1052
|
+
case "finish":
|
1053
|
+
case "error":
|
1054
|
+
controller.enqueue(chunk);
|
1055
|
+
break;
|
1056
|
+
}
|
1057
|
+
}
|
1058
|
+
};
|
1059
|
+
break;
|
1060
|
+
}
|
1061
|
+
case "tool": {
|
1062
|
+
const validatedPrompt = getValidatedPrompt({
|
1063
|
+
system,
|
1064
|
+
prompt,
|
1065
|
+
messages
|
1066
|
+
});
|
1067
|
+
callOptions = {
|
1068
|
+
mode: {
|
1069
|
+
type: "object-tool",
|
1070
|
+
tool: {
|
1071
|
+
type: "function",
|
1072
|
+
name: "json",
|
1073
|
+
description: "Respond with a JSON object.",
|
1074
|
+
parameters: jsonSchema
|
1075
|
+
}
|
1076
|
+
},
|
1077
|
+
...settings,
|
1078
|
+
inputFormat: validatedPrompt.type,
|
1079
|
+
prompt: convertToLanguageModelPrompt(validatedPrompt),
|
1080
|
+
abortSignal
|
1081
|
+
};
|
1082
|
+
transformer = {
|
1083
|
+
transform(chunk, controller) {
|
1084
|
+
switch (chunk.type) {
|
1085
|
+
case "tool-call-delta":
|
1086
|
+
controller.enqueue(chunk.argsTextDelta);
|
1087
|
+
break;
|
1088
|
+
case "finish":
|
1089
|
+
case "error":
|
1090
|
+
controller.enqueue(chunk);
|
1091
|
+
break;
|
1092
|
+
}
|
1093
|
+
}
|
1094
|
+
};
|
1095
|
+
break;
|
1096
|
+
}
|
1097
|
+
case void 0: {
|
1098
|
+
throw new Error("Model does not have a default object generation mode.");
|
1099
|
+
}
|
1100
|
+
default: {
|
1101
|
+
const _exhaustiveCheck = mode;
|
1102
|
+
throw new Error(`Unsupported mode: ${_exhaustiveCheck}`);
|
1103
|
+
}
|
1104
|
+
}
|
1105
|
+
const result = await retry(() => model.doStream(callOptions));
|
1106
|
+
return new StreamObjectResult({
|
1107
|
+
stream: result.stream.pipeThrough(new TransformStream(transformer)),
|
1108
|
+
warnings: result.warnings,
|
1109
|
+
rawResponse: result.rawResponse
|
1110
|
+
});
|
1111
|
+
}
|
1112
|
+
var StreamObjectResult = class {
|
1113
|
+
constructor({
|
1114
|
+
stream,
|
1115
|
+
warnings,
|
1116
|
+
rawResponse
|
1117
|
+
}) {
|
1118
|
+
this.originalStream = stream;
|
1119
|
+
this.warnings = warnings;
|
1120
|
+
this.rawResponse = rawResponse;
|
1121
|
+
}
|
1122
|
+
get partialObjectStream() {
|
1123
|
+
let accumulatedText = "";
|
1124
|
+
let latestObject = void 0;
|
1125
|
+
return createAsyncIterableStream(this.originalStream, {
|
1126
|
+
transform(chunk, controller) {
|
1127
|
+
if (typeof chunk === "string") {
|
1128
|
+
accumulatedText += chunk;
|
1129
|
+
const currentObject = parsePartialJson(
|
1130
|
+
accumulatedText
|
1131
|
+
);
|
1132
|
+
if (!isDeepEqualData(latestObject, currentObject)) {
|
1133
|
+
latestObject = currentObject;
|
1134
|
+
controller.enqueue(currentObject);
|
1135
|
+
}
|
1136
|
+
} else if (chunk.type === "error") {
|
1137
|
+
throw chunk.error;
|
1138
|
+
}
|
1139
|
+
}
|
1140
|
+
});
|
1141
|
+
}
|
1142
|
+
get fullStream() {
|
1143
|
+
let accumulatedText = "";
|
1144
|
+
let latestObject = void 0;
|
1145
|
+
return createAsyncIterableStream(this.originalStream, {
|
1146
|
+
transform(chunk, controller) {
|
1147
|
+
if (typeof chunk === "string") {
|
1148
|
+
accumulatedText += chunk;
|
1149
|
+
const currentObject = parsePartialJson(
|
1150
|
+
accumulatedText
|
1151
|
+
);
|
1152
|
+
if (!isDeepEqualData(latestObject, currentObject)) {
|
1153
|
+
latestObject = currentObject;
|
1154
|
+
controller.enqueue({ type: "object", object: currentObject });
|
1155
|
+
}
|
1156
|
+
} else {
|
1157
|
+
switch (chunk.type) {
|
1158
|
+
case "finish":
|
1159
|
+
controller.enqueue({
|
1160
|
+
...chunk,
|
1161
|
+
usage: calculateTokenUsage(chunk.usage)
|
1162
|
+
});
|
1163
|
+
break;
|
1164
|
+
default:
|
1165
|
+
controller.enqueue(chunk);
|
1166
|
+
break;
|
1167
|
+
}
|
1168
|
+
}
|
1169
|
+
}
|
1170
|
+
});
|
1171
|
+
}
|
1172
|
+
};
|
1173
|
+
var experimental_streamObject = streamObject;
|
1174
|
+
|
1175
|
+
// core/generate-text/tool-call.ts
|
1176
|
+
var import_provider6 = require("@ai-sdk/provider");
|
1177
|
+
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
1178
|
+
function parseToolCall({
|
1179
|
+
toolCall,
|
1180
|
+
tools
|
1181
|
+
}) {
|
1182
|
+
const toolName = toolCall.toolName;
|
1183
|
+
if (tools == null) {
|
1184
|
+
throw new import_provider6.NoSuchToolError({ toolName: toolCall.toolName });
|
1185
|
+
}
|
1186
|
+
const tool2 = tools[toolName];
|
1187
|
+
if (tool2 == null) {
|
1188
|
+
throw new import_provider6.NoSuchToolError({
|
1189
|
+
toolName: toolCall.toolName,
|
1190
|
+
availableTools: Object.keys(tools)
|
1191
|
+
});
|
1192
|
+
}
|
1193
|
+
const parseResult = (0, import_provider_utils4.safeParseJSON)({
|
1194
|
+
text: toolCall.args,
|
1195
|
+
schema: tool2.parameters
|
1196
|
+
});
|
1197
|
+
if (parseResult.success === false) {
|
1198
|
+
throw new import_provider6.InvalidToolArgumentsError({
|
1199
|
+
toolName,
|
1200
|
+
toolArgs: toolCall.args,
|
1201
|
+
cause: parseResult.error
|
1202
|
+
});
|
1203
|
+
}
|
1204
|
+
return {
|
1205
|
+
type: "tool-call",
|
1206
|
+
toolCallId: toolCall.toolCallId,
|
1207
|
+
toolName,
|
1208
|
+
args: parseResult.value
|
1209
|
+
};
|
1210
|
+
}
|
1211
|
+
|
1212
|
+
// core/generate-text/generate-text.ts
|
1213
|
+
async function generateText({
|
1214
|
+
model,
|
1215
|
+
tools,
|
1216
|
+
system,
|
1217
|
+
prompt,
|
1218
|
+
messages,
|
1219
|
+
maxRetries,
|
1220
|
+
abortSignal,
|
1221
|
+
...settings
|
1222
|
+
}) {
|
1223
|
+
var _a, _b;
|
1224
|
+
const retry = retryWithExponentialBackoff({ maxRetries });
|
1225
|
+
const validatedPrompt = getValidatedPrompt({ system, prompt, messages });
|
1226
|
+
const modelResponse = await retry(() => {
|
1227
|
+
return model.doGenerate({
|
1228
|
+
mode: {
|
1229
|
+
type: "regular",
|
1230
|
+
tools: tools == null ? void 0 : Object.entries(tools).map(([name, tool2]) => ({
|
1231
|
+
type: "function",
|
1232
|
+
name,
|
1233
|
+
description: tool2.description,
|
1234
|
+
parameters: convertZodToJSONSchema(tool2.parameters)
|
1235
|
+
}))
|
1236
|
+
},
|
1237
|
+
...prepareCallSettings(settings),
|
1238
|
+
inputFormat: validatedPrompt.type,
|
1239
|
+
prompt: convertToLanguageModelPrompt(validatedPrompt),
|
1240
|
+
abortSignal
|
1241
|
+
});
|
1242
|
+
});
|
1243
|
+
const toolCalls = [];
|
1244
|
+
for (const modelToolCall of (_a = modelResponse.toolCalls) != null ? _a : []) {
|
1245
|
+
toolCalls.push(parseToolCall({ toolCall: modelToolCall, tools }));
|
1246
|
+
}
|
1247
|
+
const toolResults = tools == null ? [] : await executeTools({ toolCalls, tools });
|
1248
|
+
return new GenerateTextResult({
|
1249
|
+
// Always return a string so that the caller doesn't have to check for undefined.
|
1250
|
+
// If they need to check if the model did not return any text,
|
1251
|
+
// they can check the length of the string:
|
1252
|
+
text: (_b = modelResponse.text) != null ? _b : "",
|
1253
|
+
toolCalls,
|
1254
|
+
toolResults,
|
1255
|
+
finishReason: modelResponse.finishReason,
|
1256
|
+
usage: calculateTokenUsage(modelResponse.usage),
|
1257
|
+
warnings: modelResponse.warnings,
|
1258
|
+
rawResponse: modelResponse.rawResponse,
|
1259
|
+
logprobs: modelResponse.logprobs
|
1260
|
+
});
|
1261
|
+
}
|
1262
|
+
async function executeTools({
|
1263
|
+
toolCalls,
|
1264
|
+
tools
|
1265
|
+
}) {
|
1266
|
+
const toolResults = await Promise.all(
|
1267
|
+
toolCalls.map(async (toolCall) => {
|
1268
|
+
const tool2 = tools[toolCall.toolName];
|
1269
|
+
if ((tool2 == null ? void 0 : tool2.execute) == null) {
|
1270
|
+
return void 0;
|
1271
|
+
}
|
1272
|
+
const result = await tool2.execute(toolCall.args);
|
1273
|
+
return {
|
1274
|
+
toolCallId: toolCall.toolCallId,
|
1275
|
+
toolName: toolCall.toolName,
|
1276
|
+
args: toolCall.args,
|
1277
|
+
result
|
1278
|
+
};
|
1279
|
+
})
|
1280
|
+
);
|
1281
|
+
return toolResults.filter(
|
1282
|
+
(result) => result != null
|
1283
|
+
);
|
1284
|
+
}
|
1285
|
+
var GenerateTextResult = class {
|
1286
|
+
constructor(options) {
|
1287
|
+
this.text = options.text;
|
1288
|
+
this.toolCalls = options.toolCalls;
|
1289
|
+
this.toolResults = options.toolResults;
|
1290
|
+
this.finishReason = options.finishReason;
|
1291
|
+
this.usage = options.usage;
|
1292
|
+
this.warnings = options.warnings;
|
1293
|
+
this.rawResponse = options.rawResponse;
|
1294
|
+
this.logprobs = options.logprobs;
|
1295
|
+
}
|
1296
|
+
};
|
1297
|
+
var experimental_generateText = generateText;
|
1298
|
+
|
1299
|
+
// core/generate-text/run-tools-transformation.ts
|
1300
|
+
var import_provider7 = require("@ai-sdk/provider");
|
1301
|
+
|
1302
|
+
// shared/generate-id.ts
|
55
1303
|
var import_non_secure = require("nanoid/non-secure");
|
1304
|
+
var generateId = (0, import_non_secure.customAlphabet)(
|
1305
|
+
"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
|
1306
|
+
7
|
1307
|
+
);
|
1308
|
+
|
1309
|
+
// core/generate-text/run-tools-transformation.ts
|
1310
|
+
function runToolsTransformation({
|
1311
|
+
tools,
|
1312
|
+
generatorStream
|
1313
|
+
}) {
|
1314
|
+
let canClose = false;
|
1315
|
+
const outstandingToolCalls = /* @__PURE__ */ new Set();
|
1316
|
+
let toolResultsStreamController = null;
|
1317
|
+
const toolResultsStream = new ReadableStream({
|
1318
|
+
start(controller) {
|
1319
|
+
toolResultsStreamController = controller;
|
1320
|
+
}
|
1321
|
+
});
|
1322
|
+
const forwardStream = new TransformStream({
|
1323
|
+
transform(chunk, controller) {
|
1324
|
+
const chunkType = chunk.type;
|
1325
|
+
switch (chunkType) {
|
1326
|
+
case "text-delta":
|
1327
|
+
case "error": {
|
1328
|
+
controller.enqueue(chunk);
|
1329
|
+
break;
|
1330
|
+
}
|
1331
|
+
case "tool-call": {
|
1332
|
+
const toolName = chunk.toolName;
|
1333
|
+
if (tools == null) {
|
1334
|
+
toolResultsStreamController.enqueue({
|
1335
|
+
type: "error",
|
1336
|
+
error: new import_provider7.NoSuchToolError({ toolName: chunk.toolName })
|
1337
|
+
});
|
1338
|
+
break;
|
1339
|
+
}
|
1340
|
+
const tool2 = tools[toolName];
|
1341
|
+
if (tool2 == null) {
|
1342
|
+
toolResultsStreamController.enqueue({
|
1343
|
+
type: "error",
|
1344
|
+
error: new import_provider7.NoSuchToolError({
|
1345
|
+
toolName: chunk.toolName,
|
1346
|
+
availableTools: Object.keys(tools)
|
1347
|
+
})
|
1348
|
+
});
|
1349
|
+
break;
|
1350
|
+
}
|
1351
|
+
try {
|
1352
|
+
const toolCall = parseToolCall({
|
1353
|
+
toolCall: chunk,
|
1354
|
+
tools
|
1355
|
+
});
|
1356
|
+
controller.enqueue(toolCall);
|
1357
|
+
if (tool2.execute != null) {
|
1358
|
+
const toolExecutionId = generateId();
|
1359
|
+
outstandingToolCalls.add(toolExecutionId);
|
1360
|
+
tool2.execute(toolCall.args).then(
|
1361
|
+
(result) => {
|
1362
|
+
toolResultsStreamController.enqueue({
|
1363
|
+
...toolCall,
|
1364
|
+
type: "tool-result",
|
1365
|
+
result
|
1366
|
+
});
|
1367
|
+
outstandingToolCalls.delete(toolExecutionId);
|
1368
|
+
if (canClose && outstandingToolCalls.size === 0) {
|
1369
|
+
toolResultsStreamController.close();
|
1370
|
+
}
|
1371
|
+
},
|
1372
|
+
(error) => {
|
1373
|
+
toolResultsStreamController.enqueue({
|
1374
|
+
type: "error",
|
1375
|
+
error
|
1376
|
+
});
|
1377
|
+
outstandingToolCalls.delete(toolExecutionId);
|
1378
|
+
if (canClose && outstandingToolCalls.size === 0) {
|
1379
|
+
toolResultsStreamController.close();
|
1380
|
+
}
|
1381
|
+
}
|
1382
|
+
);
|
1383
|
+
}
|
1384
|
+
} catch (error) {
|
1385
|
+
toolResultsStreamController.enqueue({
|
1386
|
+
type: "error",
|
1387
|
+
error
|
1388
|
+
});
|
1389
|
+
}
|
1390
|
+
break;
|
1391
|
+
}
|
1392
|
+
case "finish": {
|
1393
|
+
controller.enqueue({
|
1394
|
+
type: "finish",
|
1395
|
+
finishReason: chunk.finishReason,
|
1396
|
+
logprobs: chunk.logprobs,
|
1397
|
+
usage: calculateTokenUsage(chunk.usage)
|
1398
|
+
});
|
1399
|
+
break;
|
1400
|
+
}
|
1401
|
+
case "tool-call-delta": {
|
1402
|
+
break;
|
1403
|
+
}
|
1404
|
+
default: {
|
1405
|
+
const _exhaustiveCheck = chunkType;
|
1406
|
+
throw new Error(`Unhandled chunk type: ${_exhaustiveCheck}`);
|
1407
|
+
}
|
1408
|
+
}
|
1409
|
+
},
|
1410
|
+
flush() {
|
1411
|
+
canClose = true;
|
1412
|
+
if (outstandingToolCalls.size === 0) {
|
1413
|
+
toolResultsStreamController.close();
|
1414
|
+
}
|
1415
|
+
}
|
1416
|
+
});
|
1417
|
+
return new ReadableStream({
|
1418
|
+
async start(controller) {
|
1419
|
+
return Promise.all([
|
1420
|
+
generatorStream.pipeThrough(forwardStream).pipeTo(
|
1421
|
+
new WritableStream({
|
1422
|
+
write(chunk) {
|
1423
|
+
controller.enqueue(chunk);
|
1424
|
+
},
|
1425
|
+
close() {
|
1426
|
+
}
|
1427
|
+
})
|
1428
|
+
),
|
1429
|
+
toolResultsStream.pipeTo(
|
1430
|
+
new WritableStream({
|
1431
|
+
write(chunk) {
|
1432
|
+
controller.enqueue(chunk);
|
1433
|
+
},
|
1434
|
+
close() {
|
1435
|
+
controller.close();
|
1436
|
+
}
|
1437
|
+
})
|
1438
|
+
)
|
1439
|
+
]);
|
1440
|
+
}
|
1441
|
+
});
|
1442
|
+
}
|
1443
|
+
|
1444
|
+
// core/generate-text/stream-text.ts
|
1445
|
+
async function streamText({
|
1446
|
+
model,
|
1447
|
+
tools,
|
1448
|
+
system,
|
1449
|
+
prompt,
|
1450
|
+
messages,
|
1451
|
+
maxRetries,
|
1452
|
+
abortSignal,
|
1453
|
+
...settings
|
1454
|
+
}) {
|
1455
|
+
const retry = retryWithExponentialBackoff({ maxRetries });
|
1456
|
+
const validatedPrompt = getValidatedPrompt({ system, prompt, messages });
|
1457
|
+
const { stream, warnings, rawResponse } = await retry(
|
1458
|
+
() => model.doStream({
|
1459
|
+
mode: {
|
1460
|
+
type: "regular",
|
1461
|
+
tools: tools == null ? void 0 : Object.entries(tools).map(([name, tool2]) => ({
|
1462
|
+
type: "function",
|
1463
|
+
name,
|
1464
|
+
description: tool2.description,
|
1465
|
+
parameters: convertZodToJSONSchema(tool2.parameters)
|
1466
|
+
}))
|
1467
|
+
},
|
1468
|
+
...prepareCallSettings(settings),
|
1469
|
+
inputFormat: validatedPrompt.type,
|
1470
|
+
prompt: convertToLanguageModelPrompt(validatedPrompt),
|
1471
|
+
abortSignal
|
1472
|
+
})
|
1473
|
+
);
|
1474
|
+
return new StreamTextResult({
|
1475
|
+
stream: runToolsTransformation({
|
1476
|
+
tools,
|
1477
|
+
generatorStream: stream
|
1478
|
+
}),
|
1479
|
+
warnings,
|
1480
|
+
rawResponse
|
1481
|
+
});
|
1482
|
+
}
|
1483
|
+
var StreamTextResult = class {
|
1484
|
+
constructor({
|
1485
|
+
stream,
|
1486
|
+
warnings,
|
1487
|
+
rawResponse
|
1488
|
+
}) {
|
1489
|
+
this.originalStream = stream;
|
1490
|
+
this.warnings = warnings;
|
1491
|
+
this.rawResponse = rawResponse;
|
1492
|
+
}
|
1493
|
+
/**
|
1494
|
+
A text stream that returns only the generated text deltas. You can use it
|
1495
|
+
as either an AsyncIterable or a ReadableStream. When an error occurs, the
|
1496
|
+
stream will throw the error.
|
1497
|
+
*/
|
1498
|
+
get textStream() {
|
1499
|
+
return createAsyncIterableStream(this.originalStream, {
|
1500
|
+
transform(chunk, controller) {
|
1501
|
+
if (chunk.type === "text-delta") {
|
1502
|
+
if (chunk.textDelta.length > 0) {
|
1503
|
+
controller.enqueue(chunk.textDelta);
|
1504
|
+
}
|
1505
|
+
} else if (chunk.type === "error") {
|
1506
|
+
throw chunk.error;
|
1507
|
+
}
|
1508
|
+
}
|
1509
|
+
});
|
1510
|
+
}
|
1511
|
+
/**
|
1512
|
+
A stream with all events, including text deltas, tool calls, tool results, and
|
1513
|
+
errors.
|
1514
|
+
You can use it as either an AsyncIterable or a ReadableStream. When an error occurs, the
|
1515
|
+
stream will throw the error.
|
1516
|
+
*/
|
1517
|
+
get fullStream() {
|
1518
|
+
return createAsyncIterableStream(this.originalStream, {
|
1519
|
+
transform(chunk, controller) {
|
1520
|
+
if (chunk.type === "text-delta") {
|
1521
|
+
if (chunk.textDelta.length > 0) {
|
1522
|
+
controller.enqueue(chunk);
|
1523
|
+
}
|
1524
|
+
} else {
|
1525
|
+
controller.enqueue(chunk);
|
1526
|
+
}
|
1527
|
+
}
|
1528
|
+
});
|
1529
|
+
}
|
1530
|
+
/**
|
1531
|
+
Converts the result to an `AIStream` object that is compatible with `StreamingTextResponse`.
|
1532
|
+
It can be used with the `useChat` and `useCompletion` hooks.
|
1533
|
+
|
1534
|
+
@param callbacks
|
1535
|
+
Stream callbacks that will be called when the stream emits events.
|
1536
|
+
|
1537
|
+
@returns an `AIStream` object.
|
1538
|
+
*/
|
1539
|
+
toAIStream(callbacks) {
|
1540
|
+
return this.textStream.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
|
1541
|
+
}
|
1542
|
+
/**
|
1543
|
+
Writes stream data output to a Node.js response-like object.
|
1544
|
+
It sets a `Content-Type` header to `text/plain; charset=utf-8` and
|
1545
|
+
writes each stream data part as a separate chunk.
|
1546
|
+
|
1547
|
+
@param response A Node.js response-like object (ServerResponse).
|
1548
|
+
@param init Optional headers and status code.
|
1549
|
+
*/
|
1550
|
+
pipeAIStreamToResponse(response, init) {
|
1551
|
+
var _a;
|
1552
|
+
response.writeHead((_a = init == null ? void 0 : init.status) != null ? _a : 200, {
|
1553
|
+
"Content-Type": "text/plain; charset=utf-8",
|
1554
|
+
...init == null ? void 0 : init.headers
|
1555
|
+
});
|
1556
|
+
const reader = this.textStream.pipeThrough(createCallbacksTransformer(void 0)).pipeThrough(createStreamDataTransformer()).getReader();
|
1557
|
+
const read = async () => {
|
1558
|
+
try {
|
1559
|
+
while (true) {
|
1560
|
+
const { done, value } = await reader.read();
|
1561
|
+
if (done)
|
1562
|
+
break;
|
1563
|
+
response.write(value);
|
1564
|
+
}
|
1565
|
+
} catch (error) {
|
1566
|
+
throw error;
|
1567
|
+
} finally {
|
1568
|
+
response.end();
|
1569
|
+
}
|
1570
|
+
};
|
1571
|
+
read();
|
1572
|
+
}
|
1573
|
+
/**
|
1574
|
+
Writes text delta output to a Node.js response-like object.
|
1575
|
+
It sets a `Content-Type` header to `text/plain; charset=utf-8` and
|
1576
|
+
writes each text delta as a separate chunk.
|
1577
|
+
|
1578
|
+
@param response A Node.js response-like object (ServerResponse).
|
1579
|
+
@param init Optional headers and status code.
|
1580
|
+
*/
|
1581
|
+
pipeTextStreamToResponse(response, init) {
|
1582
|
+
var _a;
|
1583
|
+
response.writeHead((_a = init == null ? void 0 : init.status) != null ? _a : 200, {
|
1584
|
+
"Content-Type": "text/plain; charset=utf-8",
|
1585
|
+
...init == null ? void 0 : init.headers
|
1586
|
+
});
|
1587
|
+
const reader = this.textStream.getReader();
|
1588
|
+
const read = async () => {
|
1589
|
+
const encoder = new TextEncoder();
|
1590
|
+
try {
|
1591
|
+
while (true) {
|
1592
|
+
const { done, value } = await reader.read();
|
1593
|
+
if (done)
|
1594
|
+
break;
|
1595
|
+
response.write(encoder.encode(value));
|
1596
|
+
}
|
1597
|
+
} catch (error) {
|
1598
|
+
throw error;
|
1599
|
+
} finally {
|
1600
|
+
response.end();
|
1601
|
+
}
|
1602
|
+
};
|
1603
|
+
read();
|
1604
|
+
}
|
1605
|
+
/**
|
1606
|
+
Converts the result to a streamed response object with a stream data part stream.
|
1607
|
+
It can be used with the `useChat` and `useCompletion` hooks.
|
1608
|
+
|
1609
|
+
@param init Optional headers.
|
1610
|
+
|
1611
|
+
@return A response object.
|
1612
|
+
*/
|
1613
|
+
toAIStreamResponse(init) {
|
1614
|
+
return new StreamingTextResponse(this.toAIStream(), init);
|
1615
|
+
}
|
1616
|
+
/**
|
1617
|
+
Creates a simple text stream response.
|
1618
|
+
Each text delta is encoded as UTF-8 and sent as a separate chunk.
|
1619
|
+
Non-text-delta events are ignored.
|
1620
|
+
|
1621
|
+
@param init Optional headers and status code.
|
1622
|
+
*/
|
1623
|
+
toTextStreamResponse(init) {
|
1624
|
+
var _a;
|
1625
|
+
const encoder = new TextEncoder();
|
1626
|
+
return new Response(
|
1627
|
+
this.textStream.pipeThrough(
|
1628
|
+
new TransformStream({
|
1629
|
+
transform(chunk, controller) {
|
1630
|
+
controller.enqueue(encoder.encode(chunk));
|
1631
|
+
}
|
1632
|
+
})
|
1633
|
+
),
|
1634
|
+
{
|
1635
|
+
status: (_a = init == null ? void 0 : init.status) != null ? _a : 200,
|
1636
|
+
headers: {
|
1637
|
+
"Content-Type": "text/plain; charset=utf-8",
|
1638
|
+
...init == null ? void 0 : init.headers
|
1639
|
+
}
|
1640
|
+
}
|
1641
|
+
);
|
1642
|
+
}
|
1643
|
+
};
|
1644
|
+
var experimental_streamText = streamText;
|
1645
|
+
|
1646
|
+
// core/tool/tool.ts
|
1647
|
+
function tool(tool2) {
|
1648
|
+
return tool2;
|
1649
|
+
}
|
1650
|
+
|
1651
|
+
// core/types/errors.ts
|
1652
|
+
var import_provider8 = require("@ai-sdk/provider");
|
56
1653
|
|
57
1654
|
// shared/stream-parts.ts
|
58
1655
|
var textStreamPart = {
|
@@ -154,9 +1751,9 @@ var toolCallStreamPart = {
|
|
154
1751
|
code: "7",
|
155
1752
|
name: "tool_calls",
|
156
1753
|
parse: (value) => {
|
157
|
-
if (value == null || typeof value !== "object" || !("tool_calls" in value) || typeof value.tool_calls !== "object" || value.tool_calls == null || !Array.isArray(value.tool_calls) || value.tool_calls.some(
|
158
|
-
tc == null || typeof tc !== "object" || !("id" in tc) || typeof tc.id !== "string" || !("type" in tc) || typeof tc.type !== "string" || !("function" in tc) || tc.function == null || typeof tc.function !== "object" || !("arguments" in tc.function) || typeof tc.function.name !== "string" || typeof tc.function.arguments !== "string"
|
159
|
-
|
1754
|
+
if (value == null || typeof value !== "object" || !("tool_calls" in value) || typeof value.tool_calls !== "object" || value.tool_calls == null || !Array.isArray(value.tool_calls) || value.tool_calls.some(
|
1755
|
+
(tc) => tc == null || typeof tc !== "object" || !("id" in tc) || typeof tc.id !== "string" || !("type" in tc) || typeof tc.type !== "string" || !("function" in tc) || tc.function == null || typeof tc.function !== "object" || !("arguments" in tc.function) || typeof tc.function.name !== "string" || typeof tc.function.arguments !== "string"
|
1756
|
+
)) {
|
160
1757
|
throw new Error(
|
161
1758
|
'"tool_calls" parts expect an object with a ToolCallPayload.'
|
162
1759
|
);
|
@@ -234,11 +1831,50 @@ function formatStreamPart(type, value) {
|
|
234
1831
|
`;
|
235
1832
|
}
|
236
1833
|
|
1834
|
+
// shared/read-data-stream.ts
|
1835
|
+
var NEWLINE = "\n".charCodeAt(0);
|
1836
|
+
function concatChunks(chunks, totalLength) {
|
1837
|
+
const concatenatedChunks = new Uint8Array(totalLength);
|
1838
|
+
let offset = 0;
|
1839
|
+
for (const chunk of chunks) {
|
1840
|
+
concatenatedChunks.set(chunk, offset);
|
1841
|
+
offset += chunk.length;
|
1842
|
+
}
|
1843
|
+
chunks.length = 0;
|
1844
|
+
return concatenatedChunks;
|
1845
|
+
}
|
1846
|
+
async function* readDataStream(reader, {
|
1847
|
+
isAborted
|
1848
|
+
} = {}) {
|
1849
|
+
const decoder = new TextDecoder();
|
1850
|
+
const chunks = [];
|
1851
|
+
let totalLength = 0;
|
1852
|
+
while (true) {
|
1853
|
+
const { value } = await reader.read();
|
1854
|
+
if (value) {
|
1855
|
+
chunks.push(value);
|
1856
|
+
totalLength += value.length;
|
1857
|
+
if (value[value.length - 1] !== NEWLINE) {
|
1858
|
+
continue;
|
1859
|
+
}
|
1860
|
+
}
|
1861
|
+
if (chunks.length === 0) {
|
1862
|
+
break;
|
1863
|
+
}
|
1864
|
+
const concatenatedChunks = concatChunks(chunks, totalLength);
|
1865
|
+
totalLength = 0;
|
1866
|
+
const streamParts2 = decoder.decode(concatenatedChunks, { stream: true }).split("\n").filter((line) => line !== "").map(parseStreamPart);
|
1867
|
+
for (const streamPart of streamParts2) {
|
1868
|
+
yield streamPart;
|
1869
|
+
}
|
1870
|
+
if (isAborted == null ? void 0 : isAborted()) {
|
1871
|
+
reader.cancel();
|
1872
|
+
break;
|
1873
|
+
}
|
1874
|
+
}
|
1875
|
+
}
|
1876
|
+
|
237
1877
|
// shared/utils.ts
|
238
|
-
var nanoid = (0, import_non_secure.customAlphabet)(
|
239
|
-
"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
|
240
|
-
7
|
241
|
-
);
|
242
1878
|
function createChunkDecoder(complex) {
|
243
1879
|
const decoder = new TextDecoder();
|
244
1880
|
if (!complex) {
|
@@ -254,7 +1890,6 @@ function createChunkDecoder(complex) {
|
|
254
1890
|
};
|
255
1891
|
}
|
256
1892
|
var isStreamStringEqualToType = (type, value) => value.startsWith(`${StreamStringPrefixes[type]}:`) && value.endsWith("\n");
|
257
|
-
var COMPLEX_HEADER = "X-Experimental-Stream-Data";
|
258
1893
|
|
259
1894
|
// streams/ai-stream.ts
|
260
1895
|
var import_eventsource_parser = require("eventsource-parser");
|
@@ -379,7 +2014,7 @@ function readableFromAsyncIterable(iterable) {
|
|
379
2014
|
}
|
380
2015
|
|
381
2016
|
// streams/stream-data.ts
|
382
|
-
var
|
2017
|
+
var StreamData = class {
|
383
2018
|
constructor() {
|
384
2019
|
this.encoder = new TextEncoder();
|
385
2020
|
this.controller = null;
|
@@ -465,14 +2100,7 @@ var experimental_StreamData = class {
|
|
465
2100
|
this.messageAnnotations.push(value);
|
466
2101
|
}
|
467
2102
|
};
|
468
|
-
function createStreamDataTransformer(
|
469
|
-
if (!experimental_streamData) {
|
470
|
-
return new TransformStream({
|
471
|
-
transform: async (chunk, controller) => {
|
472
|
-
controller.enqueue(chunk);
|
473
|
-
}
|
474
|
-
});
|
475
|
-
}
|
2103
|
+
function createStreamDataTransformer() {
|
476
2104
|
const encoder = new TextEncoder();
|
477
2105
|
const decoder = new TextDecoder();
|
478
2106
|
return new TransformStream({
|
@@ -482,6 +2110,8 @@ function createStreamDataTransformer(experimental_streamData) {
|
|
482
2110
|
}
|
483
2111
|
});
|
484
2112
|
}
|
2113
|
+
var experimental_StreamData = class extends StreamData {
|
2114
|
+
};
|
485
2115
|
|
486
2116
|
// streams/anthropic-stream.ts
|
487
2117
|
function parseAnthropicStream() {
|
@@ -521,16 +2151,16 @@ async function* streamable(stream) {
|
|
521
2151
|
}
|
522
2152
|
function AnthropicStream(res, cb) {
|
523
2153
|
if (Symbol.asyncIterator in res) {
|
524
|
-
return readableFromAsyncIterable(streamable(res)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(
|
2154
|
+
return readableFromAsyncIterable(streamable(res)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer());
|
525
2155
|
} else {
|
526
2156
|
return AIStream(res, parseAnthropicStream(), cb).pipeThrough(
|
527
|
-
createStreamDataTransformer(
|
2157
|
+
createStreamDataTransformer()
|
528
2158
|
);
|
529
2159
|
}
|
530
2160
|
}
|
531
2161
|
|
532
2162
|
// streams/assistant-response.ts
|
533
|
-
function
|
2163
|
+
function AssistantResponse({ threadId, messageId }, process2) {
|
534
2164
|
const stream = new ReadableStream({
|
535
2165
|
async start(controller) {
|
536
2166
|
var _a;
|
@@ -621,6 +2251,7 @@ function experimental_AssistantResponse({ threadId, messageId }, process2) {
|
|
621
2251
|
}
|
622
2252
|
});
|
623
2253
|
}
|
2254
|
+
var experimental_AssistantResponse = AssistantResponse;
|
624
2255
|
|
625
2256
|
// streams/aws-bedrock-stream.ts
|
626
2257
|
async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
|
@@ -638,20 +2269,17 @@ async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
|
|
638
2269
|
}
|
639
2270
|
}
|
640
2271
|
}
|
2272
|
+
function AWSBedrockAnthropicMessagesStream(response, callbacks) {
|
2273
|
+
return AWSBedrockStream(response, callbacks, (chunk) => {
|
2274
|
+
var _a;
|
2275
|
+
return (_a = chunk.delta) == null ? void 0 : _a.text;
|
2276
|
+
});
|
2277
|
+
}
|
641
2278
|
function AWSBedrockAnthropicStream(response, callbacks) {
|
642
2279
|
return AWSBedrockStream(response, callbacks, (chunk) => chunk.completion);
|
643
2280
|
}
|
644
2281
|
function AWSBedrockCohereStream(response, callbacks) {
|
645
|
-
return AWSBedrockStream(
|
646
|
-
response,
|
647
|
-
callbacks,
|
648
|
-
// As of 2023-11-17, Bedrock does not support streaming for Cohere,
|
649
|
-
// so we take the full generation:
|
650
|
-
(chunk) => {
|
651
|
-
var _a, _b;
|
652
|
-
return (_b = (_a = chunk.generations) == null ? void 0 : _a[0]) == null ? void 0 : _b.text;
|
653
|
-
}
|
654
|
-
);
|
2282
|
+
return AWSBedrockStream(response, callbacks, (chunk) => chunk == null ? void 0 : chunk.text);
|
655
2283
|
}
|
656
2284
|
function AWSBedrockLlama2Stream(response, callbacks) {
|
657
2285
|
return AWSBedrockStream(response, callbacks, (chunk) => chunk.generation);
|
@@ -659,9 +2287,7 @@ function AWSBedrockLlama2Stream(response, callbacks) {
|
|
659
2287
|
function AWSBedrockStream(response, callbacks, extractTextDeltaFromChunk) {
|
660
2288
|
return readableFromAsyncIterable(
|
661
2289
|
asDeltaIterable(response, extractTextDeltaFromChunk)
|
662
|
-
).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
663
|
-
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
664
|
-
);
|
2290
|
+
).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
|
665
2291
|
}
|
666
2292
|
|
667
2293
|
// streams/cohere-stream.ts
|
@@ -716,13 +2342,9 @@ async function* streamable2(stream) {
|
|
716
2342
|
}
|
717
2343
|
function CohereStream(reader, callbacks) {
|
718
2344
|
if (Symbol.asyncIterator in reader) {
|
719
|
-
return readableFromAsyncIterable(streamable2(reader)).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
720
|
-
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
721
|
-
);
|
2345
|
+
return readableFromAsyncIterable(streamable2(reader)).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
|
722
2346
|
} else {
|
723
|
-
return createParser2(reader).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
724
|
-
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
725
|
-
);
|
2347
|
+
return createParser2(reader).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
|
726
2348
|
}
|
727
2349
|
}
|
728
2350
|
|
@@ -741,7 +2363,7 @@ async function* streamable3(response) {
|
|
741
2363
|
}
|
742
2364
|
}
|
743
2365
|
function GoogleGenerativeAIStream(response, cb) {
|
744
|
-
return readableFromAsyncIterable(streamable3(response)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer(
|
2366
|
+
return readableFromAsyncIterable(streamable3(response)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer());
|
745
2367
|
}
|
746
2368
|
|
747
2369
|
// streams/huggingface-stream.ts
|
@@ -769,9 +2391,7 @@ function createParser3(res) {
|
|
769
2391
|
});
|
770
2392
|
}
|
771
2393
|
function HuggingFaceStream(res, callbacks) {
|
772
|
-
return createParser3(res).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
773
|
-
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
774
|
-
);
|
2394
|
+
return createParser3(res).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
|
775
2395
|
}
|
776
2396
|
|
777
2397
|
// streams/inkeep-stream.ts
|
@@ -808,7 +2428,7 @@ function InkeepStream(res, callbacks) {
|
|
808
2428
|
}
|
809
2429
|
};
|
810
2430
|
return AIStream(res, inkeepEventParser, passThroughCallbacks).pipeThrough(
|
811
|
-
createStreamDataTransformer(
|
2431
|
+
createStreamDataTransformer()
|
812
2432
|
);
|
813
2433
|
}
|
814
2434
|
|
@@ -833,9 +2453,7 @@ function LangChainStream(callbacks) {
|
|
833
2453
|
}
|
834
2454
|
};
|
835
2455
|
return {
|
836
|
-
stream: stream.readable.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
837
|
-
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
838
|
-
),
|
2456
|
+
stream: stream.readable.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer()),
|
839
2457
|
writer,
|
840
2458
|
handlers: {
|
841
2459
|
handleLLMNewToken: async (token) => {
|
@@ -886,9 +2504,7 @@ async function* streamable4(stream) {
|
|
886
2504
|
}
|
887
2505
|
function MistralStream(response, callbacks) {
|
888
2506
|
const stream = readableFromAsyncIterable(streamable4(response));
|
889
|
-
return stream.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
890
|
-
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
891
|
-
);
|
2507
|
+
return stream.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
|
892
2508
|
}
|
893
2509
|
|
894
2510
|
// streams/openai-stream.ts
|
@@ -1032,9 +2648,7 @@ function OpenAIStream(res, callbacks) {
|
|
1032
2648
|
const functionCallTransformer = createFunctionCallTransformer(cb);
|
1033
2649
|
return stream.pipeThrough(functionCallTransformer);
|
1034
2650
|
} else {
|
1035
|
-
return stream.pipeThrough(
|
1036
|
-
createStreamDataTransformer(cb == null ? void 0 : cb.experimental_streamData)
|
1037
|
-
);
|
2651
|
+
return stream.pipeThrough(createStreamDataTransformer());
|
1038
2652
|
}
|
1039
2653
|
}
|
1040
2654
|
function createFunctionCallTransformer(callbacks) {
|
@@ -1044,7 +2658,6 @@ function createFunctionCallTransformer(callbacks) {
|
|
1044
2658
|
let aggregatedFinalCompletionResponse = "";
|
1045
2659
|
let isFunctionStreamingIn = false;
|
1046
2660
|
let functionCallMessages = callbacks[__internal__OpenAIFnMessagesSymbol] || [];
|
1047
|
-
const isComplexMode = callbacks == null ? void 0 : callbacks.experimental_streamData;
|
1048
2661
|
const decode = createChunkDecoder();
|
1049
2662
|
return new TransformStream({
|
1050
2663
|
async transform(chunk, controller) {
|
@@ -1059,7 +2672,7 @@ function createFunctionCallTransformer(callbacks) {
|
|
1059
2672
|
}
|
1060
2673
|
if (!isFunctionStreamingIn) {
|
1061
2674
|
controller.enqueue(
|
1062
|
-
|
2675
|
+
textEncoder.encode(formatStreamPart("text", message))
|
1063
2676
|
);
|
1064
2677
|
return;
|
1065
2678
|
} else {
|
@@ -1111,13 +2724,13 @@ function createFunctionCallTransformer(callbacks) {
|
|
1111
2724
|
const toolCalls = {
|
1112
2725
|
tools: []
|
1113
2726
|
};
|
1114
|
-
for (const
|
2727
|
+
for (const tool2 of payload.tool_calls) {
|
1115
2728
|
toolCalls.tools.push({
|
1116
|
-
id:
|
2729
|
+
id: tool2.id,
|
1117
2730
|
type: "function",
|
1118
2731
|
func: {
|
1119
|
-
name:
|
1120
|
-
arguments: JSON.parse(
|
2732
|
+
name: tool2.function.name,
|
2733
|
+
arguments: JSON.parse(tool2.function.arguments)
|
1121
2734
|
}
|
1122
2735
|
});
|
1123
2736
|
}
|
@@ -1170,17 +2783,17 @@ function createFunctionCallTransformer(callbacks) {
|
|
1170
2783
|
if (!functionResponse) {
|
1171
2784
|
controller.enqueue(
|
1172
2785
|
textEncoder.encode(
|
1173
|
-
|
2786
|
+
formatStreamPart(
|
1174
2787
|
payload.function_call ? "function_call" : "tool_calls",
|
1175
2788
|
// parse to prevent double-encoding:
|
1176
2789
|
JSON.parse(aggregatedResponse)
|
1177
|
-
)
|
2790
|
+
)
|
1178
2791
|
)
|
1179
2792
|
);
|
1180
2793
|
return;
|
1181
2794
|
} else if (typeof functionResponse === "string") {
|
1182
2795
|
controller.enqueue(
|
1183
|
-
|
2796
|
+
textEncoder.encode(formatStreamPart("text", functionResponse))
|
1184
2797
|
);
|
1185
2798
|
aggregatedFinalCompletionResponse = functionResponse;
|
1186
2799
|
return;
|
@@ -1230,53 +2843,10 @@ async function ReplicateStream(res, cb, options) {
|
|
1230
2843
|
}
|
1231
2844
|
});
|
1232
2845
|
return AIStream(eventStream, void 0, cb).pipeThrough(
|
1233
|
-
createStreamDataTransformer(
|
2846
|
+
createStreamDataTransformer()
|
1234
2847
|
);
|
1235
2848
|
}
|
1236
2849
|
|
1237
|
-
// shared/read-data-stream.ts
|
1238
|
-
var NEWLINE = "\n".charCodeAt(0);
|
1239
|
-
function concatChunks(chunks, totalLength) {
|
1240
|
-
const concatenatedChunks = new Uint8Array(totalLength);
|
1241
|
-
let offset = 0;
|
1242
|
-
for (const chunk of chunks) {
|
1243
|
-
concatenatedChunks.set(chunk, offset);
|
1244
|
-
offset += chunk.length;
|
1245
|
-
}
|
1246
|
-
chunks.length = 0;
|
1247
|
-
return concatenatedChunks;
|
1248
|
-
}
|
1249
|
-
async function* readDataStream(reader, {
|
1250
|
-
isAborted
|
1251
|
-
} = {}) {
|
1252
|
-
const decoder = new TextDecoder();
|
1253
|
-
const chunks = [];
|
1254
|
-
let totalLength = 0;
|
1255
|
-
while (true) {
|
1256
|
-
const { value } = await reader.read();
|
1257
|
-
if (value) {
|
1258
|
-
chunks.push(value);
|
1259
|
-
totalLength += value.length;
|
1260
|
-
if (value[value.length - 1] !== NEWLINE) {
|
1261
|
-
continue;
|
1262
|
-
}
|
1263
|
-
}
|
1264
|
-
if (chunks.length === 0) {
|
1265
|
-
break;
|
1266
|
-
}
|
1267
|
-
const concatenatedChunks = concatChunks(chunks, totalLength);
|
1268
|
-
totalLength = 0;
|
1269
|
-
const streamParts2 = decoder.decode(concatenatedChunks, { stream: true }).split("\n").filter((line) => line !== "").map(parseStreamPart);
|
1270
|
-
for (const streamPart of streamParts2) {
|
1271
|
-
yield streamPart;
|
1272
|
-
}
|
1273
|
-
if (isAborted == null ? void 0 : isAborted()) {
|
1274
|
-
reader.cancel();
|
1275
|
-
break;
|
1276
|
-
}
|
1277
|
-
}
|
1278
|
-
}
|
1279
|
-
|
1280
2850
|
// shared/parse-complex-response.ts
|
1281
2851
|
function assignAnnotationsToMessage(message, annotations) {
|
1282
2852
|
if (!message || !annotations || !annotations.length)
|
@@ -1288,7 +2858,7 @@ async function parseComplexResponse({
|
|
1288
2858
|
abortControllerRef,
|
1289
2859
|
update,
|
1290
2860
|
onFinish,
|
1291
|
-
generateId =
|
2861
|
+
generateId: generateId2 = generateId,
|
1292
2862
|
getCurrentDate = () => /* @__PURE__ */ new Date()
|
1293
2863
|
}) {
|
1294
2864
|
const createdAt = getCurrentDate();
|
@@ -1307,7 +2877,7 @@ async function parseComplexResponse({
|
|
1307
2877
|
};
|
1308
2878
|
} else {
|
1309
2879
|
prefixMap["text"] = {
|
1310
|
-
id:
|
2880
|
+
id: generateId2(),
|
1311
2881
|
role: "assistant",
|
1312
2882
|
content: value,
|
1313
2883
|
createdAt
|
@@ -1317,7 +2887,7 @@ async function parseComplexResponse({
|
|
1317
2887
|
let functionCallMessage = null;
|
1318
2888
|
if (type === "function_call") {
|
1319
2889
|
prefixMap["function_call"] = {
|
1320
|
-
id:
|
2890
|
+
id: generateId2(),
|
1321
2891
|
role: "assistant",
|
1322
2892
|
content: "",
|
1323
2893
|
function_call: value.function_call,
|
@@ -1329,7 +2899,7 @@ async function parseComplexResponse({
|
|
1329
2899
|
let toolCallMessage = null;
|
1330
2900
|
if (type === "tool_calls") {
|
1331
2901
|
prefixMap["tool_calls"] = {
|
1332
|
-
id:
|
2902
|
+
id: generateId2(),
|
1333
2903
|
role: "assistant",
|
1334
2904
|
content: "",
|
1335
2905
|
tool_calls: value.tool_calls,
|
@@ -1391,74 +2961,41 @@ async function parseComplexResponse({
|
|
1391
2961
|
// streams/streaming-react-response.ts
|
1392
2962
|
var experimental_StreamingReactResponse = class {
|
1393
2963
|
constructor(res, options) {
|
1394
|
-
var _a;
|
2964
|
+
var _a, _b;
|
1395
2965
|
let resolveFunc = () => {
|
1396
2966
|
};
|
1397
2967
|
let next = new Promise((resolve) => {
|
1398
2968
|
resolveFunc = resolve;
|
1399
2969
|
});
|
1400
|
-
|
1401
|
-
|
1402
|
-
|
1403
|
-
)
|
1404
|
-
|
1405
|
-
|
1406
|
-
|
1407
|
-
|
1408
|
-
|
1409
|
-
|
1410
|
-
|
1411
|
-
|
1412
|
-
|
1413
|
-
|
1414
|
-
|
1415
|
-
|
1416
|
-
|
1417
|
-
|
1418
|
-
|
2970
|
+
const processedStream = (options == null ? void 0 : options.data) != null ? res.pipeThrough((_a = options == null ? void 0 : options.data) == null ? void 0 : _a.stream) : res;
|
2971
|
+
let lastPayload = void 0;
|
2972
|
+
parseComplexResponse({
|
2973
|
+
reader: processedStream.getReader(),
|
2974
|
+
update: (merged, data) => {
|
2975
|
+
var _a2, _b2, _c;
|
2976
|
+
const content = (_b2 = (_a2 = merged[0]) == null ? void 0 : _a2.content) != null ? _b2 : "";
|
2977
|
+
const ui = ((_c = options == null ? void 0 : options.ui) == null ? void 0 : _c.call(options, { content, data })) || content;
|
2978
|
+
const payload = { ui, content };
|
2979
|
+
const resolvePrevious = resolveFunc;
|
2980
|
+
const nextRow = new Promise((resolve) => {
|
2981
|
+
resolveFunc = resolve;
|
2982
|
+
});
|
2983
|
+
resolvePrevious({
|
2984
|
+
next: nextRow,
|
2985
|
+
...payload
|
2986
|
+
});
|
2987
|
+
lastPayload = payload;
|
2988
|
+
},
|
2989
|
+
generateId: (_b = options == null ? void 0 : options.generateId) != null ? _b : generateId,
|
2990
|
+
onFinish: () => {
|
2991
|
+
if (lastPayload !== void 0) {
|
2992
|
+
resolveFunc({
|
2993
|
+
next: null,
|
2994
|
+
...lastPayload
|
1419
2995
|
});
|
1420
|
-
lastPayload = payload;
|
1421
|
-
},
|
1422
|
-
generateId: (_a = options.generateId) != null ? _a : nanoid,
|
1423
|
-
onFinish: () => {
|
1424
|
-
if (lastPayload !== void 0) {
|
1425
|
-
resolveFunc({
|
1426
|
-
next: null,
|
1427
|
-
...lastPayload
|
1428
|
-
});
|
1429
|
-
}
|
1430
2996
|
}
|
1431
|
-
});
|
1432
|
-
return next;
|
1433
|
-
}
|
1434
|
-
let content = "";
|
1435
|
-
const decode = createChunkDecoder();
|
1436
|
-
const reader = res.getReader();
|
1437
|
-
async function readChunk() {
|
1438
|
-
var _a2;
|
1439
|
-
const { done, value } = await reader.read();
|
1440
|
-
if (!done) {
|
1441
|
-
content += decode(value);
|
1442
2997
|
}
|
1443
|
-
|
1444
|
-
const payload = {
|
1445
|
-
ui,
|
1446
|
-
content
|
1447
|
-
};
|
1448
|
-
const resolvePrevious = resolveFunc;
|
1449
|
-
const nextRow = done ? null : new Promise((resolve) => {
|
1450
|
-
resolveFunc = resolve;
|
1451
|
-
});
|
1452
|
-
resolvePrevious({
|
1453
|
-
next: nextRow,
|
1454
|
-
...payload
|
1455
|
-
});
|
1456
|
-
if (done) {
|
1457
|
-
return;
|
1458
|
-
}
|
1459
|
-
await readChunk();
|
1460
|
-
}
|
1461
|
-
readChunk();
|
2998
|
+
});
|
1462
2999
|
return next;
|
1463
3000
|
}
|
1464
3001
|
};
|
@@ -1475,7 +3012,6 @@ var StreamingTextResponse = class extends Response {
|
|
1475
3012
|
status: 200,
|
1476
3013
|
headers: {
|
1477
3014
|
"Content-Type": "text/plain; charset=utf-8",
|
1478
|
-
[COMPLEX_HEADER]: data ? "true" : "false",
|
1479
3015
|
...init == null ? void 0 : init.headers
|
1480
3016
|
}
|
1481
3017
|
});
|
@@ -1502,21 +3038,45 @@ function streamToResponse(res, response, init) {
|
|
1502
3038
|
// Annotate the CommonJS export names for ESM import in node:
|
1503
3039
|
0 && (module.exports = {
|
1504
3040
|
AIStream,
|
3041
|
+
APICallError,
|
3042
|
+
AWSBedrockAnthropicMessagesStream,
|
1505
3043
|
AWSBedrockAnthropicStream,
|
1506
3044
|
AWSBedrockCohereStream,
|
1507
3045
|
AWSBedrockLlama2Stream,
|
1508
3046
|
AWSBedrockStream,
|
1509
3047
|
AnthropicStream,
|
1510
|
-
|
3048
|
+
AssistantResponse,
|
1511
3049
|
CohereStream,
|
3050
|
+
EmptyResponseBodyError,
|
3051
|
+
GenerateObjectResult,
|
3052
|
+
GenerateTextResult,
|
1512
3053
|
GoogleGenerativeAIStream,
|
1513
3054
|
HuggingFaceStream,
|
1514
3055
|
InkeepStream,
|
3056
|
+
InvalidArgumentError,
|
3057
|
+
InvalidDataContentError,
|
3058
|
+
InvalidPromptError,
|
3059
|
+
InvalidResponseDataError,
|
3060
|
+
InvalidToolArgumentsError,
|
3061
|
+
JSONParseError,
|
1515
3062
|
LangChainStream,
|
3063
|
+
LoadAPIKeyError,
|
1516
3064
|
MistralStream,
|
3065
|
+
NoObjectGeneratedError,
|
3066
|
+
NoSuchToolError,
|
1517
3067
|
OpenAIStream,
|
1518
3068
|
ReplicateStream,
|
3069
|
+
RetryError,
|
3070
|
+
StreamData,
|
3071
|
+
StreamObjectResult,
|
3072
|
+
StreamTextResult,
|
1519
3073
|
StreamingTextResponse,
|
3074
|
+
ToolCallParseError,
|
3075
|
+
TypeValidationError,
|
3076
|
+
UnsupportedFunctionalityError,
|
3077
|
+
UnsupportedJSONSchemaError,
|
3078
|
+
convertDataContentToBase64String,
|
3079
|
+
convertDataContentToUint8Array,
|
1520
3080
|
createCallbacksTransformer,
|
1521
3081
|
createChunkDecoder,
|
1522
3082
|
createEventStreamTransformer,
|
@@ -1524,10 +3084,23 @@ function streamToResponse(res, response, init) {
|
|
1524
3084
|
experimental_AssistantResponse,
|
1525
3085
|
experimental_StreamData,
|
1526
3086
|
experimental_StreamingReactResponse,
|
3087
|
+
experimental_generateObject,
|
3088
|
+
experimental_generateText,
|
3089
|
+
experimental_streamObject,
|
3090
|
+
experimental_streamText,
|
3091
|
+
formatStreamPart,
|
3092
|
+
generateId,
|
3093
|
+
generateObject,
|
3094
|
+
generateText,
|
1527
3095
|
isStreamStringEqualToType,
|
1528
3096
|
nanoid,
|
3097
|
+
parseStreamPart,
|
3098
|
+
readDataStream,
|
1529
3099
|
readableFromAsyncIterable,
|
3100
|
+
streamObject,
|
3101
|
+
streamText,
|
1530
3102
|
streamToResponse,
|
3103
|
+
tool,
|
1531
3104
|
trimStartOfStreamHelper
|
1532
3105
|
});
|
1533
3106
|
//# sourceMappingURL=index.js.map
|