ai 5.0.0-canary.20 → 5.0.0-canary.22
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +60 -0
- package/README.md +1 -1
- package/dist/index.d.mts +731 -795
- package/dist/index.d.ts +731 -795
- package/dist/index.js +1244 -1678
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +1167 -1600
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +7 -13
- package/dist/internal/index.d.ts +7 -13
- package/dist/internal/index.js +126 -126
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +119 -119
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +2 -2
package/dist/index.js
CHANGED
@@ -50,8 +50,7 @@ __export(src_exports, {
|
|
50
50
|
TypeValidationError: () => import_provider16.TypeValidationError,
|
51
51
|
UnsupportedFunctionalityError: () => import_provider16.UnsupportedFunctionalityError,
|
52
52
|
appendClientMessage: () => appendClientMessage,
|
53
|
-
|
54
|
-
asSchema: () => import_provider_utils24.asSchema,
|
53
|
+
asSchema: () => import_provider_utils25.asSchema,
|
55
54
|
assistantModelMessageSchema: () => assistantModelMessageSchema,
|
56
55
|
callChatApi: () => callChatApi,
|
57
56
|
callCompletionApi: () => callCompletionApi,
|
@@ -64,11 +63,11 @@ __export(src_exports, {
|
|
64
63
|
coreToolMessageSchema: () => coreToolMessageSchema,
|
65
64
|
coreUserMessageSchema: () => coreUserMessageSchema,
|
66
65
|
cosineSimilarity: () => cosineSimilarity,
|
67
|
-
|
68
|
-
createDataStreamResponse: () => createDataStreamResponse,
|
69
|
-
createIdGenerator: () => import_provider_utils24.createIdGenerator,
|
66
|
+
createIdGenerator: () => import_provider_utils25.createIdGenerator,
|
70
67
|
createProviderRegistry: () => createProviderRegistry,
|
71
68
|
createTextStreamResponse: () => createTextStreamResponse,
|
69
|
+
createUIMessageStream: () => createUIMessageStream,
|
70
|
+
createUIMessageStreamResponse: () => createUIMessageStreamResponse,
|
72
71
|
customProvider: () => customProvider,
|
73
72
|
defaultSettingsMiddleware: () => defaultSettingsMiddleware,
|
74
73
|
embed: () => embed,
|
@@ -81,19 +80,18 @@ __export(src_exports, {
|
|
81
80
|
experimental_transcribe: () => transcribe,
|
82
81
|
extractMaxToolInvocationStep: () => extractMaxToolInvocationStep,
|
83
82
|
extractReasoningMiddleware: () => extractReasoningMiddleware,
|
84
|
-
generateId: () =>
|
83
|
+
generateId: () => import_provider_utils25.generateId,
|
85
84
|
generateObject: () => generateObject,
|
86
85
|
generateText: () => generateText,
|
87
86
|
getTextFromDataUrl: () => getTextFromDataUrl,
|
88
87
|
getToolInvocations: () => getToolInvocations,
|
89
88
|
isAssistantMessageWithCompletedToolCalls: () => isAssistantMessageWithCompletedToolCalls,
|
90
89
|
isDeepEqualData: () => isDeepEqualData,
|
91
|
-
jsonSchema: () =>
|
90
|
+
jsonSchema: () => import_provider_utils25.jsonSchema,
|
92
91
|
modelMessageSchema: () => modelMessageSchema,
|
93
92
|
parsePartialJson: () => parsePartialJson,
|
94
|
-
pipeDataStreamToResponse: () => pipeDataStreamToResponse,
|
95
93
|
pipeTextStreamToResponse: () => pipeTextStreamToResponse,
|
96
|
-
|
94
|
+
pipeUIMessageStreamToResponse: () => pipeUIMessageStreamToResponse,
|
97
95
|
processTextStream: () => processTextStream,
|
98
96
|
shouldResubmitMessages: () => shouldResubmitMessages,
|
99
97
|
simulateReadableStream: () => simulateReadableStream,
|
@@ -109,404 +107,7 @@ __export(src_exports, {
|
|
109
107
|
wrapLanguageModel: () => wrapLanguageModel
|
110
108
|
});
|
111
109
|
module.exports = __toCommonJS(src_exports);
|
112
|
-
var
|
113
|
-
|
114
|
-
// src/data-stream/create-data-stream.ts
|
115
|
-
function createDataStream({
|
116
|
-
execute,
|
117
|
-
onError = () => "An error occurred."
|
118
|
-
// mask error messages for safety by default
|
119
|
-
}) {
|
120
|
-
let controller;
|
121
|
-
const ongoingStreamPromises = [];
|
122
|
-
const stream = new ReadableStream({
|
123
|
-
start(controllerArg) {
|
124
|
-
controller = controllerArg;
|
125
|
-
}
|
126
|
-
});
|
127
|
-
function safeEnqueue(data) {
|
128
|
-
try {
|
129
|
-
controller.enqueue(data);
|
130
|
-
} catch (error) {
|
131
|
-
}
|
132
|
-
}
|
133
|
-
try {
|
134
|
-
const result = execute({
|
135
|
-
write(part) {
|
136
|
-
safeEnqueue(part);
|
137
|
-
},
|
138
|
-
merge(streamArg) {
|
139
|
-
ongoingStreamPromises.push(
|
140
|
-
(async () => {
|
141
|
-
const reader = streamArg.getReader();
|
142
|
-
while (true) {
|
143
|
-
const { done, value } = await reader.read();
|
144
|
-
if (done)
|
145
|
-
break;
|
146
|
-
safeEnqueue(value);
|
147
|
-
}
|
148
|
-
})().catch((error) => {
|
149
|
-
safeEnqueue({ type: "error", value: onError(error) });
|
150
|
-
})
|
151
|
-
);
|
152
|
-
},
|
153
|
-
onError
|
154
|
-
});
|
155
|
-
if (result) {
|
156
|
-
ongoingStreamPromises.push(
|
157
|
-
result.catch((error) => {
|
158
|
-
safeEnqueue({ type: "error", value: onError(error) });
|
159
|
-
})
|
160
|
-
);
|
161
|
-
}
|
162
|
-
} catch (error) {
|
163
|
-
safeEnqueue({ type: "error", value: onError(error) });
|
164
|
-
}
|
165
|
-
const waitForStreams = new Promise(async (resolve) => {
|
166
|
-
while (ongoingStreamPromises.length > 0) {
|
167
|
-
await ongoingStreamPromises.shift();
|
168
|
-
}
|
169
|
-
resolve();
|
170
|
-
});
|
171
|
-
waitForStreams.finally(() => {
|
172
|
-
try {
|
173
|
-
controller.close();
|
174
|
-
} catch (error) {
|
175
|
-
}
|
176
|
-
});
|
177
|
-
return stream;
|
178
|
-
}
|
179
|
-
|
180
|
-
// src/util/prepare-headers.ts
|
181
|
-
function prepareHeaders(headers, defaultHeaders) {
|
182
|
-
const responseHeaders = new Headers(headers != null ? headers : {});
|
183
|
-
for (const [key, value] of Object.entries(defaultHeaders)) {
|
184
|
-
if (!responseHeaders.has(key)) {
|
185
|
-
responseHeaders.set(key, value);
|
186
|
-
}
|
187
|
-
}
|
188
|
-
return responseHeaders;
|
189
|
-
}
|
190
|
-
|
191
|
-
// src/data-stream/data-stream-headers.ts
|
192
|
-
var dataStreamHeaders = {
|
193
|
-
"content-type": "text/event-stream",
|
194
|
-
"cache-control": "no-cache",
|
195
|
-
connection: "keep-alive",
|
196
|
-
"x-vercel-ai-data-stream": "v2",
|
197
|
-
"x-accel-buffering": "no"
|
198
|
-
// disable nginx buffering
|
199
|
-
};
|
200
|
-
|
201
|
-
// src/data-stream/json-to-sse-transform-stream.ts
|
202
|
-
var JsonToSseTransformStream = class extends TransformStream {
|
203
|
-
constructor() {
|
204
|
-
super({
|
205
|
-
transform(part, controller) {
|
206
|
-
controller.enqueue(`data: ${JSON.stringify(part)}
|
207
|
-
|
208
|
-
`);
|
209
|
-
},
|
210
|
-
flush(controller) {
|
211
|
-
controller.enqueue("data: [DONE]\n\n");
|
212
|
-
}
|
213
|
-
});
|
214
|
-
}
|
215
|
-
};
|
216
|
-
|
217
|
-
// src/data-stream/create-data-stream-response.ts
|
218
|
-
function createDataStreamResponse({
|
219
|
-
status,
|
220
|
-
statusText,
|
221
|
-
headers,
|
222
|
-
dataStream
|
223
|
-
}) {
|
224
|
-
return new Response(
|
225
|
-
dataStream.pipeThrough(new JsonToSseTransformStream()).pipeThrough(new TextEncoderStream()),
|
226
|
-
{
|
227
|
-
status,
|
228
|
-
statusText,
|
229
|
-
headers: prepareHeaders(headers, dataStreamHeaders)
|
230
|
-
}
|
231
|
-
);
|
232
|
-
}
|
233
|
-
|
234
|
-
// src/util/write-to-server-response.ts
|
235
|
-
function writeToServerResponse({
|
236
|
-
response,
|
237
|
-
status,
|
238
|
-
statusText,
|
239
|
-
headers,
|
240
|
-
stream
|
241
|
-
}) {
|
242
|
-
response.writeHead(status != null ? status : 200, statusText, headers);
|
243
|
-
const reader = stream.getReader();
|
244
|
-
const read = async () => {
|
245
|
-
try {
|
246
|
-
while (true) {
|
247
|
-
const { done, value } = await reader.read();
|
248
|
-
if (done)
|
249
|
-
break;
|
250
|
-
response.write(value);
|
251
|
-
}
|
252
|
-
} catch (error) {
|
253
|
-
throw error;
|
254
|
-
} finally {
|
255
|
-
response.end();
|
256
|
-
}
|
257
|
-
};
|
258
|
-
read();
|
259
|
-
}
|
260
|
-
|
261
|
-
// src/data-stream/pipe-data-stream-to-response.ts
|
262
|
-
function pipeDataStreamToResponse({
|
263
|
-
response,
|
264
|
-
status,
|
265
|
-
statusText,
|
266
|
-
headers,
|
267
|
-
dataStream
|
268
|
-
}) {
|
269
|
-
writeToServerResponse({
|
270
|
-
response,
|
271
|
-
status,
|
272
|
-
statusText,
|
273
|
-
headers: Object.fromEntries(
|
274
|
-
prepareHeaders(headers, dataStreamHeaders).entries()
|
275
|
-
),
|
276
|
-
stream: dataStream.pipeThrough(new JsonToSseTransformStream()).pipeThrough(new TextEncoderStream())
|
277
|
-
});
|
278
|
-
}
|
279
|
-
|
280
|
-
// src/data-stream/process-data-stream.ts
|
281
|
-
var import_provider_utils = require("@ai-sdk/provider-utils");
|
282
|
-
|
283
|
-
// src/util/async-iterable-stream.ts
|
284
|
-
function createAsyncIterableStream(source) {
|
285
|
-
const stream = source.pipeThrough(new TransformStream());
|
286
|
-
stream[Symbol.asyncIterator] = () => {
|
287
|
-
const reader = stream.getReader();
|
288
|
-
return {
|
289
|
-
async next() {
|
290
|
-
const { done, value } = await reader.read();
|
291
|
-
return done ? { done: true, value: void 0 } : { done: false, value };
|
292
|
-
}
|
293
|
-
};
|
294
|
-
};
|
295
|
-
return stream;
|
296
|
-
}
|
297
|
-
|
298
|
-
// src/data-stream/data-stream-parts.ts
|
299
|
-
var import_zod = require("zod");
|
300
|
-
var languageModelUsageSchema = import_zod.z.object({
|
301
|
-
inputTokens: import_zod.z.number().optional(),
|
302
|
-
outputTokens: import_zod.z.number().optional(),
|
303
|
-
totalTokens: import_zod.z.number().optional(),
|
304
|
-
reasoningTokens: import_zod.z.number().optional(),
|
305
|
-
cachedInputTokens: import_zod.z.number().optional()
|
306
|
-
});
|
307
|
-
var finishReasonSchema = import_zod.z.enum([
|
308
|
-
"stop",
|
309
|
-
"length",
|
310
|
-
"tool-calls",
|
311
|
-
"content-filter",
|
312
|
-
"other",
|
313
|
-
"error",
|
314
|
-
"unknown"
|
315
|
-
]);
|
316
|
-
var toolCallSchema = import_zod.z.object({
|
317
|
-
toolCallId: import_zod.z.string(),
|
318
|
-
toolName: import_zod.z.string(),
|
319
|
-
args: import_zod.z.unknown()
|
320
|
-
});
|
321
|
-
var toolResultValueSchema = import_zod.z.object({
|
322
|
-
toolCallId: import_zod.z.string(),
|
323
|
-
result: import_zod.z.unknown(),
|
324
|
-
providerMetadata: import_zod.z.any().optional()
|
325
|
-
});
|
326
|
-
var sourceSchema = import_zod.z.object({
|
327
|
-
type: import_zod.z.literal("source"),
|
328
|
-
sourceType: import_zod.z.literal("url"),
|
329
|
-
id: import_zod.z.string(),
|
330
|
-
url: import_zod.z.string(),
|
331
|
-
title: import_zod.z.string().optional(),
|
332
|
-
providerMetadata: import_zod.z.any().optional()
|
333
|
-
// Use z.any() for generic metadata
|
334
|
-
});
|
335
|
-
var dataStreamPartSchema = import_zod.z.discriminatedUnion("type", [
|
336
|
-
import_zod.z.object({
|
337
|
-
type: import_zod.z.literal("text"),
|
338
|
-
value: import_zod.z.string()
|
339
|
-
}),
|
340
|
-
import_zod.z.object({
|
341
|
-
type: import_zod.z.literal("data"),
|
342
|
-
value: import_zod.z.array(import_zod.z.any())
|
343
|
-
// TODO json validation
|
344
|
-
}),
|
345
|
-
import_zod.z.object({
|
346
|
-
type: import_zod.z.literal("error"),
|
347
|
-
value: import_zod.z.string()
|
348
|
-
}),
|
349
|
-
import_zod.z.object({
|
350
|
-
type: import_zod.z.literal("message-annotations"),
|
351
|
-
value: import_zod.z.array(import_zod.z.any())
|
352
|
-
// TODO json validation
|
353
|
-
}),
|
354
|
-
import_zod.z.object({
|
355
|
-
type: import_zod.z.literal("tool-call"),
|
356
|
-
value: toolCallSchema
|
357
|
-
}),
|
358
|
-
import_zod.z.object({
|
359
|
-
type: import_zod.z.literal("tool-result"),
|
360
|
-
value: toolResultValueSchema
|
361
|
-
}),
|
362
|
-
import_zod.z.object({
|
363
|
-
type: import_zod.z.literal("tool-call-streaming-start"),
|
364
|
-
value: import_zod.z.object({ toolCallId: import_zod.z.string(), toolName: import_zod.z.string() })
|
365
|
-
}),
|
366
|
-
import_zod.z.object({
|
367
|
-
type: import_zod.z.literal("tool-call-delta"),
|
368
|
-
value: import_zod.z.object({ toolCallId: import_zod.z.string(), argsTextDelta: import_zod.z.string() })
|
369
|
-
}),
|
370
|
-
import_zod.z.object({
|
371
|
-
type: import_zod.z.literal("finish-message"),
|
372
|
-
value: import_zod.z.object({
|
373
|
-
finishReason: finishReasonSchema,
|
374
|
-
// TODO v5 remove usage from finish event (only on step-finish)
|
375
|
-
usage: languageModelUsageSchema.optional()
|
376
|
-
})
|
377
|
-
}),
|
378
|
-
import_zod.z.object({
|
379
|
-
type: import_zod.z.literal("finish-step"),
|
380
|
-
value: import_zod.z.object({
|
381
|
-
isContinued: import_zod.z.boolean(),
|
382
|
-
finishReason: finishReasonSchema,
|
383
|
-
usage: languageModelUsageSchema.optional()
|
384
|
-
})
|
385
|
-
}),
|
386
|
-
import_zod.z.object({
|
387
|
-
type: import_zod.z.literal("start-step"),
|
388
|
-
value: import_zod.z.object({
|
389
|
-
messageId: import_zod.z.string()
|
390
|
-
})
|
391
|
-
}),
|
392
|
-
import_zod.z.object({
|
393
|
-
type: import_zod.z.literal("reasoning"),
|
394
|
-
value: import_zod.z.object({
|
395
|
-
text: import_zod.z.string(),
|
396
|
-
providerMetadata: import_zod.z.record(import_zod.z.any()).optional()
|
397
|
-
})
|
398
|
-
}),
|
399
|
-
import_zod.z.object({
|
400
|
-
type: import_zod.z.literal("source"),
|
401
|
-
value: sourceSchema
|
402
|
-
}),
|
403
|
-
import_zod.z.object({
|
404
|
-
type: import_zod.z.literal("file"),
|
405
|
-
value: import_zod.z.object({
|
406
|
-
url: import_zod.z.string(),
|
407
|
-
mediaType: import_zod.z.string()
|
408
|
-
})
|
409
|
-
}),
|
410
|
-
import_zod.z.object({
|
411
|
-
type: import_zod.z.literal("reasoning-part-finish"),
|
412
|
-
value: import_zod.z.null()
|
413
|
-
})
|
414
|
-
]);
|
415
|
-
|
416
|
-
// src/data-stream/process-data-stream.ts
|
417
|
-
async function processDataStream({
|
418
|
-
stream,
|
419
|
-
onTextPart,
|
420
|
-
onReasoningPart,
|
421
|
-
onReasoningPartFinish,
|
422
|
-
onSourcePart,
|
423
|
-
onFilePart,
|
424
|
-
onDataPart,
|
425
|
-
onErrorPart,
|
426
|
-
onToolCallStreamingStartPart,
|
427
|
-
onToolCallDeltaPart,
|
428
|
-
onToolCallPart,
|
429
|
-
onToolResultPart,
|
430
|
-
onMessageAnnotationsPart,
|
431
|
-
onFinishMessagePart,
|
432
|
-
onFinishStepPart,
|
433
|
-
onStartStepPart
|
434
|
-
}) {
|
435
|
-
const streamParts = createAsyncIterableStream(
|
436
|
-
stream.pipeThrough(new TextDecoderStream()).pipeThrough((0, import_provider_utils.createEventSourceParserStream)()).pipeThrough(
|
437
|
-
new TransformStream({
|
438
|
-
async transform({ data }, controller) {
|
439
|
-
if (data === "[DONE]") {
|
440
|
-
return;
|
441
|
-
}
|
442
|
-
controller.enqueue(
|
443
|
-
await (0, import_provider_utils.safeParseJSON)({
|
444
|
-
text: data,
|
445
|
-
schema: dataStreamPartSchema
|
446
|
-
})
|
447
|
-
);
|
448
|
-
}
|
449
|
-
})
|
450
|
-
)
|
451
|
-
);
|
452
|
-
for await (const parseResult of streamParts) {
|
453
|
-
if (!parseResult.success) {
|
454
|
-
throw new Error("Failed to parse data stream part");
|
455
|
-
}
|
456
|
-
const { type, value } = parseResult.value;
|
457
|
-
switch (type) {
|
458
|
-
case "text":
|
459
|
-
await (onTextPart == null ? void 0 : onTextPart(value));
|
460
|
-
break;
|
461
|
-
case "reasoning":
|
462
|
-
await (onReasoningPart == null ? void 0 : onReasoningPart(value));
|
463
|
-
break;
|
464
|
-
case "reasoning-part-finish":
|
465
|
-
await (onReasoningPartFinish == null ? void 0 : onReasoningPartFinish(value));
|
466
|
-
break;
|
467
|
-
case "file":
|
468
|
-
await (onFilePart == null ? void 0 : onFilePart(value));
|
469
|
-
break;
|
470
|
-
case "source":
|
471
|
-
await (onSourcePart == null ? void 0 : onSourcePart(value));
|
472
|
-
break;
|
473
|
-
case "data":
|
474
|
-
await (onDataPart == null ? void 0 : onDataPart(value));
|
475
|
-
break;
|
476
|
-
case "error":
|
477
|
-
await (onErrorPart == null ? void 0 : onErrorPart(value));
|
478
|
-
break;
|
479
|
-
case "message-annotations":
|
480
|
-
await (onMessageAnnotationsPart == null ? void 0 : onMessageAnnotationsPart(value));
|
481
|
-
break;
|
482
|
-
case "tool-call-streaming-start":
|
483
|
-
await (onToolCallStreamingStartPart == null ? void 0 : onToolCallStreamingStartPart(value));
|
484
|
-
break;
|
485
|
-
case "tool-call-delta":
|
486
|
-
await (onToolCallDeltaPart == null ? void 0 : onToolCallDeltaPart(value));
|
487
|
-
break;
|
488
|
-
case "tool-call":
|
489
|
-
await (onToolCallPart == null ? void 0 : onToolCallPart(value));
|
490
|
-
break;
|
491
|
-
case "tool-result":
|
492
|
-
await (onToolResultPart == null ? void 0 : onToolResultPart(value));
|
493
|
-
break;
|
494
|
-
case "finish-message":
|
495
|
-
await (onFinishMessagePart == null ? void 0 : onFinishMessagePart(value));
|
496
|
-
break;
|
497
|
-
case "finish-step":
|
498
|
-
await (onFinishStepPart == null ? void 0 : onFinishStepPart(value));
|
499
|
-
break;
|
500
|
-
case "start-step":
|
501
|
-
await (onStartStepPart == null ? void 0 : onStartStepPart(value));
|
502
|
-
break;
|
503
|
-
default: {
|
504
|
-
const exhaustiveCheck = type;
|
505
|
-
throw new Error(`Unknown stream part type: ${exhaustiveCheck}`);
|
506
|
-
}
|
507
|
-
}
|
508
|
-
}
|
509
|
-
}
|
110
|
+
var import_provider_utils25 = require("@ai-sdk/provider-utils");
|
510
111
|
|
511
112
|
// src/error/index.ts
|
512
113
|
var import_provider16 = require("@ai-sdk/provider");
|
@@ -858,6 +459,17 @@ var RetryError = class extends import_provider15.AISDKError {
|
|
858
459
|
};
|
859
460
|
_a15 = symbol15;
|
860
461
|
|
462
|
+
// src/util/prepare-headers.ts
|
463
|
+
function prepareHeaders(headers, defaultHeaders) {
|
464
|
+
const responseHeaders = new Headers(headers != null ? headers : {});
|
465
|
+
for (const [key, value] of Object.entries(defaultHeaders)) {
|
466
|
+
if (!responseHeaders.has(key)) {
|
467
|
+
responseHeaders.set(key, value);
|
468
|
+
}
|
469
|
+
}
|
470
|
+
return responseHeaders;
|
471
|
+
}
|
472
|
+
|
861
473
|
// src/text-stream/create-text-stream-response.ts
|
862
474
|
function createTextStreamResponse({
|
863
475
|
status,
|
@@ -874,6 +486,33 @@ function createTextStreamResponse({
|
|
874
486
|
});
|
875
487
|
}
|
876
488
|
|
489
|
+
// src/util/write-to-server-response.ts
|
490
|
+
function writeToServerResponse({
|
491
|
+
response,
|
492
|
+
status,
|
493
|
+
statusText,
|
494
|
+
headers,
|
495
|
+
stream
|
496
|
+
}) {
|
497
|
+
response.writeHead(status != null ? status : 200, statusText, headers);
|
498
|
+
const reader = stream.getReader();
|
499
|
+
const read = async () => {
|
500
|
+
try {
|
501
|
+
while (true) {
|
502
|
+
const { done, value } = await reader.read();
|
503
|
+
if (done)
|
504
|
+
break;
|
505
|
+
response.write(value);
|
506
|
+
}
|
507
|
+
} catch (error) {
|
508
|
+
throw error;
|
509
|
+
} finally {
|
510
|
+
response.end();
|
511
|
+
}
|
512
|
+
};
|
513
|
+
read();
|
514
|
+
}
|
515
|
+
|
877
516
|
// src/text-stream/pipe-text-stream-to-response.ts
|
878
517
|
function pipeTextStreamToResponse({
|
879
518
|
response,
|
@@ -895,278 +534,178 @@ function pipeTextStreamToResponse({
|
|
895
534
|
});
|
896
535
|
}
|
897
536
|
|
898
|
-
// src/ui/append-client-message.ts
|
899
|
-
function appendClientMessage({
|
900
|
-
messages,
|
901
|
-
message
|
537
|
+
// src/ui/append-client-message.ts
|
538
|
+
function appendClientMessage({
|
539
|
+
messages,
|
540
|
+
message
|
541
|
+
}) {
|
542
|
+
return [
|
543
|
+
...messages.length > 0 && messages[messages.length - 1].id === message.id ? messages.slice(0, -1) : messages,
|
544
|
+
message
|
545
|
+
];
|
546
|
+
}
|
547
|
+
|
548
|
+
// src/ui/call-chat-api.ts
|
549
|
+
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
550
|
+
|
551
|
+
// src/ui-message-stream/ui-message-stream-parts.ts
|
552
|
+
var import_zod = require("zod");
|
553
|
+
var toolCallSchema = import_zod.z.object({
|
554
|
+
toolCallId: import_zod.z.string(),
|
555
|
+
toolName: import_zod.z.string(),
|
556
|
+
args: import_zod.z.unknown()
|
557
|
+
});
|
558
|
+
var toolResultValueSchema = import_zod.z.object({
|
559
|
+
toolCallId: import_zod.z.string(),
|
560
|
+
result: import_zod.z.unknown(),
|
561
|
+
providerMetadata: import_zod.z.any().optional()
|
562
|
+
});
|
563
|
+
var sourceSchema = import_zod.z.object({
|
564
|
+
type: import_zod.z.literal("source"),
|
565
|
+
sourceType: import_zod.z.literal("url"),
|
566
|
+
id: import_zod.z.string(),
|
567
|
+
url: import_zod.z.string(),
|
568
|
+
title: import_zod.z.string().optional(),
|
569
|
+
providerMetadata: import_zod.z.any().optional()
|
570
|
+
// Use z.any() for generic metadata
|
571
|
+
});
|
572
|
+
var uiMessageStreamPartSchema = import_zod.z.discriminatedUnion("type", [
|
573
|
+
import_zod.z.object({
|
574
|
+
type: import_zod.z.literal("text"),
|
575
|
+
value: import_zod.z.string()
|
576
|
+
}),
|
577
|
+
import_zod.z.object({
|
578
|
+
type: import_zod.z.literal("error"),
|
579
|
+
value: import_zod.z.string()
|
580
|
+
}),
|
581
|
+
import_zod.z.object({
|
582
|
+
type: import_zod.z.literal("tool-call"),
|
583
|
+
value: toolCallSchema
|
584
|
+
}),
|
585
|
+
import_zod.z.object({
|
586
|
+
type: import_zod.z.literal("tool-result"),
|
587
|
+
value: toolResultValueSchema
|
588
|
+
}),
|
589
|
+
import_zod.z.object({
|
590
|
+
type: import_zod.z.literal("tool-call-streaming-start"),
|
591
|
+
value: import_zod.z.object({ toolCallId: import_zod.z.string(), toolName: import_zod.z.string() })
|
592
|
+
}),
|
593
|
+
import_zod.z.object({
|
594
|
+
type: import_zod.z.literal("tool-call-delta"),
|
595
|
+
value: import_zod.z.object({ toolCallId: import_zod.z.string(), argsTextDelta: import_zod.z.string() })
|
596
|
+
}),
|
597
|
+
import_zod.z.object({
|
598
|
+
type: import_zod.z.literal("reasoning"),
|
599
|
+
value: import_zod.z.object({
|
600
|
+
text: import_zod.z.string(),
|
601
|
+
providerMetadata: import_zod.z.record(import_zod.z.any()).optional()
|
602
|
+
})
|
603
|
+
}),
|
604
|
+
import_zod.z.object({
|
605
|
+
type: import_zod.z.literal("source"),
|
606
|
+
value: sourceSchema
|
607
|
+
}),
|
608
|
+
import_zod.z.object({
|
609
|
+
type: import_zod.z.literal("file"),
|
610
|
+
value: import_zod.z.object({
|
611
|
+
url: import_zod.z.string(),
|
612
|
+
mediaType: import_zod.z.string()
|
613
|
+
})
|
614
|
+
}),
|
615
|
+
import_zod.z.object({
|
616
|
+
type: import_zod.z.literal("metadata"),
|
617
|
+
value: import_zod.z.object({
|
618
|
+
metadata: import_zod.z.unknown()
|
619
|
+
})
|
620
|
+
}),
|
621
|
+
import_zod.z.object({
|
622
|
+
type: import_zod.z.literal("start-step"),
|
623
|
+
value: import_zod.z.object({
|
624
|
+
metadata: import_zod.z.unknown()
|
625
|
+
})
|
626
|
+
}),
|
627
|
+
import_zod.z.object({
|
628
|
+
type: import_zod.z.literal("finish-step"),
|
629
|
+
value: import_zod.z.object({
|
630
|
+
metadata: import_zod.z.unknown()
|
631
|
+
})
|
632
|
+
}),
|
633
|
+
import_zod.z.object({
|
634
|
+
type: import_zod.z.literal("start"),
|
635
|
+
value: import_zod.z.object({
|
636
|
+
messageId: import_zod.z.string().optional(),
|
637
|
+
metadata: import_zod.z.unknown()
|
638
|
+
})
|
639
|
+
}),
|
640
|
+
import_zod.z.object({
|
641
|
+
type: import_zod.z.literal("finish"),
|
642
|
+
value: import_zod.z.object({
|
643
|
+
metadata: import_zod.z.unknown()
|
644
|
+
})
|
645
|
+
}),
|
646
|
+
import_zod.z.object({
|
647
|
+
type: import_zod.z.literal("reasoning-part-finish"),
|
648
|
+
value: import_zod.z.null()
|
649
|
+
})
|
650
|
+
]);
|
651
|
+
|
652
|
+
// src/util/consume-stream.ts
|
653
|
+
async function consumeStream({
|
654
|
+
stream,
|
655
|
+
onError
|
902
656
|
}) {
|
903
|
-
|
904
|
-
...messages.length > 0 && messages[messages.length - 1].id === message.id ? messages.slice(0, -1) : messages,
|
905
|
-
message
|
906
|
-
];
|
907
|
-
}
|
908
|
-
|
909
|
-
// src/ui/append-response-messages.ts
|
910
|
-
var import_provider18 = require("@ai-sdk/provider");
|
911
|
-
|
912
|
-
// src/ui/extract-max-tool-invocation-step.ts
|
913
|
-
function extractMaxToolInvocationStep(toolInvocations) {
|
914
|
-
return toolInvocations == null ? void 0 : toolInvocations.reduce((max, toolInvocation) => {
|
915
|
-
var _a17;
|
916
|
-
return Math.max(max, (_a17 = toolInvocation.step) != null ? _a17 : 0);
|
917
|
-
}, 0);
|
918
|
-
}
|
919
|
-
|
920
|
-
// src/ui/get-tool-invocations.ts
|
921
|
-
function getToolInvocations(message) {
|
922
|
-
return message.parts.filter(
|
923
|
-
(part) => part.type === "tool-invocation"
|
924
|
-
).map((part) => part.toolInvocation);
|
925
|
-
}
|
926
|
-
|
927
|
-
// core/prompt/data-content.ts
|
928
|
-
var import_provider17 = require("@ai-sdk/provider");
|
929
|
-
var import_provider_utils2 = require("@ai-sdk/provider-utils");
|
930
|
-
var import_zod2 = require("zod");
|
931
|
-
|
932
|
-
// core/prompt/split-data-url.ts
|
933
|
-
function splitDataUrl(dataUrl) {
|
657
|
+
const reader = stream.getReader();
|
934
658
|
try {
|
935
|
-
|
936
|
-
|
937
|
-
|
938
|
-
|
939
|
-
}
|
659
|
+
while (true) {
|
660
|
+
const { done } = await reader.read();
|
661
|
+
if (done)
|
662
|
+
break;
|
663
|
+
}
|
940
664
|
} catch (error) {
|
941
|
-
|
942
|
-
|
943
|
-
|
944
|
-
};
|
665
|
+
onError == null ? void 0 : onError(error);
|
666
|
+
} finally {
|
667
|
+
reader.releaseLock();
|
945
668
|
}
|
946
669
|
}
|
947
670
|
|
948
|
-
//
|
949
|
-
var
|
950
|
-
|
951
|
-
|
952
|
-
|
953
|
-
|
954
|
-
|
955
|
-
(value) => {
|
956
|
-
var _a17, _b;
|
957
|
-
return (_b = (_a17 = globalThis.Buffer) == null ? void 0 : _a17.isBuffer(value)) != null ? _b : false;
|
958
|
-
},
|
959
|
-
{ message: "Must be a Buffer" }
|
960
|
-
)
|
961
|
-
]);
|
962
|
-
function convertToLanguageModelV2DataContent(content) {
|
963
|
-
if (content instanceof Uint8Array) {
|
964
|
-
return { data: content, mediaType: void 0 };
|
965
|
-
}
|
966
|
-
if (content instanceof ArrayBuffer) {
|
967
|
-
return { data: new Uint8Array(content), mediaType: void 0 };
|
968
|
-
}
|
969
|
-
if (typeof content === "string") {
|
970
|
-
try {
|
971
|
-
content = new URL(content);
|
972
|
-
} catch (error) {
|
973
|
-
}
|
974
|
-
}
|
975
|
-
if (content instanceof URL && content.protocol === "data:") {
|
976
|
-
const { mediaType: dataUrlMediaType, base64Content } = splitDataUrl(
|
977
|
-
content.toString()
|
978
|
-
);
|
979
|
-
if (dataUrlMediaType == null || base64Content == null) {
|
980
|
-
throw new import_provider17.AISDKError({
|
981
|
-
name: "InvalidDataContentError",
|
982
|
-
message: `Invalid data URL format in content ${content.toString()}`
|
983
|
-
});
|
984
|
-
}
|
985
|
-
return { data: base64Content, mediaType: dataUrlMediaType };
|
986
|
-
}
|
987
|
-
return { data: content, mediaType: void 0 };
|
988
|
-
}
|
989
|
-
function convertDataContentToBase64String(content) {
|
990
|
-
if (typeof content === "string") {
|
991
|
-
return content;
|
992
|
-
}
|
993
|
-
if (content instanceof ArrayBuffer) {
|
994
|
-
return (0, import_provider_utils2.convertUint8ArrayToBase64)(new Uint8Array(content));
|
995
|
-
}
|
996
|
-
return (0, import_provider_utils2.convertUint8ArrayToBase64)(content);
|
997
|
-
}
|
998
|
-
function convertDataContentToUint8Array(content) {
|
999
|
-
if (content instanceof Uint8Array) {
|
1000
|
-
return content;
|
671
|
+
// src/ui/process-ui-message-stream.ts
|
672
|
+
var import_provider_utils2 = require("@ai-sdk/provider-utils");
|
673
|
+
|
674
|
+
// src/util/merge-objects.ts
|
675
|
+
function mergeObjects(base, overrides) {
|
676
|
+
if (base === void 0 && overrides === void 0) {
|
677
|
+
return void 0;
|
1001
678
|
}
|
1002
|
-
if (
|
1003
|
-
|
1004
|
-
return (0, import_provider_utils2.convertBase64ToUint8Array)(content);
|
1005
|
-
} catch (error) {
|
1006
|
-
throw new InvalidDataContentError({
|
1007
|
-
message: "Invalid data content. Content string is not a base64-encoded media.",
|
1008
|
-
content,
|
1009
|
-
cause: error
|
1010
|
-
});
|
1011
|
-
}
|
679
|
+
if (base === void 0) {
|
680
|
+
return overrides;
|
1012
681
|
}
|
1013
|
-
if (
|
1014
|
-
return
|
682
|
+
if (overrides === void 0) {
|
683
|
+
return base;
|
1015
684
|
}
|
1016
|
-
|
1017
|
-
|
1018
|
-
|
1019
|
-
|
1020
|
-
|
1021
|
-
|
1022
|
-
|
1023
|
-
|
1024
|
-
|
1025
|
-
|
1026
|
-
|
1027
|
-
|
1028
|
-
|
1029
|
-
|
1030
|
-
|
1031
|
-
|
1032
|
-
case "assistant": {
|
1033
|
-
let getToolInvocationsForStep2 = function(step) {
|
1034
|
-
return (typeof message.content === "string" ? [] : message.content.filter((part) => part.type === "tool-call")).map((call) => ({
|
1035
|
-
state: "call",
|
1036
|
-
step,
|
1037
|
-
args: call.args,
|
1038
|
-
toolCallId: call.toolCallId,
|
1039
|
-
toolName: call.toolName
|
1040
|
-
}));
|
1041
|
-
};
|
1042
|
-
var getToolInvocationsForStep = getToolInvocationsForStep2;
|
1043
|
-
const parts = [{ type: "step-start" }];
|
1044
|
-
let textContent = "";
|
1045
|
-
let reasoningTextContent = void 0;
|
1046
|
-
if (typeof message.content === "string") {
|
1047
|
-
textContent = message.content;
|
1048
|
-
parts.push({
|
1049
|
-
type: "text",
|
1050
|
-
text: message.content
|
1051
|
-
});
|
1052
|
-
} else {
|
1053
|
-
let reasoningPart = void 0;
|
1054
|
-
for (const part of message.content) {
|
1055
|
-
switch (part.type) {
|
1056
|
-
case "text": {
|
1057
|
-
reasoningPart = void 0;
|
1058
|
-
textContent += part.text;
|
1059
|
-
parts.push({
|
1060
|
-
type: "text",
|
1061
|
-
text: part.text
|
1062
|
-
});
|
1063
|
-
break;
|
1064
|
-
}
|
1065
|
-
case "reasoning": {
|
1066
|
-
if (reasoningPart == null) {
|
1067
|
-
reasoningPart = {
|
1068
|
-
type: "reasoning",
|
1069
|
-
text: ""
|
1070
|
-
};
|
1071
|
-
parts.push(reasoningPart);
|
1072
|
-
}
|
1073
|
-
reasoningTextContent = (reasoningTextContent != null ? reasoningTextContent : "") + part.text;
|
1074
|
-
reasoningPart.text += part.text;
|
1075
|
-
reasoningPart.providerMetadata = part.providerOptions;
|
1076
|
-
break;
|
1077
|
-
}
|
1078
|
-
case "tool-call":
|
1079
|
-
break;
|
1080
|
-
case "file":
|
1081
|
-
if (part.data instanceof URL) {
|
1082
|
-
throw new import_provider18.AISDKError({
|
1083
|
-
name: "InvalidAssistantFileData",
|
1084
|
-
message: "File data cannot be a URL"
|
1085
|
-
});
|
1086
|
-
}
|
1087
|
-
parts.push({
|
1088
|
-
type: "file",
|
1089
|
-
mediaType: part.mediaType,
|
1090
|
-
url: `data:${part.mediaType};base64,${convertDataContentToBase64String(part.data)}`
|
1091
|
-
});
|
1092
|
-
break;
|
1093
|
-
}
|
1094
|
-
}
|
1095
|
-
}
|
1096
|
-
if (isLastMessageAssistant) {
|
1097
|
-
const maxStep = extractMaxToolInvocationStep(
|
1098
|
-
getToolInvocations(lastMessage)
|
1099
|
-
);
|
1100
|
-
(_a17 = lastMessage.parts) != null ? _a17 : lastMessage.parts = [];
|
1101
|
-
lastMessage.parts.push(...parts);
|
1102
|
-
getToolInvocationsForStep2(maxStep === void 0 ? 0 : maxStep + 1).map((call) => ({
|
1103
|
-
type: "tool-invocation",
|
1104
|
-
toolInvocation: call
|
1105
|
-
})).forEach((part) => {
|
1106
|
-
lastMessage.parts.push(part);
|
1107
|
-
});
|
1108
|
-
} else {
|
1109
|
-
clonedMessages.push({
|
1110
|
-
role: "assistant",
|
1111
|
-
id: message.id,
|
1112
|
-
createdAt: currentDate(),
|
1113
|
-
// generate a createdAt date for the message, will be overridden by the client
|
1114
|
-
parts: [
|
1115
|
-
...parts,
|
1116
|
-
...getToolInvocationsForStep2(0).map((call) => ({
|
1117
|
-
type: "tool-invocation",
|
1118
|
-
toolInvocation: call
|
1119
|
-
}))
|
1120
|
-
]
|
1121
|
-
});
|
1122
|
-
}
|
1123
|
-
break;
|
1124
|
-
}
|
1125
|
-
case "tool": {
|
1126
|
-
if (lastMessage.role !== "assistant") {
|
1127
|
-
throw new Error(
|
1128
|
-
`Tool result must follow an assistant message: ${lastMessage.role}`
|
1129
|
-
);
|
1130
|
-
}
|
1131
|
-
(_b = lastMessage.parts) != null ? _b : lastMessage.parts = [];
|
1132
|
-
for (const contentPart of message.content) {
|
1133
|
-
const toolCall = getToolInvocations(lastMessage).find(
|
1134
|
-
(call) => call.toolCallId === contentPart.toolCallId
|
1135
|
-
);
|
1136
|
-
const toolCallPart = lastMessage.parts.find(
|
1137
|
-
(part) => part.type === "tool-invocation" && part.toolInvocation.toolCallId === contentPart.toolCallId
|
1138
|
-
);
|
1139
|
-
if (!toolCall) {
|
1140
|
-
throw new Error("Tool call not found in previous message");
|
1141
|
-
}
|
1142
|
-
toolCall.state = "result";
|
1143
|
-
const toolResult = toolCall;
|
1144
|
-
toolResult.result = contentPart.result;
|
1145
|
-
if (toolCallPart) {
|
1146
|
-
toolCallPart.toolInvocation = toolResult;
|
1147
|
-
} else {
|
1148
|
-
lastMessage.parts.push({
|
1149
|
-
type: "tool-invocation",
|
1150
|
-
toolInvocation: toolResult
|
1151
|
-
});
|
1152
|
-
}
|
1153
|
-
}
|
1154
|
-
break;
|
1155
|
-
}
|
1156
|
-
default: {
|
1157
|
-
const _exhaustiveCheck = role;
|
1158
|
-
throw new Error(`Unsupported message role: ${_exhaustiveCheck}`);
|
685
|
+
const result = { ...base };
|
686
|
+
for (const key in overrides) {
|
687
|
+
if (Object.prototype.hasOwnProperty.call(overrides, key)) {
|
688
|
+
const overridesValue = overrides[key];
|
689
|
+
if (overridesValue === void 0)
|
690
|
+
continue;
|
691
|
+
const baseValue = key in base ? base[key] : void 0;
|
692
|
+
const isSourceObject = overridesValue !== null && typeof overridesValue === "object" && !Array.isArray(overridesValue) && !(overridesValue instanceof Date) && !(overridesValue instanceof RegExp);
|
693
|
+
const isTargetObject = baseValue !== null && baseValue !== void 0 && typeof baseValue === "object" && !Array.isArray(baseValue) && !(baseValue instanceof Date) && !(baseValue instanceof RegExp);
|
694
|
+
if (isSourceObject && isTargetObject) {
|
695
|
+
result[key] = mergeObjects(
|
696
|
+
baseValue,
|
697
|
+
overridesValue
|
698
|
+
);
|
699
|
+
} else {
|
700
|
+
result[key] = overridesValue;
|
1159
701
|
}
|
1160
702
|
}
|
1161
703
|
}
|
1162
|
-
return
|
704
|
+
return result;
|
1163
705
|
}
|
1164
706
|
|
1165
|
-
// src/ui/process-chat-response.ts
|
1166
|
-
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
1167
|
-
|
1168
707
|
// src/util/parse-partial-json.ts
|
1169
|
-
var
|
708
|
+
var import_provider_utils = require("@ai-sdk/provider-utils");
|
1170
709
|
|
1171
710
|
// src/util/fix-json.ts
|
1172
711
|
function fixJson(input) {
|
@@ -1491,243 +1030,274 @@ async function parsePartialJson(jsonText) {
|
|
1491
1030
|
if (jsonText === void 0) {
|
1492
1031
|
return { value: void 0, state: "undefined-input" };
|
1493
1032
|
}
|
1494
|
-
let result = await (0,
|
1033
|
+
let result = await (0, import_provider_utils.safeParseJSON)({ text: jsonText });
|
1495
1034
|
if (result.success) {
|
1496
1035
|
return { value: result.value, state: "successful-parse" };
|
1497
1036
|
}
|
1498
|
-
result = await (0,
|
1037
|
+
result = await (0, import_provider_utils.safeParseJSON)({ text: fixJson(jsonText) });
|
1499
1038
|
if (result.success) {
|
1500
1039
|
return { value: result.value, state: "repaired-parse" };
|
1501
1040
|
}
|
1502
1041
|
return { value: void 0, state: "failed-parse" };
|
1503
1042
|
}
|
1504
1043
|
|
1505
|
-
// src/ui/
|
1506
|
-
|
1044
|
+
// src/ui/extract-max-tool-invocation-step.ts
|
1045
|
+
function extractMaxToolInvocationStep(toolInvocations) {
|
1046
|
+
return toolInvocations == null ? void 0 : toolInvocations.reduce((max, toolInvocation) => {
|
1047
|
+
var _a17;
|
1048
|
+
return Math.max(max, (_a17 = toolInvocation.step) != null ? _a17 : 0);
|
1049
|
+
}, 0);
|
1050
|
+
}
|
1051
|
+
|
1052
|
+
// src/ui/get-tool-invocations.ts
|
1053
|
+
function getToolInvocations(message) {
|
1054
|
+
return message.parts.filter(
|
1055
|
+
(part) => part.type === "tool-invocation"
|
1056
|
+
).map((part) => part.toolInvocation);
|
1057
|
+
}
|
1058
|
+
|
1059
|
+
// src/ui/process-ui-message-stream.ts
|
1060
|
+
function processUIMessageStream({
|
1507
1061
|
stream,
|
1508
|
-
|
1062
|
+
onUpdate,
|
1509
1063
|
onToolCall,
|
1510
1064
|
onFinish,
|
1511
|
-
|
1512
|
-
|
1513
|
-
|
1065
|
+
lastMessage,
|
1066
|
+
newMessageId,
|
1067
|
+
messageMetadataSchema
|
1514
1068
|
}) {
|
1515
1069
|
var _a17;
|
1516
|
-
const
|
1517
|
-
let step =
|
1518
|
-
const message =
|
1519
|
-
id:
|
1520
|
-
|
1070
|
+
const isContinuation = (lastMessage == null ? void 0 : lastMessage.role) === "assistant";
|
1071
|
+
let step = isContinuation ? 1 + ((_a17 = extractMaxToolInvocationStep(getToolInvocations(lastMessage))) != null ? _a17 : 0) : 0;
|
1072
|
+
const message = isContinuation ? structuredClone(lastMessage) : {
|
1073
|
+
id: newMessageId,
|
1074
|
+
metadata: {},
|
1521
1075
|
role: "assistant",
|
1522
|
-
parts: []
|
1523
|
-
};
|
1524
|
-
let currentTextPart = void 0;
|
1525
|
-
let currentReasoningPart = void 0;
|
1526
|
-
function updateToolInvocationPart(toolCallId, invocation) {
|
1527
|
-
const part = message.parts.find(
|
1528
|
-
(part2) => part2.type === "tool-invocation" && part2.toolInvocation.toolCallId === toolCallId
|
1529
|
-
);
|
1530
|
-
if (part != null) {
|
1531
|
-
part.toolInvocation = invocation;
|
1532
|
-
} else {
|
1533
|
-
message.parts.push({
|
1534
|
-
type: "tool-invocation",
|
1535
|
-
toolInvocation: invocation
|
1536
|
-
});
|
1537
|
-
}
|
1538
|
-
}
|
1539
|
-
const data = [];
|
1540
|
-
let messageAnnotations = replaceLastMessage ? lastMessage == null ? void 0 : lastMessage.annotations : void 0;
|
1541
|
-
const partialToolCalls = {};
|
1542
|
-
let usage = {
|
1543
|
-
inputTokens: void 0,
|
1544
|
-
outputTokens: void 0,
|
1545
|
-
totalTokens: void 0
|
1546
|
-
};
|
1547
|
-
let finishReason = "unknown";
|
1548
|
-
function execUpdate() {
|
1549
|
-
const copiedData = [...data];
|
1550
|
-
if (messageAnnotations == null ? void 0 : messageAnnotations.length) {
|
1551
|
-
message.annotations = messageAnnotations;
|
1552
|
-
}
|
1553
|
-
const copiedMessage = {
|
1554
|
-
// deep copy the message to ensure that deep changes (msg attachments) are updated
|
1555
|
-
// with SolidJS. SolidJS uses referential integration of sub-objects to detect changes.
|
1556
|
-
...structuredClone(message),
|
1557
|
-
// add a revision id to ensure that the message is updated with SWR. SWR uses a
|
1558
|
-
// hashing approach by default to detect changes, but it only works for shallow
|
1559
|
-
// changes. This is why we need to add a revision id to ensure that the message
|
1560
|
-
// is updated with SWR (without it, the changes get stuck in SWR and are not
|
1561
|
-
// forwarded to rendering):
|
1562
|
-
revisionId: generateId3()
|
1563
|
-
};
|
1564
|
-
update({
|
1565
|
-
message: copiedMessage,
|
1566
|
-
data: copiedData,
|
1567
|
-
replaceLastMessage
|
1568
|
-
});
|
1569
|
-
}
|
1570
|
-
await processDataStream({
|
1571
|
-
stream,
|
1572
|
-
onTextPart(value) {
|
1573
|
-
if (currentTextPart == null) {
|
1574
|
-
currentTextPart = {
|
1575
|
-
type: "text",
|
1576
|
-
text: value
|
1577
|
-
};
|
1578
|
-
message.parts.push(currentTextPart);
|
1579
|
-
} else {
|
1580
|
-
currentTextPart.text += value;
|
1581
|
-
}
|
1582
|
-
execUpdate();
|
1583
|
-
},
|
1584
|
-
onReasoningPart(value) {
|
1585
|
-
if (currentReasoningPart == null) {
|
1586
|
-
currentReasoningPart = {
|
1587
|
-
type: "reasoning",
|
1588
|
-
text: value.text,
|
1589
|
-
providerMetadata: value.providerMetadata
|
1590
|
-
};
|
1591
|
-
message.parts.push(currentReasoningPart);
|
1592
|
-
} else {
|
1593
|
-
currentReasoningPart.text += value.text;
|
1594
|
-
currentReasoningPart.providerMetadata = value.providerMetadata;
|
1595
|
-
}
|
1596
|
-
execUpdate();
|
1597
|
-
},
|
1598
|
-
onReasoningPartFinish(value) {
|
1599
|
-
if (currentReasoningPart != null) {
|
1600
|
-
currentReasoningPart = void 0;
|
1601
|
-
}
|
1602
|
-
},
|
1603
|
-
onFilePart(value) {
|
1604
|
-
message.parts.push({
|
1605
|
-
type: "file",
|
1606
|
-
mediaType: value.mediaType,
|
1607
|
-
url: value.url
|
1608
|
-
});
|
1609
|
-
execUpdate();
|
1610
|
-
},
|
1611
|
-
onSourcePart(value) {
|
1076
|
+
parts: []
|
1077
|
+
};
|
1078
|
+
let currentTextPart = void 0;
|
1079
|
+
let currentReasoningPart = void 0;
|
1080
|
+
function updateToolInvocationPart(toolCallId, invocation) {
|
1081
|
+
const part = message.parts.find(
|
1082
|
+
(part2) => part2.type === "tool-invocation" && part2.toolInvocation.toolCallId === toolCallId
|
1083
|
+
);
|
1084
|
+
if (part != null) {
|
1085
|
+
part.toolInvocation = invocation;
|
1086
|
+
} else {
|
1612
1087
|
message.parts.push({
|
1613
|
-
type: "
|
1614
|
-
|
1615
|
-
});
|
1616
|
-
execUpdate();
|
1617
|
-
},
|
1618
|
-
onToolCallStreamingStartPart(value) {
|
1619
|
-
const toolInvocations = getToolInvocations(message);
|
1620
|
-
partialToolCalls[value.toolCallId] = {
|
1621
|
-
text: "",
|
1622
|
-
step,
|
1623
|
-
toolName: value.toolName,
|
1624
|
-
index: toolInvocations.length
|
1625
|
-
};
|
1626
|
-
updateToolInvocationPart(value.toolCallId, {
|
1627
|
-
state: "partial-call",
|
1628
|
-
step,
|
1629
|
-
toolCallId: value.toolCallId,
|
1630
|
-
toolName: value.toolName,
|
1631
|
-
args: void 0
|
1632
|
-
});
|
1633
|
-
execUpdate();
|
1634
|
-
},
|
1635
|
-
async onToolCallDeltaPart(value) {
|
1636
|
-
const partialToolCall = partialToolCalls[value.toolCallId];
|
1637
|
-
partialToolCall.text += value.argsTextDelta;
|
1638
|
-
const { value: partialArgs } = await parsePartialJson(
|
1639
|
-
partialToolCall.text
|
1640
|
-
);
|
1641
|
-
updateToolInvocationPart(value.toolCallId, {
|
1642
|
-
state: "partial-call",
|
1643
|
-
step: partialToolCall.step,
|
1644
|
-
toolCallId: value.toolCallId,
|
1645
|
-
toolName: partialToolCall.toolName,
|
1646
|
-
args: partialArgs
|
1647
|
-
});
|
1648
|
-
execUpdate();
|
1649
|
-
},
|
1650
|
-
async onToolCallPart(value) {
|
1651
|
-
updateToolInvocationPart(value.toolCallId, {
|
1652
|
-
state: "call",
|
1653
|
-
step,
|
1654
|
-
...value
|
1088
|
+
type: "tool-invocation",
|
1089
|
+
toolInvocation: invocation
|
1655
1090
|
});
|
1656
|
-
|
1657
|
-
|
1658
|
-
|
1659
|
-
|
1091
|
+
}
|
1092
|
+
}
|
1093
|
+
const partialToolCalls = {};
|
1094
|
+
async function updateMessageMetadata(metadata) {
|
1095
|
+
if (metadata != null) {
|
1096
|
+
const mergedMetadata = message.metadata != null ? mergeObjects(message.metadata, metadata) : metadata;
|
1097
|
+
if (messageMetadataSchema != null) {
|
1098
|
+
await (0, import_provider_utils2.validateTypes)({
|
1099
|
+
value: mergedMetadata,
|
1100
|
+
schema: messageMetadataSchema
|
1660
1101
|
});
|
1661
|
-
if (result != null) {
|
1662
|
-
updateToolInvocationPart(value.toolCallId, {
|
1663
|
-
state: "result",
|
1664
|
-
step,
|
1665
|
-
...value,
|
1666
|
-
result
|
1667
|
-
});
|
1668
|
-
execUpdate();
|
1669
|
-
}
|
1670
|
-
}
|
1671
|
-
},
|
1672
|
-
onToolResultPart(value) {
|
1673
|
-
const toolInvocations = getToolInvocations(message);
|
1674
|
-
if (toolInvocations == null) {
|
1675
|
-
throw new Error("tool_result must be preceded by a tool_call");
|
1676
|
-
}
|
1677
|
-
const toolInvocationIndex = toolInvocations.findIndex(
|
1678
|
-
(invocation) => invocation.toolCallId === value.toolCallId
|
1679
|
-
);
|
1680
|
-
if (toolInvocationIndex === -1) {
|
1681
|
-
throw new Error(
|
1682
|
-
"tool_result must be preceded by a tool_call with the same toolCallId"
|
1683
|
-
);
|
1684
1102
|
}
|
1685
|
-
|
1686
|
-
...toolInvocations[toolInvocationIndex],
|
1687
|
-
state: "result",
|
1688
|
-
...value
|
1689
|
-
});
|
1690
|
-
execUpdate();
|
1691
|
-
},
|
1692
|
-
onDataPart(value) {
|
1693
|
-
data.push(...value);
|
1694
|
-
execUpdate();
|
1695
|
-
},
|
1696
|
-
onMessageAnnotationsPart(value) {
|
1697
|
-
if (messageAnnotations == null) {
|
1698
|
-
messageAnnotations = [...value];
|
1699
|
-
} else {
|
1700
|
-
messageAnnotations.push(...value);
|
1701
|
-
}
|
1702
|
-
execUpdate();
|
1703
|
-
},
|
1704
|
-
onFinishStepPart(value) {
|
1705
|
-
step += 1;
|
1706
|
-
currentTextPart = value.isContinued ? currentTextPart : void 0;
|
1707
|
-
currentReasoningPart = void 0;
|
1708
|
-
},
|
1709
|
-
onStartStepPart(value) {
|
1710
|
-
if (!replaceLastMessage) {
|
1711
|
-
message.id = value.messageId;
|
1712
|
-
}
|
1713
|
-
message.parts.push({ type: "step-start" });
|
1714
|
-
execUpdate();
|
1715
|
-
},
|
1716
|
-
onFinishMessagePart(value) {
|
1717
|
-
finishReason = value.finishReason;
|
1718
|
-
if (value.usage != null) {
|
1719
|
-
usage = value.usage;
|
1720
|
-
}
|
1721
|
-
},
|
1722
|
-
onErrorPart(error) {
|
1723
|
-
throw new Error(error);
|
1103
|
+
message.metadata = mergedMetadata;
|
1724
1104
|
}
|
1725
|
-
}
|
1726
|
-
|
1105
|
+
}
|
1106
|
+
return stream.pipeThrough(
|
1107
|
+
new TransformStream({
|
1108
|
+
async transform(chunk, controller) {
|
1109
|
+
const { type, value } = chunk;
|
1110
|
+
switch (type) {
|
1111
|
+
case "text": {
|
1112
|
+
if (currentTextPart == null) {
|
1113
|
+
currentTextPart = {
|
1114
|
+
type: "text",
|
1115
|
+
text: value
|
1116
|
+
};
|
1117
|
+
message.parts.push(currentTextPart);
|
1118
|
+
} else {
|
1119
|
+
currentTextPart.text += value;
|
1120
|
+
}
|
1121
|
+
onUpdate == null ? void 0 : onUpdate({ message });
|
1122
|
+
break;
|
1123
|
+
}
|
1124
|
+
case "reasoning": {
|
1125
|
+
if (currentReasoningPart == null) {
|
1126
|
+
currentReasoningPart = {
|
1127
|
+
type: "reasoning",
|
1128
|
+
text: value.text,
|
1129
|
+
providerMetadata: value.providerMetadata
|
1130
|
+
};
|
1131
|
+
message.parts.push(currentReasoningPart);
|
1132
|
+
} else {
|
1133
|
+
currentReasoningPart.text += value.text;
|
1134
|
+
currentReasoningPart.providerMetadata = value.providerMetadata;
|
1135
|
+
}
|
1136
|
+
onUpdate == null ? void 0 : onUpdate({ message });
|
1137
|
+
break;
|
1138
|
+
}
|
1139
|
+
case "reasoning-part-finish": {
|
1140
|
+
if (currentReasoningPart != null) {
|
1141
|
+
currentReasoningPart = void 0;
|
1142
|
+
}
|
1143
|
+
break;
|
1144
|
+
}
|
1145
|
+
case "file": {
|
1146
|
+
message.parts.push({
|
1147
|
+
type: "file",
|
1148
|
+
mediaType: value.mediaType,
|
1149
|
+
url: value.url
|
1150
|
+
});
|
1151
|
+
onUpdate == null ? void 0 : onUpdate({ message });
|
1152
|
+
break;
|
1153
|
+
}
|
1154
|
+
case "source": {
|
1155
|
+
message.parts.push({
|
1156
|
+
type: "source",
|
1157
|
+
source: value
|
1158
|
+
});
|
1159
|
+
onUpdate == null ? void 0 : onUpdate({ message });
|
1160
|
+
break;
|
1161
|
+
}
|
1162
|
+
case "tool-call-streaming-start": {
|
1163
|
+
const toolInvocations = getToolInvocations(message);
|
1164
|
+
partialToolCalls[value.toolCallId] = {
|
1165
|
+
text: "",
|
1166
|
+
step,
|
1167
|
+
toolName: value.toolName,
|
1168
|
+
index: toolInvocations.length
|
1169
|
+
};
|
1170
|
+
updateToolInvocationPart(value.toolCallId, {
|
1171
|
+
state: "partial-call",
|
1172
|
+
step,
|
1173
|
+
toolCallId: value.toolCallId,
|
1174
|
+
toolName: value.toolName,
|
1175
|
+
args: void 0
|
1176
|
+
});
|
1177
|
+
onUpdate == null ? void 0 : onUpdate({ message });
|
1178
|
+
break;
|
1179
|
+
}
|
1180
|
+
case "tool-call-delta": {
|
1181
|
+
const partialToolCall = partialToolCalls[value.toolCallId];
|
1182
|
+
partialToolCall.text += value.argsTextDelta;
|
1183
|
+
const { value: partialArgs } = await parsePartialJson(
|
1184
|
+
partialToolCall.text
|
1185
|
+
);
|
1186
|
+
updateToolInvocationPart(value.toolCallId, {
|
1187
|
+
state: "partial-call",
|
1188
|
+
step: partialToolCall.step,
|
1189
|
+
toolCallId: value.toolCallId,
|
1190
|
+
toolName: partialToolCall.toolName,
|
1191
|
+
args: partialArgs
|
1192
|
+
});
|
1193
|
+
onUpdate == null ? void 0 : onUpdate({ message });
|
1194
|
+
break;
|
1195
|
+
}
|
1196
|
+
case "tool-call": {
|
1197
|
+
const call = { args: value.args, ...value };
|
1198
|
+
updateToolInvocationPart(value.toolCallId, {
|
1199
|
+
state: "call",
|
1200
|
+
step,
|
1201
|
+
...call
|
1202
|
+
});
|
1203
|
+
onUpdate == null ? void 0 : onUpdate({ message });
|
1204
|
+
if (onToolCall) {
|
1205
|
+
const result = await onToolCall({
|
1206
|
+
toolCall: call
|
1207
|
+
});
|
1208
|
+
if (result != null) {
|
1209
|
+
updateToolInvocationPart(value.toolCallId, {
|
1210
|
+
state: "result",
|
1211
|
+
step,
|
1212
|
+
...call,
|
1213
|
+
result
|
1214
|
+
});
|
1215
|
+
onUpdate == null ? void 0 : onUpdate({ message });
|
1216
|
+
}
|
1217
|
+
}
|
1218
|
+
break;
|
1219
|
+
}
|
1220
|
+
case "tool-result": {
|
1221
|
+
const toolInvocations = getToolInvocations(message);
|
1222
|
+
if (toolInvocations == null) {
|
1223
|
+
throw new Error("tool_result must be preceded by a tool_call");
|
1224
|
+
}
|
1225
|
+
const toolInvocationIndex = toolInvocations.findIndex(
|
1226
|
+
(invocation) => invocation.toolCallId === value.toolCallId
|
1227
|
+
);
|
1228
|
+
if (toolInvocationIndex === -1) {
|
1229
|
+
throw new Error(
|
1230
|
+
"tool_result must be preceded by a tool_call with the same toolCallId"
|
1231
|
+
);
|
1232
|
+
}
|
1233
|
+
const result = { result: value.result, ...value };
|
1234
|
+
updateToolInvocationPart(value.toolCallId, {
|
1235
|
+
...toolInvocations[toolInvocationIndex],
|
1236
|
+
state: "result",
|
1237
|
+
...result
|
1238
|
+
});
|
1239
|
+
onUpdate == null ? void 0 : onUpdate({ message });
|
1240
|
+
break;
|
1241
|
+
}
|
1242
|
+
case "start-step": {
|
1243
|
+
message.parts.push({ type: "step-start" });
|
1244
|
+
await updateMessageMetadata(value.metadata);
|
1245
|
+
onUpdate == null ? void 0 : onUpdate({ message });
|
1246
|
+
break;
|
1247
|
+
}
|
1248
|
+
case "finish-step": {
|
1249
|
+
step += 1;
|
1250
|
+
currentTextPart = void 0;
|
1251
|
+
currentReasoningPart = void 0;
|
1252
|
+
await updateMessageMetadata(value.metadata);
|
1253
|
+
if (value.metadata != null) {
|
1254
|
+
onUpdate == null ? void 0 : onUpdate({ message });
|
1255
|
+
}
|
1256
|
+
break;
|
1257
|
+
}
|
1258
|
+
case "start": {
|
1259
|
+
if (value.messageId != null) {
|
1260
|
+
message.id = value.messageId;
|
1261
|
+
}
|
1262
|
+
await updateMessageMetadata(value.metadata);
|
1263
|
+
if (value.messageId != null || value.metadata != null) {
|
1264
|
+
onUpdate == null ? void 0 : onUpdate({ message });
|
1265
|
+
}
|
1266
|
+
break;
|
1267
|
+
}
|
1268
|
+
case "finish": {
|
1269
|
+
await updateMessageMetadata(value.metadata);
|
1270
|
+
if (value.metadata != null) {
|
1271
|
+
onUpdate == null ? void 0 : onUpdate({ message });
|
1272
|
+
}
|
1273
|
+
break;
|
1274
|
+
}
|
1275
|
+
case "metadata": {
|
1276
|
+
await updateMessageMetadata(value.metadata);
|
1277
|
+
if (value.metadata != null) {
|
1278
|
+
onUpdate == null ? void 0 : onUpdate({ message });
|
1279
|
+
}
|
1280
|
+
break;
|
1281
|
+
}
|
1282
|
+
case "error": {
|
1283
|
+
throw new Error(value);
|
1284
|
+
}
|
1285
|
+
default: {
|
1286
|
+
const _exhaustiveCheck = type;
|
1287
|
+
throw new Error(`Unhandled stream part: ${_exhaustiveCheck}`);
|
1288
|
+
}
|
1289
|
+
}
|
1290
|
+
controller.enqueue(chunk);
|
1291
|
+
},
|
1292
|
+
flush() {
|
1293
|
+
onFinish == null ? void 0 : onFinish({ message });
|
1294
|
+
}
|
1295
|
+
})
|
1296
|
+
);
|
1727
1297
|
}
|
1728
1298
|
|
1729
1299
|
// src/ui/process-chat-text-response.ts
|
1730
|
-
var
|
1300
|
+
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
1731
1301
|
|
1732
1302
|
// src/ui/process-text-stream.ts
|
1733
1303
|
async function processTextStream({
|
@@ -1749,13 +1319,11 @@ async function processChatTextResponse({
|
|
1749
1319
|
stream,
|
1750
1320
|
update,
|
1751
1321
|
onFinish,
|
1752
|
-
|
1753
|
-
generateId: generateId3 = import_provider_utils5.generateId
|
1322
|
+
generateId: generateId3 = import_provider_utils3.generateId
|
1754
1323
|
}) {
|
1755
1324
|
const textPart = { type: "text", text: "" };
|
1756
1325
|
const resultMessage = {
|
1757
1326
|
id: generateId3(),
|
1758
|
-
createdAt: getCurrentDate(),
|
1759
1327
|
role: "assistant",
|
1760
1328
|
parts: [textPart]
|
1761
1329
|
};
|
@@ -1763,21 +1331,10 @@ async function processChatTextResponse({
|
|
1763
1331
|
stream,
|
1764
1332
|
onTextPart: (chunk) => {
|
1765
1333
|
textPart.text += chunk;
|
1766
|
-
update({
|
1767
|
-
message: { ...resultMessage },
|
1768
|
-
data: [],
|
1769
|
-
replaceLastMessage: false
|
1770
|
-
});
|
1334
|
+
update({ message: { ...resultMessage } });
|
1771
1335
|
}
|
1772
1336
|
});
|
1773
|
-
onFinish == null ? void 0 : onFinish(resultMessage
|
1774
|
-
usage: {
|
1775
|
-
inputTokens: void 0,
|
1776
|
-
outputTokens: void 0,
|
1777
|
-
totalTokens: void 0
|
1778
|
-
},
|
1779
|
-
finishReason: "unknown"
|
1780
|
-
});
|
1337
|
+
onFinish == null ? void 0 : onFinish({ message: resultMessage });
|
1781
1338
|
}
|
1782
1339
|
|
1783
1340
|
// src/ui/call-chat-api.ts
|
@@ -1785,19 +1342,18 @@ var getOriginalFetch = () => fetch;
|
|
1785
1342
|
async function callChatApi({
|
1786
1343
|
api,
|
1787
1344
|
body,
|
1788
|
-
streamProtocol = "
|
1345
|
+
streamProtocol = "ui-message",
|
1789
1346
|
credentials,
|
1790
1347
|
headers,
|
1791
1348
|
abortController,
|
1792
|
-
onResponse,
|
1793
1349
|
onUpdate,
|
1794
1350
|
onFinish,
|
1795
1351
|
onToolCall,
|
1796
1352
|
generateId: generateId3,
|
1797
1353
|
fetch: fetch2 = getOriginalFetch(),
|
1798
1354
|
lastMessage,
|
1799
|
-
|
1800
|
-
|
1355
|
+
requestType = "generate",
|
1356
|
+
messageMetadataSchema
|
1801
1357
|
}) {
|
1802
1358
|
var _a17, _b, _c;
|
1803
1359
|
const response = requestType === "resume" ? await fetch2(`${api}?chatId=${body.id}`, {
|
@@ -1818,9 +1374,6 @@ async function callChatApi({
|
|
1818
1374
|
signal: (_b = abortController == null ? void 0 : abortController()) == null ? void 0 : _b.signal,
|
1819
1375
|
credentials
|
1820
1376
|
});
|
1821
|
-
if (onResponse != null) {
|
1822
|
-
await onResponse(response);
|
1823
|
-
}
|
1824
1377
|
if (!response.ok) {
|
1825
1378
|
throw new Error(
|
1826
1379
|
(_c = await response.text()) != null ? _c : "Failed to fetch the chat response."
|
@@ -1835,24 +1388,49 @@ async function callChatApi({
|
|
1835
1388
|
stream: response.body,
|
1836
1389
|
update: onUpdate,
|
1837
1390
|
onFinish,
|
1838
|
-
generateId: generateId3
|
1839
|
-
getCurrentDate
|
1391
|
+
generateId: generateId3
|
1840
1392
|
});
|
1841
1393
|
return;
|
1842
1394
|
}
|
1843
|
-
case "
|
1844
|
-
await
|
1845
|
-
stream:
|
1846
|
-
|
1847
|
-
|
1848
|
-
|
1849
|
-
|
1850
|
-
|
1851
|
-
|
1852
|
-
|
1853
|
-
|
1854
|
-
|
1855
|
-
|
1395
|
+
case "ui-message": {
|
1396
|
+
await consumeStream({
|
1397
|
+
stream: processUIMessageStream({
|
1398
|
+
stream: (0, import_provider_utils4.parseJsonEventStream)({
|
1399
|
+
stream: response.body,
|
1400
|
+
schema: uiMessageStreamPartSchema
|
1401
|
+
}).pipeThrough(
|
1402
|
+
new TransformStream({
|
1403
|
+
async transform(part, controller) {
|
1404
|
+
if (!part.success) {
|
1405
|
+
throw part.error;
|
1406
|
+
}
|
1407
|
+
controller.enqueue(part.value);
|
1408
|
+
}
|
1409
|
+
})
|
1410
|
+
),
|
1411
|
+
onUpdate({ message }) {
|
1412
|
+
const copiedMessage = {
|
1413
|
+
// deep copy the message to ensure that deep changes (msg attachments) are updated
|
1414
|
+
// with SolidJS. SolidJS uses referential integration of sub-objects to detect changes.
|
1415
|
+
...structuredClone(message),
|
1416
|
+
// add a revision id to ensure that the message is updated with SWR. SWR uses a
|
1417
|
+
// hashing approach by default to detect changes, but it only works for shallow
|
1418
|
+
// changes. This is why we need to add a revision id to ensure that the message
|
1419
|
+
// is updated with SWR (without it, the changes get stuck in SWR and are not
|
1420
|
+
// forwarded to rendering):
|
1421
|
+
revisionId: generateId3()
|
1422
|
+
};
|
1423
|
+
onUpdate({ message: copiedMessage });
|
1424
|
+
},
|
1425
|
+
lastMessage,
|
1426
|
+
onToolCall,
|
1427
|
+
onFinish,
|
1428
|
+
newMessageId: generateId3(),
|
1429
|
+
messageMetadataSchema
|
1430
|
+
}),
|
1431
|
+
onError: (error) => {
|
1432
|
+
throw error;
|
1433
|
+
}
|
1856
1434
|
});
|
1857
1435
|
return;
|
1858
1436
|
}
|
@@ -1864,6 +1442,7 @@ async function callChatApi({
|
|
1864
1442
|
}
|
1865
1443
|
|
1866
1444
|
// src/ui/call-completion-api.ts
|
1445
|
+
var import_provider_utils5 = require("@ai-sdk/provider-utils");
|
1867
1446
|
var getOriginalFetch2 = () => fetch;
|
1868
1447
|
async function callCompletionApi({
|
1869
1448
|
api,
|
@@ -1876,10 +1455,8 @@ async function callCompletionApi({
|
|
1876
1455
|
setLoading,
|
1877
1456
|
setError,
|
1878
1457
|
setAbortController,
|
1879
|
-
onResponse,
|
1880
1458
|
onFinish,
|
1881
1459
|
onError,
|
1882
|
-
onData,
|
1883
1460
|
fetch: fetch2 = getOriginalFetch2()
|
1884
1461
|
}) {
|
1885
1462
|
var _a17;
|
@@ -1904,13 +1481,6 @@ async function callCompletionApi({
|
|
1904
1481
|
}).catch((err) => {
|
1905
1482
|
throw err;
|
1906
1483
|
});
|
1907
|
-
if (onResponse) {
|
1908
|
-
try {
|
1909
|
-
await onResponse(response);
|
1910
|
-
} catch (err) {
|
1911
|
-
throw err;
|
1912
|
-
}
|
1913
|
-
}
|
1914
1484
|
if (!response.ok) {
|
1915
1485
|
throw new Error(
|
1916
1486
|
(_a17 = await response.text()) != null ? _a17 : "Failed to fetch the chat response."
|
@@ -1932,17 +1502,28 @@ async function callCompletionApi({
|
|
1932
1502
|
break;
|
1933
1503
|
}
|
1934
1504
|
case "data": {
|
1935
|
-
await
|
1936
|
-
stream:
|
1937
|
-
|
1938
|
-
|
1939
|
-
|
1940
|
-
|
1941
|
-
|
1942
|
-
|
1943
|
-
|
1944
|
-
|
1945
|
-
|
1505
|
+
await consumeStream({
|
1506
|
+
stream: (0, import_provider_utils5.parseJsonEventStream)({
|
1507
|
+
stream: response.body,
|
1508
|
+
schema: uiMessageStreamPartSchema
|
1509
|
+
}).pipeThrough(
|
1510
|
+
new TransformStream({
|
1511
|
+
async transform(part) {
|
1512
|
+
if (!part.success) {
|
1513
|
+
throw part.error;
|
1514
|
+
}
|
1515
|
+
const { type, value } = part.value;
|
1516
|
+
if (type === "text") {
|
1517
|
+
result += value;
|
1518
|
+
setCompletion(result);
|
1519
|
+
} else if (type === "error") {
|
1520
|
+
throw new Error(value);
|
1521
|
+
}
|
1522
|
+
}
|
1523
|
+
})
|
1524
|
+
),
|
1525
|
+
onError: (error) => {
|
1526
|
+
throw error;
|
1946
1527
|
}
|
1947
1528
|
});
|
1948
1529
|
break;
|
@@ -2154,58 +1735,186 @@ function convertToModelMessages(messages, options) {
|
|
2154
1735
|
});
|
2155
1736
|
}
|
2156
1737
|
}
|
2157
|
-
}
|
2158
|
-
return modelMessages;
|
1738
|
+
}
|
1739
|
+
return modelMessages;
|
1740
|
+
}
|
1741
|
+
var convertToCoreMessages = convertToModelMessages;
|
1742
|
+
|
1743
|
+
// src/ui/should-resubmit-messages.ts
|
1744
|
+
function shouldResubmitMessages({
|
1745
|
+
originalMaxToolInvocationStep,
|
1746
|
+
originalMessageCount,
|
1747
|
+
maxSteps,
|
1748
|
+
messages
|
1749
|
+
}) {
|
1750
|
+
var _a17;
|
1751
|
+
const lastMessage = messages[messages.length - 1];
|
1752
|
+
return (
|
1753
|
+
// check if the feature is enabled:
|
1754
|
+
maxSteps > 1 && // ensure there is a last message:
|
1755
|
+
lastMessage != null && // ensure we actually have new steps (to prevent infinite loops in case of errors):
|
1756
|
+
(messages.length > originalMessageCount || extractMaxToolInvocationStep(getToolInvocations(lastMessage)) !== originalMaxToolInvocationStep) && // check that next step is possible:
|
1757
|
+
isAssistantMessageWithCompletedToolCalls(lastMessage) && // limit the number of automatic steps:
|
1758
|
+
((_a17 = extractMaxToolInvocationStep(getToolInvocations(lastMessage))) != null ? _a17 : 0) < maxSteps
|
1759
|
+
);
|
1760
|
+
}
|
1761
|
+
function isAssistantMessageWithCompletedToolCalls(message) {
|
1762
|
+
if (message.role !== "assistant") {
|
1763
|
+
return false;
|
1764
|
+
}
|
1765
|
+
const lastStepStartIndex = message.parts.reduce((lastIndex, part, index) => {
|
1766
|
+
return part.type === "step-start" ? index : lastIndex;
|
1767
|
+
}, -1);
|
1768
|
+
const lastStepToolInvocations = message.parts.slice(lastStepStartIndex + 1).filter((part) => part.type === "tool-invocation");
|
1769
|
+
return lastStepToolInvocations.length > 0 && lastStepToolInvocations.every((part) => "result" in part.toolInvocation);
|
1770
|
+
}
|
1771
|
+
|
1772
|
+
// src/ui/update-tool-call-result.ts
|
1773
|
+
function updateToolCallResult({
|
1774
|
+
messages,
|
1775
|
+
toolCallId,
|
1776
|
+
toolResult: result
|
1777
|
+
}) {
|
1778
|
+
const lastMessage = messages[messages.length - 1];
|
1779
|
+
const invocationPart = lastMessage.parts.find(
|
1780
|
+
(part) => part.type === "tool-invocation" && part.toolInvocation.toolCallId === toolCallId
|
1781
|
+
);
|
1782
|
+
if (invocationPart == null) {
|
1783
|
+
return;
|
1784
|
+
}
|
1785
|
+
invocationPart.toolInvocation = {
|
1786
|
+
...invocationPart.toolInvocation,
|
1787
|
+
state: "result",
|
1788
|
+
result
|
1789
|
+
};
|
1790
|
+
}
|
1791
|
+
|
1792
|
+
// src/ui-message-stream/create-ui-message-stream.ts
|
1793
|
+
function createUIMessageStream({
|
1794
|
+
execute,
|
1795
|
+
onError = () => "An error occurred."
|
1796
|
+
// mask error messages for safety by default
|
1797
|
+
}) {
|
1798
|
+
let controller;
|
1799
|
+
const ongoingStreamPromises = [];
|
1800
|
+
const stream = new ReadableStream({
|
1801
|
+
start(controllerArg) {
|
1802
|
+
controller = controllerArg;
|
1803
|
+
}
|
1804
|
+
});
|
1805
|
+
function safeEnqueue(data) {
|
1806
|
+
try {
|
1807
|
+
controller.enqueue(data);
|
1808
|
+
} catch (error) {
|
1809
|
+
}
|
1810
|
+
}
|
1811
|
+
try {
|
1812
|
+
const result = execute({
|
1813
|
+
write(part) {
|
1814
|
+
safeEnqueue(part);
|
1815
|
+
},
|
1816
|
+
merge(streamArg) {
|
1817
|
+
ongoingStreamPromises.push(
|
1818
|
+
(async () => {
|
1819
|
+
const reader = streamArg.getReader();
|
1820
|
+
while (true) {
|
1821
|
+
const { done, value } = await reader.read();
|
1822
|
+
if (done)
|
1823
|
+
break;
|
1824
|
+
safeEnqueue(value);
|
1825
|
+
}
|
1826
|
+
})().catch((error) => {
|
1827
|
+
safeEnqueue({ type: "error", value: onError(error) });
|
1828
|
+
})
|
1829
|
+
);
|
1830
|
+
},
|
1831
|
+
onError
|
1832
|
+
});
|
1833
|
+
if (result) {
|
1834
|
+
ongoingStreamPromises.push(
|
1835
|
+
result.catch((error) => {
|
1836
|
+
safeEnqueue({ type: "error", value: onError(error) });
|
1837
|
+
})
|
1838
|
+
);
|
1839
|
+
}
|
1840
|
+
} catch (error) {
|
1841
|
+
safeEnqueue({ type: "error", value: onError(error) });
|
1842
|
+
}
|
1843
|
+
const waitForStreams = new Promise(async (resolve) => {
|
1844
|
+
while (ongoingStreamPromises.length > 0) {
|
1845
|
+
await ongoingStreamPromises.shift();
|
1846
|
+
}
|
1847
|
+
resolve();
|
1848
|
+
});
|
1849
|
+
waitForStreams.finally(() => {
|
1850
|
+
try {
|
1851
|
+
controller.close();
|
1852
|
+
} catch (error) {
|
1853
|
+
}
|
1854
|
+
});
|
1855
|
+
return stream;
|
2159
1856
|
}
|
2160
|
-
var convertToCoreMessages = convertToModelMessages;
|
2161
1857
|
|
2162
|
-
// src/ui/
|
2163
|
-
|
2164
|
-
|
2165
|
-
|
2166
|
-
|
2167
|
-
|
1858
|
+
// src/ui-message-stream/ui-message-stream-headers.ts
|
1859
|
+
var uiMessageStreamHeaders = {
|
1860
|
+
"content-type": "text/event-stream",
|
1861
|
+
"cache-control": "no-cache",
|
1862
|
+
connection: "keep-alive",
|
1863
|
+
"x-vercel-ai-ui-message-stream": "v1",
|
1864
|
+
"x-accel-buffering": "no"
|
1865
|
+
// disable nginx buffering
|
1866
|
+
};
|
1867
|
+
|
1868
|
+
// src/ui-message-stream/json-to-sse-transform-stream.ts
|
1869
|
+
var JsonToSseTransformStream = class extends TransformStream {
|
1870
|
+
constructor() {
|
1871
|
+
super({
|
1872
|
+
transform(part, controller) {
|
1873
|
+
controller.enqueue(`data: ${JSON.stringify(part)}
|
1874
|
+
|
1875
|
+
`);
|
1876
|
+
},
|
1877
|
+
flush(controller) {
|
1878
|
+
controller.enqueue("data: [DONE]\n\n");
|
1879
|
+
}
|
1880
|
+
});
|
1881
|
+
}
|
1882
|
+
};
|
1883
|
+
|
1884
|
+
// src/ui-message-stream/create-ui-message-stream-response.ts
|
1885
|
+
function createUIMessageStreamResponse({
|
1886
|
+
status,
|
1887
|
+
statusText,
|
1888
|
+
headers,
|
1889
|
+
stream
|
2168
1890
|
}) {
|
2169
|
-
|
2170
|
-
|
2171
|
-
|
2172
|
-
|
2173
|
-
|
2174
|
-
|
2175
|
-
|
2176
|
-
isAssistantMessageWithCompletedToolCalls(lastMessage) && // limit the number of automatic steps:
|
2177
|
-
((_a17 = extractMaxToolInvocationStep(getToolInvocations(lastMessage))) != null ? _a17 : 0) < maxSteps
|
1891
|
+
return new Response(
|
1892
|
+
stream.pipeThrough(new JsonToSseTransformStream()).pipeThrough(new TextEncoderStream()),
|
1893
|
+
{
|
1894
|
+
status,
|
1895
|
+
statusText,
|
1896
|
+
headers: prepareHeaders(headers, uiMessageStreamHeaders)
|
1897
|
+
}
|
2178
1898
|
);
|
2179
1899
|
}
|
2180
|
-
function isAssistantMessageWithCompletedToolCalls(message) {
|
2181
|
-
if (message.role !== "assistant") {
|
2182
|
-
return false;
|
2183
|
-
}
|
2184
|
-
const lastStepStartIndex = message.parts.reduce((lastIndex, part, index) => {
|
2185
|
-
return part.type === "step-start" ? index : lastIndex;
|
2186
|
-
}, -1);
|
2187
|
-
const lastStepToolInvocations = message.parts.slice(lastStepStartIndex + 1).filter((part) => part.type === "tool-invocation");
|
2188
|
-
return lastStepToolInvocations.length > 0 && lastStepToolInvocations.every((part) => "result" in part.toolInvocation);
|
2189
|
-
}
|
2190
1900
|
|
2191
|
-
// src/ui/
|
2192
|
-
function
|
2193
|
-
|
2194
|
-
|
2195
|
-
|
1901
|
+
// src/ui-message-stream/pipe-ui-message-stream-to-response.ts
|
1902
|
+
function pipeUIMessageStreamToResponse({
|
1903
|
+
response,
|
1904
|
+
status,
|
1905
|
+
statusText,
|
1906
|
+
headers,
|
1907
|
+
stream
|
2196
1908
|
}) {
|
2197
|
-
|
2198
|
-
|
2199
|
-
|
2200
|
-
|
2201
|
-
|
2202
|
-
|
2203
|
-
|
2204
|
-
|
2205
|
-
|
2206
|
-
state: "result",
|
2207
|
-
result
|
2208
|
-
};
|
1909
|
+
writeToServerResponse({
|
1910
|
+
response,
|
1911
|
+
status,
|
1912
|
+
statusText,
|
1913
|
+
headers: Object.fromEntries(
|
1914
|
+
prepareHeaders(headers, uiMessageStreamHeaders).entries()
|
1915
|
+
),
|
1916
|
+
stream: stream.pipeThrough(new JsonToSseTransformStream()).pipeThrough(new TextEncoderStream())
|
1917
|
+
});
|
2209
1918
|
}
|
2210
1919
|
|
2211
1920
|
// src/util/data-url.ts
|
@@ -2307,7 +2016,7 @@ function simulateReadableStream({
|
|
2307
2016
|
}
|
2308
2017
|
|
2309
2018
|
// src/util/retry-with-exponential-backoff.ts
|
2310
|
-
var
|
2019
|
+
var import_provider17 = require("@ai-sdk/provider");
|
2311
2020
|
var import_provider_utils7 = require("@ai-sdk/provider-utils");
|
2312
2021
|
var retryWithExponentialBackoff = ({
|
2313
2022
|
maxRetries = 2,
|
@@ -2342,7 +2051,7 @@ async function _retryWithExponentialBackoff(f, {
|
|
2342
2051
|
errors: newErrors
|
2343
2052
|
});
|
2344
2053
|
}
|
2345
|
-
if (error instanceof Error &&
|
2054
|
+
if (error instanceof Error && import_provider17.APICallError.isInstance(error) && error.isRetryable === true && tryNumber <= maxRetries) {
|
2346
2055
|
await (0, import_provider_utils7.delay)(delayInMs);
|
2347
2056
|
return _retryWithExponentialBackoff(
|
2348
2057
|
f,
|
@@ -3073,6 +2782,7 @@ async function generateImage({
|
|
3073
2782
|
model,
|
3074
2783
|
prompt,
|
3075
2784
|
n = 1,
|
2785
|
+
maxImagesPerCall,
|
3076
2786
|
size,
|
3077
2787
|
aspectRatio,
|
3078
2788
|
seed,
|
@@ -3083,14 +2793,14 @@ async function generateImage({
|
|
3083
2793
|
}) {
|
3084
2794
|
var _a17, _b;
|
3085
2795
|
const { retry } = prepareRetries({ maxRetries: maxRetriesArg });
|
3086
|
-
const
|
3087
|
-
const callCount = Math.ceil(n /
|
2796
|
+
const maxImagesPerCallWithDefault = (_a17 = maxImagesPerCall != null ? maxImagesPerCall : model.maxImagesPerCall) != null ? _a17 : 1;
|
2797
|
+
const callCount = Math.ceil(n / maxImagesPerCallWithDefault);
|
3088
2798
|
const callImageCounts = Array.from({ length: callCount }, (_, i) => {
|
3089
2799
|
if (i < callCount - 1) {
|
3090
|
-
return
|
2800
|
+
return maxImagesPerCallWithDefault;
|
3091
2801
|
}
|
3092
|
-
const remainder = n %
|
3093
|
-
return remainder === 0 ?
|
2802
|
+
const remainder = n % maxImagesPerCallWithDefault;
|
2803
|
+
return remainder === 0 ? maxImagesPerCallWithDefault : remainder;
|
3094
2804
|
});
|
3095
2805
|
const results = await Promise.all(
|
3096
2806
|
callImageCounts.map(
|
@@ -3161,8 +2871,8 @@ var DefaultGenerateImageResult = class {
|
|
3161
2871
|
};
|
3162
2872
|
|
3163
2873
|
// core/generate-object/generate-object.ts
|
3164
|
-
var
|
3165
|
-
var
|
2874
|
+
var import_provider21 = require("@ai-sdk/provider");
|
2875
|
+
var import_provider_utils14 = require("@ai-sdk/provider-utils");
|
3166
2876
|
|
3167
2877
|
// core/generate-text/extract-content-text.ts
|
3168
2878
|
function extractContentText(content) {
|
@@ -3176,7 +2886,7 @@ function extractContentText(content) {
|
|
3176
2886
|
}
|
3177
2887
|
|
3178
2888
|
// core/prompt/convert-to-language-model-prompt.ts
|
3179
|
-
var
|
2889
|
+
var import_provider_utils11 = require("@ai-sdk/provider-utils");
|
3180
2890
|
|
3181
2891
|
// src/util/download.ts
|
3182
2892
|
async function download({ url }) {
|
@@ -3203,6 +2913,89 @@ async function download({ url }) {
|
|
3203
2913
|
}
|
3204
2914
|
}
|
3205
2915
|
|
2916
|
+
// core/prompt/data-content.ts
|
2917
|
+
var import_provider18 = require("@ai-sdk/provider");
|
2918
|
+
var import_provider_utils10 = require("@ai-sdk/provider-utils");
|
2919
|
+
var import_zod2 = require("zod");
|
2920
|
+
|
2921
|
+
// core/prompt/split-data-url.ts
|
2922
|
+
function splitDataUrl(dataUrl) {
|
2923
|
+
try {
|
2924
|
+
const [header, base64Content] = dataUrl.split(",");
|
2925
|
+
return {
|
2926
|
+
mediaType: header.split(";")[0].split(":")[1],
|
2927
|
+
base64Content
|
2928
|
+
};
|
2929
|
+
} catch (error) {
|
2930
|
+
return {
|
2931
|
+
mediaType: void 0,
|
2932
|
+
base64Content: void 0
|
2933
|
+
};
|
2934
|
+
}
|
2935
|
+
}
|
2936
|
+
|
2937
|
+
// core/prompt/data-content.ts
|
2938
|
+
var dataContentSchema = import_zod2.z.union([
|
2939
|
+
import_zod2.z.string(),
|
2940
|
+
import_zod2.z.instanceof(Uint8Array),
|
2941
|
+
import_zod2.z.instanceof(ArrayBuffer),
|
2942
|
+
import_zod2.z.custom(
|
2943
|
+
// Buffer might not be available in some environments such as CloudFlare:
|
2944
|
+
(value) => {
|
2945
|
+
var _a17, _b;
|
2946
|
+
return (_b = (_a17 = globalThis.Buffer) == null ? void 0 : _a17.isBuffer(value)) != null ? _b : false;
|
2947
|
+
},
|
2948
|
+
{ message: "Must be a Buffer" }
|
2949
|
+
)
|
2950
|
+
]);
|
2951
|
+
function convertToLanguageModelV2DataContent(content) {
|
2952
|
+
if (content instanceof Uint8Array) {
|
2953
|
+
return { data: content, mediaType: void 0 };
|
2954
|
+
}
|
2955
|
+
if (content instanceof ArrayBuffer) {
|
2956
|
+
return { data: new Uint8Array(content), mediaType: void 0 };
|
2957
|
+
}
|
2958
|
+
if (typeof content === "string") {
|
2959
|
+
try {
|
2960
|
+
content = new URL(content);
|
2961
|
+
} catch (error) {
|
2962
|
+
}
|
2963
|
+
}
|
2964
|
+
if (content instanceof URL && content.protocol === "data:") {
|
2965
|
+
const { mediaType: dataUrlMediaType, base64Content } = splitDataUrl(
|
2966
|
+
content.toString()
|
2967
|
+
);
|
2968
|
+
if (dataUrlMediaType == null || base64Content == null) {
|
2969
|
+
throw new import_provider18.AISDKError({
|
2970
|
+
name: "InvalidDataContentError",
|
2971
|
+
message: `Invalid data URL format in content ${content.toString()}`
|
2972
|
+
});
|
2973
|
+
}
|
2974
|
+
return { data: base64Content, mediaType: dataUrlMediaType };
|
2975
|
+
}
|
2976
|
+
return { data: content, mediaType: void 0 };
|
2977
|
+
}
|
2978
|
+
function convertDataContentToUint8Array(content) {
|
2979
|
+
if (content instanceof Uint8Array) {
|
2980
|
+
return content;
|
2981
|
+
}
|
2982
|
+
if (typeof content === "string") {
|
2983
|
+
try {
|
2984
|
+
return (0, import_provider_utils10.convertBase64ToUint8Array)(content);
|
2985
|
+
} catch (error) {
|
2986
|
+
throw new InvalidDataContentError({
|
2987
|
+
message: "Invalid data content. Content string is not a base64-encoded media.",
|
2988
|
+
content,
|
2989
|
+
cause: error
|
2990
|
+
});
|
2991
|
+
}
|
2992
|
+
}
|
2993
|
+
if (content instanceof ArrayBuffer) {
|
2994
|
+
return new Uint8Array(content);
|
2995
|
+
}
|
2996
|
+
throw new InvalidDataContentError({ content });
|
2997
|
+
}
|
2998
|
+
|
3206
2999
|
// core/prompt/convert-to-language-model-prompt.ts
|
3207
3000
|
async function convertToLanguageModelPrompt({
|
3208
3001
|
prompt,
|
@@ -3339,7 +3132,7 @@ async function downloadAssets(messages, downloadImplementation, supportedUrls) {
|
|
3339
3132
|
}
|
3340
3133
|
return { mediaType, data };
|
3341
3134
|
}).filter(
|
3342
|
-
(part) => part.data instanceof URL && part.mediaType != null && !(0,
|
3135
|
+
(part) => part.data instanceof URL && part.mediaType != null && !(0, import_provider_utils11.isUrlSupported)({
|
3343
3136
|
url: part.data.toString(),
|
3344
3137
|
mediaType: part.mediaType,
|
3345
3138
|
supportedUrls
|
@@ -3423,8 +3216,8 @@ function prepareCallSettings({
|
|
3423
3216
|
topK,
|
3424
3217
|
presencePenalty,
|
3425
3218
|
frequencyPenalty,
|
3426
|
-
|
3427
|
-
|
3219
|
+
seed,
|
3220
|
+
stopSequences
|
3428
3221
|
}) {
|
3429
3222
|
if (maxOutputTokens != null) {
|
3430
3223
|
if (!Number.isInteger(maxOutputTokens)) {
|
@@ -3498,19 +3291,19 @@ function prepareCallSettings({
|
|
3498
3291
|
}
|
3499
3292
|
return {
|
3500
3293
|
maxOutputTokens,
|
3501
|
-
temperature
|
3294
|
+
temperature,
|
3502
3295
|
topP,
|
3503
3296
|
topK,
|
3504
3297
|
presencePenalty,
|
3505
3298
|
frequencyPenalty,
|
3506
|
-
stopSequences
|
3299
|
+
stopSequences,
|
3507
3300
|
seed
|
3508
3301
|
};
|
3509
3302
|
}
|
3510
3303
|
|
3511
3304
|
// core/prompt/standardize-prompt.ts
|
3512
|
-
var
|
3513
|
-
var
|
3305
|
+
var import_provider19 = require("@ai-sdk/provider");
|
3306
|
+
var import_provider_utils12 = require("@ai-sdk/provider-utils");
|
3514
3307
|
var import_zod8 = require("zod");
|
3515
3308
|
|
3516
3309
|
// core/prompt/message.ts
|
@@ -3646,19 +3439,19 @@ var coreMessageSchema = modelMessageSchema;
|
|
3646
3439
|
// core/prompt/standardize-prompt.ts
|
3647
3440
|
async function standardizePrompt(prompt) {
|
3648
3441
|
if (prompt.prompt == null && prompt.messages == null) {
|
3649
|
-
throw new
|
3442
|
+
throw new import_provider19.InvalidPromptError({
|
3650
3443
|
prompt,
|
3651
3444
|
message: "prompt or messages must be defined"
|
3652
3445
|
});
|
3653
3446
|
}
|
3654
3447
|
if (prompt.prompt != null && prompt.messages != null) {
|
3655
|
-
throw new
|
3448
|
+
throw new import_provider19.InvalidPromptError({
|
3656
3449
|
prompt,
|
3657
3450
|
message: "prompt and messages cannot be defined at the same time"
|
3658
3451
|
});
|
3659
3452
|
}
|
3660
3453
|
if (prompt.system != null && typeof prompt.system !== "string") {
|
3661
|
-
throw new
|
3454
|
+
throw new import_provider19.InvalidPromptError({
|
3662
3455
|
prompt,
|
3663
3456
|
message: "system must be a string"
|
3664
3457
|
});
|
@@ -3671,23 +3464,23 @@ async function standardizePrompt(prompt) {
|
|
3671
3464
|
} else if (prompt.messages != null) {
|
3672
3465
|
messages = prompt.messages;
|
3673
3466
|
} else {
|
3674
|
-
throw new
|
3467
|
+
throw new import_provider19.InvalidPromptError({
|
3675
3468
|
prompt,
|
3676
3469
|
message: "prompt or messages must be defined"
|
3677
3470
|
});
|
3678
3471
|
}
|
3679
3472
|
if (messages.length === 0) {
|
3680
|
-
throw new
|
3473
|
+
throw new import_provider19.InvalidPromptError({
|
3681
3474
|
prompt,
|
3682
3475
|
message: "messages must not be empty"
|
3683
3476
|
});
|
3684
3477
|
}
|
3685
|
-
const validationResult = await (0,
|
3478
|
+
const validationResult = await (0, import_provider_utils12.safeValidateTypes)({
|
3686
3479
|
value: messages,
|
3687
3480
|
schema: import_zod8.z.array(modelMessageSchema)
|
3688
3481
|
});
|
3689
3482
|
if (!validationResult.success) {
|
3690
|
-
throw new
|
3483
|
+
throw new import_provider19.InvalidPromptError({
|
3691
3484
|
prompt,
|
3692
3485
|
message: "messages must be an array of ModelMessage",
|
3693
3486
|
cause: validationResult.error
|
@@ -3700,8 +3493,25 @@ async function standardizePrompt(prompt) {
|
|
3700
3493
|
}
|
3701
3494
|
|
3702
3495
|
// core/generate-object/output-strategy.ts
|
3703
|
-
var
|
3704
|
-
var
|
3496
|
+
var import_provider20 = require("@ai-sdk/provider");
|
3497
|
+
var import_provider_utils13 = require("@ai-sdk/provider-utils");
|
3498
|
+
|
3499
|
+
// src/util/async-iterable-stream.ts
|
3500
|
+
function createAsyncIterableStream(source) {
|
3501
|
+
const stream = source.pipeThrough(new TransformStream());
|
3502
|
+
stream[Symbol.asyncIterator] = () => {
|
3503
|
+
const reader = stream.getReader();
|
3504
|
+
return {
|
3505
|
+
async next() {
|
3506
|
+
const { done, value } = await reader.read();
|
3507
|
+
return done ? { done: true, value: void 0 } : { done: false, value };
|
3508
|
+
}
|
3509
|
+
};
|
3510
|
+
};
|
3511
|
+
return stream;
|
3512
|
+
}
|
3513
|
+
|
3514
|
+
// core/generate-object/output-strategy.ts
|
3705
3515
|
var noSchemaOutputStrategy = {
|
3706
3516
|
type: "no-schema",
|
3707
3517
|
jsonSchema: void 0,
|
@@ -3721,7 +3531,7 @@ var noSchemaOutputStrategy = {
|
|
3721
3531
|
} : { success: true, value };
|
3722
3532
|
},
|
3723
3533
|
createElementStream() {
|
3724
|
-
throw new
|
3534
|
+
throw new import_provider20.UnsupportedFunctionalityError({
|
3725
3535
|
functionality: "element streams in no-schema mode"
|
3726
3536
|
});
|
3727
3537
|
}
|
@@ -3740,10 +3550,10 @@ var objectOutputStrategy = (schema) => ({
|
|
3740
3550
|
};
|
3741
3551
|
},
|
3742
3552
|
async validateFinalResult(value) {
|
3743
|
-
return (0,
|
3553
|
+
return (0, import_provider_utils13.safeValidateTypes)({ value, schema });
|
3744
3554
|
},
|
3745
3555
|
createElementStream() {
|
3746
|
-
throw new
|
3556
|
+
throw new import_provider20.UnsupportedFunctionalityError({
|
3747
3557
|
functionality: "element streams in object mode"
|
3748
3558
|
});
|
3749
3559
|
}
|
@@ -3771,10 +3581,10 @@ var arrayOutputStrategy = (schema) => {
|
|
3771
3581
|
isFinalDelta
|
3772
3582
|
}) {
|
3773
3583
|
var _a17;
|
3774
|
-
if (!(0,
|
3584
|
+
if (!(0, import_provider20.isJSONObject)(value) || !(0, import_provider20.isJSONArray)(value.elements)) {
|
3775
3585
|
return {
|
3776
3586
|
success: false,
|
3777
|
-
error: new
|
3587
|
+
error: new import_provider20.TypeValidationError({
|
3778
3588
|
value,
|
3779
3589
|
cause: "value must be an object that contains an array of elements"
|
3780
3590
|
})
|
@@ -3784,7 +3594,7 @@ var arrayOutputStrategy = (schema) => {
|
|
3784
3594
|
const resultArray = [];
|
3785
3595
|
for (let i = 0; i < inputArray.length; i++) {
|
3786
3596
|
const element = inputArray[i];
|
3787
|
-
const result = await (0,
|
3597
|
+
const result = await (0, import_provider_utils13.safeValidateTypes)({ value: element, schema });
|
3788
3598
|
if (i === inputArray.length - 1 && !isFinalDelta) {
|
3789
3599
|
continue;
|
3790
3600
|
}
|
@@ -3814,10 +3624,10 @@ var arrayOutputStrategy = (schema) => {
|
|
3814
3624
|
};
|
3815
3625
|
},
|
3816
3626
|
async validateFinalResult(value) {
|
3817
|
-
if (!(0,
|
3627
|
+
if (!(0, import_provider20.isJSONObject)(value) || !(0, import_provider20.isJSONArray)(value.elements)) {
|
3818
3628
|
return {
|
3819
3629
|
success: false,
|
3820
|
-
error: new
|
3630
|
+
error: new import_provider20.TypeValidationError({
|
3821
3631
|
value,
|
3822
3632
|
cause: "value must be an object that contains an array of elements"
|
3823
3633
|
})
|
@@ -3825,7 +3635,7 @@ var arrayOutputStrategy = (schema) => {
|
|
3825
3635
|
}
|
3826
3636
|
const inputArray = value.elements;
|
3827
3637
|
for (const element of inputArray) {
|
3828
|
-
const result = await (0,
|
3638
|
+
const result = await (0, import_provider_utils13.safeValidateTypes)({ value: element, schema });
|
3829
3639
|
if (!result.success) {
|
3830
3640
|
return result;
|
3831
3641
|
}
|
@@ -3880,10 +3690,10 @@ var enumOutputStrategy = (enumValues) => {
|
|
3880
3690
|
additionalProperties: false
|
3881
3691
|
},
|
3882
3692
|
async validateFinalResult(value) {
|
3883
|
-
if (!(0,
|
3693
|
+
if (!(0, import_provider20.isJSONObject)(value) || typeof value.result !== "string") {
|
3884
3694
|
return {
|
3885
3695
|
success: false,
|
3886
|
-
error: new
|
3696
|
+
error: new import_provider20.TypeValidationError({
|
3887
3697
|
value,
|
3888
3698
|
cause: 'value must be an object that contains a string in the "result" property.'
|
3889
3699
|
})
|
@@ -3892,17 +3702,17 @@ var enumOutputStrategy = (enumValues) => {
|
|
3892
3702
|
const result = value.result;
|
3893
3703
|
return enumValues.includes(result) ? { success: true, value: result } : {
|
3894
3704
|
success: false,
|
3895
|
-
error: new
|
3705
|
+
error: new import_provider20.TypeValidationError({
|
3896
3706
|
value,
|
3897
3707
|
cause: "value must be a string in the enum"
|
3898
3708
|
})
|
3899
3709
|
};
|
3900
3710
|
},
|
3901
3711
|
async validatePartialResult({ value, textDelta }) {
|
3902
|
-
if (!(0,
|
3712
|
+
if (!(0, import_provider20.isJSONObject)(value) || typeof value.result !== "string") {
|
3903
3713
|
return {
|
3904
3714
|
success: false,
|
3905
|
-
error: new
|
3715
|
+
error: new import_provider20.TypeValidationError({
|
3906
3716
|
value,
|
3907
3717
|
cause: 'value must be an object that contains a string in the "result" property.'
|
3908
3718
|
})
|
@@ -3915,7 +3725,7 @@ var enumOutputStrategy = (enumValues) => {
|
|
3915
3725
|
if (value.result.length === 0 || possibleEnumValues.length === 0) {
|
3916
3726
|
return {
|
3917
3727
|
success: false,
|
3918
|
-
error: new
|
3728
|
+
error: new import_provider20.TypeValidationError({
|
3919
3729
|
value,
|
3920
3730
|
cause: "value must be a string in the enum"
|
3921
3731
|
})
|
@@ -3930,7 +3740,7 @@ var enumOutputStrategy = (enumValues) => {
|
|
3930
3740
|
};
|
3931
3741
|
},
|
3932
3742
|
createElementStream() {
|
3933
|
-
throw new
|
3743
|
+
throw new import_provider20.UnsupportedFunctionalityError({
|
3934
3744
|
functionality: "element streams in enum mode"
|
3935
3745
|
});
|
3936
3746
|
}
|
@@ -3943,9 +3753,9 @@ function getOutputStrategy({
|
|
3943
3753
|
}) {
|
3944
3754
|
switch (output) {
|
3945
3755
|
case "object":
|
3946
|
-
return objectOutputStrategy((0,
|
3756
|
+
return objectOutputStrategy((0, import_provider_utils13.asSchema)(schema));
|
3947
3757
|
case "array":
|
3948
|
-
return arrayOutputStrategy((0,
|
3758
|
+
return arrayOutputStrategy((0, import_provider_utils13.asSchema)(schema));
|
3949
3759
|
case "enum":
|
3950
3760
|
return enumOutputStrategy(enumValues);
|
3951
3761
|
case "no-schema":
|
@@ -4076,7 +3886,7 @@ function validateObjectGenerationInput({
|
|
4076
3886
|
}
|
4077
3887
|
|
4078
3888
|
// core/generate-object/generate-object.ts
|
4079
|
-
var originalGenerateId = (0,
|
3889
|
+
var originalGenerateId = (0, import_provider_utils14.createIdGenerator)({ prefix: "aiobj", size: 24 });
|
4080
3890
|
async function generateObject(options) {
|
4081
3891
|
const {
|
4082
3892
|
model,
|
@@ -4252,7 +4062,7 @@ async function generateObject(options) {
|
|
4252
4062
|
request = (_a17 = generateResult.request) != null ? _a17 : {};
|
4253
4063
|
response = generateResult.responseData;
|
4254
4064
|
async function processResult(result2) {
|
4255
|
-
const parseResult = await (0,
|
4065
|
+
const parseResult = await (0, import_provider_utils14.safeParseJSON)({ text: result2 });
|
4256
4066
|
if (!parseResult.success) {
|
4257
4067
|
throw new NoObjectGeneratedError({
|
4258
4068
|
message: "No object generated: could not parse the response.",
|
@@ -4287,7 +4097,7 @@ async function generateObject(options) {
|
|
4287
4097
|
try {
|
4288
4098
|
object2 = await processResult(result);
|
4289
4099
|
} catch (error) {
|
4290
|
-
if (repairText != null && NoObjectGeneratedError.isInstance(error) && (
|
4100
|
+
if (repairText != null && NoObjectGeneratedError.isInstance(error) && (import_provider21.JSONParseError.isInstance(error.cause) || import_provider21.TypeValidationError.isInstance(error.cause))) {
|
4291
4101
|
const repairedText = await repairText({
|
4292
4102
|
text: result,
|
4293
4103
|
error: error.cause
|
@@ -4348,7 +4158,7 @@ var DefaultGenerateObjectResult = class {
|
|
4348
4158
|
};
|
4349
4159
|
|
4350
4160
|
// core/generate-object/stream-object.ts
|
4351
|
-
var
|
4161
|
+
var import_provider_utils15 = require("@ai-sdk/provider-utils");
|
4352
4162
|
|
4353
4163
|
// src/util/create-resolvable-promise.ts
|
4354
4164
|
function createResolvablePromise() {
|
@@ -4492,7 +4302,7 @@ function now() {
|
|
4492
4302
|
}
|
4493
4303
|
|
4494
4304
|
// core/generate-object/stream-object.ts
|
4495
|
-
var originalGenerateId2 = (0,
|
4305
|
+
var originalGenerateId2 = (0, import_provider_utils15.createIdGenerator)({ prefix: "aiobj", size: 24 });
|
4496
4306
|
function streamObject(options) {
|
4497
4307
|
const {
|
4498
4308
|
model,
|
@@ -4997,8 +4807,8 @@ var DefaultStreamObjectResult = class {
|
|
4997
4807
|
};
|
4998
4808
|
|
4999
4809
|
// src/error/no-speech-generated-error.ts
|
5000
|
-
var
|
5001
|
-
var NoSpeechGeneratedError = class extends
|
4810
|
+
var import_provider22 = require("@ai-sdk/provider");
|
4811
|
+
var NoSpeechGeneratedError = class extends import_provider22.AISDKError {
|
5002
4812
|
constructor(options) {
|
5003
4813
|
super({
|
5004
4814
|
name: "AI_NoSpeechGeneratedError",
|
@@ -5087,23 +4897,10 @@ var DefaultSpeechResult = class {
|
|
5087
4897
|
};
|
5088
4898
|
|
5089
4899
|
// core/generate-text/generate-text.ts
|
5090
|
-
var
|
5091
|
-
|
5092
|
-
// src/util/split-on-last-whitespace.ts
|
5093
|
-
var lastWhitespaceRegexp = /^([\s\S]*?)(\s+)(\S*)$/;
|
5094
|
-
function splitOnLastWhitespace(text2) {
|
5095
|
-
const match = text2.match(lastWhitespaceRegexp);
|
5096
|
-
return match ? { prefix: match[1], whitespace: match[2], suffix: match[3] } : void 0;
|
5097
|
-
}
|
5098
|
-
|
5099
|
-
// src/util/remove-text-after-last-whitespace.ts
|
5100
|
-
function removeTextAfterLastWhitespace(text2) {
|
5101
|
-
const match = splitOnLastWhitespace(text2);
|
5102
|
-
return match ? match.prefix + match.whitespace : text2;
|
5103
|
-
}
|
4900
|
+
var import_provider_utils18 = require("@ai-sdk/provider-utils");
|
5104
4901
|
|
5105
4902
|
// core/prompt/prepare-tools-and-tool-choice.ts
|
5106
|
-
var
|
4903
|
+
var import_provider_utils16 = require("@ai-sdk/provider-utils");
|
5107
4904
|
|
5108
4905
|
// src/util/is-non-empty-object.ts
|
5109
4906
|
function isNonEmptyObject(object2) {
|
@@ -5135,7 +4932,7 @@ function prepareToolsAndToolChoice({
|
|
5135
4932
|
type: "function",
|
5136
4933
|
name: name17,
|
5137
4934
|
description: tool2.description,
|
5138
|
-
parameters: (0,
|
4935
|
+
parameters: (0, import_provider_utils16.asSchema)(tool2.parameters).jsonSchema
|
5139
4936
|
};
|
5140
4937
|
case "provider-defined":
|
5141
4938
|
return {
|
@@ -5203,18 +5000,9 @@ function asContent({
|
|
5203
5000
|
...toolResults
|
5204
5001
|
];
|
5205
5002
|
}
|
5206
|
-
function extractFiles(content) {
|
5207
|
-
return content.filter((part) => part.type === "file").map((part) => part.file);
|
5208
|
-
}
|
5209
|
-
function extractReasoning(content) {
|
5210
|
-
return content.filter((part) => part.type === "reasoning");
|
5211
|
-
}
|
5212
|
-
function extractSources(content) {
|
5213
|
-
return content.filter((part) => part.type === "source");
|
5214
|
-
}
|
5215
5003
|
|
5216
5004
|
// core/generate-text/parse-tool-call.ts
|
5217
|
-
var
|
5005
|
+
var import_provider_utils17 = require("@ai-sdk/provider-utils");
|
5218
5006
|
async function parseToolCall({
|
5219
5007
|
toolCall,
|
5220
5008
|
tools,
|
@@ -5238,7 +5026,7 @@ async function parseToolCall({
|
|
5238
5026
|
tools,
|
5239
5027
|
parameterSchema: ({ toolName }) => {
|
5240
5028
|
const { parameters } = tools[toolName];
|
5241
|
-
return (0,
|
5029
|
+
return (0, import_provider_utils17.asSchema)(parameters).jsonSchema;
|
5242
5030
|
},
|
5243
5031
|
system,
|
5244
5032
|
messages,
|
@@ -5268,8 +5056,8 @@ async function doParseToolCall({
|
|
5268
5056
|
availableTools: Object.keys(tools)
|
5269
5057
|
});
|
5270
5058
|
}
|
5271
|
-
const schema = (0,
|
5272
|
-
const parseResult = toolCall.args.trim() === "" ? await (0,
|
5059
|
+
const schema = (0, import_provider_utils17.asSchema)(tool2.parameters);
|
5060
|
+
const parseResult = toolCall.args.trim() === "" ? await (0, import_provider_utils17.safeValidateTypes)({ value: {}, schema }) : await (0, import_provider_utils17.safeParseJSON)({ text: toolCall.args, schema });
|
5273
5061
|
if (parseResult.success === false) {
|
5274
5062
|
throw new InvalidToolArgumentsError({
|
5275
5063
|
toolName,
|
@@ -5285,85 +5073,111 @@ async function doParseToolCall({
|
|
5285
5073
|
};
|
5286
5074
|
}
|
5287
5075
|
|
5288
|
-
// core/generate-text/
|
5289
|
-
|
5290
|
-
|
5291
|
-
|
5292
|
-
|
5076
|
+
// core/generate-text/step-result.ts
|
5077
|
+
var DefaultStepResult = class {
|
5078
|
+
constructor({
|
5079
|
+
content,
|
5080
|
+
finishReason,
|
5081
|
+
usage,
|
5082
|
+
warnings,
|
5083
|
+
request,
|
5084
|
+
response,
|
5085
|
+
providerMetadata
|
5086
|
+
}) {
|
5087
|
+
this.content = content;
|
5088
|
+
this.finishReason = finishReason;
|
5089
|
+
this.usage = usage;
|
5090
|
+
this.warnings = warnings;
|
5091
|
+
this.request = request;
|
5092
|
+
this.response = response;
|
5093
|
+
this.providerMetadata = providerMetadata;
|
5094
|
+
}
|
5095
|
+
get text() {
|
5096
|
+
return this.content.filter((part) => part.type === "text").map((part) => part.text).join("");
|
5097
|
+
}
|
5098
|
+
get reasoning() {
|
5099
|
+
return this.content.filter((part) => part.type === "reasoning");
|
5100
|
+
}
|
5101
|
+
get reasoningText() {
|
5102
|
+
return this.reasoning.length === 0 ? void 0 : this.reasoning.map((part) => part.text).join("");
|
5103
|
+
}
|
5104
|
+
get files() {
|
5105
|
+
return this.content.filter((part) => part.type === "file").map((part) => part.file);
|
5106
|
+
}
|
5107
|
+
get sources() {
|
5108
|
+
return this.content.filter((part) => part.type === "source");
|
5109
|
+
}
|
5110
|
+
get toolCalls() {
|
5111
|
+
return this.content.filter((part) => part.type === "tool-call");
|
5112
|
+
}
|
5113
|
+
get toolResults() {
|
5114
|
+
return this.content.filter((part) => part.type === "tool-result");
|
5115
|
+
}
|
5116
|
+
};
|
5293
5117
|
|
5294
5118
|
// core/generate-text/to-response-messages.ts
|
5295
5119
|
function toResponseMessages({
|
5296
|
-
|
5297
|
-
|
5298
|
-
reasoning,
|
5299
|
-
tools,
|
5300
|
-
toolCalls,
|
5301
|
-
toolResults,
|
5302
|
-
messageId,
|
5303
|
-
generateMessageId
|
5120
|
+
content: inputContent,
|
5121
|
+
tools
|
5304
5122
|
}) {
|
5305
5123
|
const responseMessages = [];
|
5306
|
-
const content =
|
5307
|
-
|
5308
|
-
|
5309
|
-
|
5310
|
-
|
5311
|
-
|
5312
|
-
|
5313
|
-
|
5314
|
-
|
5315
|
-
|
5316
|
-
|
5317
|
-
|
5318
|
-
|
5319
|
-
|
5320
|
-
|
5321
|
-
|
5322
|
-
|
5323
|
-
|
5324
|
-
|
5124
|
+
const content = inputContent.filter((part) => part.type !== "tool-result" && part.type !== "source").filter((part) => part.type !== "text" || part.text.length > 0).map((part) => {
|
5125
|
+
switch (part.type) {
|
5126
|
+
case "text":
|
5127
|
+
return part;
|
5128
|
+
case "reasoning":
|
5129
|
+
return {
|
5130
|
+
type: "reasoning",
|
5131
|
+
text: part.text,
|
5132
|
+
providerOptions: part.providerMetadata
|
5133
|
+
};
|
5134
|
+
case "file":
|
5135
|
+
return {
|
5136
|
+
type: "file",
|
5137
|
+
data: part.file.base64,
|
5138
|
+
mediaType: part.file.mediaType
|
5139
|
+
};
|
5140
|
+
case "tool-call":
|
5141
|
+
return part;
|
5142
|
+
}
|
5143
|
+
});
|
5325
5144
|
if (content.length > 0) {
|
5326
5145
|
responseMessages.push({
|
5327
5146
|
role: "assistant",
|
5328
|
-
content
|
5329
|
-
id: messageId
|
5147
|
+
content
|
5330
5148
|
});
|
5331
5149
|
}
|
5332
|
-
|
5150
|
+
const toolResultContent = inputContent.filter((part) => part.type === "tool-result").map((toolResult) => {
|
5151
|
+
const tool2 = tools[toolResult.toolName];
|
5152
|
+
return (tool2 == null ? void 0 : tool2.experimental_toToolResultContent) != null ? {
|
5153
|
+
type: "tool-result",
|
5154
|
+
toolCallId: toolResult.toolCallId,
|
5155
|
+
toolName: toolResult.toolName,
|
5156
|
+
result: tool2.experimental_toToolResultContent(toolResult.result),
|
5157
|
+
experimental_content: tool2.experimental_toToolResultContent(
|
5158
|
+
toolResult.result
|
5159
|
+
)
|
5160
|
+
} : {
|
5161
|
+
type: "tool-result",
|
5162
|
+
toolCallId: toolResult.toolCallId,
|
5163
|
+
toolName: toolResult.toolName,
|
5164
|
+
result: toolResult.result
|
5165
|
+
};
|
5166
|
+
});
|
5167
|
+
if (toolResultContent.length > 0) {
|
5333
5168
|
responseMessages.push({
|
5334
5169
|
role: "tool",
|
5335
|
-
|
5336
|
-
content: toolResults.map((toolResult) => {
|
5337
|
-
const tool2 = tools[toolResult.toolName];
|
5338
|
-
return (tool2 == null ? void 0 : tool2.experimental_toToolResultContent) != null ? {
|
5339
|
-
type: "tool-result",
|
5340
|
-
toolCallId: toolResult.toolCallId,
|
5341
|
-
toolName: toolResult.toolName,
|
5342
|
-
result: tool2.experimental_toToolResultContent(toolResult.result),
|
5343
|
-
experimental_content: tool2.experimental_toToolResultContent(
|
5344
|
-
toolResult.result
|
5345
|
-
)
|
5346
|
-
} : {
|
5347
|
-
type: "tool-result",
|
5348
|
-
toolCallId: toolResult.toolCallId,
|
5349
|
-
toolName: toolResult.toolName,
|
5350
|
-
result: toolResult.result
|
5351
|
-
};
|
5352
|
-
})
|
5170
|
+
content: toolResultContent
|
5353
5171
|
});
|
5354
5172
|
}
|
5355
5173
|
return responseMessages;
|
5356
5174
|
}
|
5357
5175
|
|
5358
5176
|
// core/generate-text/generate-text.ts
|
5359
|
-
var originalGenerateId3 = (0,
|
5177
|
+
var originalGenerateId3 = (0, import_provider_utils18.createIdGenerator)({
|
5360
5178
|
prefix: "aitxt",
|
5361
5179
|
size: 24
|
5362
5180
|
});
|
5363
|
-
var originalGenerateMessageId = (0, import_provider_utils17.createIdGenerator)({
|
5364
|
-
prefix: "msg",
|
5365
|
-
size: 24
|
5366
|
-
});
|
5367
5181
|
async function generateText({
|
5368
5182
|
model,
|
5369
5183
|
tools,
|
@@ -5375,9 +5189,7 @@ async function generateText({
|
|
5375
5189
|
abortSignal,
|
5376
5190
|
headers,
|
5377
5191
|
maxSteps = 1,
|
5378
|
-
experimental_generateMessageId: generateMessageId = originalGenerateMessageId,
|
5379
5192
|
experimental_output: output,
|
5380
|
-
experimental_continueSteps: continueSteps = false,
|
5381
5193
|
experimental_telemetry: telemetry,
|
5382
5194
|
providerOptions,
|
5383
5195
|
experimental_activeTools: activeTools,
|
@@ -5433,22 +5245,14 @@ async function generateText({
|
|
5433
5245
|
}),
|
5434
5246
|
tracer,
|
5435
5247
|
fn: async (span) => {
|
5436
|
-
var _a17, _b, _c, _d
|
5248
|
+
var _a17, _b, _c, _d;
|
5437
5249
|
const callSettings2 = prepareCallSettings(settings);
|
5438
5250
|
let currentModelResponse;
|
5439
5251
|
let currentToolCalls = [];
|
5440
5252
|
let currentToolResults = [];
|
5441
5253
|
let stepCount = 0;
|
5442
5254
|
const responseMessages = [];
|
5443
|
-
let text2 = "";
|
5444
|
-
const sources = [];
|
5445
5255
|
const steps = [];
|
5446
|
-
let usage = {
|
5447
|
-
inputTokens: void 0,
|
5448
|
-
outputTokens: void 0,
|
5449
|
-
totalTokens: void 0
|
5450
|
-
};
|
5451
|
-
let stepType = "initial";
|
5452
5256
|
do {
|
5453
5257
|
const stepInputMessages = [
|
5454
5258
|
...initialPrompt.messages,
|
@@ -5514,7 +5318,7 @@ async function generateText({
|
|
5514
5318
|
}),
|
5515
5319
|
tracer,
|
5516
5320
|
fn: async (span2) => {
|
5517
|
-
var _a19, _b2, _c2, _d2,
|
5321
|
+
var _a19, _b2, _c2, _d2, _e, _f, _g, _h;
|
5518
5322
|
const result = await stepModel.doGenerate({
|
5519
5323
|
...callSettings2,
|
5520
5324
|
tools: stepTools,
|
@@ -5528,7 +5332,7 @@ async function generateText({
|
|
5528
5332
|
const responseData = {
|
5529
5333
|
id: (_b2 = (_a19 = result.response) == null ? void 0 : _a19.id) != null ? _b2 : generateId3(),
|
5530
5334
|
timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
|
5531
|
-
modelId: (
|
5335
|
+
modelId: (_f = (_e = result.response) == null ? void 0 : _e.modelId) != null ? _f : stepModel.modelId,
|
5532
5336
|
headers: (_g = result.response) == null ? void 0 : _g.headers,
|
5533
5337
|
body: (_h = result.response) == null ? void 0 : _h.body
|
5534
5338
|
};
|
@@ -5587,89 +5391,35 @@ async function generateText({
|
|
5587
5391
|
messages: stepInputMessages,
|
5588
5392
|
abortSignal
|
5589
5393
|
});
|
5590
|
-
usage = addLanguageModelUsage(usage, currentModelResponse.usage);
|
5591
|
-
let nextStepType = "done";
|
5592
|
-
if (++stepCount < maxSteps) {
|
5593
|
-
if (continueSteps && currentModelResponse.finishReason === "length" && // only use continue when there are no tool calls:
|
5594
|
-
currentToolCalls.length === 0) {
|
5595
|
-
nextStepType = "continue";
|
5596
|
-
} else if (
|
5597
|
-
// there are tool calls:
|
5598
|
-
currentToolCalls.length > 0 && // all current tool calls have results:
|
5599
|
-
currentToolResults.length === currentToolCalls.length
|
5600
|
-
) {
|
5601
|
-
nextStepType = "tool-result";
|
5602
|
-
}
|
5603
|
-
}
|
5604
5394
|
const stepContent = asContent({
|
5605
5395
|
content: currentModelResponse.content,
|
5606
5396
|
toolCalls: currentToolCalls,
|
5607
5397
|
toolResults: currentToolResults
|
5608
5398
|
});
|
5609
|
-
|
5610
|
-
|
5611
|
-
|
5612
|
-
|
5613
|
-
|
5614
|
-
sources.push(
|
5615
|
-
...currentModelResponse.content.filter(
|
5616
|
-
(part) => part.type === "source"
|
5617
|
-
)
|
5399
|
+
responseMessages.push(
|
5400
|
+
...toResponseMessages({
|
5401
|
+
content: stepContent,
|
5402
|
+
tools: tools != null ? tools : {}
|
5403
|
+
})
|
5618
5404
|
);
|
5619
|
-
|
5620
|
-
const lastMessage = responseMessages[responseMessages.length - 1];
|
5621
|
-
if (typeof lastMessage.content === "string") {
|
5622
|
-
lastMessage.content += stepText;
|
5623
|
-
} else {
|
5624
|
-
lastMessage.content.push({
|
5625
|
-
text: stepText,
|
5626
|
-
type: "text"
|
5627
|
-
});
|
5628
|
-
}
|
5629
|
-
} else {
|
5630
|
-
responseMessages.push(
|
5631
|
-
...toResponseMessages({
|
5632
|
-
text: text2,
|
5633
|
-
files: extractFiles(stepContent),
|
5634
|
-
reasoning: extractReasoning(stepContent).map((part) => ({
|
5635
|
-
type: "reasoning",
|
5636
|
-
text: part.text,
|
5637
|
-
providerOptions: part.providerMetadata
|
5638
|
-
})),
|
5639
|
-
tools: tools != null ? tools : {},
|
5640
|
-
toolCalls: currentToolCalls,
|
5641
|
-
toolResults: currentToolResults,
|
5642
|
-
messageId: generateMessageId(),
|
5643
|
-
generateMessageId
|
5644
|
-
})
|
5645
|
-
);
|
5646
|
-
}
|
5647
|
-
const currentStepResult = {
|
5648
|
-
stepType,
|
5405
|
+
const currentStepResult = new DefaultStepResult({
|
5649
5406
|
content: stepContent,
|
5650
|
-
text: stepText,
|
5651
|
-
reasoningText: asReasoningText(extractReasoning(stepContent)),
|
5652
|
-
reasoning: extractReasoning(stepContent),
|
5653
|
-
files: extractFiles(stepContent),
|
5654
|
-
sources: extractSources(stepContent),
|
5655
|
-
toolCalls: currentToolCalls,
|
5656
|
-
toolResults: currentToolResults,
|
5657
5407
|
finishReason: currentModelResponse.finishReason,
|
5658
5408
|
usage: currentModelResponse.usage,
|
5659
5409
|
warnings: currentModelResponse.warnings,
|
5660
|
-
|
5410
|
+
providerMetadata: currentModelResponse.providerMetadata,
|
5411
|
+
request: (_d = currentModelResponse.request) != null ? _d : {},
|
5661
5412
|
response: {
|
5662
5413
|
...currentModelResponse.response,
|
5663
5414
|
// deep clone msgs to avoid mutating past messages in multi-step:
|
5664
5415
|
messages: structuredClone(responseMessages)
|
5665
|
-
}
|
5666
|
-
|
5667
|
-
isContinued: nextStepType === "continue"
|
5668
|
-
};
|
5416
|
+
}
|
5417
|
+
});
|
5669
5418
|
steps.push(currentStepResult);
|
5670
5419
|
await (onStepFinish == null ? void 0 : onStepFinish(currentStepResult));
|
5671
|
-
|
5672
|
-
|
5420
|
+
} while (++stepCount < maxSteps && // there are tool calls:
|
5421
|
+
currentToolCalls.length > 0 && // all current tool calls have results:
|
5422
|
+
currentToolResults.length === currentToolCalls.length);
|
5673
5423
|
span.setAttributes(
|
5674
5424
|
selectTelemetryAttributes({
|
5675
5425
|
telemetry,
|
@@ -5690,32 +5440,17 @@ async function generateText({
|
|
5690
5440
|
}
|
5691
5441
|
})
|
5692
5442
|
);
|
5693
|
-
const
|
5694
|
-
{ text: text2 },
|
5695
|
-
{
|
5696
|
-
response: currentModelResponse.response,
|
5697
|
-
usage,
|
5698
|
-
finishReason: currentModelResponse.finishReason
|
5699
|
-
}
|
5700
|
-
));
|
5443
|
+
const lastStep = steps[steps.length - 1];
|
5701
5444
|
return new DefaultGenerateTextResult({
|
5702
|
-
text: text2,
|
5703
|
-
content: asContent({
|
5704
|
-
content: currentModelResponse.content,
|
5705
|
-
toolCalls: currentToolCalls,
|
5706
|
-
toolResults: currentToolResults
|
5707
|
-
}),
|
5708
|
-
resolvedOutput,
|
5709
|
-
finishReason: currentModelResponse.finishReason,
|
5710
|
-
usage,
|
5711
|
-
warnings: currentModelResponse.warnings,
|
5712
|
-
request: (_f = currentModelResponse.request) != null ? _f : {},
|
5713
|
-
response: {
|
5714
|
-
...currentModelResponse.response,
|
5715
|
-
messages: responseMessages
|
5716
|
-
},
|
5717
5445
|
steps,
|
5718
|
-
|
5446
|
+
resolvedOutput: await (output == null ? void 0 : output.parseOutput(
|
5447
|
+
{ text: lastStep.text },
|
5448
|
+
{
|
5449
|
+
response: lastStep.response,
|
5450
|
+
usage: lastStep.usage,
|
5451
|
+
finishReason: lastStep.finishReason
|
5452
|
+
}
|
5453
|
+
))
|
5719
5454
|
});
|
5720
5455
|
}
|
5721
5456
|
});
|
@@ -5797,35 +5532,67 @@ async function executeTools({
|
|
5797
5532
|
}
|
5798
5533
|
var DefaultGenerateTextResult = class {
|
5799
5534
|
constructor(options) {
|
5800
|
-
this.text = options.text;
|
5801
|
-
this.content = options.content;
|
5802
|
-
this.finishReason = options.finishReason;
|
5803
|
-
this.usage = options.usage;
|
5804
|
-
this.warnings = options.warnings;
|
5805
|
-
this.request = options.request;
|
5806
|
-
this.response = options.response;
|
5807
5535
|
this.steps = options.steps;
|
5808
|
-
this.providerMetadata = options.providerMetadata;
|
5809
5536
|
this.resolvedOutput = options.resolvedOutput;
|
5810
5537
|
}
|
5538
|
+
get finalStep() {
|
5539
|
+
return this.steps[this.steps.length - 1];
|
5540
|
+
}
|
5541
|
+
get content() {
|
5542
|
+
return this.finalStep.content;
|
5543
|
+
}
|
5544
|
+
get text() {
|
5545
|
+
return this.finalStep.text;
|
5546
|
+
}
|
5811
5547
|
get files() {
|
5812
|
-
return
|
5548
|
+
return this.finalStep.files;
|
5813
5549
|
}
|
5814
5550
|
get reasoningText() {
|
5815
|
-
|
5816
|
-
return texts.length > 0 ? texts.join("") : void 0;
|
5551
|
+
return this.finalStep.reasoningText;
|
5817
5552
|
}
|
5818
5553
|
get reasoning() {
|
5819
|
-
return this.
|
5554
|
+
return this.finalStep.reasoning;
|
5820
5555
|
}
|
5821
5556
|
get toolCalls() {
|
5822
|
-
return this.
|
5557
|
+
return this.finalStep.toolCalls;
|
5823
5558
|
}
|
5824
5559
|
get toolResults() {
|
5825
|
-
return this.
|
5560
|
+
return this.finalStep.toolResults;
|
5826
5561
|
}
|
5827
5562
|
get sources() {
|
5828
|
-
return this.
|
5563
|
+
return this.finalStep.sources;
|
5564
|
+
}
|
5565
|
+
get finishReason() {
|
5566
|
+
return this.finalStep.finishReason;
|
5567
|
+
}
|
5568
|
+
get warnings() {
|
5569
|
+
return this.finalStep.warnings;
|
5570
|
+
}
|
5571
|
+
get providerMetadata() {
|
5572
|
+
return this.finalStep.providerMetadata;
|
5573
|
+
}
|
5574
|
+
get response() {
|
5575
|
+
return this.finalStep.response;
|
5576
|
+
}
|
5577
|
+
get request() {
|
5578
|
+
return this.finalStep.request;
|
5579
|
+
}
|
5580
|
+
get usage() {
|
5581
|
+
return this.finalStep.usage;
|
5582
|
+
}
|
5583
|
+
get totalUsage() {
|
5584
|
+
return this.steps.reduce(
|
5585
|
+
(totalUsage, step) => {
|
5586
|
+
return addLanguageModelUsage(totalUsage, step.usage);
|
5587
|
+
},
|
5588
|
+
{
|
5589
|
+
inputTokens: void 0,
|
5590
|
+
outputTokens: void 0,
|
5591
|
+
totalTokens: void 0,
|
5592
|
+
reasoningTokens: void 0,
|
5593
|
+
cachedInputTokens: void 0
|
5594
|
+
}
|
5595
|
+
);
|
5829
5596
|
}
|
5830
5597
|
get experimental_output() {
|
5831
5598
|
if (this.resolvedOutput == null) {
|
@@ -5855,7 +5622,7 @@ __export(output_exports, {
|
|
5855
5622
|
object: () => object,
|
5856
5623
|
text: () => text
|
5857
5624
|
});
|
5858
|
-
var
|
5625
|
+
var import_provider_utils19 = require("@ai-sdk/provider-utils");
|
5859
5626
|
var text = () => ({
|
5860
5627
|
type: "text",
|
5861
5628
|
responseFormat: { type: "text" },
|
@@ -5869,7 +5636,7 @@ var text = () => ({
|
|
5869
5636
|
var object = ({
|
5870
5637
|
schema: inputSchema
|
5871
5638
|
}) => {
|
5872
|
-
const schema = (0,
|
5639
|
+
const schema = (0, import_provider_utils19.asSchema)(inputSchema);
|
5873
5640
|
return {
|
5874
5641
|
type: "object",
|
5875
5642
|
responseFormat: {
|
@@ -5895,7 +5662,7 @@ var object = ({
|
|
5895
5662
|
}
|
5896
5663
|
},
|
5897
5664
|
async parseOutput({ text: text2 }, context) {
|
5898
|
-
const parseResult = await (0,
|
5665
|
+
const parseResult = await (0, import_provider_utils19.safeParseJSON)({ text: text2 });
|
5899
5666
|
if (!parseResult.success) {
|
5900
5667
|
throw new NoObjectGeneratedError({
|
5901
5668
|
message: "No object generated: could not parse the response.",
|
@@ -5906,7 +5673,7 @@ var object = ({
|
|
5906
5673
|
finishReason: context.finishReason
|
5907
5674
|
});
|
5908
5675
|
}
|
5909
|
-
const validationResult = await (0,
|
5676
|
+
const validationResult = await (0, import_provider_utils19.safeValidateTypes)({
|
5910
5677
|
value: parseResult.value,
|
5911
5678
|
schema
|
5912
5679
|
});
|
@@ -5926,8 +5693,8 @@ var object = ({
|
|
5926
5693
|
};
|
5927
5694
|
|
5928
5695
|
// core/generate-text/smooth-stream.ts
|
5929
|
-
var
|
5930
|
-
var
|
5696
|
+
var import_provider_utils20 = require("@ai-sdk/provider-utils");
|
5697
|
+
var import_provider23 = require("@ai-sdk/provider");
|
5931
5698
|
var CHUNKING_REGEXPS = {
|
5932
5699
|
word: /\S+\s+/m,
|
5933
5700
|
line: /\n+/m
|
@@ -5935,7 +5702,7 @@ var CHUNKING_REGEXPS = {
|
|
5935
5702
|
function smoothStream({
|
5936
5703
|
delayInMs = 10,
|
5937
5704
|
chunking = "word",
|
5938
|
-
_internal: { delay: delay2 =
|
5705
|
+
_internal: { delay: delay2 = import_provider_utils20.delay } = {}
|
5939
5706
|
} = {}) {
|
5940
5707
|
let detectChunk;
|
5941
5708
|
if (typeof chunking === "function") {
|
@@ -5957,7 +5724,7 @@ function smoothStream({
|
|
5957
5724
|
} else {
|
5958
5725
|
const chunkingRegex = typeof chunking === "string" ? CHUNKING_REGEXPS[chunking] : chunking;
|
5959
5726
|
if (chunkingRegex == null) {
|
5960
|
-
throw new
|
5727
|
+
throw new import_provider23.InvalidArgumentError({
|
5961
5728
|
argument: "chunking",
|
5962
5729
|
message: `Chunking must be "word" or "line" or a RegExp. Received: ${chunking}`
|
5963
5730
|
});
|
@@ -5995,34 +5762,15 @@ function smoothStream({
|
|
5995
5762
|
}
|
5996
5763
|
|
5997
5764
|
// core/generate-text/stream-text.ts
|
5998
|
-
var
|
5765
|
+
var import_provider_utils22 = require("@ai-sdk/provider-utils");
|
5999
5766
|
|
6000
5767
|
// src/util/as-array.ts
|
6001
5768
|
function asArray(value) {
|
6002
5769
|
return value === void 0 ? [] : Array.isArray(value) ? value : [value];
|
6003
5770
|
}
|
6004
5771
|
|
6005
|
-
// src/util/consume-stream.ts
|
6006
|
-
async function consumeStream({
|
6007
|
-
stream,
|
6008
|
-
onError
|
6009
|
-
}) {
|
6010
|
-
const reader = stream.getReader();
|
6011
|
-
try {
|
6012
|
-
while (true) {
|
6013
|
-
const { done } = await reader.read();
|
6014
|
-
if (done)
|
6015
|
-
break;
|
6016
|
-
}
|
6017
|
-
} catch (error) {
|
6018
|
-
onError == null ? void 0 : onError(error);
|
6019
|
-
} finally {
|
6020
|
-
reader.releaseLock();
|
6021
|
-
}
|
6022
|
-
}
|
6023
|
-
|
6024
5772
|
// core/generate-text/run-tools-transformation.ts
|
6025
|
-
var
|
5773
|
+
var import_provider_utils21 = require("@ai-sdk/provider-utils");
|
6026
5774
|
function runToolsTransformation({
|
6027
5775
|
tools,
|
6028
5776
|
generatorStream,
|
@@ -6108,7 +5856,7 @@ function runToolsTransformation({
|
|
6108
5856
|
controller.enqueue(toolCall);
|
6109
5857
|
const tool2 = tools[toolCall.toolName];
|
6110
5858
|
if (tool2.execute != null) {
|
6111
|
-
const toolExecutionId = (0,
|
5859
|
+
const toolExecutionId = (0, import_provider_utils21.generateId)();
|
6112
5860
|
outstandingToolResults.add(toolExecutionId);
|
6113
5861
|
recordSpan({
|
6114
5862
|
name: "ai.toolCall",
|
@@ -6217,14 +5965,10 @@ function runToolsTransformation({
|
|
6217
5965
|
}
|
6218
5966
|
|
6219
5967
|
// core/generate-text/stream-text.ts
|
6220
|
-
var originalGenerateId4 = (0,
|
5968
|
+
var originalGenerateId4 = (0, import_provider_utils22.createIdGenerator)({
|
6221
5969
|
prefix: "aitxt",
|
6222
5970
|
size: 24
|
6223
5971
|
});
|
6224
|
-
var originalGenerateMessageId2 = (0, import_provider_utils21.createIdGenerator)({
|
6225
|
-
prefix: "msg",
|
6226
|
-
size: 24
|
6227
|
-
});
|
6228
5972
|
function streamText({
|
6229
5973
|
model,
|
6230
5974
|
tools,
|
@@ -6236,9 +5980,7 @@ function streamText({
|
|
6236
5980
|
abortSignal,
|
6237
5981
|
headers,
|
6238
5982
|
maxSteps = 1,
|
6239
|
-
experimental_generateMessageId: generateMessageId = originalGenerateMessageId2,
|
6240
5983
|
experimental_output: output,
|
6241
|
-
experimental_continueSteps: continueSteps = false,
|
6242
5984
|
experimental_telemetry: telemetry,
|
6243
5985
|
providerOptions,
|
6244
5986
|
experimental_toolCallStreaming = false,
|
@@ -6275,7 +6017,6 @@ function streamText({
|
|
6275
6017
|
repairToolCall,
|
6276
6018
|
maxSteps,
|
6277
6019
|
output,
|
6278
|
-
continueSteps,
|
6279
6020
|
providerOptions,
|
6280
6021
|
onChunk,
|
6281
6022
|
onError,
|
@@ -6283,8 +6024,7 @@ function streamText({
|
|
6283
6024
|
onStepFinish,
|
6284
6025
|
now: now2,
|
6285
6026
|
currentDate,
|
6286
|
-
generateId: generateId3
|
6287
|
-
generateMessageId
|
6027
|
+
generateId: generateId3
|
6288
6028
|
});
|
6289
6029
|
}
|
6290
6030
|
function createOutputTransformStream(output) {
|
@@ -6310,7 +6050,7 @@ function createOutputTransformStream(output) {
|
|
6310
6050
|
}
|
6311
6051
|
return new TransformStream({
|
6312
6052
|
async transform(chunk, controller) {
|
6313
|
-
if (chunk.type === "step
|
6053
|
+
if (chunk.type === "finish-step") {
|
6314
6054
|
publishTextChunk({ controller });
|
6315
6055
|
}
|
6316
6056
|
if (chunk.type !== "text") {
|
@@ -6354,32 +6094,18 @@ var DefaultStreamTextResult = class {
|
|
6354
6094
|
repairToolCall,
|
6355
6095
|
maxSteps,
|
6356
6096
|
output,
|
6357
|
-
continueSteps,
|
6358
6097
|
providerOptions,
|
6359
6098
|
now: now2,
|
6360
6099
|
currentDate,
|
6361
6100
|
generateId: generateId3,
|
6362
|
-
generateMessageId,
|
6363
6101
|
onChunk,
|
6364
6102
|
onError,
|
6365
6103
|
onFinish,
|
6366
6104
|
onStepFinish
|
6367
6105
|
}) {
|
6368
|
-
this.
|
6369
|
-
this.usagePromise = new DelayedPromise();
|
6106
|
+
this.totalUsagePromise = new DelayedPromise();
|
6370
6107
|
this.finishReasonPromise = new DelayedPromise();
|
6371
|
-
this.providerMetadataPromise = new DelayedPromise();
|
6372
|
-
this.textPromise = new DelayedPromise();
|
6373
|
-
this.reasoningPromise = new DelayedPromise();
|
6374
|
-
this.reasoningDetailsPromise = new DelayedPromise();
|
6375
|
-
this.sourcesPromise = new DelayedPromise();
|
6376
|
-
this.filesPromise = new DelayedPromise();
|
6377
|
-
this.toolCallsPromise = new DelayedPromise();
|
6378
|
-
this.toolResultsPromise = new DelayedPromise();
|
6379
|
-
this.requestPromise = new DelayedPromise();
|
6380
|
-
this.responsePromise = new DelayedPromise();
|
6381
6108
|
this.stepsPromise = new DelayedPromise();
|
6382
|
-
this.contentPromise = new DelayedPromise();
|
6383
6109
|
if (maxSteps < 1) {
|
6384
6110
|
throw new InvalidArgumentError({
|
6385
6111
|
parameter: "maxSteps",
|
@@ -6388,23 +6114,14 @@ var DefaultStreamTextResult = class {
|
|
6388
6114
|
});
|
6389
6115
|
}
|
6390
6116
|
this.output = output;
|
6391
|
-
|
6392
|
-
let recordedContinuationText = "";
|
6393
|
-
let recordedFullText = "";
|
6117
|
+
this.generateId = generateId3;
|
6394
6118
|
let activeReasoningPart = void 0;
|
6395
6119
|
let recordedContent = [];
|
6396
|
-
const
|
6397
|
-
const recordedResponse = {
|
6398
|
-
id: generateId3(),
|
6399
|
-
timestamp: currentDate(),
|
6400
|
-
modelId: model.modelId,
|
6401
|
-
messages: []
|
6402
|
-
};
|
6403
|
-
let recordedToolCalls = [];
|
6404
|
-
let recordedToolResults = [];
|
6120
|
+
const recordedResponseMessages = [];
|
6405
6121
|
let recordedFinishReason = void 0;
|
6406
|
-
let
|
6407
|
-
let
|
6122
|
+
let recordedTotalUsage = void 0;
|
6123
|
+
let recordedRequest = {};
|
6124
|
+
let recordedWarnings = [];
|
6408
6125
|
const recordedSteps = [];
|
6409
6126
|
let rootSpan;
|
6410
6127
|
const eventProcessor = new TransformStream({
|
@@ -6418,9 +6135,6 @@ var DefaultStreamTextResult = class {
|
|
6418
6135
|
await (onError == null ? void 0 : onError({ error: part.error }));
|
6419
6136
|
}
|
6420
6137
|
if (part.type === "text") {
|
6421
|
-
recordedStepText += part.text;
|
6422
|
-
recordedContinuationText += part.text;
|
6423
|
-
recordedFullText += part.text;
|
6424
6138
|
const latestContent = recordedContent[recordedContent.length - 1];
|
6425
6139
|
if ((latestContent == null ? void 0 : latestContent.type) === "text") {
|
6426
6140
|
latestContent.text += part.text;
|
@@ -6433,12 +6147,12 @@ var DefaultStreamTextResult = class {
|
|
6433
6147
|
activeReasoningPart = {
|
6434
6148
|
type: "reasoning",
|
6435
6149
|
text: part.text,
|
6436
|
-
|
6150
|
+
providerMetadata: part.providerMetadata
|
6437
6151
|
};
|
6438
6152
|
recordedContent.push(activeReasoningPart);
|
6439
6153
|
} else {
|
6440
6154
|
activeReasoningPart.text += part.text;
|
6441
|
-
activeReasoningPart.
|
6155
|
+
activeReasoningPart.providerMetadata = part.providerMetadata;
|
6442
6156
|
}
|
6443
6157
|
}
|
6444
6158
|
if (part.type === "reasoning-part-finish" && activeReasoningPart != null) {
|
@@ -6449,129 +6163,76 @@ var DefaultStreamTextResult = class {
|
|
6449
6163
|
}
|
6450
6164
|
if (part.type === "source") {
|
6451
6165
|
recordedContent.push(part);
|
6452
|
-
recordedSources.push(part);
|
6453
6166
|
}
|
6454
6167
|
if (part.type === "tool-call") {
|
6455
6168
|
recordedContent.push(part);
|
6456
|
-
recordedToolCalls.push(part);
|
6457
6169
|
}
|
6458
6170
|
if (part.type === "tool-result") {
|
6459
6171
|
recordedContent.push(part);
|
6460
|
-
recordedToolResults.push(part);
|
6461
6172
|
}
|
6462
|
-
if (part.type === "step
|
6173
|
+
if (part.type === "start-step") {
|
6174
|
+
recordedRequest = part.request;
|
6175
|
+
recordedWarnings = part.warnings;
|
6176
|
+
}
|
6177
|
+
if (part.type === "finish-step") {
|
6463
6178
|
const stepMessages = toResponseMessages({
|
6464
|
-
|
6465
|
-
|
6466
|
-
reasoning: extractReasoning(recordedContent),
|
6467
|
-
tools: tools != null ? tools : {},
|
6468
|
-
toolCalls: recordedToolCalls,
|
6469
|
-
toolResults: recordedToolResults,
|
6470
|
-
messageId: part.messageId,
|
6471
|
-
generateMessageId
|
6179
|
+
content: recordedContent,
|
6180
|
+
tools: tools != null ? tools : {}
|
6472
6181
|
});
|
6473
|
-
const
|
6474
|
-
let nextStepType = "done";
|
6475
|
-
if (currentStep + 1 < maxSteps) {
|
6476
|
-
if (continueSteps && part.finishReason === "length" && // only use continue when there are no tool calls:
|
6477
|
-
recordedToolCalls.length === 0) {
|
6478
|
-
nextStepType = "continue";
|
6479
|
-
} else if (
|
6480
|
-
// there are tool calls:
|
6481
|
-
recordedToolCalls.length > 0 && // all current tool calls have results:
|
6482
|
-
recordedToolResults.length === recordedToolCalls.length
|
6483
|
-
) {
|
6484
|
-
nextStepType = "tool-result";
|
6485
|
-
}
|
6486
|
-
}
|
6487
|
-
const currentStepResult = {
|
6488
|
-
stepType,
|
6182
|
+
const currentStepResult = new DefaultStepResult({
|
6489
6183
|
content: recordedContent,
|
6490
|
-
text: recordedStepText,
|
6491
|
-
reasoningText: asReasoningText(extractReasoning(recordedContent)),
|
6492
|
-
reasoning: extractReasoning(recordedContent),
|
6493
|
-
files: extractFiles(recordedContent),
|
6494
|
-
sources: extractSources(recordedContent),
|
6495
|
-
toolCalls: recordedToolCalls,
|
6496
|
-
toolResults: recordedToolResults,
|
6497
6184
|
finishReason: part.finishReason,
|
6498
6185
|
usage: part.usage,
|
6499
|
-
warnings:
|
6500
|
-
request:
|
6186
|
+
warnings: recordedWarnings,
|
6187
|
+
request: recordedRequest,
|
6501
6188
|
response: {
|
6502
6189
|
...part.response,
|
6503
|
-
messages: [...
|
6190
|
+
messages: [...recordedResponseMessages, ...stepMessages]
|
6504
6191
|
},
|
6505
|
-
providerMetadata: part.providerMetadata
|
6506
|
-
|
6507
|
-
};
|
6192
|
+
providerMetadata: part.providerMetadata
|
6193
|
+
});
|
6508
6194
|
await (onStepFinish == null ? void 0 : onStepFinish(currentStepResult));
|
6509
6195
|
recordedSteps.push(currentStepResult);
|
6510
6196
|
recordedContent = [];
|
6511
|
-
recordedToolCalls = [];
|
6512
|
-
recordedToolResults = [];
|
6513
|
-
recordedStepText = "";
|
6514
6197
|
activeReasoningPart = void 0;
|
6515
|
-
|
6516
|
-
stepType = nextStepType;
|
6517
|
-
}
|
6518
|
-
if (nextStepType !== "continue") {
|
6519
|
-
recordedResponse.messages.push(...stepMessages);
|
6520
|
-
recordedContinuationText = "";
|
6521
|
-
}
|
6198
|
+
recordedResponseMessages.push(...stepMessages);
|
6522
6199
|
}
|
6523
6200
|
if (part.type === "finish") {
|
6524
|
-
|
6525
|
-
recordedResponse.timestamp = part.response.timestamp;
|
6526
|
-
recordedResponse.modelId = part.response.modelId;
|
6527
|
-
recordedResponse.headers = part.response.headers;
|
6528
|
-
recordedUsage = part.usage;
|
6201
|
+
recordedTotalUsage = part.totalUsage;
|
6529
6202
|
recordedFinishReason = part.finishReason;
|
6530
6203
|
}
|
6531
6204
|
},
|
6532
6205
|
async flush(controller) {
|
6533
|
-
var _a17;
|
6534
6206
|
try {
|
6535
6207
|
if (recordedSteps.length === 0) {
|
6536
6208
|
return;
|
6537
6209
|
}
|
6538
|
-
const lastStep = recordedSteps[recordedSteps.length - 1];
|
6539
|
-
self.contentPromise.resolve(lastStep.content);
|
6540
|
-
self.warningsPromise.resolve(lastStep.warnings);
|
6541
|
-
self.requestPromise.resolve(lastStep.request);
|
6542
|
-
self.responsePromise.resolve(lastStep.response);
|
6543
|
-
self.toolCallsPromise.resolve(lastStep.toolCalls);
|
6544
|
-
self.toolResultsPromise.resolve(lastStep.toolResults);
|
6545
|
-
self.providerMetadataPromise.resolve(lastStep.providerMetadata);
|
6546
|
-
self.reasoningPromise.resolve(lastStep.reasoningText);
|
6547
|
-
self.reasoningDetailsPromise.resolve(lastStep.reasoning);
|
6548
6210
|
const finishReason = recordedFinishReason != null ? recordedFinishReason : "unknown";
|
6549
|
-
const
|
6211
|
+
const totalUsage = recordedTotalUsage != null ? recordedTotalUsage : {
|
6550
6212
|
inputTokens: void 0,
|
6551
6213
|
outputTokens: void 0,
|
6552
6214
|
totalTokens: void 0
|
6553
6215
|
};
|
6554
6216
|
self.finishReasonPromise.resolve(finishReason);
|
6555
|
-
self.
|
6556
|
-
self.textPromise.resolve(recordedFullText);
|
6557
|
-
self.sourcesPromise.resolve(recordedSources);
|
6558
|
-
self.filesPromise.resolve(lastStep.files);
|
6217
|
+
self.totalUsagePromise.resolve(totalUsage);
|
6559
6218
|
self.stepsPromise.resolve(recordedSteps);
|
6219
|
+
const finalStep = recordedSteps[recordedSteps.length - 1];
|
6560
6220
|
await (onFinish == null ? void 0 : onFinish({
|
6561
6221
|
finishReason,
|
6562
|
-
|
6563
|
-
|
6564
|
-
|
6565
|
-
|
6566
|
-
|
6567
|
-
|
6568
|
-
|
6569
|
-
|
6570
|
-
|
6571
|
-
|
6572
|
-
|
6573
|
-
|
6574
|
-
|
6222
|
+
totalUsage,
|
6223
|
+
usage: finalStep.usage,
|
6224
|
+
content: finalStep.content,
|
6225
|
+
text: finalStep.text,
|
6226
|
+
reasoningText: finalStep.reasoningText,
|
6227
|
+
reasoning: finalStep.reasoning,
|
6228
|
+
files: finalStep.files,
|
6229
|
+
sources: finalStep.sources,
|
6230
|
+
toolCalls: finalStep.toolCalls,
|
6231
|
+
toolResults: finalStep.toolResults,
|
6232
|
+
request: finalStep.request,
|
6233
|
+
response: finalStep.response,
|
6234
|
+
warnings: finalStep.warnings,
|
6235
|
+
providerMetadata: finalStep.providerMetadata,
|
6575
6236
|
steps: recordedSteps
|
6576
6237
|
}));
|
6577
6238
|
rootSpan.setAttributes(
|
@@ -6579,18 +6240,18 @@ var DefaultStreamTextResult = class {
|
|
6579
6240
|
telemetry,
|
6580
6241
|
attributes: {
|
6581
6242
|
"ai.response.finishReason": finishReason,
|
6582
|
-
"ai.response.text": { output: () =>
|
6243
|
+
"ai.response.text": { output: () => finalStep.text },
|
6583
6244
|
"ai.response.toolCalls": {
|
6584
6245
|
output: () => {
|
6585
|
-
var
|
6586
|
-
return ((
|
6246
|
+
var _a17;
|
6247
|
+
return ((_a17 = finalStep.toolCalls) == null ? void 0 : _a17.length) ? JSON.stringify(finalStep.toolCalls) : void 0;
|
6587
6248
|
}
|
6588
6249
|
},
|
6589
|
-
"ai.usage.inputTokens":
|
6590
|
-
"ai.usage.outputTokens":
|
6591
|
-
"ai.usage.totalTokens":
|
6592
|
-
"ai.usage.reasoningTokens":
|
6593
|
-
"ai.usage.cachedInputTokens":
|
6250
|
+
"ai.usage.inputTokens": totalUsage.inputTokens,
|
6251
|
+
"ai.usage.outputTokens": totalUsage.outputTokens,
|
6252
|
+
"ai.usage.totalTokens": totalUsage.totalTokens,
|
6253
|
+
"ai.usage.reasoningTokens": totalUsage.reasoningTokens,
|
6254
|
+
"ai.usage.cachedInputTokens": totalUsage.cachedInputTokens
|
6594
6255
|
}
|
6595
6256
|
})
|
6596
6257
|
);
|
@@ -6649,11 +6310,7 @@ var DefaultStreamTextResult = class {
|
|
6649
6310
|
async function streamStep({
|
6650
6311
|
currentStep,
|
6651
6312
|
responseMessages,
|
6652
|
-
usage
|
6653
|
-
stepType: stepType2,
|
6654
|
-
previousStepText,
|
6655
|
-
hasLeadingWhitespace,
|
6656
|
-
messageId
|
6313
|
+
usage
|
6657
6314
|
}) {
|
6658
6315
|
const initialPrompt = await standardizePrompt({
|
6659
6316
|
system,
|
@@ -6751,8 +6408,7 @@ var DefaultStreamTextResult = class {
|
|
6751
6408
|
const stepToolCalls = [];
|
6752
6409
|
const stepToolResults = [];
|
6753
6410
|
let warnings;
|
6754
|
-
const
|
6755
|
-
const stepFiles = [];
|
6411
|
+
const stepContent = [];
|
6756
6412
|
let activeReasoningPart2 = void 0;
|
6757
6413
|
let stepFinishReason = "unknown";
|
6758
6414
|
let stepUsage = {
|
@@ -6763,25 +6419,17 @@ var DefaultStreamTextResult = class {
|
|
6763
6419
|
let stepProviderMetadata;
|
6764
6420
|
let stepFirstChunk = true;
|
6765
6421
|
let stepText = "";
|
6766
|
-
let fullStepText = stepType2 === "continue" ? previousStepText : "";
|
6767
6422
|
let stepResponse = {
|
6768
6423
|
id: generateId3(),
|
6769
6424
|
timestamp: currentDate(),
|
6770
6425
|
modelId: model.modelId
|
6771
6426
|
};
|
6772
|
-
let chunkBuffer = "";
|
6773
|
-
let chunkTextPublished = false;
|
6774
|
-
let inWhitespacePrefix = true;
|
6775
|
-
let hasWhitespaceSuffix = false;
|
6776
6427
|
async function publishTextChunk({
|
6777
6428
|
controller,
|
6778
6429
|
chunk
|
6779
6430
|
}) {
|
6780
6431
|
controller.enqueue(chunk);
|
6781
6432
|
stepText += chunk.text;
|
6782
|
-
fullStepText += chunk.text;
|
6783
|
-
chunkTextPublished = true;
|
6784
|
-
hasWhitespaceSuffix = chunk.text.trimEnd() !== chunk.text;
|
6785
6433
|
}
|
6786
6434
|
self.addStream(
|
6787
6435
|
transformedStream.pipeThrough(
|
@@ -6790,6 +6438,7 @@ var DefaultStreamTextResult = class {
|
|
6790
6438
|
var _a17, _b, _c, _d;
|
6791
6439
|
if (chunk.type === "stream-start") {
|
6792
6440
|
warnings = chunk.warnings;
|
6441
|
+
controller.enqueue({ type: "start" });
|
6793
6442
|
return;
|
6794
6443
|
}
|
6795
6444
|
if (stepFirstChunk) {
|
@@ -6802,8 +6451,7 @@ var DefaultStreamTextResult = class {
|
|
6802
6451
|
"ai.response.msToFirstChunk": msToFirstChunk
|
6803
6452
|
});
|
6804
6453
|
controller.enqueue({
|
6805
|
-
type: "step
|
6806
|
-
messageId,
|
6454
|
+
type: "start-step",
|
6807
6455
|
request: stepRequest,
|
6808
6456
|
warnings: warnings != null ? warnings : []
|
6809
6457
|
});
|
@@ -6814,27 +6462,7 @@ var DefaultStreamTextResult = class {
|
|
6814
6462
|
const chunkType = chunk.type;
|
6815
6463
|
switch (chunkType) {
|
6816
6464
|
case "text": {
|
6817
|
-
|
6818
|
-
const trimmedChunkText = inWhitespacePrefix && hasLeadingWhitespace ? chunk.text.trimStart() : chunk.text;
|
6819
|
-
if (trimmedChunkText.length === 0) {
|
6820
|
-
break;
|
6821
|
-
}
|
6822
|
-
inWhitespacePrefix = false;
|
6823
|
-
chunkBuffer += trimmedChunkText;
|
6824
|
-
const split = splitOnLastWhitespace(chunkBuffer);
|
6825
|
-
if (split != null) {
|
6826
|
-
chunkBuffer = split.suffix;
|
6827
|
-
await publishTextChunk({
|
6828
|
-
controller,
|
6829
|
-
chunk: {
|
6830
|
-
type: "text",
|
6831
|
-
text: split.prefix + split.whitespace
|
6832
|
-
}
|
6833
|
-
});
|
6834
|
-
}
|
6835
|
-
} else {
|
6836
|
-
await publishTextChunk({ controller, chunk });
|
6837
|
-
}
|
6465
|
+
await publishTextChunk({ controller, chunk });
|
6838
6466
|
break;
|
6839
6467
|
}
|
6840
6468
|
case "reasoning": {
|
@@ -6843,12 +6471,12 @@ var DefaultStreamTextResult = class {
|
|
6843
6471
|
activeReasoningPart2 = {
|
6844
6472
|
type: "reasoning",
|
6845
6473
|
text: chunk.text,
|
6846
|
-
|
6474
|
+
providerMetadata: chunk.providerMetadata
|
6847
6475
|
};
|
6848
|
-
|
6476
|
+
stepContent.push(activeReasoningPart2);
|
6849
6477
|
} else {
|
6850
6478
|
activeReasoningPart2.text += chunk.text;
|
6851
|
-
activeReasoningPart2.
|
6479
|
+
activeReasoningPart2.providerMetadata = chunk.providerMetadata;
|
6852
6480
|
}
|
6853
6481
|
break;
|
6854
6482
|
}
|
@@ -6860,11 +6488,13 @@ var DefaultStreamTextResult = class {
|
|
6860
6488
|
case "tool-call": {
|
6861
6489
|
controller.enqueue(chunk);
|
6862
6490
|
stepToolCalls.push(chunk);
|
6491
|
+
stepContent.push(chunk);
|
6863
6492
|
break;
|
6864
6493
|
}
|
6865
6494
|
case "tool-result": {
|
6866
6495
|
controller.enqueue(chunk);
|
6867
6496
|
stepToolResults.push(chunk);
|
6497
|
+
stepContent.push(chunk);
|
6868
6498
|
break;
|
6869
6499
|
}
|
6870
6500
|
case "response-metadata": {
|
@@ -6888,11 +6518,15 @@ var DefaultStreamTextResult = class {
|
|
6888
6518
|
break;
|
6889
6519
|
}
|
6890
6520
|
case "file": {
|
6891
|
-
|
6521
|
+
stepContent.push(chunk);
|
6522
|
+
controller.enqueue(chunk);
|
6523
|
+
break;
|
6524
|
+
}
|
6525
|
+
case "source": {
|
6526
|
+
stepContent.push(chunk);
|
6892
6527
|
controller.enqueue(chunk);
|
6893
6528
|
break;
|
6894
6529
|
}
|
6895
|
-
case "source":
|
6896
6530
|
case "tool-call-streaming-start":
|
6897
6531
|
case "tool-call-delta": {
|
6898
6532
|
controller.enqueue(chunk);
|
@@ -6912,27 +6546,6 @@ var DefaultStreamTextResult = class {
|
|
6912
6546
|
// invoke onFinish callback and resolve toolResults promise when the stream is about to close:
|
6913
6547
|
async flush(controller) {
|
6914
6548
|
const stepToolCallsJson = stepToolCalls.length > 0 ? JSON.stringify(stepToolCalls) : void 0;
|
6915
|
-
let nextStepType = "done";
|
6916
|
-
if (currentStep + 1 < maxSteps) {
|
6917
|
-
if (continueSteps && stepFinishReason === "length" && // only use continue when there are no tool calls:
|
6918
|
-
stepToolCalls.length === 0) {
|
6919
|
-
nextStepType = "continue";
|
6920
|
-
} else if (
|
6921
|
-
// there are tool calls:
|
6922
|
-
stepToolCalls.length > 0 && // all current tool calls have results:
|
6923
|
-
stepToolResults.length === stepToolCalls.length
|
6924
|
-
) {
|
6925
|
-
nextStepType = "tool-result";
|
6926
|
-
}
|
6927
|
-
}
|
6928
|
-
if (continueSteps && chunkBuffer.length > 0 && (nextStepType !== "continue" || // when the next step is a regular step, publish the buffer
|
6929
|
-
stepType2 === "continue" && !chunkTextPublished)) {
|
6930
|
-
await publishTextChunk({
|
6931
|
-
controller,
|
6932
|
-
chunk: { type: "text", text: chunkBuffer }
|
6933
|
-
});
|
6934
|
-
chunkBuffer = "";
|
6935
|
-
}
|
6936
6549
|
try {
|
6937
6550
|
doStreamSpan.setAttributes(
|
6938
6551
|
selectTelemetryAttributes({
|
@@ -6965,69 +6578,37 @@ var DefaultStreamTextResult = class {
|
|
6965
6578
|
doStreamSpan.end();
|
6966
6579
|
}
|
6967
6580
|
controller.enqueue({
|
6968
|
-
type: "step
|
6581
|
+
type: "finish-step",
|
6969
6582
|
finishReason: stepFinishReason,
|
6970
6583
|
usage: stepUsage,
|
6971
6584
|
providerMetadata: stepProviderMetadata,
|
6972
|
-
request: stepRequest,
|
6973
6585
|
response: {
|
6974
6586
|
...stepResponse,
|
6975
6587
|
headers: response == null ? void 0 : response.headers
|
6976
|
-
}
|
6977
|
-
warnings,
|
6978
|
-
isContinued: nextStepType === "continue",
|
6979
|
-
messageId
|
6588
|
+
}
|
6980
6589
|
});
|
6981
6590
|
const combinedUsage = addLanguageModelUsage(usage, stepUsage);
|
6982
|
-
if (
|
6591
|
+
if (currentStep + 1 < maxSteps && // there are tool calls:
|
6592
|
+
stepToolCalls.length > 0 && // all current tool calls have results:
|
6593
|
+
stepToolResults.length === stepToolCalls.length) {
|
6594
|
+
responseMessages.push(
|
6595
|
+
...toResponseMessages({
|
6596
|
+
content: stepContent,
|
6597
|
+
tools: tools != null ? tools : {}
|
6598
|
+
})
|
6599
|
+
);
|
6600
|
+
await streamStep({
|
6601
|
+
currentStep: currentStep + 1,
|
6602
|
+
responseMessages,
|
6603
|
+
usage: combinedUsage
|
6604
|
+
});
|
6605
|
+
} else {
|
6983
6606
|
controller.enqueue({
|
6984
6607
|
type: "finish",
|
6985
6608
|
finishReason: stepFinishReason,
|
6986
|
-
|
6987
|
-
providerMetadata: stepProviderMetadata,
|
6988
|
-
response: {
|
6989
|
-
...stepResponse,
|
6990
|
-
headers: response == null ? void 0 : response.headers
|
6991
|
-
}
|
6609
|
+
totalUsage: combinedUsage
|
6992
6610
|
});
|
6993
6611
|
self.closeStream();
|
6994
|
-
} else {
|
6995
|
-
if (stepType2 === "continue") {
|
6996
|
-
const lastMessage = responseMessages[responseMessages.length - 1];
|
6997
|
-
if (typeof lastMessage.content === "string") {
|
6998
|
-
lastMessage.content += stepText;
|
6999
|
-
} else {
|
7000
|
-
lastMessage.content.push({
|
7001
|
-
text: stepText,
|
7002
|
-
type: "text"
|
7003
|
-
});
|
7004
|
-
}
|
7005
|
-
} else {
|
7006
|
-
responseMessages.push(
|
7007
|
-
...toResponseMessages({
|
7008
|
-
text: stepText,
|
7009
|
-
files: stepFiles,
|
7010
|
-
reasoning: stepReasoning,
|
7011
|
-
tools: tools != null ? tools : {},
|
7012
|
-
toolCalls: stepToolCalls,
|
7013
|
-
toolResults: stepToolResults,
|
7014
|
-
messageId,
|
7015
|
-
generateMessageId
|
7016
|
-
})
|
7017
|
-
);
|
7018
|
-
}
|
7019
|
-
await streamStep({
|
7020
|
-
currentStep: currentStep + 1,
|
7021
|
-
responseMessages,
|
7022
|
-
usage: combinedUsage,
|
7023
|
-
stepType: nextStepType,
|
7024
|
-
previousStepText: fullStepText,
|
7025
|
-
hasLeadingWhitespace: hasWhitespaceSuffix,
|
7026
|
-
messageId: (
|
7027
|
-
// keep the same id when continuing a step:
|
7028
|
-
nextStepType === "continue" ? messageId : generateMessageId()
|
7029
|
-
)
|
7030
|
-
});
|
7031
6612
|
}
|
7032
6613
|
}
|
7033
6614
|
})
|
@@ -7041,11 +6622,7 @@ var DefaultStreamTextResult = class {
|
|
7041
6622
|
inputTokens: void 0,
|
7042
6623
|
outputTokens: void 0,
|
7043
6624
|
totalTokens: void 0
|
7044
|
-
}
|
7045
|
-
previousStepText: "",
|
7046
|
-
stepType: "initial",
|
7047
|
-
hasLeadingWhitespace: false,
|
7048
|
-
messageId: generateMessageId()
|
6625
|
+
}
|
7049
6626
|
});
|
7050
6627
|
}
|
7051
6628
|
}).catch((error) => {
|
@@ -7060,50 +6637,56 @@ var DefaultStreamTextResult = class {
|
|
7060
6637
|
self.closeStream();
|
7061
6638
|
});
|
7062
6639
|
}
|
7063
|
-
get
|
7064
|
-
return this.
|
6640
|
+
get steps() {
|
6641
|
+
return this.stepsPromise.value;
|
7065
6642
|
}
|
7066
|
-
get
|
7067
|
-
return this.
|
6643
|
+
get finalStep() {
|
6644
|
+
return this.steps.then((steps) => steps[steps.length - 1]);
|
7068
6645
|
}
|
7069
|
-
get
|
7070
|
-
return this.
|
6646
|
+
get content() {
|
6647
|
+
return this.finalStep.then((step) => step.content);
|
7071
6648
|
}
|
7072
|
-
get
|
7073
|
-
return this.
|
6649
|
+
get warnings() {
|
6650
|
+
return this.finalStep.then((step) => step.warnings);
|
7074
6651
|
}
|
7075
6652
|
get providerMetadata() {
|
7076
|
-
return this.
|
6653
|
+
return this.finalStep.then((step) => step.providerMetadata);
|
7077
6654
|
}
|
7078
6655
|
get text() {
|
7079
|
-
return this.
|
6656
|
+
return this.finalStep.then((step) => step.text);
|
7080
6657
|
}
|
7081
6658
|
get reasoningText() {
|
7082
|
-
return this.
|
6659
|
+
return this.finalStep.then((step) => step.reasoningText);
|
7083
6660
|
}
|
7084
6661
|
get reasoning() {
|
7085
|
-
return this.
|
6662
|
+
return this.finalStep.then((step) => step.reasoning);
|
7086
6663
|
}
|
7087
6664
|
get sources() {
|
7088
|
-
return this.
|
6665
|
+
return this.finalStep.then((step) => step.sources);
|
7089
6666
|
}
|
7090
6667
|
get files() {
|
7091
|
-
return this.
|
6668
|
+
return this.finalStep.then((step) => step.files);
|
7092
6669
|
}
|
7093
6670
|
get toolCalls() {
|
7094
|
-
return this.
|
6671
|
+
return this.finalStep.then((step) => step.toolCalls);
|
7095
6672
|
}
|
7096
6673
|
get toolResults() {
|
7097
|
-
return this.
|
6674
|
+
return this.finalStep.then((step) => step.toolResults);
|
6675
|
+
}
|
6676
|
+
get usage() {
|
6677
|
+
return this.finalStep.then((step) => step.usage);
|
7098
6678
|
}
|
7099
6679
|
get request() {
|
7100
|
-
return this.
|
6680
|
+
return this.finalStep.then((step) => step.request);
|
7101
6681
|
}
|
7102
6682
|
get response() {
|
7103
|
-
return this.
|
6683
|
+
return this.finalStep.then((step) => step.response);
|
7104
6684
|
}
|
7105
|
-
get
|
7106
|
-
return this.
|
6685
|
+
get totalUsage() {
|
6686
|
+
return this.totalUsagePromise.value;
|
6687
|
+
}
|
6688
|
+
get finishReason() {
|
6689
|
+
return this.finishReasonPromise.value;
|
7107
6690
|
}
|
7108
6691
|
/**
|
7109
6692
|
Split out a new stream from the original stream.
|
@@ -7169,26 +6752,33 @@ var DefaultStreamTextResult = class {
|
|
7169
6752
|
)
|
7170
6753
|
);
|
7171
6754
|
}
|
7172
|
-
|
7173
|
-
|
7174
|
-
|
7175
|
-
|
6755
|
+
toUIMessageStream({
|
6756
|
+
newMessageId,
|
6757
|
+
originalMessages = [],
|
6758
|
+
onFinish,
|
6759
|
+
messageMetadata,
|
7176
6760
|
sendReasoning = false,
|
7177
6761
|
sendSources = false,
|
7178
|
-
|
6762
|
+
experimental_sendStart = true,
|
6763
|
+
experimental_sendFinish = true,
|
6764
|
+
onError = () => "An error occurred."
|
6765
|
+
// mask error messages for safety by default
|
7179
6766
|
} = {}) {
|
7180
|
-
|
6767
|
+
const lastMessage = originalMessages[originalMessages.length - 1];
|
6768
|
+
const isContinuation = (lastMessage == null ? void 0 : lastMessage.role) === "assistant";
|
6769
|
+
const messageId = isContinuation ? lastMessage.id : newMessageId;
|
6770
|
+
const baseStream = this.fullStream.pipeThrough(
|
7181
6771
|
new TransformStream({
|
7182
|
-
transform: async (
|
7183
|
-
const
|
7184
|
-
switch (
|
6772
|
+
transform: async (part, controller) => {
|
6773
|
+
const partType = part.type;
|
6774
|
+
switch (partType) {
|
7185
6775
|
case "text": {
|
7186
|
-
controller.enqueue({ type: "text", value:
|
6776
|
+
controller.enqueue({ type: "text", value: part.text });
|
7187
6777
|
break;
|
7188
6778
|
}
|
7189
6779
|
case "reasoning": {
|
7190
6780
|
if (sendReasoning) {
|
7191
|
-
controller.enqueue({ type: "reasoning", value:
|
6781
|
+
controller.enqueue({ type: "reasoning", value: part });
|
7192
6782
|
}
|
7193
6783
|
break;
|
7194
6784
|
}
|
@@ -7205,15 +6795,15 @@ var DefaultStreamTextResult = class {
|
|
7205
6795
|
controller.enqueue({
|
7206
6796
|
type: "file",
|
7207
6797
|
value: {
|
7208
|
-
mediaType:
|
7209
|
-
url: `data:${
|
6798
|
+
mediaType: part.file.mediaType,
|
6799
|
+
url: `data:${part.file.mediaType};base64,${part.file.base64}`
|
7210
6800
|
}
|
7211
6801
|
});
|
7212
6802
|
break;
|
7213
6803
|
}
|
7214
6804
|
case "source": {
|
7215
6805
|
if (sendSources) {
|
7216
|
-
controller.enqueue({ type: "source", value:
|
6806
|
+
controller.enqueue({ type: "source", value: part });
|
7217
6807
|
}
|
7218
6808
|
break;
|
7219
6809
|
}
|
@@ -7221,8 +6811,8 @@ var DefaultStreamTextResult = class {
|
|
7221
6811
|
controller.enqueue({
|
7222
6812
|
type: "tool-call-streaming-start",
|
7223
6813
|
value: {
|
7224
|
-
toolCallId:
|
7225
|
-
toolName:
|
6814
|
+
toolCallId: part.toolCallId,
|
6815
|
+
toolName: part.toolName
|
7226
6816
|
}
|
7227
6817
|
});
|
7228
6818
|
break;
|
@@ -7231,8 +6821,8 @@ var DefaultStreamTextResult = class {
|
|
7231
6821
|
controller.enqueue({
|
7232
6822
|
type: "tool-call-delta",
|
7233
6823
|
value: {
|
7234
|
-
toolCallId:
|
7235
|
-
argsTextDelta:
|
6824
|
+
toolCallId: part.toolCallId,
|
6825
|
+
argsTextDelta: part.argsTextDelta
|
7236
6826
|
}
|
7237
6827
|
});
|
7238
6828
|
break;
|
@@ -7241,9 +6831,9 @@ var DefaultStreamTextResult = class {
|
|
7241
6831
|
controller.enqueue({
|
7242
6832
|
type: "tool-call",
|
7243
6833
|
value: {
|
7244
|
-
toolCallId:
|
7245
|
-
toolName:
|
7246
|
-
args:
|
6834
|
+
toolCallId: part.toolCallId,
|
6835
|
+
toolName: part.toolName,
|
6836
|
+
args: part.args
|
7247
6837
|
}
|
7248
6838
|
});
|
7249
6839
|
break;
|
@@ -7252,8 +6842,8 @@ var DefaultStreamTextResult = class {
|
|
7252
6842
|
controller.enqueue({
|
7253
6843
|
type: "tool-result",
|
7254
6844
|
value: {
|
7255
|
-
toolCallId:
|
7256
|
-
result:
|
6845
|
+
toolCallId: part.toolCallId,
|
6846
|
+
result: part.result
|
7257
6847
|
}
|
7258
6848
|
});
|
7259
6849
|
break;
|
@@ -7261,69 +6851,100 @@ var DefaultStreamTextResult = class {
|
|
7261
6851
|
case "error": {
|
7262
6852
|
controller.enqueue({
|
7263
6853
|
type: "error",
|
7264
|
-
value: onError(
|
6854
|
+
value: onError(part.error)
|
7265
6855
|
});
|
7266
6856
|
break;
|
7267
6857
|
}
|
7268
|
-
case "step
|
6858
|
+
case "start-step": {
|
7269
6859
|
controller.enqueue({
|
7270
6860
|
type: "start-step",
|
7271
6861
|
value: {
|
7272
|
-
|
6862
|
+
metadata: messageMetadata == null ? void 0 : messageMetadata({ part })
|
7273
6863
|
}
|
7274
6864
|
});
|
7275
6865
|
break;
|
7276
6866
|
}
|
7277
|
-
case "step
|
6867
|
+
case "finish-step": {
|
7278
6868
|
controller.enqueue({
|
7279
6869
|
type: "finish-step",
|
7280
6870
|
value: {
|
7281
|
-
|
7282
|
-
usage: sendUsage ? chunk.usage : void 0,
|
7283
|
-
isContinued: chunk.isContinued
|
6871
|
+
metadata: messageMetadata == null ? void 0 : messageMetadata({ part })
|
7284
6872
|
}
|
7285
6873
|
});
|
7286
6874
|
break;
|
7287
6875
|
}
|
6876
|
+
case "start": {
|
6877
|
+
if (experimental_sendStart) {
|
6878
|
+
controller.enqueue({
|
6879
|
+
type: "start",
|
6880
|
+
value: {
|
6881
|
+
messageId,
|
6882
|
+
metadata: messageMetadata == null ? void 0 : messageMetadata({ part })
|
6883
|
+
}
|
6884
|
+
});
|
6885
|
+
}
|
6886
|
+
break;
|
6887
|
+
}
|
7288
6888
|
case "finish": {
|
7289
6889
|
if (experimental_sendFinish) {
|
7290
6890
|
controller.enqueue({
|
7291
|
-
type: "finish
|
6891
|
+
type: "finish",
|
7292
6892
|
value: {
|
7293
|
-
|
7294
|
-
usage: sendUsage ? chunk.usage : void 0
|
6893
|
+
metadata: messageMetadata == null ? void 0 : messageMetadata({ part })
|
7295
6894
|
}
|
7296
6895
|
});
|
7297
6896
|
}
|
7298
6897
|
break;
|
7299
6898
|
}
|
7300
6899
|
default: {
|
7301
|
-
const exhaustiveCheck =
|
6900
|
+
const exhaustiveCheck = partType;
|
7302
6901
|
throw new Error(`Unknown chunk type: ${exhaustiveCheck}`);
|
7303
6902
|
}
|
7304
6903
|
}
|
7305
6904
|
}
|
7306
6905
|
})
|
7307
6906
|
);
|
6907
|
+
return onFinish == null ? baseStream : processUIMessageStream({
|
6908
|
+
stream: baseStream,
|
6909
|
+
lastMessage,
|
6910
|
+
newMessageId: messageId != null ? messageId : this.generateId(),
|
6911
|
+
onFinish: ({ message }) => {
|
6912
|
+
const isContinuation2 = message.id === (lastMessage == null ? void 0 : lastMessage.id);
|
6913
|
+
onFinish({
|
6914
|
+
isContinuation: isContinuation2,
|
6915
|
+
responseMessage: message,
|
6916
|
+
messages: [
|
6917
|
+
...isContinuation2 ? originalMessages.slice(0, -1) : originalMessages,
|
6918
|
+
message
|
6919
|
+
]
|
6920
|
+
});
|
6921
|
+
}
|
6922
|
+
});
|
7308
6923
|
}
|
7309
|
-
|
7310
|
-
|
7311
|
-
|
6924
|
+
pipeUIMessageStreamToResponse(response, {
|
6925
|
+
newMessageId,
|
6926
|
+
originalMessages,
|
6927
|
+
onFinish,
|
6928
|
+
messageMetadata,
|
7312
6929
|
sendReasoning,
|
7313
6930
|
sendSources,
|
7314
6931
|
experimental_sendFinish,
|
7315
6932
|
experimental_sendStart,
|
6933
|
+
onError,
|
7316
6934
|
...init
|
7317
6935
|
} = {}) {
|
7318
|
-
|
6936
|
+
pipeUIMessageStreamToResponse({
|
7319
6937
|
response,
|
7320
|
-
|
7321
|
-
|
7322
|
-
|
6938
|
+
stream: this.toUIMessageStream({
|
6939
|
+
newMessageId,
|
6940
|
+
originalMessages,
|
6941
|
+
onFinish,
|
6942
|
+
messageMetadata,
|
7323
6943
|
sendReasoning,
|
7324
6944
|
sendSources,
|
7325
6945
|
experimental_sendFinish,
|
7326
|
-
experimental_sendStart
|
6946
|
+
experimental_sendStart,
|
6947
|
+
onError
|
7327
6948
|
}),
|
7328
6949
|
...init
|
7329
6950
|
});
|
@@ -7335,23 +6956,29 @@ var DefaultStreamTextResult = class {
|
|
7335
6956
|
...init
|
7336
6957
|
});
|
7337
6958
|
}
|
7338
|
-
|
7339
|
-
|
7340
|
-
|
6959
|
+
toUIMessageStreamResponse({
|
6960
|
+
newMessageId,
|
6961
|
+
originalMessages,
|
6962
|
+
onFinish,
|
6963
|
+
messageMetadata,
|
7341
6964
|
sendReasoning,
|
7342
6965
|
sendSources,
|
7343
6966
|
experimental_sendFinish,
|
7344
6967
|
experimental_sendStart,
|
6968
|
+
onError,
|
7345
6969
|
...init
|
7346
6970
|
} = {}) {
|
7347
|
-
return
|
7348
|
-
|
7349
|
-
|
7350
|
-
|
6971
|
+
return createUIMessageStreamResponse({
|
6972
|
+
stream: this.toUIMessageStream({
|
6973
|
+
newMessageId,
|
6974
|
+
originalMessages,
|
6975
|
+
onFinish,
|
6976
|
+
messageMetadata,
|
7351
6977
|
sendReasoning,
|
7352
6978
|
sendSources,
|
7353
6979
|
experimental_sendFinish,
|
7354
|
-
experimental_sendStart
|
6980
|
+
experimental_sendStart,
|
6981
|
+
onError
|
7355
6982
|
}),
|
7356
6983
|
...init
|
7357
6984
|
});
|
@@ -7364,39 +6991,6 @@ var DefaultStreamTextResult = class {
|
|
7364
6991
|
}
|
7365
6992
|
};
|
7366
6993
|
|
7367
|
-
// src/util/merge-objects.ts
|
7368
|
-
function mergeObjects(target, source) {
|
7369
|
-
if (target === void 0 && source === void 0) {
|
7370
|
-
return void 0;
|
7371
|
-
}
|
7372
|
-
if (target === void 0) {
|
7373
|
-
return source;
|
7374
|
-
}
|
7375
|
-
if (source === void 0) {
|
7376
|
-
return target;
|
7377
|
-
}
|
7378
|
-
const result = { ...target };
|
7379
|
-
for (const key in source) {
|
7380
|
-
if (Object.prototype.hasOwnProperty.call(source, key)) {
|
7381
|
-
const sourceValue = source[key];
|
7382
|
-
if (sourceValue === void 0)
|
7383
|
-
continue;
|
7384
|
-
const targetValue = key in target ? target[key] : void 0;
|
7385
|
-
const isSourceObject = sourceValue !== null && typeof sourceValue === "object" && !Array.isArray(sourceValue) && !(sourceValue instanceof Date) && !(sourceValue instanceof RegExp);
|
7386
|
-
const isTargetObject = targetValue !== null && targetValue !== void 0 && typeof targetValue === "object" && !Array.isArray(targetValue) && !(targetValue instanceof Date) && !(targetValue instanceof RegExp);
|
7387
|
-
if (isSourceObject && isTargetObject) {
|
7388
|
-
result[key] = mergeObjects(
|
7389
|
-
targetValue,
|
7390
|
-
sourceValue
|
7391
|
-
);
|
7392
|
-
} else {
|
7393
|
-
result[key] = sourceValue;
|
7394
|
-
}
|
7395
|
-
}
|
7396
|
-
}
|
7397
|
-
return result;
|
7398
|
-
}
|
7399
|
-
|
7400
6994
|
// core/middleware/default-settings-middleware.ts
|
7401
6995
|
function defaultSettingsMiddleware({
|
7402
6996
|
settings
|
@@ -7404,33 +6998,7 @@ function defaultSettingsMiddleware({
|
|
7404
6998
|
return {
|
7405
6999
|
middlewareVersion: "v2",
|
7406
7000
|
transformParams: async ({ params }) => {
|
7407
|
-
|
7408
|
-
return {
|
7409
|
-
...settings,
|
7410
|
-
...params,
|
7411
|
-
// map all values that are null to undefined
|
7412
|
-
maxOutputTokens: settings.maxOutputTokens !== null ? (_a17 = params.maxOutputTokens) != null ? _a17 : settings.maxOutputTokens : void 0,
|
7413
|
-
temperature: settings.temperature !== null ? (
|
7414
|
-
// temperature: special case 0 or null
|
7415
|
-
params.temperature === 0 || params.temperature == null ? (_b = settings.temperature) != null ? _b : params.temperature : params.temperature
|
7416
|
-
) : void 0,
|
7417
|
-
stopSequences: settings.stopSequences !== null ? (_c = params.stopSequences) != null ? _c : settings.stopSequences : void 0,
|
7418
|
-
topP: settings.topP !== null ? (_d = params.topP) != null ? _d : settings.topP : void 0,
|
7419
|
-
topK: settings.topK !== null ? (_e = params.topK) != null ? _e : settings.topK : void 0,
|
7420
|
-
presencePenalty: settings.presencePenalty !== null ? (_f = params.presencePenalty) != null ? _f : settings.presencePenalty : void 0,
|
7421
|
-
frequencyPenalty: settings.frequencyPenalty !== null ? (_g = params.frequencyPenalty) != null ? _g : settings.frequencyPenalty : void 0,
|
7422
|
-
responseFormat: settings.responseFormat !== null ? (_h = params.responseFormat) != null ? _h : settings.responseFormat : void 0,
|
7423
|
-
seed: settings.seed !== null ? (_i = params.seed) != null ? _i : settings.seed : void 0,
|
7424
|
-
tools: settings.tools !== null ? (_j = params.tools) != null ? _j : settings.tools : void 0,
|
7425
|
-
toolChoice: settings.toolChoice !== null ? (_k = params.toolChoice) != null ? _k : settings.toolChoice : void 0,
|
7426
|
-
// headers: deep merge
|
7427
|
-
headers: mergeObjects(settings.headers, params.headers),
|
7428
|
-
// provider options: deep merge
|
7429
|
-
providerOptions: mergeObjects(
|
7430
|
-
settings.providerOptions,
|
7431
|
-
params.providerOptions
|
7432
|
-
)
|
7433
|
-
};
|
7001
|
+
return mergeObjects(settings, params);
|
7434
7002
|
}
|
7435
7003
|
};
|
7436
7004
|
}
|
@@ -7652,7 +7220,7 @@ var doWrap = ({
|
|
7652
7220
|
};
|
7653
7221
|
|
7654
7222
|
// core/registry/custom-provider.ts
|
7655
|
-
var
|
7223
|
+
var import_provider24 = require("@ai-sdk/provider");
|
7656
7224
|
function customProvider({
|
7657
7225
|
languageModels,
|
7658
7226
|
textEmbeddingModels,
|
@@ -7667,7 +7235,7 @@ function customProvider({
|
|
7667
7235
|
if (fallbackProvider) {
|
7668
7236
|
return fallbackProvider.languageModel(modelId);
|
7669
7237
|
}
|
7670
|
-
throw new
|
7238
|
+
throw new import_provider24.NoSuchModelError({ modelId, modelType: "languageModel" });
|
7671
7239
|
},
|
7672
7240
|
textEmbeddingModel(modelId) {
|
7673
7241
|
if (textEmbeddingModels != null && modelId in textEmbeddingModels) {
|
@@ -7676,7 +7244,7 @@ function customProvider({
|
|
7676
7244
|
if (fallbackProvider) {
|
7677
7245
|
return fallbackProvider.textEmbeddingModel(modelId);
|
7678
7246
|
}
|
7679
|
-
throw new
|
7247
|
+
throw new import_provider24.NoSuchModelError({ modelId, modelType: "textEmbeddingModel" });
|
7680
7248
|
},
|
7681
7249
|
imageModel(modelId) {
|
7682
7250
|
if (imageModels != null && modelId in imageModels) {
|
@@ -7685,19 +7253,19 @@ function customProvider({
|
|
7685
7253
|
if (fallbackProvider == null ? void 0 : fallbackProvider.imageModel) {
|
7686
7254
|
return fallbackProvider.imageModel(modelId);
|
7687
7255
|
}
|
7688
|
-
throw new
|
7256
|
+
throw new import_provider24.NoSuchModelError({ modelId, modelType: "imageModel" });
|
7689
7257
|
}
|
7690
7258
|
};
|
7691
7259
|
}
|
7692
7260
|
var experimental_customProvider = customProvider;
|
7693
7261
|
|
7694
7262
|
// core/registry/no-such-provider-error.ts
|
7695
|
-
var
|
7263
|
+
var import_provider25 = require("@ai-sdk/provider");
|
7696
7264
|
var name16 = "AI_NoSuchProviderError";
|
7697
7265
|
var marker16 = `vercel.ai.error.${name16}`;
|
7698
7266
|
var symbol16 = Symbol.for(marker16);
|
7699
7267
|
var _a16;
|
7700
|
-
var NoSuchProviderError = class extends
|
7268
|
+
var NoSuchProviderError = class extends import_provider25.NoSuchModelError {
|
7701
7269
|
constructor({
|
7702
7270
|
modelId,
|
7703
7271
|
modelType,
|
@@ -7711,13 +7279,13 @@ var NoSuchProviderError = class extends import_provider26.NoSuchModelError {
|
|
7711
7279
|
this.availableProviders = availableProviders;
|
7712
7280
|
}
|
7713
7281
|
static isInstance(error) {
|
7714
|
-
return
|
7282
|
+
return import_provider25.AISDKError.hasMarker(error, marker16);
|
7715
7283
|
}
|
7716
7284
|
};
|
7717
7285
|
_a16 = symbol16;
|
7718
7286
|
|
7719
7287
|
// core/registry/provider-registry.ts
|
7720
|
-
var
|
7288
|
+
var import_provider26 = require("@ai-sdk/provider");
|
7721
7289
|
function createProviderRegistry(providers, {
|
7722
7290
|
separator = ":"
|
7723
7291
|
} = {}) {
|
@@ -7756,7 +7324,7 @@ var DefaultProviderRegistry = class {
|
|
7756
7324
|
splitId(id, modelType) {
|
7757
7325
|
const index = id.indexOf(this.separator);
|
7758
7326
|
if (index === -1) {
|
7759
|
-
throw new
|
7327
|
+
throw new import_provider26.NoSuchModelError({
|
7760
7328
|
modelId: id,
|
7761
7329
|
modelType,
|
7762
7330
|
message: `Invalid ${modelType} id for registry: ${id} (must be in the format "providerId${this.separator}modelId")`
|
@@ -7769,7 +7337,7 @@ var DefaultProviderRegistry = class {
|
|
7769
7337
|
const [providerId, modelId] = this.splitId(id, "languageModel");
|
7770
7338
|
const model = (_b = (_a17 = this.getProvider(providerId)).languageModel) == null ? void 0 : _b.call(_a17, modelId);
|
7771
7339
|
if (model == null) {
|
7772
|
-
throw new
|
7340
|
+
throw new import_provider26.NoSuchModelError({ modelId: id, modelType: "languageModel" });
|
7773
7341
|
}
|
7774
7342
|
return model;
|
7775
7343
|
}
|
@@ -7779,7 +7347,7 @@ var DefaultProviderRegistry = class {
|
|
7779
7347
|
const provider = this.getProvider(providerId);
|
7780
7348
|
const model = (_a17 = provider.textEmbeddingModel) == null ? void 0 : _a17.call(provider, modelId);
|
7781
7349
|
if (model == null) {
|
7782
|
-
throw new
|
7350
|
+
throw new import_provider26.NoSuchModelError({
|
7783
7351
|
modelId: id,
|
7784
7352
|
modelType: "textEmbeddingModel"
|
7785
7353
|
});
|
@@ -7792,14 +7360,14 @@ var DefaultProviderRegistry = class {
|
|
7792
7360
|
const provider = this.getProvider(providerId);
|
7793
7361
|
const model = (_a17 = provider.imageModel) == null ? void 0 : _a17.call(provider, modelId);
|
7794
7362
|
if (model == null) {
|
7795
|
-
throw new
|
7363
|
+
throw new import_provider26.NoSuchModelError({ modelId: id, modelType: "imageModel" });
|
7796
7364
|
}
|
7797
7365
|
return model;
|
7798
7366
|
}
|
7799
7367
|
};
|
7800
7368
|
|
7801
7369
|
// core/tool/mcp/mcp-client.ts
|
7802
|
-
var
|
7370
|
+
var import_provider_utils24 = require("@ai-sdk/provider-utils");
|
7803
7371
|
|
7804
7372
|
// core/tool/tool.ts
|
7805
7373
|
function tool(tool2) {
|
@@ -7807,7 +7375,7 @@ function tool(tool2) {
|
|
7807
7375
|
}
|
7808
7376
|
|
7809
7377
|
// core/tool/mcp/mcp-sse-transport.ts
|
7810
|
-
var
|
7378
|
+
var import_provider_utils23 = require("@ai-sdk/provider-utils");
|
7811
7379
|
|
7812
7380
|
// core/tool/mcp/json-rpc-message.ts
|
7813
7381
|
var import_zod10 = require("zod");
|
@@ -7978,7 +7546,7 @@ var SseMCPTransport = class {
|
|
7978
7546
|
(_b = this.onerror) == null ? void 0 : _b.call(this, error);
|
7979
7547
|
return reject(error);
|
7980
7548
|
}
|
7981
|
-
const stream = response.body.pipeThrough(new TextDecoderStream()).pipeThrough((0,
|
7549
|
+
const stream = response.body.pipeThrough(new TextDecoderStream()).pipeThrough((0, import_provider_utils23.createEventSourceParserStream)());
|
7982
7550
|
const reader = stream.getReader();
|
7983
7551
|
const processEvents = async () => {
|
7984
7552
|
var _a18, _b2, _c2;
|
@@ -8302,7 +7870,7 @@ var MCPClient = class {
|
|
8302
7870
|
if (schemas !== "automatic" && !(name17 in schemas)) {
|
8303
7871
|
continue;
|
8304
7872
|
}
|
8305
|
-
const parameters = schemas === "automatic" ? (0,
|
7873
|
+
const parameters = schemas === "automatic" ? (0, import_provider_utils24.jsonSchema)({
|
8306
7874
|
...inputSchema,
|
8307
7875
|
properties: (_a17 = inputSchema.properties) != null ? _a17 : {},
|
8308
7876
|
additionalProperties: false
|
@@ -8366,8 +7934,8 @@ var MCPClient = class {
|
|
8366
7934
|
};
|
8367
7935
|
|
8368
7936
|
// src/error/no-transcript-generated-error.ts
|
8369
|
-
var
|
8370
|
-
var NoTranscriptGeneratedError = class extends
|
7937
|
+
var import_provider27 = require("@ai-sdk/provider");
|
7938
|
+
var NoTranscriptGeneratedError = class extends import_provider27.AISDKError {
|
8371
7939
|
constructor(options) {
|
8372
7940
|
super({
|
8373
7941
|
name: "AI_NoTranscriptGeneratedError",
|
@@ -8460,7 +8028,6 @@ var DefaultTranscriptionResult = class {
|
|
8460
8028
|
TypeValidationError,
|
8461
8029
|
UnsupportedFunctionalityError,
|
8462
8030
|
appendClientMessage,
|
8463
|
-
appendResponseMessages,
|
8464
8031
|
asSchema,
|
8465
8032
|
assistantModelMessageSchema,
|
8466
8033
|
callChatApi,
|
@@ -8474,11 +8041,11 @@ var DefaultTranscriptionResult = class {
|
|
8474
8041
|
coreToolMessageSchema,
|
8475
8042
|
coreUserMessageSchema,
|
8476
8043
|
cosineSimilarity,
|
8477
|
-
createDataStream,
|
8478
|
-
createDataStreamResponse,
|
8479
8044
|
createIdGenerator,
|
8480
8045
|
createProviderRegistry,
|
8481
8046
|
createTextStreamResponse,
|
8047
|
+
createUIMessageStream,
|
8048
|
+
createUIMessageStreamResponse,
|
8482
8049
|
customProvider,
|
8483
8050
|
defaultSettingsMiddleware,
|
8484
8051
|
embed,
|
@@ -8501,9 +8068,8 @@ var DefaultTranscriptionResult = class {
|
|
8501
8068
|
jsonSchema,
|
8502
8069
|
modelMessageSchema,
|
8503
8070
|
parsePartialJson,
|
8504
|
-
pipeDataStreamToResponse,
|
8505
8071
|
pipeTextStreamToResponse,
|
8506
|
-
|
8072
|
+
pipeUIMessageStreamToResponse,
|
8507
8073
|
processTextStream,
|
8508
8074
|
shouldResubmitMessages,
|
8509
8075
|
simulateReadableStream,
|