ai 0.0.0-85f9a635-20240518005312 → 0.0.0-8777c42a-20250115032312

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. package/CHANGELOG.md +2863 -0
  2. package/README.md +99 -22
  3. package/dist/index.d.mts +1925 -1592
  4. package/dist/index.d.ts +1925 -1592
  5. package/dist/index.js +5500 -2961
  6. package/dist/index.js.map +1 -1
  7. package/dist/index.mjs +5497 -2916
  8. package/dist/index.mjs.map +1 -1
  9. package/package.json +39 -100
  10. package/react/dist/index.d.mts +8 -563
  11. package/react/dist/index.d.ts +8 -580
  12. package/react/dist/index.js +7 -1395
  13. package/react/dist/index.js.map +1 -1
  14. package/react/dist/index.mjs +12 -1383
  15. package/react/dist/index.mjs.map +1 -1
  16. package/rsc/dist/index.d.ts +340 -197
  17. package/rsc/dist/rsc-server.d.mts +339 -197
  18. package/rsc/dist/rsc-server.mjs +1295 -1347
  19. package/rsc/dist/rsc-server.mjs.map +1 -1
  20. package/rsc/dist/rsc-shared.d.mts +30 -23
  21. package/rsc/dist/rsc-shared.mjs +69 -105
  22. package/rsc/dist/rsc-shared.mjs.map +1 -1
  23. package/test/dist/index.d.mts +67 -0
  24. package/test/dist/index.d.ts +67 -0
  25. package/test/dist/index.js +131 -0
  26. package/test/dist/index.js.map +1 -0
  27. package/test/dist/index.mjs +101 -0
  28. package/test/dist/index.mjs.map +1 -0
  29. package/prompts/dist/index.d.mts +0 -324
  30. package/prompts/dist/index.d.ts +0 -324
  31. package/prompts/dist/index.js +0 -178
  32. package/prompts/dist/index.js.map +0 -1
  33. package/prompts/dist/index.mjs +0 -146
  34. package/prompts/dist/index.mjs.map +0 -1
  35. package/react/dist/index.server.d.mts +0 -17
  36. package/react/dist/index.server.d.ts +0 -17
  37. package/react/dist/index.server.js +0 -50
  38. package/react/dist/index.server.js.map +0 -1
  39. package/react/dist/index.server.mjs +0 -23
  40. package/react/dist/index.server.mjs.map +0 -1
  41. package/solid/dist/index.d.mts +0 -408
  42. package/solid/dist/index.d.ts +0 -408
  43. package/solid/dist/index.js +0 -1072
  44. package/solid/dist/index.js.map +0 -1
  45. package/solid/dist/index.mjs +0 -1044
  46. package/solid/dist/index.mjs.map +0 -1
  47. package/svelte/dist/index.d.mts +0 -484
  48. package/svelte/dist/index.d.ts +0 -484
  49. package/svelte/dist/index.js +0 -1778
  50. package/svelte/dist/index.js.map +0 -1
  51. package/svelte/dist/index.mjs +0 -1749
  52. package/svelte/dist/index.mjs.map +0 -1
  53. package/vue/dist/index.d.mts +0 -402
  54. package/vue/dist/index.d.ts +0 -402
  55. package/vue/dist/index.js +0 -1072
  56. package/vue/dist/index.js.map +0 -1
  57. package/vue/dist/index.mjs +0 -1034
  58. package/vue/dist/index.mjs.map +0 -1
@@ -1,12 +1,11 @@
1
1
  // rsc/ai-state.tsx
2
- import { AsyncLocalStorage } from "async_hooks";
3
2
  import * as jsondiffpatch from "jsondiffpatch";
3
+ import { AsyncLocalStorage } from "async_hooks";
4
4
 
5
- // rsc/utils.tsx
6
- import { Suspense } from "react";
7
- import { Fragment, jsx, jsxs } from "react/jsx-runtime";
5
+ // util/create-resolvable-promise.ts
8
6
  function createResolvablePromise() {
9
- let resolve, reject;
7
+ let resolve;
8
+ let reject;
10
9
  const promise = new Promise((res, rej) => {
11
10
  resolve = res;
12
11
  reject = rej;
@@ -17,43 +16,9 @@ function createResolvablePromise() {
17
16
  reject
18
17
  };
19
18
  }
20
- var R = [
21
- async ({
22
- c,
23
- // current
24
- n
25
- // next
26
- }) => {
27
- const chunk = await n;
28
- if (chunk.done) {
29
- return chunk.value;
30
- }
31
- if (chunk.append) {
32
- return /* @__PURE__ */ jsxs(Fragment, { children: [
33
- c,
34
- /* @__PURE__ */ jsx(Suspense, { fallback: chunk.value, children: /* @__PURE__ */ jsx(R, { c: chunk.value, n: chunk.next }) })
35
- ] });
36
- }
37
- return /* @__PURE__ */ jsx(Suspense, { fallback: chunk.value, children: /* @__PURE__ */ jsx(R, { c: chunk.value, n: chunk.next }) });
38
- }
39
- ][0];
40
- function createSuspensedChunk(initialValue) {
41
- const { promise, resolve, reject } = createResolvablePromise();
42
- return {
43
- row: /* @__PURE__ */ jsx(Suspense, { fallback: initialValue, children: /* @__PURE__ */ jsx(R, { c: initialValue, n: promise }) }),
44
- resolve,
45
- reject
46
- };
47
- }
48
- var isFunction = (x) => typeof x === "function";
49
- var consumeStream = async (stream) => {
50
- const reader = stream.getReader();
51
- while (true) {
52
- const { done } = await reader.read();
53
- if (done)
54
- break;
55
- }
56
- };
19
+
20
+ // util/is-function.ts
21
+ var isFunction = (value) => typeof value === "function";
57
22
 
58
23
  // rsc/ai-state.tsx
59
24
  var asyncAIStateStorage = new AsyncLocalStorage();
@@ -67,7 +32,8 @@ function getAIStateStoreOrThrow(message) {
67
32
  function withAIState({ state, options }, fn) {
68
33
  return asyncAIStateStorage.run(
69
34
  {
70
- currentState: state,
35
+ currentState: JSON.parse(JSON.stringify(state)),
36
+ // deep clone object
71
37
  originalState: state,
72
38
  sealed: false,
73
39
  options
@@ -115,7 +81,7 @@ function getMutableAIState(...args) {
115
81
  store.mutationDeltaResolve = resolve;
116
82
  }
117
83
  function doUpdate(newState, done) {
118
- var _a, _b;
84
+ var _a9, _b;
119
85
  if (args.length > 0) {
120
86
  if (typeof store.currentState !== "object") {
121
87
  const key = args[0];
@@ -139,7 +105,7 @@ function getMutableAIState(...args) {
139
105
  store.currentState = newState;
140
106
  }
141
107
  }
142
- (_b = (_a = store.options).onSetAIState) == null ? void 0 : _b.call(_a, {
108
+ (_b = (_a9 = store.options).onSetAIState) == null ? void 0 : _b.call(_a9, {
143
109
  key: args.length > 0 ? args[0] : void 0,
144
110
  state: store.currentState,
145
111
  done
@@ -174,68 +140,130 @@ function getMutableAIState(...args) {
174
140
  return mutableState;
175
141
  }
176
142
 
177
- // rsc/streamable.tsx
178
- import zodToJsonSchema2 from "zod-to-json-schema";
143
+ // rsc/provider.tsx
144
+ import * as React from "react";
145
+ import { InternalAIProvider } from "./rsc-shared.mjs";
146
+ import { jsx } from "react/jsx-runtime";
147
+ async function innerAction({
148
+ action,
149
+ options
150
+ }, state, ...args) {
151
+ "use server";
152
+ return await withAIState(
153
+ {
154
+ state,
155
+ options
156
+ },
157
+ async () => {
158
+ const result = await action(...args);
159
+ sealMutableAIState();
160
+ return [getAIStateDeltaPromise(), result];
161
+ }
162
+ );
163
+ }
164
+ function wrapAction(action, options) {
165
+ return innerAction.bind(null, { action, options });
166
+ }
167
+ function createAI({
168
+ actions,
169
+ initialAIState,
170
+ initialUIState,
171
+ onSetAIState,
172
+ onGetUIState
173
+ }) {
174
+ const wrappedActions = {};
175
+ for (const name9 in actions) {
176
+ wrappedActions[name9] = wrapAction(actions[name9], {
177
+ onSetAIState
178
+ });
179
+ }
180
+ const wrappedSyncUIState = onGetUIState ? wrapAction(onGetUIState, {}) : void 0;
181
+ const AI = async (props) => {
182
+ var _a9, _b;
183
+ if ("useState" in React) {
184
+ throw new Error(
185
+ "This component can only be used inside Server Components."
186
+ );
187
+ }
188
+ let uiState = (_a9 = props.initialUIState) != null ? _a9 : initialUIState;
189
+ let aiState = (_b = props.initialAIState) != null ? _b : initialAIState;
190
+ let aiStateDelta = void 0;
191
+ if (wrappedSyncUIState) {
192
+ const [newAIStateDelta, newUIState] = await wrappedSyncUIState(aiState);
193
+ if (newUIState !== void 0) {
194
+ aiStateDelta = newAIStateDelta;
195
+ uiState = newUIState;
196
+ }
197
+ }
198
+ return /* @__PURE__ */ jsx(
199
+ InternalAIProvider,
200
+ {
201
+ wrappedActions,
202
+ wrappedSyncUIState,
203
+ initialUIState: uiState,
204
+ initialAIState: aiState,
205
+ initialAIStatePatch: aiStateDelta,
206
+ children: props.children
207
+ }
208
+ );
209
+ };
210
+ return AI;
211
+ }
179
212
 
180
- // core/util/retry-with-exponential-backoff.ts
181
- import { APICallError, RetryError } from "@ai-sdk/provider";
182
- import { getErrorMessage, isAbortError } from "@ai-sdk/provider-utils";
213
+ // rsc/stream-ui/stream-ui.tsx
214
+ import { safeParseJSON } from "@ai-sdk/provider-utils";
183
215
 
184
- // core/util/delay.ts
185
- async function delay(delayInMs) {
186
- return new Promise((resolve) => setTimeout(resolve, delayInMs));
187
- }
216
+ // util/download-error.ts
217
+ import { AISDKError } from "@ai-sdk/provider";
218
+ var name = "AI_DownloadError";
219
+ var marker = `vercel.ai.error.${name}`;
220
+ var symbol = Symbol.for(marker);
221
+ var _a;
222
+ var DownloadError = class extends AISDKError {
223
+ constructor({
224
+ url,
225
+ statusCode,
226
+ statusText,
227
+ cause,
228
+ message = cause == null ? `Failed to download ${url}: ${statusCode} ${statusText}` : `Failed to download ${url}: ${cause}`
229
+ }) {
230
+ super({ name, message, cause });
231
+ this[_a] = true;
232
+ this.url = url;
233
+ this.statusCode = statusCode;
234
+ this.statusText = statusText;
235
+ }
236
+ static isInstance(error) {
237
+ return AISDKError.hasMarker(error, marker);
238
+ }
239
+ };
240
+ _a = symbol;
188
241
 
189
- // core/util/retry-with-exponential-backoff.ts
190
- var retryWithExponentialBackoff = ({
191
- maxRetries = 2,
192
- initialDelayInMs = 2e3,
193
- backoffFactor = 2
194
- } = {}) => async (f) => _retryWithExponentialBackoff(f, {
195
- maxRetries,
196
- delayInMs: initialDelayInMs,
197
- backoffFactor
198
- });
199
- async function _retryWithExponentialBackoff(f, {
200
- maxRetries,
201
- delayInMs,
202
- backoffFactor
203
- }, errors = []) {
242
+ // util/download.ts
243
+ async function download({
244
+ url,
245
+ fetchImplementation = fetch
246
+ }) {
247
+ var _a9;
248
+ const urlText = url.toString();
204
249
  try {
205
- return await f();
206
- } catch (error) {
207
- if (isAbortError(error)) {
208
- throw error;
209
- }
210
- if (maxRetries === 0) {
211
- throw error;
212
- }
213
- const errorMessage = getErrorMessage(error);
214
- const newErrors = [...errors, error];
215
- const tryNumber = newErrors.length;
216
- if (tryNumber > maxRetries) {
217
- throw new RetryError({
218
- message: `Failed after ${tryNumber} attempts. Last error: ${errorMessage}`,
219
- reason: "maxRetriesExceeded",
220
- errors: newErrors
250
+ const response = await fetchImplementation(urlText);
251
+ if (!response.ok) {
252
+ throw new DownloadError({
253
+ url: urlText,
254
+ statusCode: response.status,
255
+ statusText: response.statusText
221
256
  });
222
257
  }
223
- if (error instanceof Error && APICallError.isAPICallError(error) && error.isRetryable === true && tryNumber <= maxRetries) {
224
- await delay(delayInMs);
225
- return _retryWithExponentialBackoff(
226
- f,
227
- { maxRetries, delayInMs: backoffFactor * delayInMs, backoffFactor },
228
- newErrors
229
- );
230
- }
231
- if (tryNumber === 1) {
258
+ return {
259
+ data: new Uint8Array(await response.arrayBuffer()),
260
+ mimeType: (_a9 = response.headers.get("content-type")) != null ? _a9 : void 0
261
+ };
262
+ } catch (error) {
263
+ if (DownloadError.isInstance(error)) {
232
264
  throw error;
233
265
  }
234
- throw new RetryError({
235
- message: `Failed after ${tryNumber} attempts with non-retryable error: '${errorMessage}'`,
236
- reason: "errorNotRetryable",
237
- errors: newErrors
238
- });
266
+ throw new DownloadError({ url: urlText, cause: error });
239
267
  }
240
268
  }
241
269
 
@@ -256,11 +284,57 @@ function detectImageMimeType(image) {
256
284
  }
257
285
 
258
286
  // core/prompt/data-content.ts
259
- import { InvalidDataContentError } from "@ai-sdk/provider";
260
287
  import {
261
288
  convertBase64ToUint8Array,
262
289
  convertUint8ArrayToBase64
263
290
  } from "@ai-sdk/provider-utils";
291
+
292
+ // core/prompt/invalid-data-content-error.ts
293
+ import { AISDKError as AISDKError2 } from "@ai-sdk/provider";
294
+ var name2 = "AI_InvalidDataContentError";
295
+ var marker2 = `vercel.ai.error.${name2}`;
296
+ var symbol2 = Symbol.for(marker2);
297
+ var _a2;
298
+ var InvalidDataContentError = class extends AISDKError2 {
299
+ constructor({
300
+ content,
301
+ cause,
302
+ message = `Invalid data content. Expected a base64 string, Uint8Array, ArrayBuffer, or Buffer, but got ${typeof content}.`
303
+ }) {
304
+ super({ name: name2, message, cause });
305
+ this[_a2] = true;
306
+ this.content = content;
307
+ }
308
+ static isInstance(error) {
309
+ return AISDKError2.hasMarker(error, marker2);
310
+ }
311
+ };
312
+ _a2 = symbol2;
313
+
314
+ // core/prompt/data-content.ts
315
+ import { z } from "zod";
316
+ var dataContentSchema = z.union([
317
+ z.string(),
318
+ z.instanceof(Uint8Array),
319
+ z.instanceof(ArrayBuffer),
320
+ z.custom(
321
+ // Buffer might not be available in some environments such as CloudFlare:
322
+ (value) => {
323
+ var _a9, _b;
324
+ return (_b = (_a9 = globalThis.Buffer) == null ? void 0 : _a9.isBuffer(value)) != null ? _b : false;
325
+ },
326
+ { message: "Must be a Buffer" }
327
+ )
328
+ ]);
329
+ function convertDataContentToBase64String(content) {
330
+ if (typeof content === "string") {
331
+ return content;
332
+ }
333
+ if (content instanceof ArrayBuffer) {
334
+ return convertUint8ArrayToBase64(new Uint8Array(content));
335
+ }
336
+ return convertUint8ArrayToBase64(content);
337
+ }
264
338
  function convertDataContentToUint8Array(content) {
265
339
  if (content instanceof Uint8Array) {
266
340
  return content;
@@ -270,7 +344,7 @@ function convertDataContentToUint8Array(content) {
270
344
  return convertBase64ToUint8Array(content);
271
345
  } catch (error) {
272
346
  throw new InvalidDataContentError({
273
- message: "Invalid data content. Content string is not a base64-encoded image.",
347
+ message: "Invalid data content. Content string is not a base64-encoded media.",
274
348
  content,
275
349
  cause: error
276
350
  });
@@ -281,130 +355,275 @@ function convertDataContentToUint8Array(content) {
281
355
  }
282
356
  throw new InvalidDataContentError({ content });
283
357
  }
358
+ function convertUint8ArrayToText(uint8Array) {
359
+ try {
360
+ return new TextDecoder().decode(uint8Array);
361
+ } catch (error) {
362
+ throw new Error("Error decoding Uint8Array to text");
363
+ }
364
+ }
365
+
366
+ // core/prompt/invalid-message-role-error.ts
367
+ import { AISDKError as AISDKError3 } from "@ai-sdk/provider";
368
+ var name3 = "AI_InvalidMessageRoleError";
369
+ var marker3 = `vercel.ai.error.${name3}`;
370
+ var symbol3 = Symbol.for(marker3);
371
+ var _a3;
372
+ var InvalidMessageRoleError = class extends AISDKError3 {
373
+ constructor({
374
+ role,
375
+ message = `Invalid message role: '${role}'. Must be one of: "system", "user", "assistant", "tool".`
376
+ }) {
377
+ super({ name: name3, message });
378
+ this[_a3] = true;
379
+ this.role = role;
380
+ }
381
+ static isInstance(error) {
382
+ return AISDKError3.hasMarker(error, marker3);
383
+ }
384
+ };
385
+ _a3 = symbol3;
386
+
387
+ // core/prompt/split-data-url.ts
388
+ function splitDataUrl(dataUrl) {
389
+ try {
390
+ const [header, base64Content] = dataUrl.split(",");
391
+ return {
392
+ mimeType: header.split(";")[0].split(":")[1],
393
+ base64Content
394
+ };
395
+ } catch (error) {
396
+ return {
397
+ mimeType: void 0,
398
+ base64Content: void 0
399
+ };
400
+ }
401
+ }
284
402
 
285
403
  // core/prompt/convert-to-language-model-prompt.ts
286
- function convertToLanguageModelPrompt(prompt) {
287
- const languageModelMessages = [];
288
- if (prompt.system != null) {
289
- languageModelMessages.push({ role: "system", content: prompt.system });
290
- }
291
- switch (prompt.type) {
292
- case "prompt": {
293
- languageModelMessages.push({
404
+ async function convertToLanguageModelPrompt({
405
+ prompt,
406
+ modelSupportsImageUrls = true,
407
+ modelSupportsUrl = () => false,
408
+ downloadImplementation = download
409
+ }) {
410
+ const downloadedAssets = await downloadAssets(
411
+ prompt.messages,
412
+ downloadImplementation,
413
+ modelSupportsImageUrls,
414
+ modelSupportsUrl
415
+ );
416
+ return [
417
+ ...prompt.system != null ? [{ role: "system", content: prompt.system }] : [],
418
+ ...prompt.messages.map(
419
+ (message) => convertToLanguageModelMessage(message, downloadedAssets)
420
+ )
421
+ ];
422
+ }
423
+ function convertToLanguageModelMessage(message, downloadedAssets) {
424
+ const role = message.role;
425
+ switch (role) {
426
+ case "system": {
427
+ return {
428
+ role: "system",
429
+ content: message.content,
430
+ providerMetadata: message.experimental_providerMetadata
431
+ };
432
+ }
433
+ case "user": {
434
+ if (typeof message.content === "string") {
435
+ return {
436
+ role: "user",
437
+ content: [{ type: "text", text: message.content }],
438
+ providerMetadata: message.experimental_providerMetadata
439
+ };
440
+ }
441
+ return {
294
442
  role: "user",
295
- content: [{ type: "text", text: prompt.prompt }]
296
- });
297
- break;
443
+ content: message.content.map((part) => convertPartToLanguageModelPart(part, downloadedAssets)).filter((part) => part.type !== "text" || part.text !== ""),
444
+ providerMetadata: message.experimental_providerMetadata
445
+ };
298
446
  }
299
- case "messages": {
300
- languageModelMessages.push(
301
- ...prompt.messages.map((message) => {
302
- switch (message.role) {
303
- case "system": {
304
- return { role: "system", content: message.content };
305
- }
306
- case "user": {
307
- if (typeof message.content === "string") {
308
- return {
309
- role: "user",
310
- content: [{ type: "text", text: message.content }]
311
- };
312
- }
313
- return {
314
- role: "user",
315
- content: message.content.map(
316
- (part) => {
317
- var _a;
318
- switch (part.type) {
319
- case "text": {
320
- return part;
321
- }
322
- case "image": {
323
- if (part.image instanceof URL) {
324
- return {
325
- type: "image",
326
- image: part.image,
327
- mimeType: part.mimeType
328
- };
329
- }
330
- const imageUint8 = convertDataContentToUint8Array(
331
- part.image
332
- );
333
- return {
334
- type: "image",
335
- image: imageUint8,
336
- mimeType: (_a = part.mimeType) != null ? _a : detectImageMimeType(imageUint8)
337
- };
338
- }
339
- }
340
- }
341
- )
342
- };
343
- }
344
- case "assistant": {
345
- if (typeof message.content === "string") {
346
- return {
347
- role: "assistant",
348
- content: [{ type: "text", text: message.content }]
349
- };
350
- }
351
- return { role: "assistant", content: message.content };
352
- }
353
- case "tool": {
354
- return message;
355
- }
356
- }
357
- })
358
- );
359
- break;
447
+ case "assistant": {
448
+ if (typeof message.content === "string") {
449
+ return {
450
+ role: "assistant",
451
+ content: [{ type: "text", text: message.content }],
452
+ providerMetadata: message.experimental_providerMetadata
453
+ };
454
+ }
455
+ return {
456
+ role: "assistant",
457
+ content: message.content.filter(
458
+ // remove empty text parts:
459
+ (part) => part.type !== "text" || part.text !== ""
460
+ ).map((part) => {
461
+ const { experimental_providerMetadata, ...rest } = part;
462
+ return {
463
+ ...rest,
464
+ providerMetadata: experimental_providerMetadata
465
+ };
466
+ }),
467
+ providerMetadata: message.experimental_providerMetadata
468
+ };
469
+ }
470
+ case "tool": {
471
+ return {
472
+ role: "tool",
473
+ content: message.content.map((part) => ({
474
+ type: "tool-result",
475
+ toolCallId: part.toolCallId,
476
+ toolName: part.toolName,
477
+ result: part.result,
478
+ content: part.experimental_content,
479
+ isError: part.isError,
480
+ providerMetadata: part.experimental_providerMetadata
481
+ })),
482
+ providerMetadata: message.experimental_providerMetadata
483
+ };
360
484
  }
361
485
  default: {
362
- const _exhaustiveCheck = prompt;
363
- throw new Error(`Unsupported prompt type: ${_exhaustiveCheck}`);
486
+ const _exhaustiveCheck = role;
487
+ throw new InvalidMessageRoleError({ role: _exhaustiveCheck });
488
+ }
489
+ }
490
+ }
491
+ async function downloadAssets(messages, downloadImplementation, modelSupportsImageUrls, modelSupportsUrl) {
492
+ const urls = messages.filter((message) => message.role === "user").map((message) => message.content).filter(
493
+ (content) => Array.isArray(content)
494
+ ).flat().filter(
495
+ (part) => part.type === "image" || part.type === "file"
496
+ ).filter(
497
+ (part) => !(part.type === "image" && modelSupportsImageUrls === true)
498
+ ).map((part) => part.type === "image" ? part.image : part.data).map(
499
+ (part) => (
500
+ // support string urls:
501
+ typeof part === "string" && (part.startsWith("http:") || part.startsWith("https:")) ? new URL(part) : part
502
+ )
503
+ ).filter((image) => image instanceof URL).filter((url) => !modelSupportsUrl(url));
504
+ const downloadedImages = await Promise.all(
505
+ urls.map(async (url) => ({
506
+ url,
507
+ data: await downloadImplementation({ url })
508
+ }))
509
+ );
510
+ return Object.fromEntries(
511
+ downloadedImages.map(({ url, data }) => [url.toString(), data])
512
+ );
513
+ }
514
+ function convertPartToLanguageModelPart(part, downloadedAssets) {
515
+ var _a9;
516
+ if (part.type === "text") {
517
+ return {
518
+ type: "text",
519
+ text: part.text,
520
+ providerMetadata: part.experimental_providerMetadata
521
+ };
522
+ }
523
+ let mimeType = part.mimeType;
524
+ let data;
525
+ let content;
526
+ let normalizedData;
527
+ const type = part.type;
528
+ switch (type) {
529
+ case "image":
530
+ data = part.image;
531
+ break;
532
+ case "file":
533
+ data = part.data;
534
+ break;
535
+ default:
536
+ throw new Error(`Unsupported part type: ${type}`);
537
+ }
538
+ try {
539
+ content = typeof data === "string" ? new URL(data) : data;
540
+ } catch (error) {
541
+ content = data;
542
+ }
543
+ if (content instanceof URL) {
544
+ if (content.protocol === "data:") {
545
+ const { mimeType: dataUrlMimeType, base64Content } = splitDataUrl(
546
+ content.toString()
547
+ );
548
+ if (dataUrlMimeType == null || base64Content == null) {
549
+ throw new Error(`Invalid data URL format in part ${type}`);
550
+ }
551
+ mimeType = dataUrlMimeType;
552
+ normalizedData = convertDataContentToUint8Array(base64Content);
553
+ } else {
554
+ const downloadedFile = downloadedAssets[content.toString()];
555
+ if (downloadedFile) {
556
+ normalizedData = downloadedFile.data;
557
+ mimeType != null ? mimeType : mimeType = downloadedFile.mimeType;
558
+ } else {
559
+ normalizedData = content;
560
+ }
561
+ }
562
+ } else {
563
+ normalizedData = convertDataContentToUint8Array(content);
564
+ }
565
+ switch (type) {
566
+ case "image": {
567
+ if (normalizedData instanceof Uint8Array) {
568
+ mimeType = (_a9 = detectImageMimeType(normalizedData)) != null ? _a9 : mimeType;
569
+ }
570
+ return {
571
+ type: "image",
572
+ image: normalizedData,
573
+ mimeType,
574
+ providerMetadata: part.experimental_providerMetadata
575
+ };
576
+ }
577
+ case "file": {
578
+ if (mimeType == null) {
579
+ throw new Error(`Mime type is missing for file part`);
580
+ }
581
+ return {
582
+ type: "file",
583
+ data: normalizedData instanceof Uint8Array ? convertDataContentToBase64String(normalizedData) : normalizedData,
584
+ mimeType,
585
+ providerMetadata: part.experimental_providerMetadata
586
+ };
364
587
  }
365
588
  }
366
- return languageModelMessages;
367
589
  }
368
590
 
369
- // core/prompt/get-validated-prompt.ts
370
- import { InvalidPromptError } from "@ai-sdk/provider";
371
- function getValidatedPrompt(prompt) {
372
- if (prompt.prompt == null && prompt.messages == null) {
373
- throw new InvalidPromptError({
374
- prompt,
375
- message: "prompt or messages must be defined"
591
+ // errors/invalid-argument-error.ts
592
+ import { AISDKError as AISDKError4 } from "@ai-sdk/provider";
593
+ var name4 = "AI_InvalidArgumentError";
594
+ var marker4 = `vercel.ai.error.${name4}`;
595
+ var symbol4 = Symbol.for(marker4);
596
+ var _a4;
597
+ var InvalidArgumentError = class extends AISDKError4 {
598
+ constructor({
599
+ parameter,
600
+ value,
601
+ message
602
+ }) {
603
+ super({
604
+ name: name4,
605
+ message: `Invalid argument for parameter ${parameter}: ${message}`
376
606
  });
607
+ this[_a4] = true;
608
+ this.parameter = parameter;
609
+ this.value = value;
377
610
  }
378
- if (prompt.prompt != null && prompt.messages != null) {
379
- throw new InvalidPromptError({
380
- prompt,
381
- message: "prompt and messages cannot be defined at the same time"
382
- });
611
+ static isInstance(error) {
612
+ return AISDKError4.hasMarker(error, marker4);
383
613
  }
384
- return prompt.prompt != null ? {
385
- type: "prompt",
386
- prompt: prompt.prompt,
387
- messages: void 0,
388
- system: prompt.system
389
- } : {
390
- type: "messages",
391
- prompt: void 0,
392
- messages: prompt.messages,
393
- // only possible case bc of checks above
394
- system: prompt.system
395
- };
396
- }
614
+ };
615
+ _a4 = symbol4;
397
616
 
398
617
  // core/prompt/prepare-call-settings.ts
399
- import { InvalidArgumentError } from "@ai-sdk/provider";
400
618
  function prepareCallSettings({
401
619
  maxTokens,
402
620
  temperature,
403
621
  topP,
622
+ topK,
404
623
  presencePenalty,
405
624
  frequencyPenalty,
406
- seed,
407
- maxRetries
625
+ stopSequences,
626
+ seed
408
627
  }) {
409
628
  if (maxTokens != null) {
410
629
  if (!Number.isInteger(maxTokens)) {
@@ -440,6 +659,15 @@ function prepareCallSettings({
440
659
  });
441
660
  }
442
661
  }
662
+ if (topK != null) {
663
+ if (typeof topK !== "number") {
664
+ throw new InvalidArgumentError({
665
+ parameter: "topK",
666
+ value: topK,
667
+ message: "topK must be a number"
668
+ });
669
+ }
670
+ }
443
671
  if (presencePenalty != null) {
444
672
  if (typeof presencePenalty !== "number") {
445
673
  throw new InvalidArgumentError({
@@ -467,734 +695,697 @@ function prepareCallSettings({
467
695
  });
468
696
  }
469
697
  }
470
- if (maxRetries != null) {
471
- if (!Number.isInteger(maxRetries)) {
472
- throw new InvalidArgumentError({
473
- parameter: "maxRetries",
474
- value: maxRetries,
475
- message: "maxRetries must be an integer"
476
- });
477
- }
478
- if (maxRetries < 0) {
479
- throw new InvalidArgumentError({
480
- parameter: "maxRetries",
481
- value: maxRetries,
482
- message: "maxRetries must be >= 0"
483
- });
484
- }
485
- }
486
698
  return {
487
699
  maxTokens,
488
700
  temperature: temperature != null ? temperature : 0,
489
701
  topP,
702
+ topK,
490
703
  presencePenalty,
491
704
  frequencyPenalty,
492
- seed,
493
- maxRetries: maxRetries != null ? maxRetries : 2
705
+ stopSequences: stopSequences != null && stopSequences.length > 0 ? stopSequences : void 0,
706
+ seed
494
707
  };
495
708
  }
496
709
 
497
- // core/util/convert-zod-to-json-schema.ts
498
- import zodToJsonSchema from "zod-to-json-schema";
499
- function convertZodToJSONSchema(zodSchema) {
500
- return zodToJsonSchema(zodSchema);
710
+ // util/retry-with-exponential-backoff.ts
711
+ import { APICallError } from "@ai-sdk/provider";
712
+ import { getErrorMessage, isAbortError } from "@ai-sdk/provider-utils";
713
+
714
+ // util/delay.ts
715
+ async function delay(delayInMs) {
716
+ return delayInMs == null ? Promise.resolve() : new Promise((resolve) => setTimeout(resolve, delayInMs));
501
717
  }
502
718
 
503
- // shared/stream-parts.ts
504
- var textStreamPart = {
505
- code: "0",
506
- name: "text",
507
- parse: (value) => {
508
- if (typeof value !== "string") {
509
- throw new Error('"text" parts expect a string value.');
510
- }
511
- return { type: "text", value };
719
+ // util/retry-error.ts
720
+ import { AISDKError as AISDKError5 } from "@ai-sdk/provider";
721
+ var name5 = "AI_RetryError";
722
+ var marker5 = `vercel.ai.error.${name5}`;
723
+ var symbol5 = Symbol.for(marker5);
724
+ var _a5;
725
+ var RetryError = class extends AISDKError5 {
726
+ constructor({
727
+ message,
728
+ reason,
729
+ errors
730
+ }) {
731
+ super({ name: name5, message });
732
+ this[_a5] = true;
733
+ this.reason = reason;
734
+ this.errors = errors;
735
+ this.lastError = errors[errors.length - 1];
512
736
  }
513
- };
514
- var functionCallStreamPart = {
515
- code: "1",
516
- name: "function_call",
517
- parse: (value) => {
518
- if (value == null || typeof value !== "object" || !("function_call" in value) || typeof value.function_call !== "object" || value.function_call == null || !("name" in value.function_call) || !("arguments" in value.function_call) || typeof value.function_call.name !== "string" || typeof value.function_call.arguments !== "string") {
519
- throw new Error(
520
- '"function_call" parts expect an object with a "function_call" property.'
521
- );
522
- }
523
- return {
524
- type: "function_call",
525
- value
526
- };
737
+ static isInstance(error) {
738
+ return AISDKError5.hasMarker(error, marker5);
527
739
  }
528
740
  };
529
- var dataStreamPart = {
530
- code: "2",
531
- name: "data",
532
- parse: (value) => {
533
- if (!Array.isArray(value)) {
534
- throw new Error('"data" parts expect an array value.');
741
+ _a5 = symbol5;
742
+
743
+ // util/retry-with-exponential-backoff.ts
744
+ var retryWithExponentialBackoff = ({
745
+ maxRetries = 2,
746
+ initialDelayInMs = 2e3,
747
+ backoffFactor = 2
748
+ } = {}) => async (f) => _retryWithExponentialBackoff(f, {
749
+ maxRetries,
750
+ delayInMs: initialDelayInMs,
751
+ backoffFactor
752
+ });
753
+ async function _retryWithExponentialBackoff(f, {
754
+ maxRetries,
755
+ delayInMs,
756
+ backoffFactor
757
+ }, errors = []) {
758
+ try {
759
+ return await f();
760
+ } catch (error) {
761
+ if (isAbortError(error)) {
762
+ throw error;
535
763
  }
536
- return { type: "data", value };
537
- }
538
- };
539
- var errorStreamPart = {
540
- code: "3",
541
- name: "error",
542
- parse: (value) => {
543
- if (typeof value !== "string") {
544
- throw new Error('"error" parts expect a string value.');
764
+ if (maxRetries === 0) {
765
+ throw error;
545
766
  }
546
- return { type: "error", value };
547
- }
548
- };
549
- var assistantMessageStreamPart = {
550
- code: "4",
551
- name: "assistant_message",
552
- parse: (value) => {
553
- if (value == null || typeof value !== "object" || !("id" in value) || !("role" in value) || !("content" in value) || typeof value.id !== "string" || typeof value.role !== "string" || value.role !== "assistant" || !Array.isArray(value.content) || !value.content.every(
554
- (item) => item != null && typeof item === "object" && "type" in item && item.type === "text" && "text" in item && item.text != null && typeof item.text === "object" && "value" in item.text && typeof item.text.value === "string"
555
- )) {
556
- throw new Error(
557
- '"assistant_message" parts expect an object with an "id", "role", and "content" property.'
558
- );
767
+ const errorMessage = getErrorMessage(error);
768
+ const newErrors = [...errors, error];
769
+ const tryNumber = newErrors.length;
770
+ if (tryNumber > maxRetries) {
771
+ throw new RetryError({
772
+ message: `Failed after ${tryNumber} attempts. Last error: ${errorMessage}`,
773
+ reason: "maxRetriesExceeded",
774
+ errors: newErrors
775
+ });
559
776
  }
560
- return {
561
- type: "assistant_message",
562
- value
563
- };
564
- }
565
- };
566
- var assistantControlDataStreamPart = {
567
- code: "5",
568
- name: "assistant_control_data",
569
- parse: (value) => {
570
- if (value == null || typeof value !== "object" || !("threadId" in value) || !("messageId" in value) || typeof value.threadId !== "string" || typeof value.messageId !== "string") {
571
- throw new Error(
572
- '"assistant_control_data" parts expect an object with a "threadId" and "messageId" property.'
777
+ if (error instanceof Error && APICallError.isInstance(error) && error.isRetryable === true && tryNumber <= maxRetries) {
778
+ await delay(delayInMs);
779
+ return _retryWithExponentialBackoff(
780
+ f,
781
+ { maxRetries, delayInMs: backoffFactor * delayInMs, backoffFactor },
782
+ newErrors
573
783
  );
574
784
  }
575
- return {
576
- type: "assistant_control_data",
577
- value: {
578
- threadId: value.threadId,
579
- messageId: value.messageId
580
- }
581
- };
582
- }
583
- };
584
- var dataMessageStreamPart = {
585
- code: "6",
586
- name: "data_message",
587
- parse: (value) => {
588
- if (value == null || typeof value !== "object" || !("role" in value) || !("data" in value) || typeof value.role !== "string" || value.role !== "data") {
589
- throw new Error(
590
- '"data_message" parts expect an object with a "role" and "data" property.'
591
- );
785
+ if (tryNumber === 1) {
786
+ throw error;
592
787
  }
593
- return {
594
- type: "data_message",
595
- value
596
- };
788
+ throw new RetryError({
789
+ message: `Failed after ${tryNumber} attempts with non-retryable error: '${errorMessage}'`,
790
+ reason: "errorNotRetryable",
791
+ errors: newErrors
792
+ });
597
793
  }
598
- };
599
- var toolCallsStreamPart = {
600
- code: "7",
601
- name: "tool_calls",
602
- parse: (value) => {
603
- if (value == null || typeof value !== "object" || !("tool_calls" in value) || typeof value.tool_calls !== "object" || value.tool_calls == null || !Array.isArray(value.tool_calls) || value.tool_calls.some(
604
- (tc) => tc == null || typeof tc !== "object" || !("id" in tc) || typeof tc.id !== "string" || !("type" in tc) || typeof tc.type !== "string" || !("function" in tc) || tc.function == null || typeof tc.function !== "object" || !("arguments" in tc.function) || typeof tc.function.name !== "string" || typeof tc.function.arguments !== "string"
605
- )) {
606
- throw new Error(
607
- '"tool_calls" parts expect an object with a ToolCallPayload.'
608
- );
794
+ }
795
+
796
+ // core/prompt/prepare-retries.ts
797
+ function prepareRetries({
798
+ maxRetries
799
+ }) {
800
+ if (maxRetries != null) {
801
+ if (!Number.isInteger(maxRetries)) {
802
+ throw new InvalidArgumentError({
803
+ parameter: "maxRetries",
804
+ value: maxRetries,
805
+ message: "maxRetries must be an integer"
806
+ });
609
807
  }
610
- return {
611
- type: "tool_calls",
612
- value
613
- };
614
- }
615
- };
616
- var messageAnnotationsStreamPart = {
617
- code: "8",
618
- name: "message_annotations",
619
- parse: (value) => {
620
- if (!Array.isArray(value)) {
621
- throw new Error('"message_annotations" parts expect an array value.');
808
+ if (maxRetries < 0) {
809
+ throw new InvalidArgumentError({
810
+ parameter: "maxRetries",
811
+ value: maxRetries,
812
+ message: "maxRetries must be >= 0"
813
+ });
622
814
  }
623
- return { type: "message_annotations", value };
624
815
  }
625
- };
626
- var toolCallStreamPart = {
627
- code: "9",
628
- name: "tool_call",
629
- parse: (value) => {
630
- if (value == null || typeof value !== "object" || !("toolCallId" in value) || typeof value.toolCallId !== "string" || !("toolName" in value) || typeof value.toolName !== "string" || !("args" in value) || typeof value.args !== "object") {
631
- throw new Error(
632
- '"tool_call" parts expect an object with a "toolCallId", "toolName", and "args" property.'
633
- );
634
- }
816
+ const maxRetriesResult = maxRetries != null ? maxRetries : 2;
817
+ return {
818
+ maxRetries: maxRetriesResult,
819
+ retry: retryWithExponentialBackoff({ maxRetries: maxRetriesResult })
820
+ };
821
+ }
822
+
823
+ // core/prompt/prepare-tools-and-tool-choice.ts
824
+ import { asSchema } from "@ai-sdk/ui-utils";
825
+
826
+ // core/util/is-non-empty-object.ts
827
+ function isNonEmptyObject(object) {
828
+ return object != null && Object.keys(object).length > 0;
829
+ }
830
+
831
+ // core/prompt/prepare-tools-and-tool-choice.ts
832
+ function prepareToolsAndToolChoice({
833
+ tools,
834
+ toolChoice,
835
+ activeTools
836
+ }) {
837
+ if (!isNonEmptyObject(tools)) {
635
838
  return {
636
- type: "tool_call",
637
- value
839
+ tools: void 0,
840
+ toolChoice: void 0
638
841
  };
639
842
  }
640
- };
641
- var toolResultStreamPart = {
642
- code: "a",
643
- name: "tool_result",
644
- parse: (value) => {
645
- if (value == null || typeof value !== "object" || !("toolCallId" in value) || typeof value.toolCallId !== "string" || !("toolName" in value) || typeof value.toolName !== "string" || !("args" in value) || typeof value.args !== "object" || !("result" in value)) {
646
- throw new Error(
647
- '"tool_result" parts expect an object with a "toolCallId", "toolName", "args", and "result" property.'
648
- );
649
- }
650
- return {
651
- type: "tool_result",
652
- value
653
- };
843
+ const filteredTools = activeTools != null ? Object.entries(tools).filter(
844
+ ([name9]) => activeTools.includes(name9)
845
+ ) : Object.entries(tools);
846
+ return {
847
+ tools: filteredTools.map(([name9, tool]) => {
848
+ const toolType = tool.type;
849
+ switch (toolType) {
850
+ case void 0:
851
+ case "function":
852
+ return {
853
+ type: "function",
854
+ name: name9,
855
+ description: tool.description,
856
+ parameters: asSchema(tool.parameters).jsonSchema
857
+ };
858
+ case "provider-defined":
859
+ return {
860
+ type: "provider-defined",
861
+ name: name9,
862
+ id: tool.id,
863
+ args: tool.args
864
+ };
865
+ default: {
866
+ const exhaustiveCheck = toolType;
867
+ throw new Error(`Unsupported tool type: ${exhaustiveCheck}`);
868
+ }
869
+ }
870
+ }),
871
+ toolChoice: toolChoice == null ? { type: "auto" } : typeof toolChoice === "string" ? { type: toolChoice } : { type: "tool", toolName: toolChoice.toolName }
872
+ };
873
+ }
874
+
875
+ // core/prompt/standardize-prompt.ts
876
+ import { InvalidPromptError } from "@ai-sdk/provider";
877
+ import { safeValidateTypes } from "@ai-sdk/provider-utils";
878
+ import { z as z7 } from "zod";
879
+
880
+ // core/prompt/message.ts
881
+ import { z as z6 } from "zod";
882
+
883
+ // core/types/provider-metadata.ts
884
+ import { z as z3 } from "zod";
885
+
886
+ // core/types/json-value.ts
887
+ import { z as z2 } from "zod";
888
+ var jsonValueSchema = z2.lazy(
889
+ () => z2.union([
890
+ z2.null(),
891
+ z2.string(),
892
+ z2.number(),
893
+ z2.boolean(),
894
+ z2.record(z2.string(), jsonValueSchema),
895
+ z2.array(jsonValueSchema)
896
+ ])
897
+ );
898
+
899
+ // core/types/provider-metadata.ts
900
+ var providerMetadataSchema = z3.record(
901
+ z3.string(),
902
+ z3.record(z3.string(), jsonValueSchema)
903
+ );
904
+
905
+ // core/prompt/content-part.ts
906
+ import { z as z5 } from "zod";
907
+
908
+ // core/prompt/tool-result-content.ts
909
+ import { z as z4 } from "zod";
910
+ var toolResultContentSchema = z4.array(
911
+ z4.union([
912
+ z4.object({ type: z4.literal("text"), text: z4.string() }),
913
+ z4.object({
914
+ type: z4.literal("image"),
915
+ data: z4.string(),
916
+ mimeType: z4.string().optional()
917
+ })
918
+ ])
919
+ );
920
+
921
+ // core/prompt/content-part.ts
922
+ var textPartSchema = z5.object({
923
+ type: z5.literal("text"),
924
+ text: z5.string(),
925
+ experimental_providerMetadata: providerMetadataSchema.optional()
926
+ });
927
+ var imagePartSchema = z5.object({
928
+ type: z5.literal("image"),
929
+ image: z5.union([dataContentSchema, z5.instanceof(URL)]),
930
+ mimeType: z5.string().optional(),
931
+ experimental_providerMetadata: providerMetadataSchema.optional()
932
+ });
933
+ var filePartSchema = z5.object({
934
+ type: z5.literal("file"),
935
+ data: z5.union([dataContentSchema, z5.instanceof(URL)]),
936
+ mimeType: z5.string(),
937
+ experimental_providerMetadata: providerMetadataSchema.optional()
938
+ });
939
+ var toolCallPartSchema = z5.object({
940
+ type: z5.literal("tool-call"),
941
+ toolCallId: z5.string(),
942
+ toolName: z5.string(),
943
+ args: z5.unknown()
944
+ });
945
+ var toolResultPartSchema = z5.object({
946
+ type: z5.literal("tool-result"),
947
+ toolCallId: z5.string(),
948
+ toolName: z5.string(),
949
+ result: z5.unknown(),
950
+ content: toolResultContentSchema.optional(),
951
+ isError: z5.boolean().optional(),
952
+ experimental_providerMetadata: providerMetadataSchema.optional()
953
+ });
954
+
955
+ // core/prompt/message.ts
956
+ var coreSystemMessageSchema = z6.object({
957
+ role: z6.literal("system"),
958
+ content: z6.string(),
959
+ experimental_providerMetadata: providerMetadataSchema.optional()
960
+ });
961
+ var coreUserMessageSchema = z6.object({
962
+ role: z6.literal("user"),
963
+ content: z6.union([
964
+ z6.string(),
965
+ z6.array(z6.union([textPartSchema, imagePartSchema, filePartSchema]))
966
+ ]),
967
+ experimental_providerMetadata: providerMetadataSchema.optional()
968
+ });
969
+ var coreAssistantMessageSchema = z6.object({
970
+ role: z6.literal("assistant"),
971
+ content: z6.union([
972
+ z6.string(),
973
+ z6.array(z6.union([textPartSchema, toolCallPartSchema]))
974
+ ]),
975
+ experimental_providerMetadata: providerMetadataSchema.optional()
976
+ });
977
+ var coreToolMessageSchema = z6.object({
978
+ role: z6.literal("tool"),
979
+ content: z6.array(toolResultPartSchema),
980
+ experimental_providerMetadata: providerMetadataSchema.optional()
981
+ });
982
+ var coreMessageSchema = z6.union([
983
+ coreSystemMessageSchema,
984
+ coreUserMessageSchema,
985
+ coreAssistantMessageSchema,
986
+ coreToolMessageSchema
987
+ ]);
988
+
989
+ // core/prompt/detect-prompt-type.ts
990
+ function detectPromptType(prompt) {
991
+ if (!Array.isArray(prompt)) {
992
+ return "other";
654
993
  }
655
- };
656
- var streamParts = [
657
- textStreamPart,
658
- functionCallStreamPart,
659
- dataStreamPart,
660
- errorStreamPart,
661
- assistantMessageStreamPart,
662
- assistantControlDataStreamPart,
663
- dataMessageStreamPart,
664
- toolCallsStreamPart,
665
- messageAnnotationsStreamPart,
666
- toolCallStreamPart,
667
- toolResultStreamPart
668
- ];
669
- var streamPartsByCode = {
670
- [textStreamPart.code]: textStreamPart,
671
- [functionCallStreamPart.code]: functionCallStreamPart,
672
- [dataStreamPart.code]: dataStreamPart,
673
- [errorStreamPart.code]: errorStreamPart,
674
- [assistantMessageStreamPart.code]: assistantMessageStreamPart,
675
- [assistantControlDataStreamPart.code]: assistantControlDataStreamPart,
676
- [dataMessageStreamPart.code]: dataMessageStreamPart,
677
- [toolCallsStreamPart.code]: toolCallsStreamPart,
678
- [messageAnnotationsStreamPart.code]: messageAnnotationsStreamPart,
679
- [toolCallStreamPart.code]: toolCallStreamPart,
680
- [toolResultStreamPart.code]: toolResultStreamPart
681
- };
682
- var StreamStringPrefixes = {
683
- [textStreamPart.name]: textStreamPart.code,
684
- [functionCallStreamPart.name]: functionCallStreamPart.code,
685
- [dataStreamPart.name]: dataStreamPart.code,
686
- [errorStreamPart.name]: errorStreamPart.code,
687
- [assistantMessageStreamPart.name]: assistantMessageStreamPart.code,
688
- [assistantControlDataStreamPart.name]: assistantControlDataStreamPart.code,
689
- [dataMessageStreamPart.name]: dataMessageStreamPart.code,
690
- [toolCallsStreamPart.name]: toolCallsStreamPart.code,
691
- [messageAnnotationsStreamPart.name]: messageAnnotationsStreamPart.code,
692
- [toolCallStreamPart.name]: toolCallStreamPart.code,
693
- [toolResultStreamPart.name]: toolResultStreamPart.code
694
- };
695
- var validCodes = streamParts.map((part) => part.code);
696
- var parseStreamPart = (line) => {
697
- const firstSeparatorIndex = line.indexOf(":");
698
- if (firstSeparatorIndex === -1) {
699
- throw new Error("Failed to parse stream string. No separator found.");
700
- }
701
- const prefix = line.slice(0, firstSeparatorIndex);
702
- if (!validCodes.includes(prefix)) {
703
- throw new Error(`Failed to parse stream string. Invalid code ${prefix}.`);
704
- }
705
- const code = prefix;
706
- const textValue = line.slice(firstSeparatorIndex + 1);
707
- const jsonValue = JSON.parse(textValue);
708
- return streamPartsByCode[code].parse(jsonValue);
709
- };
710
- function formatStreamPart(type, value) {
711
- const streamPart = streamParts.find((part) => part.name === type);
712
- if (!streamPart) {
713
- throw new Error(`Invalid stream part type: ${type}`);
994
+ if (prompt.length === 0) {
995
+ return "messages";
996
+ }
997
+ const characteristics = prompt.map(detectSingleMessageCharacteristics);
998
+ if (characteristics.some((c) => c === "has-ui-specific-parts")) {
999
+ return "ui-messages";
1000
+ } else if (characteristics.every(
1001
+ (c) => c === "has-core-specific-parts" || c === "message"
1002
+ )) {
1003
+ return "messages";
1004
+ } else {
1005
+ return "other";
714
1006
  }
715
- return `${streamPart.code}:${JSON.stringify(value)}
716
- `;
717
1007
  }
718
-
719
- // shared/utils.ts
720
- function createChunkDecoder(complex) {
721
- const decoder = new TextDecoder();
722
- if (!complex) {
723
- return function(chunk) {
724
- if (!chunk)
725
- return "";
726
- return decoder.decode(chunk, { stream: true });
727
- };
1008
+ function detectSingleMessageCharacteristics(message) {
1009
+ if (typeof message === "object" && message !== null && (message.role === "function" || // UI-only role
1010
+ message.role === "data" || // UI-only role
1011
+ "toolInvocations" in message || // UI-specific field
1012
+ "experimental_attachments" in message)) {
1013
+ return "has-ui-specific-parts";
1014
+ } else if (typeof message === "object" && message !== null && "content" in message && (Array.isArray(message.content) || // Core messages can have array content
1015
+ "experimental_providerMetadata" in message)) {
1016
+ return "has-core-specific-parts";
1017
+ } else if (typeof message === "object" && message !== null && "role" in message && "content" in message && typeof message.content === "string" && ["system", "user", "assistant", "tool"].includes(message.role)) {
1018
+ return "message";
1019
+ } else {
1020
+ return "other";
728
1021
  }
729
- return function(chunk) {
730
- const decoded = decoder.decode(chunk, { stream: true }).split("\n").filter((line) => line !== "");
731
- return decoded.map(parseStreamPart).filter(Boolean);
732
- };
733
1022
  }
734
1023
 
735
- // streams/ai-stream.ts
736
- import {
737
- createParser
738
- } from "eventsource-parser";
739
- function createEventStreamTransformer(customParser) {
740
- const textDecoder = new TextDecoder();
741
- let eventSourceParser;
742
- return new TransformStream({
743
- async start(controller) {
744
- eventSourceParser = createParser(
745
- (event) => {
746
- if ("data" in event && event.type === "event" && event.data === "[DONE]" || // Replicate doesn't send [DONE] but does send a 'done' event
747
- // @see https://replicate.com/docs/streaming
748
- event.event === "done") {
749
- controller.terminate();
750
- return;
1024
+ // core/prompt/attachments-to-parts.ts
1025
+ function attachmentsToParts(attachments) {
1026
+ var _a9, _b, _c;
1027
+ const parts = [];
1028
+ for (const attachment of attachments) {
1029
+ let url;
1030
+ try {
1031
+ url = new URL(attachment.url);
1032
+ } catch (error) {
1033
+ throw new Error(`Invalid URL: ${attachment.url}`);
1034
+ }
1035
+ switch (url.protocol) {
1036
+ case "http:":
1037
+ case "https:": {
1038
+ if ((_a9 = attachment.contentType) == null ? void 0 : _a9.startsWith("image/")) {
1039
+ parts.push({ type: "image", image: url });
1040
+ } else {
1041
+ if (!attachment.contentType) {
1042
+ throw new Error(
1043
+ "If the attachment is not an image, it must specify a content type"
1044
+ );
751
1045
  }
752
- if ("data" in event) {
753
- const parsedMessage = customParser ? customParser(event.data, {
754
- event: event.event
755
- }) : event.data;
756
- if (parsedMessage)
757
- controller.enqueue(parsedMessage);
1046
+ parts.push({
1047
+ type: "file",
1048
+ data: url,
1049
+ mimeType: attachment.contentType
1050
+ });
1051
+ }
1052
+ break;
1053
+ }
1054
+ case "data:": {
1055
+ let header;
1056
+ let base64Content;
1057
+ let mimeType;
1058
+ try {
1059
+ [header, base64Content] = attachment.url.split(",");
1060
+ mimeType = header.split(";")[0].split(":")[1];
1061
+ } catch (error) {
1062
+ throw new Error(`Error processing data URL: ${attachment.url}`);
1063
+ }
1064
+ if (mimeType == null || base64Content == null) {
1065
+ throw new Error(`Invalid data URL format: ${attachment.url}`);
1066
+ }
1067
+ if ((_b = attachment.contentType) == null ? void 0 : _b.startsWith("image/")) {
1068
+ parts.push({
1069
+ type: "image",
1070
+ image: convertDataContentToUint8Array(base64Content)
1071
+ });
1072
+ } else if ((_c = attachment.contentType) == null ? void 0 : _c.startsWith("text/")) {
1073
+ parts.push({
1074
+ type: "text",
1075
+ text: convertUint8ArrayToText(
1076
+ convertDataContentToUint8Array(base64Content)
1077
+ )
1078
+ });
1079
+ } else {
1080
+ if (!attachment.contentType) {
1081
+ throw new Error(
1082
+ "If the attachment is not an image or text, it must specify a content type"
1083
+ );
758
1084
  }
1085
+ parts.push({
1086
+ type: "file",
1087
+ data: base64Content,
1088
+ mimeType: attachment.contentType
1089
+ });
759
1090
  }
760
- );
761
- },
762
- transform(chunk) {
763
- eventSourceParser.feed(textDecoder.decode(chunk));
1091
+ break;
1092
+ }
1093
+ default: {
1094
+ throw new Error(`Unsupported URL protocol: ${url.protocol}`);
1095
+ }
764
1096
  }
765
- });
1097
+ }
1098
+ return parts;
766
1099
  }
767
- function createCallbacksTransformer(cb) {
768
- const textEncoder = new TextEncoder();
769
- let aggregatedResponse = "";
770
- const callbacks = cb || {};
771
- return new TransformStream({
772
- async start() {
773
- if (callbacks.onStart)
774
- await callbacks.onStart();
775
- },
776
- async transform(message, controller) {
777
- const content = typeof message === "string" ? message : message.content;
778
- controller.enqueue(textEncoder.encode(content));
779
- aggregatedResponse += content;
780
- if (callbacks.onToken)
781
- await callbacks.onToken(content);
782
- if (callbacks.onText && typeof message === "string") {
783
- await callbacks.onText(message);
1100
+
1101
+ // core/prompt/message-conversion-error.ts
1102
+ import { AISDKError as AISDKError6 } from "@ai-sdk/provider";
1103
+ var name6 = "AI_MessageConversionError";
1104
+ var marker6 = `vercel.ai.error.${name6}`;
1105
+ var symbol6 = Symbol.for(marker6);
1106
+ var _a6;
1107
+ var MessageConversionError = class extends AISDKError6 {
1108
+ constructor({
1109
+ originalMessage,
1110
+ message
1111
+ }) {
1112
+ super({ name: name6, message });
1113
+ this[_a6] = true;
1114
+ this.originalMessage = originalMessage;
1115
+ }
1116
+ static isInstance(error) {
1117
+ return AISDKError6.hasMarker(error, marker6);
1118
+ }
1119
+ };
1120
+ _a6 = symbol6;
1121
+
1122
+ // core/prompt/convert-to-core-messages.ts
1123
+ function convertToCoreMessages(messages, options) {
1124
+ var _a9;
1125
+ const tools = (_a9 = options == null ? void 0 : options.tools) != null ? _a9 : {};
1126
+ const coreMessages = [];
1127
+ for (const message of messages) {
1128
+ const { role, content, toolInvocations, experimental_attachments } = message;
1129
+ switch (role) {
1130
+ case "system": {
1131
+ coreMessages.push({
1132
+ role: "system",
1133
+ content
1134
+ });
1135
+ break;
784
1136
  }
785
- },
786
- async flush() {
787
- const isOpenAICallbacks = isOfTypeOpenAIStreamCallbacks(callbacks);
788
- if (callbacks.onCompletion) {
789
- await callbacks.onCompletion(aggregatedResponse);
1137
+ case "user": {
1138
+ coreMessages.push({
1139
+ role: "user",
1140
+ content: experimental_attachments ? [
1141
+ { type: "text", text: content },
1142
+ ...attachmentsToParts(experimental_attachments)
1143
+ ] : content
1144
+ });
1145
+ break;
1146
+ }
1147
+ case "assistant": {
1148
+ if (toolInvocations == null) {
1149
+ coreMessages.push({ role: "assistant", content });
1150
+ break;
1151
+ }
1152
+ coreMessages.push({
1153
+ role: "assistant",
1154
+ content: [
1155
+ { type: "text", text: content },
1156
+ ...toolInvocations.map(
1157
+ ({ toolCallId, toolName, args }) => ({
1158
+ type: "tool-call",
1159
+ toolCallId,
1160
+ toolName,
1161
+ args
1162
+ })
1163
+ )
1164
+ ]
1165
+ });
1166
+ coreMessages.push({
1167
+ role: "tool",
1168
+ content: toolInvocations.map((toolInvocation) => {
1169
+ if (!("result" in toolInvocation)) {
1170
+ throw new MessageConversionError({
1171
+ originalMessage: message,
1172
+ message: "ToolInvocation must have a result: " + JSON.stringify(toolInvocation)
1173
+ });
1174
+ }
1175
+ const { toolCallId, toolName, result } = toolInvocation;
1176
+ const tool = tools[toolName];
1177
+ return (tool == null ? void 0 : tool.experimental_toToolResultContent) != null ? {
1178
+ type: "tool-result",
1179
+ toolCallId,
1180
+ toolName,
1181
+ result: tool.experimental_toToolResultContent(result),
1182
+ experimental_content: tool.experimental_toToolResultContent(result)
1183
+ } : {
1184
+ type: "tool-result",
1185
+ toolCallId,
1186
+ toolName,
1187
+ result
1188
+ };
1189
+ })
1190
+ });
1191
+ break;
1192
+ }
1193
+ case "data": {
1194
+ break;
790
1195
  }
791
- if (callbacks.onFinal && !isOpenAICallbacks) {
792
- await callbacks.onFinal(aggregatedResponse);
1196
+ default: {
1197
+ const _exhaustiveCheck = role;
1198
+ throw new MessageConversionError({
1199
+ originalMessage: message,
1200
+ message: `Unsupported role: ${_exhaustiveCheck}`
1201
+ });
793
1202
  }
794
1203
  }
795
- });
796
- }
797
- function isOfTypeOpenAIStreamCallbacks(callbacks) {
798
- return "experimental_onFunctionCall" in callbacks;
1204
+ }
1205
+ return coreMessages;
799
1206
  }
800
- function trimStartOfStreamHelper() {
801
- let isStreamStart = true;
802
- return (text) => {
803
- if (isStreamStart) {
804
- text = text.trimStart();
805
- if (text)
806
- isStreamStart = false;
1207
+
1208
+ // core/prompt/standardize-prompt.ts
1209
+ function standardizePrompt({
1210
+ prompt,
1211
+ tools
1212
+ }) {
1213
+ if (prompt.prompt == null && prompt.messages == null) {
1214
+ throw new InvalidPromptError({
1215
+ prompt,
1216
+ message: "prompt or messages must be defined"
1217
+ });
1218
+ }
1219
+ if (prompt.prompt != null && prompt.messages != null) {
1220
+ throw new InvalidPromptError({
1221
+ prompt,
1222
+ message: "prompt and messages cannot be defined at the same time"
1223
+ });
1224
+ }
1225
+ if (prompt.system != null && typeof prompt.system !== "string") {
1226
+ throw new InvalidPromptError({
1227
+ prompt,
1228
+ message: "system must be a string"
1229
+ });
1230
+ }
1231
+ if (prompt.prompt != null) {
1232
+ if (typeof prompt.prompt !== "string") {
1233
+ throw new InvalidPromptError({
1234
+ prompt,
1235
+ message: "prompt must be a string"
1236
+ });
807
1237
  }
808
- return text;
809
- };
810
- }
811
- function AIStream(response, customParser, callbacks) {
812
- if (!response.ok) {
813
- if (response.body) {
814
- const reader = response.body.getReader();
815
- return new ReadableStream({
816
- async start(controller) {
817
- const { done, value } = await reader.read();
818
- if (!done) {
819
- const errorText = new TextDecoder().decode(value);
820
- controller.error(new Error(`Response error: ${errorText}`));
821
- }
1238
+ return {
1239
+ type: "prompt",
1240
+ system: prompt.system,
1241
+ messages: [
1242
+ {
1243
+ role: "user",
1244
+ content: prompt.prompt
822
1245
  }
1246
+ ]
1247
+ };
1248
+ }
1249
+ if (prompt.messages != null) {
1250
+ const promptType = detectPromptType(prompt.messages);
1251
+ if (promptType === "other") {
1252
+ throw new InvalidPromptError({
1253
+ prompt,
1254
+ message: "messages must be an array of CoreMessage or UIMessage"
823
1255
  });
824
- } else {
825
- return new ReadableStream({
826
- start(controller) {
827
- controller.error(new Error("Response error: No response body"));
828
- }
1256
+ }
1257
+ const messages = promptType === "ui-messages" ? convertToCoreMessages(prompt.messages, {
1258
+ tools
1259
+ }) : prompt.messages;
1260
+ const validationResult = safeValidateTypes({
1261
+ value: messages,
1262
+ schema: z7.array(coreMessageSchema)
1263
+ });
1264
+ if (!validationResult.success) {
1265
+ throw new InvalidPromptError({
1266
+ prompt,
1267
+ message: "messages must be an array of CoreMessage or UIMessage",
1268
+ cause: validationResult.error
829
1269
  });
830
1270
  }
1271
+ return {
1272
+ type: "messages",
1273
+ messages,
1274
+ system: prompt.system
1275
+ };
831
1276
  }
832
- const responseBodyStream = response.body || createEmptyReadableStream();
833
- return responseBodyStream.pipeThrough(createEventStreamTransformer(customParser)).pipeThrough(createCallbacksTransformer(callbacks));
834
- }
835
- function createEmptyReadableStream() {
836
- return new ReadableStream({
837
- start(controller) {
838
- controller.close();
839
- }
840
- });
841
- }
842
- function readableFromAsyncIterable(iterable) {
843
- let it = iterable[Symbol.asyncIterator]();
844
- return new ReadableStream({
845
- async pull(controller) {
846
- const { done, value } = await it.next();
847
- if (done)
848
- controller.close();
849
- else
850
- controller.enqueue(value);
851
- },
852
- async cancel(reason) {
853
- var _a;
854
- await ((_a = it.return) == null ? void 0 : _a.call(it, reason));
855
- }
856
- });
1277
+ throw new Error("unreachable");
857
1278
  }
858
1279
 
859
- // streams/stream-data.ts
860
- function createStreamDataTransformer() {
861
- const encoder = new TextEncoder();
862
- const decoder = new TextDecoder();
863
- return new TransformStream({
864
- transform: async (chunk, controller) => {
865
- const message = decoder.decode(chunk);
866
- controller.enqueue(encoder.encode(formatStreamPart("text", message)));
867
- }
868
- });
1280
+ // core/types/usage.ts
1281
+ function calculateLanguageModelUsage({
1282
+ promptTokens,
1283
+ completionTokens
1284
+ }) {
1285
+ return {
1286
+ promptTokens,
1287
+ completionTokens,
1288
+ totalTokens: promptTokens + completionTokens
1289
+ };
869
1290
  }
870
1291
 
871
- // streams/openai-stream.ts
872
- function parseOpenAIStream() {
873
- const extract = chunkToText();
874
- return (data) => extract(JSON.parse(data));
875
- }
876
- async function* streamable(stream) {
877
- const extract = chunkToText();
878
- for await (let chunk of stream) {
879
- if ("promptFilterResults" in chunk) {
880
- chunk = {
881
- id: chunk.id,
882
- created: chunk.created.getDate(),
883
- object: chunk.object,
884
- // not exposed by Azure API
885
- model: chunk.model,
886
- // not exposed by Azure API
887
- choices: chunk.choices.map((choice) => {
888
- var _a, _b, _c, _d, _e, _f, _g;
889
- return {
890
- delta: {
891
- content: (_a = choice.delta) == null ? void 0 : _a.content,
892
- function_call: (_b = choice.delta) == null ? void 0 : _b.functionCall,
893
- role: (_c = choice.delta) == null ? void 0 : _c.role,
894
- tool_calls: ((_e = (_d = choice.delta) == null ? void 0 : _d.toolCalls) == null ? void 0 : _e.length) ? (_g = (_f = choice.delta) == null ? void 0 : _f.toolCalls) == null ? void 0 : _g.map((toolCall, index) => ({
895
- index,
896
- id: toolCall.id,
897
- function: toolCall.function,
898
- type: toolCall.type
899
- })) : void 0
900
- },
901
- finish_reason: choice.finishReason,
902
- index: choice.index
903
- };
904
- })
905
- };
906
- }
907
- const text = extract(chunk);
908
- if (text)
909
- yield text;
1292
+ // errors/invalid-tool-arguments-error.ts
1293
+ import { AISDKError as AISDKError7, getErrorMessage as getErrorMessage2 } from "@ai-sdk/provider";
1294
+ var name7 = "AI_InvalidToolArgumentsError";
1295
+ var marker7 = `vercel.ai.error.${name7}`;
1296
+ var symbol7 = Symbol.for(marker7);
1297
+ var _a7;
1298
+ var InvalidToolArgumentsError = class extends AISDKError7 {
1299
+ constructor({
1300
+ toolArgs,
1301
+ toolName,
1302
+ cause,
1303
+ message = `Invalid arguments for tool ${toolName}: ${getErrorMessage2(
1304
+ cause
1305
+ )}`
1306
+ }) {
1307
+ super({ name: name7, message, cause });
1308
+ this[_a7] = true;
1309
+ this.toolArgs = toolArgs;
1310
+ this.toolName = toolName;
910
1311
  }
911
- }
912
- function chunkToText() {
913
- const trimStartOfStream = trimStartOfStreamHelper();
914
- let isFunctionStreamingIn;
915
- return (json) => {
916
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r;
917
- if (isChatCompletionChunk(json)) {
918
- const delta = (_a = json.choices[0]) == null ? void 0 : _a.delta;
919
- if ((_b = delta.function_call) == null ? void 0 : _b.name) {
920
- isFunctionStreamingIn = true;
921
- return {
922
- isText: false,
923
- content: `{"function_call": {"name": "${delta.function_call.name}", "arguments": "`
924
- };
925
- } else if ((_e = (_d = (_c = delta.tool_calls) == null ? void 0 : _c[0]) == null ? void 0 : _d.function) == null ? void 0 : _e.name) {
926
- isFunctionStreamingIn = true;
927
- const toolCall = delta.tool_calls[0];
928
- if (toolCall.index === 0) {
929
- return {
930
- isText: false,
931
- content: `{"tool_calls":[ {"id": "${toolCall.id}", "type": "function", "function": {"name": "${(_f = toolCall.function) == null ? void 0 : _f.name}", "arguments": "`
932
- };
933
- } else {
934
- return {
935
- isText: false,
936
- content: `"}}, {"id": "${toolCall.id}", "type": "function", "function": {"name": "${(_g = toolCall.function) == null ? void 0 : _g.name}", "arguments": "`
937
- };
938
- }
939
- } else if ((_h = delta.function_call) == null ? void 0 : _h.arguments) {
940
- return {
941
- isText: false,
942
- content: cleanupArguments((_i = delta.function_call) == null ? void 0 : _i.arguments)
943
- };
944
- } else if ((_l = (_k = (_j = delta.tool_calls) == null ? void 0 : _j[0]) == null ? void 0 : _k.function) == null ? void 0 : _l.arguments) {
945
- return {
946
- isText: false,
947
- content: cleanupArguments((_o = (_n = (_m = delta.tool_calls) == null ? void 0 : _m[0]) == null ? void 0 : _n.function) == null ? void 0 : _o.arguments)
948
- };
949
- } else if (isFunctionStreamingIn && (((_p = json.choices[0]) == null ? void 0 : _p.finish_reason) === "function_call" || ((_q = json.choices[0]) == null ? void 0 : _q.finish_reason) === "stop")) {
950
- isFunctionStreamingIn = false;
951
- return {
952
- isText: false,
953
- content: '"}}'
954
- };
955
- } else if (isFunctionStreamingIn && ((_r = json.choices[0]) == null ? void 0 : _r.finish_reason) === "tool_calls") {
956
- isFunctionStreamingIn = false;
957
- return {
958
- isText: false,
959
- content: '"}}]}'
960
- };
961
- }
962
- }
963
- const text = trimStartOfStream(
964
- isChatCompletionChunk(json) && json.choices[0].delta.content ? json.choices[0].delta.content : isCompletion(json) ? json.choices[0].text : ""
965
- );
966
- return text;
967
- };
968
- function cleanupArguments(argumentChunk) {
969
- let escapedPartialJson = argumentChunk.replace(/\\/g, "\\\\").replace(/\//g, "\\/").replace(/"/g, '\\"').replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t").replace(/\f/g, "\\f");
970
- return `${escapedPartialJson}`;
1312
+ static isInstance(error) {
1313
+ return AISDKError7.hasMarker(error, marker7);
971
1314
  }
972
- }
973
- var __internal__OpenAIFnMessagesSymbol = Symbol(
974
- "internal_openai_fn_messages"
975
- );
976
- function isChatCompletionChunk(data) {
977
- return "choices" in data && data.choices && data.choices[0] && "delta" in data.choices[0];
978
- }
979
- function isCompletion(data) {
980
- return "choices" in data && data.choices && data.choices[0] && "text" in data.choices[0];
981
- }
982
- function OpenAIStream(res, callbacks) {
983
- const cb = callbacks;
984
- let stream;
985
- if (Symbol.asyncIterator in res) {
986
- stream = readableFromAsyncIterable(streamable(res)).pipeThrough(
987
- createCallbacksTransformer(
988
- (cb == null ? void 0 : cb.experimental_onFunctionCall) || (cb == null ? void 0 : cb.experimental_onToolCall) ? {
989
- ...cb,
990
- onFinal: void 0
991
- } : {
992
- ...cb
993
- }
994
- )
995
- );
996
- } else {
997
- stream = AIStream(
998
- res,
999
- parseOpenAIStream(),
1000
- (cb == null ? void 0 : cb.experimental_onFunctionCall) || (cb == null ? void 0 : cb.experimental_onToolCall) ? {
1001
- ...cb,
1002
- onFinal: void 0
1003
- } : {
1004
- ...cb
1005
- }
1006
- );
1315
+ };
1316
+ _a7 = symbol7;
1317
+
1318
+ // errors/no-such-tool-error.ts
1319
+ import { AISDKError as AISDKError8 } from "@ai-sdk/provider";
1320
+ var name8 = "AI_NoSuchToolError";
1321
+ var marker8 = `vercel.ai.error.${name8}`;
1322
+ var symbol8 = Symbol.for(marker8);
1323
+ var _a8;
1324
+ var NoSuchToolError = class extends AISDKError8 {
1325
+ constructor({
1326
+ toolName,
1327
+ availableTools = void 0,
1328
+ message = `Model tried to call unavailable tool '${toolName}'. ${availableTools === void 0 ? "No tools are available." : `Available tools: ${availableTools.join(", ")}.`}`
1329
+ }) {
1330
+ super({ name: name8, message });
1331
+ this[_a8] = true;
1332
+ this.toolName = toolName;
1333
+ this.availableTools = availableTools;
1007
1334
  }
1008
- if (cb && (cb.experimental_onFunctionCall || cb.experimental_onToolCall)) {
1009
- const functionCallTransformer = createFunctionCallTransformer(cb);
1010
- return stream.pipeThrough(functionCallTransformer);
1011
- } else {
1012
- return stream.pipeThrough(createStreamDataTransformer());
1335
+ static isInstance(error) {
1336
+ return AISDKError8.hasMarker(error, marker8);
1013
1337
  }
1338
+ };
1339
+ _a8 = symbol8;
1340
+
1341
+ // util/is-async-generator.ts
1342
+ function isAsyncGenerator(value) {
1343
+ return value != null && typeof value === "object" && Symbol.asyncIterator in value;
1014
1344
  }
1015
- function createFunctionCallTransformer(callbacks) {
1016
- const textEncoder = new TextEncoder();
1017
- let isFirstChunk = true;
1018
- let aggregatedResponse = "";
1019
- let aggregatedFinalCompletionResponse = "";
1020
- let isFunctionStreamingIn = false;
1021
- let functionCallMessages = callbacks[__internal__OpenAIFnMessagesSymbol] || [];
1022
- const decode = createChunkDecoder();
1023
- return new TransformStream({
1024
- async transform(chunk, controller) {
1025
- const message = decode(chunk);
1026
- aggregatedFinalCompletionResponse += message;
1027
- const shouldHandleAsFunction = isFirstChunk && (message.startsWith('{"function_call":') || message.startsWith('{"tool_calls":'));
1028
- if (shouldHandleAsFunction) {
1029
- isFunctionStreamingIn = true;
1030
- aggregatedResponse += message;
1031
- isFirstChunk = false;
1032
- return;
1033
- }
1034
- if (!isFunctionStreamingIn) {
1035
- controller.enqueue(
1036
- textEncoder.encode(formatStreamPart("text", message))
1037
- );
1038
- return;
1039
- } else {
1040
- aggregatedResponse += message;
1041
- }
1042
- },
1043
- async flush(controller) {
1044
- try {
1045
- if (!isFirstChunk && isFunctionStreamingIn && (callbacks.experimental_onFunctionCall || callbacks.experimental_onToolCall)) {
1046
- isFunctionStreamingIn = false;
1047
- const payload = JSON.parse(aggregatedResponse);
1048
- let newFunctionCallMessages = [
1049
- ...functionCallMessages
1050
- ];
1051
- let functionResponse = void 0;
1052
- if (callbacks.experimental_onFunctionCall) {
1053
- if (payload.function_call === void 0) {
1054
- console.warn(
1055
- "experimental_onFunctionCall should not be defined when using tools"
1056
- );
1057
- }
1058
- const argumentsPayload = JSON.parse(
1059
- payload.function_call.arguments
1060
- );
1061
- functionResponse = await callbacks.experimental_onFunctionCall(
1062
- {
1063
- name: payload.function_call.name,
1064
- arguments: argumentsPayload
1065
- },
1066
- (result) => {
1067
- newFunctionCallMessages = [
1068
- ...functionCallMessages,
1069
- {
1070
- role: "assistant",
1071
- content: "",
1072
- function_call: payload.function_call
1073
- },
1074
- {
1075
- role: "function",
1076
- name: payload.function_call.name,
1077
- content: JSON.stringify(result)
1078
- }
1079
- ];
1080
- return newFunctionCallMessages;
1081
- }
1082
- );
1083
- }
1084
- if (callbacks.experimental_onToolCall) {
1085
- const toolCalls = {
1086
- tools: []
1087
- };
1088
- for (const tool of payload.tool_calls) {
1089
- toolCalls.tools.push({
1090
- id: tool.id,
1091
- type: "function",
1092
- func: {
1093
- name: tool.function.name,
1094
- arguments: JSON.parse(tool.function.arguments)
1095
- }
1096
- });
1097
- }
1098
- let responseIndex = 0;
1099
- try {
1100
- functionResponse = await callbacks.experimental_onToolCall(
1101
- toolCalls,
1102
- (result) => {
1103
- if (result) {
1104
- const { tool_call_id, function_name, tool_call_result } = result;
1105
- newFunctionCallMessages = [
1106
- ...newFunctionCallMessages,
1107
- // Only append the assistant message if it's the first response
1108
- ...responseIndex === 0 ? [
1109
- {
1110
- role: "assistant",
1111
- content: "",
1112
- tool_calls: payload.tool_calls.map(
1113
- (tc) => ({
1114
- id: tc.id,
1115
- type: "function",
1116
- function: {
1117
- name: tc.function.name,
1118
- // we send the arguments an object to the user, but as the API expects a string, we need to stringify it
1119
- arguments: JSON.stringify(
1120
- tc.function.arguments
1121
- )
1122
- }
1123
- })
1124
- )
1125
- }
1126
- ] : [],
1127
- // Append the function call result message
1128
- {
1129
- role: "tool",
1130
- tool_call_id,
1131
- name: function_name,
1132
- content: JSON.stringify(tool_call_result)
1133
- }
1134
- ];
1135
- responseIndex++;
1136
- }
1137
- return newFunctionCallMessages;
1138
- }
1139
- );
1140
- } catch (e) {
1141
- console.error("Error calling experimental_onToolCall:", e);
1142
- }
1143
- }
1144
- if (!functionResponse) {
1145
- controller.enqueue(
1146
- textEncoder.encode(
1147
- formatStreamPart(
1148
- payload.function_call ? "function_call" : "tool_calls",
1149
- // parse to prevent double-encoding:
1150
- JSON.parse(aggregatedResponse)
1151
- )
1152
- )
1153
- );
1154
- return;
1155
- } else if (typeof functionResponse === "string") {
1156
- controller.enqueue(
1157
- textEncoder.encode(formatStreamPart("text", functionResponse))
1158
- );
1159
- aggregatedFinalCompletionResponse = functionResponse;
1160
- return;
1161
- }
1162
- const filteredCallbacks = {
1163
- ...callbacks,
1164
- onStart: void 0
1165
- };
1166
- callbacks.onFinal = void 0;
1167
- const openAIStream = OpenAIStream(functionResponse, {
1168
- ...filteredCallbacks,
1169
- [__internal__OpenAIFnMessagesSymbol]: newFunctionCallMessages
1170
- });
1171
- const reader = openAIStream.getReader();
1172
- while (true) {
1173
- const { done, value } = await reader.read();
1174
- if (done) {
1175
- break;
1176
- }
1177
- controller.enqueue(value);
1178
- }
1179
- }
1180
- } finally {
1181
- if (callbacks.onFinal && aggregatedFinalCompletionResponse) {
1182
- await callbacks.onFinal(aggregatedFinalCompletionResponse);
1183
- }
1184
- }
1185
- }
1186
- });
1345
+
1346
+ // util/is-generator.ts
1347
+ function isGenerator(value) {
1348
+ return value != null && typeof value === "object" && Symbol.iterator in value;
1187
1349
  }
1188
1350
 
1189
- // rsc/constants.ts
1190
- var STREAMABLE_VALUE_TYPE = Symbol.for("ui.streamable.value");
1191
- var DEV_DEFAULT_STREAMABLE_WARNING_TIME = 15 * 1e3;
1351
+ // util/constants.ts
1352
+ var HANGING_STREAM_WARNING_TIME_MS = 15 * 1e3;
1192
1353
 
1193
- // rsc/streamable.tsx
1354
+ // rsc/streamable-ui/create-suspended-chunk.tsx
1355
+ import { Suspense } from "react";
1356
+ import { Fragment, jsx as jsx2, jsxs } from "react/jsx-runtime";
1357
+ var R = [
1358
+ async ({
1359
+ c: current,
1360
+ n: next
1361
+ }) => {
1362
+ const chunk = await next;
1363
+ if (chunk.done) {
1364
+ return chunk.value;
1365
+ }
1366
+ if (chunk.append) {
1367
+ return /* @__PURE__ */ jsxs(Fragment, { children: [
1368
+ current,
1369
+ /* @__PURE__ */ jsx2(Suspense, { fallback: chunk.value, children: /* @__PURE__ */ jsx2(R, { c: chunk.value, n: chunk.next }) })
1370
+ ] });
1371
+ }
1372
+ return /* @__PURE__ */ jsx2(Suspense, { fallback: chunk.value, children: /* @__PURE__ */ jsx2(R, { c: chunk.value, n: chunk.next }) });
1373
+ }
1374
+ ][0];
1375
+ function createSuspendedChunk(initialValue) {
1376
+ const { promise, resolve, reject } = createResolvablePromise();
1377
+ return {
1378
+ row: /* @__PURE__ */ jsx2(Suspense, { fallback: initialValue, children: /* @__PURE__ */ jsx2(R, { c: initialValue, n: promise }) }),
1379
+ resolve,
1380
+ reject
1381
+ };
1382
+ }
1383
+
1384
+ // rsc/streamable-ui/create-streamable-ui.tsx
1194
1385
  function createStreamableUI(initialValue) {
1195
1386
  let currentValue = initialValue;
1196
1387
  let closed = false;
1197
- let { row, resolve, reject } = createSuspensedChunk(initialValue);
1388
+ let { row, resolve, reject } = createSuspendedChunk(initialValue);
1198
1389
  function assertStream(method) {
1199
1390
  if (closed) {
1200
1391
  throw new Error(method + ": UI stream is already closed.");
@@ -1210,23 +1401,17 @@ function createStreamableUI(initialValue) {
1210
1401
  console.warn(
1211
1402
  "The streamable UI has been slow to update. This may be a bug or a performance issue or you forgot to call `.done()`."
1212
1403
  );
1213
- }, DEV_DEFAULT_STREAMABLE_WARNING_TIME);
1404
+ }, HANGING_STREAM_WARNING_TIME_MS);
1214
1405
  }
1215
1406
  }
1216
1407
  warnUnclosedStream();
1217
- const streamable2 = {
1218
- /**
1219
- * The value of the streamable UI. This can be returned from a Server Action and received by the client.
1220
- */
1408
+ const streamable = {
1221
1409
  value: row,
1222
- /**
1223
- * This method updates the current UI node. It takes a new UI node and replaces the old one.
1224
- */
1225
1410
  update(value) {
1226
1411
  assertStream(".update()");
1227
1412
  if (value === currentValue) {
1228
1413
  warnUnclosedStream();
1229
- return streamable2;
1414
+ return streamable;
1230
1415
  }
1231
1416
  const resolvable = createResolvablePromise();
1232
1417
  currentValue = value;
@@ -1234,24 +1419,8 @@ function createStreamableUI(initialValue) {
1234
1419
  resolve = resolvable.resolve;
1235
1420
  reject = resolvable.reject;
1236
1421
  warnUnclosedStream();
1237
- return streamable2;
1422
+ return streamable;
1238
1423
  },
1239
- /**
1240
- * This method is used to append a new UI node to the end of the old one.
1241
- * Once appended a new UI node, the previous UI node cannot be updated anymore.
1242
- *
1243
- * @example
1244
- * ```jsx
1245
- * const ui = createStreamableUI(<div>hello</div>)
1246
- * ui.append(<div>world</div>)
1247
- *
1248
- * // The UI node will be:
1249
- * // <>
1250
- * // <div>hello</div>
1251
- * // <div>world</div>
1252
- * // </>
1253
- * ```
1254
- */
1255
1424
  append(value) {
1256
1425
  assertStream(".append()");
1257
1426
  const resolvable = createResolvablePromise();
@@ -1260,12 +1429,8 @@ function createStreamableUI(initialValue) {
1260
1429
  resolve = resolvable.resolve;
1261
1430
  reject = resolvable.reject;
1262
1431
  warnUnclosedStream();
1263
- return streamable2;
1432
+ return streamable;
1264
1433
  },
1265
- /**
1266
- * This method is used to signal that there is an error in the UI stream.
1267
- * It will be thrown on the client side and caught by the nearest error boundary component.
1268
- */
1269
1434
  error(error) {
1270
1435
  assertStream(".error()");
1271
1436
  if (warningTimeout) {
@@ -1273,30 +1438,238 @@ function createStreamableUI(initialValue) {
1273
1438
  }
1274
1439
  closed = true;
1275
1440
  reject(error);
1276
- return streamable2;
1441
+ return streamable;
1277
1442
  },
1278
- /**
1279
- * This method marks the UI node as finalized. You can either call it without any parameters or with a new UI node as the final state.
1280
- * Once called, the UI node cannot be updated or appended anymore.
1281
- *
1282
- * This method is always **required** to be called, otherwise the response will be stuck in a loading state.
1283
- */
1284
1443
  done(...args) {
1285
1444
  assertStream(".done()");
1286
1445
  if (warningTimeout) {
1287
1446
  clearTimeout(warningTimeout);
1288
1447
  }
1289
- closed = true;
1290
- if (args.length) {
1291
- resolve({ value: args[0], done: true });
1292
- return streamable2;
1448
+ closed = true;
1449
+ if (args.length) {
1450
+ resolve({ value: args[0], done: true });
1451
+ return streamable;
1452
+ }
1453
+ resolve({ value: currentValue, done: true });
1454
+ return streamable;
1455
+ }
1456
+ };
1457
+ return streamable;
1458
+ }
1459
+
1460
+ // rsc/stream-ui/stream-ui.tsx
1461
+ var defaultTextRenderer = ({ content }) => content;
1462
+ async function streamUI({
1463
+ model,
1464
+ tools,
1465
+ toolChoice,
1466
+ system,
1467
+ prompt,
1468
+ messages,
1469
+ maxRetries,
1470
+ abortSignal,
1471
+ headers,
1472
+ initial,
1473
+ text,
1474
+ experimental_providerMetadata: providerMetadata,
1475
+ onFinish,
1476
+ ...settings
1477
+ }) {
1478
+ if (typeof model === "string") {
1479
+ throw new Error(
1480
+ "`model` cannot be a string in `streamUI`. Use the actual model instance instead."
1481
+ );
1482
+ }
1483
+ if ("functions" in settings) {
1484
+ throw new Error(
1485
+ "`functions` is not supported in `streamUI`, use `tools` instead."
1486
+ );
1487
+ }
1488
+ if ("provider" in settings) {
1489
+ throw new Error(
1490
+ "`provider` is no longer needed in `streamUI`. Use `model` instead."
1491
+ );
1492
+ }
1493
+ if (tools) {
1494
+ for (const [name9, tool] of Object.entries(tools)) {
1495
+ if ("render" in tool) {
1496
+ throw new Error(
1497
+ "Tool definition in `streamUI` should not have `render` property. Use `generate` instead. Found in tool: " + name9
1498
+ );
1499
+ }
1500
+ }
1501
+ }
1502
+ const ui = createStreamableUI(initial);
1503
+ const textRender = text || defaultTextRenderer;
1504
+ let finished;
1505
+ let finishEvent = null;
1506
+ async function render({
1507
+ args,
1508
+ renderer,
1509
+ streamableUI,
1510
+ isLastCall = false
1511
+ }) {
1512
+ if (!renderer)
1513
+ return;
1514
+ const renderFinished = createResolvablePromise();
1515
+ finished = finished ? finished.then(() => renderFinished.promise) : renderFinished.promise;
1516
+ const rendererResult = renderer(...args);
1517
+ if (isAsyncGenerator(rendererResult) || isGenerator(rendererResult)) {
1518
+ while (true) {
1519
+ const { done, value } = await rendererResult.next();
1520
+ const node = await value;
1521
+ if (isLastCall && done) {
1522
+ streamableUI.done(node);
1523
+ } else {
1524
+ streamableUI.update(node);
1525
+ }
1526
+ if (done)
1527
+ break;
1528
+ }
1529
+ } else {
1530
+ const node = await rendererResult;
1531
+ if (isLastCall) {
1532
+ streamableUI.done(node);
1533
+ } else {
1534
+ streamableUI.update(node);
1535
+ }
1536
+ }
1537
+ renderFinished.resolve(void 0);
1538
+ }
1539
+ const { retry } = prepareRetries({ maxRetries });
1540
+ const validatedPrompt = standardizePrompt({
1541
+ prompt: { system, prompt, messages },
1542
+ tools: void 0
1543
+ // streamUI tools don't support multi-modal tool result conversion
1544
+ });
1545
+ const result = await retry(
1546
+ async () => model.doStream({
1547
+ mode: {
1548
+ type: "regular",
1549
+ ...prepareToolsAndToolChoice({
1550
+ tools,
1551
+ toolChoice,
1552
+ activeTools: void 0
1553
+ })
1554
+ },
1555
+ ...prepareCallSettings(settings),
1556
+ inputFormat: validatedPrompt.type,
1557
+ prompt: await convertToLanguageModelPrompt({
1558
+ prompt: validatedPrompt,
1559
+ modelSupportsImageUrls: model.supportsImageUrls,
1560
+ modelSupportsUrl: model.supportsUrl
1561
+ }),
1562
+ providerMetadata,
1563
+ abortSignal,
1564
+ headers
1565
+ })
1566
+ );
1567
+ const [stream, forkedStream] = result.stream.tee();
1568
+ (async () => {
1569
+ try {
1570
+ let content = "";
1571
+ let hasToolCall = false;
1572
+ const reader = forkedStream.getReader();
1573
+ while (true) {
1574
+ const { done, value } = await reader.read();
1575
+ if (done)
1576
+ break;
1577
+ switch (value.type) {
1578
+ case "text-delta": {
1579
+ content += value.textDelta;
1580
+ render({
1581
+ renderer: textRender,
1582
+ args: [{ content, done: false, delta: value.textDelta }],
1583
+ streamableUI: ui
1584
+ });
1585
+ break;
1586
+ }
1587
+ case "tool-call-delta": {
1588
+ hasToolCall = true;
1589
+ break;
1590
+ }
1591
+ case "tool-call": {
1592
+ const toolName = value.toolName;
1593
+ if (!tools) {
1594
+ throw new NoSuchToolError({ toolName });
1595
+ }
1596
+ const tool = tools[toolName];
1597
+ if (!tool) {
1598
+ throw new NoSuchToolError({
1599
+ toolName,
1600
+ availableTools: Object.keys(tools)
1601
+ });
1602
+ }
1603
+ hasToolCall = true;
1604
+ const parseResult = safeParseJSON({
1605
+ text: value.args,
1606
+ schema: tool.parameters
1607
+ });
1608
+ if (parseResult.success === false) {
1609
+ throw new InvalidToolArgumentsError({
1610
+ toolName,
1611
+ toolArgs: value.args,
1612
+ cause: parseResult.error
1613
+ });
1614
+ }
1615
+ render({
1616
+ renderer: tool.generate,
1617
+ args: [
1618
+ parseResult.value,
1619
+ {
1620
+ toolName,
1621
+ toolCallId: value.toolCallId
1622
+ }
1623
+ ],
1624
+ streamableUI: ui,
1625
+ isLastCall: true
1626
+ });
1627
+ break;
1628
+ }
1629
+ case "error": {
1630
+ throw value.error;
1631
+ }
1632
+ case "finish": {
1633
+ finishEvent = {
1634
+ finishReason: value.finishReason,
1635
+ usage: calculateLanguageModelUsage(value.usage),
1636
+ warnings: result.warnings,
1637
+ rawResponse: result.rawResponse
1638
+ };
1639
+ break;
1640
+ }
1641
+ }
1642
+ }
1643
+ if (!hasToolCall) {
1644
+ render({
1645
+ renderer: textRender,
1646
+ args: [{ content, done: true }],
1647
+ streamableUI: ui,
1648
+ isLastCall: true
1649
+ });
1293
1650
  }
1294
- resolve({ value: currentValue, done: true });
1295
- return streamable2;
1651
+ await finished;
1652
+ if (finishEvent && onFinish) {
1653
+ await onFinish({
1654
+ ...finishEvent,
1655
+ value: ui.value
1656
+ });
1657
+ }
1658
+ } catch (error) {
1659
+ ui.error(error);
1296
1660
  }
1661
+ })();
1662
+ return {
1663
+ ...result,
1664
+ stream,
1665
+ value: ui.value
1297
1666
  };
1298
- return streamable2;
1299
1667
  }
1668
+
1669
+ // rsc/streamable-value/streamable-value.ts
1670
+ var STREAMABLE_VALUE_TYPE = Symbol.for("ui.streamable.value");
1671
+
1672
+ // rsc/streamable-value/create-streamable-value.ts
1300
1673
  var STREAMABLE_VALUE_INTERNAL_LOCK = Symbol("streamable.value.lock");
1301
1674
  function createStreamableValue(initialValue) {
1302
1675
  const isReadableStream = initialValue instanceof ReadableStream || typeof initialValue === "object" && initialValue !== null && "getReader" in initialValue && typeof initialValue.getReader === "function" && "locked" in initialValue && typeof initialValue.locked === "boolean";
@@ -1356,9 +1729,9 @@ function createStreamableValueImpl(initialValue) {
1356
1729
  }
1357
1730
  warningTimeout = setTimeout(() => {
1358
1731
  console.warn(
1359
- "The streamable UI has been slow to update. This may be a bug or a performance issue or you forgot to call `.done()`."
1732
+ "The streamable value has been slow to update. This may be a bug or a performance issue or you forgot to call `.done()`."
1360
1733
  );
1361
- }, DEV_DEFAULT_STREAMABLE_WARNING_TIME);
1734
+ }, HANGING_STREAM_WARNING_TIME_MS);
1362
1735
  }
1363
1736
  }
1364
1737
  warnUnclosedStream();
@@ -1392,25 +1765,13 @@ function createStreamableValueImpl(initialValue) {
1392
1765
  }
1393
1766
  currentValue = value;
1394
1767
  }
1395
- const streamable2 = {
1396
- /**
1397
- * @internal This is an internal lock to prevent the value from being
1398
- * updated by the user.
1399
- */
1768
+ const streamable = {
1400
1769
  set [STREAMABLE_VALUE_INTERNAL_LOCK](state) {
1401
1770
  locked = state;
1402
1771
  },
1403
- /**
1404
- * The value of the streamable. This can be returned from a Server Action and
1405
- * received by the client. To read the streamed values, use the
1406
- * `readStreamableValue` or `useStreamableValue` APIs.
1407
- */
1408
1772
  get value() {
1409
1773
  return createWrapped(true);
1410
1774
  },
1411
- /**
1412
- * This method updates the current value with a new one.
1413
- */
1414
1775
  update(value) {
1415
1776
  assertStream(".update()");
1416
1777
  const resolvePrevious = resolvable.resolve;
@@ -1419,20 +1780,8 @@ function createStreamableValueImpl(initialValue) {
1419
1780
  currentPromise = resolvable.promise;
1420
1781
  resolvePrevious(createWrapped());
1421
1782
  warnUnclosedStream();
1422
- return streamable2;
1783
+ return streamable;
1423
1784
  },
1424
- /**
1425
- * This method is used to append a delta string to the current value. It
1426
- * requires the current value of the streamable to be a string.
1427
- *
1428
- * @example
1429
- * ```jsx
1430
- * const streamable = createStreamableValue('hello');
1431
- * streamable.append(' world');
1432
- *
1433
- * // The value will be 'hello world'
1434
- * ```
1435
- */
1436
1785
  append(value) {
1437
1786
  assertStream(".append()");
1438
1787
  if (typeof currentValue !== "string" && typeof currentValue !== "undefined") {
@@ -1457,13 +1806,8 @@ function createStreamableValueImpl(initialValue) {
1457
1806
  currentPromise = resolvable.promise;
1458
1807
  resolvePrevious(createWrapped());
1459
1808
  warnUnclosedStream();
1460
- return streamable2;
1809
+ return streamable;
1461
1810
  },
1462
- /**
1463
- * This method is used to signal that there is an error in the value stream.
1464
- * It will be thrown on the client side when consumed via
1465
- * `readStreamableValue` or `useStreamableValue`.
1466
- */
1467
1811
  error(error) {
1468
1812
  assertStream(".error()");
1469
1813
  if (warningTimeout) {
@@ -1473,16 +1817,8 @@ function createStreamableValueImpl(initialValue) {
1473
1817
  currentError = error;
1474
1818
  currentPromise = void 0;
1475
1819
  resolvable.resolve({ error });
1476
- return streamable2;
1820
+ return streamable;
1477
1821
  },
1478
- /**
1479
- * This method marks the value as finalized. You can either call it without
1480
- * any parameters or with a new value as the final state.
1481
- * Once called, the value cannot be updated or appended anymore.
1482
- *
1483
- * This method is always **required** to be called, otherwise the response
1484
- * will be stuck in a loading state.
1485
- */
1486
1822
  done(...args) {
1487
1823
  assertStream(".done()");
1488
1824
  if (warningTimeout) {
@@ -1493,400 +1829,13 @@ function createStreamableValueImpl(initialValue) {
1493
1829
  if (args.length) {
1494
1830
  updateValueStates(args[0]);
1495
1831
  resolvable.resolve(createWrapped());
1496
- return streamable2;
1832
+ return streamable;
1497
1833
  }
1498
1834
  resolvable.resolve({});
1499
- return streamable2;
1500
- }
1501
- };
1502
- return streamable2;
1503
- }
1504
- function render(options) {
1505
- const ui = createStreamableUI(options.initial);
1506
- const text = options.text ? options.text : ({ content }) => content;
1507
- const functions = options.functions ? Object.entries(options.functions).map(
1508
- ([name, { description, parameters }]) => {
1509
- return {
1510
- name,
1511
- description,
1512
- parameters: zodToJsonSchema2(parameters)
1513
- };
1514
- }
1515
- ) : void 0;
1516
- const tools = options.tools ? Object.entries(options.tools).map(
1517
- ([name, { description, parameters }]) => {
1518
- return {
1519
- type: "function",
1520
- function: {
1521
- name,
1522
- description,
1523
- parameters: zodToJsonSchema2(parameters)
1524
- }
1525
- };
1526
- }
1527
- ) : void 0;
1528
- if (functions && tools) {
1529
- throw new Error(
1530
- "You can't have both functions and tools defined. Please choose one or the other."
1531
- );
1532
- }
1533
- let finished;
1534
- async function handleRender(args, renderer, res) {
1535
- if (!renderer)
1536
- return;
1537
- const resolvable = createResolvablePromise();
1538
- if (finished) {
1539
- finished = finished.then(() => resolvable.promise);
1540
- } else {
1541
- finished = resolvable.promise;
1542
- }
1543
- const value = renderer(args);
1544
- if (value instanceof Promise || value && typeof value === "object" && "then" in value && typeof value.then === "function") {
1545
- const node = await value;
1546
- res.update(node);
1547
- resolvable.resolve(void 0);
1548
- } else if (value && typeof value === "object" && Symbol.asyncIterator in value) {
1549
- const it = value;
1550
- while (true) {
1551
- const { done, value: value2 } = await it.next();
1552
- res.update(value2);
1553
- if (done)
1554
- break;
1555
- }
1556
- resolvable.resolve(void 0);
1557
- } else if (value && typeof value === "object" && Symbol.iterator in value) {
1558
- const it = value;
1559
- while (true) {
1560
- const { done, value: value2 } = it.next();
1561
- res.update(value2);
1562
- if (done)
1563
- break;
1564
- }
1565
- resolvable.resolve(void 0);
1566
- } else {
1567
- res.update(value);
1568
- resolvable.resolve(void 0);
1569
- }
1570
- }
1571
- (async () => {
1572
- let hasFunction = false;
1573
- let content = "";
1574
- consumeStream(
1575
- OpenAIStream(
1576
- await options.provider.chat.completions.create({
1577
- model: options.model,
1578
- messages: options.messages,
1579
- temperature: options.temperature,
1580
- stream: true,
1581
- ...functions ? {
1582
- functions
1583
- } : {},
1584
- ...tools ? {
1585
- tools
1586
- } : {}
1587
- }),
1588
- {
1589
- ...functions ? {
1590
- async experimental_onFunctionCall(functionCallPayload) {
1591
- var _a, _b;
1592
- hasFunction = true;
1593
- handleRender(
1594
- functionCallPayload.arguments,
1595
- (_b = (_a = options.functions) == null ? void 0 : _a[functionCallPayload.name]) == null ? void 0 : _b.render,
1596
- ui
1597
- );
1598
- }
1599
- } : {},
1600
- ...tools ? {
1601
- async experimental_onToolCall(toolCallPayload) {
1602
- var _a, _b;
1603
- hasFunction = true;
1604
- for (const tool of toolCallPayload.tools) {
1605
- handleRender(
1606
- tool.func.arguments,
1607
- (_b = (_a = options.tools) == null ? void 0 : _a[tool.func.name]) == null ? void 0 : _b.render,
1608
- ui
1609
- );
1610
- }
1611
- }
1612
- } : {},
1613
- onText(chunk) {
1614
- content += chunk;
1615
- handleRender({ content, done: false, delta: chunk }, text, ui);
1616
- },
1617
- async onFinal() {
1618
- if (hasFunction) {
1619
- await finished;
1620
- ui.done();
1621
- return;
1622
- }
1623
- handleRender({ content, done: true }, text, ui);
1624
- await finished;
1625
- ui.done();
1626
- }
1627
- }
1628
- )
1629
- );
1630
- })();
1631
- return ui.value;
1632
- }
1633
-
1634
- // rsc/stream-ui/stream-ui.tsx
1635
- import {
1636
- InvalidToolArgumentsError,
1637
- NoSuchToolError
1638
- } from "@ai-sdk/provider";
1639
- import { safeParseJSON } from "@ai-sdk/provider-utils";
1640
- var defaultTextRenderer = ({ content }) => content;
1641
- async function streamUI({
1642
- model,
1643
- tools,
1644
- system,
1645
- prompt,
1646
- messages,
1647
- maxRetries,
1648
- abortSignal,
1649
- initial,
1650
- text,
1651
- ...settings
1652
- }) {
1653
- if (typeof model === "string") {
1654
- throw new Error(
1655
- "`model` cannot be a string in `streamUI`. Use the actual model instance instead."
1656
- );
1657
- }
1658
- if ("functions" in settings) {
1659
- throw new Error(
1660
- "`functions` is not supported in `streamUI`, use `tools` instead."
1661
- );
1662
- }
1663
- if ("provider" in settings) {
1664
- throw new Error(
1665
- "`provider` is no longer needed in `streamUI`. Use `model` instead."
1666
- );
1667
- }
1668
- if (tools) {
1669
- for (const [name, tool] of Object.entries(tools)) {
1670
- if ("render" in tool) {
1671
- throw new Error(
1672
- "Tool definition in `streamUI` should not have `render` property. Use `generate` instead. Found in tool: " + name
1673
- );
1674
- }
1675
- }
1676
- }
1677
- const ui = createStreamableUI(initial);
1678
- const textRender = text || defaultTextRenderer;
1679
- let finished;
1680
- async function handleRender(args, renderer, res) {
1681
- if (!renderer)
1682
- return;
1683
- const resolvable = createResolvablePromise();
1684
- if (finished) {
1685
- finished = finished.then(() => resolvable.promise);
1686
- } else {
1687
- finished = resolvable.promise;
1688
- }
1689
- const value = renderer(...args);
1690
- if (value instanceof Promise || value && typeof value === "object" && "then" in value && typeof value.then === "function") {
1691
- const node = await value;
1692
- res.update(node);
1693
- resolvable.resolve(void 0);
1694
- } else if (value && typeof value === "object" && Symbol.asyncIterator in value) {
1695
- const it = value;
1696
- while (true) {
1697
- const { done, value: value2 } = await it.next();
1698
- res.update(value2);
1699
- if (done)
1700
- break;
1701
- }
1702
- resolvable.resolve(void 0);
1703
- } else if (value && typeof value === "object" && Symbol.iterator in value) {
1704
- const it = value;
1705
- while (true) {
1706
- const { done, value: value2 } = it.next();
1707
- res.update(value2);
1708
- if (done)
1709
- break;
1710
- }
1711
- resolvable.resolve(void 0);
1712
- } else {
1713
- res.update(value);
1714
- resolvable.resolve(void 0);
1715
- }
1716
- }
1717
- const retry = retryWithExponentialBackoff({ maxRetries });
1718
- const validatedPrompt = getValidatedPrompt({ system, prompt, messages });
1719
- const result = await retry(
1720
- () => model.doStream({
1721
- mode: {
1722
- type: "regular",
1723
- tools: tools == null ? void 0 : Object.entries(tools).map(([name, tool]) => ({
1724
- type: "function",
1725
- name,
1726
- description: tool.description,
1727
- parameters: convertZodToJSONSchema(tool.parameters)
1728
- }))
1729
- },
1730
- ...prepareCallSettings(settings),
1731
- inputFormat: validatedPrompt.type,
1732
- prompt: convertToLanguageModelPrompt(validatedPrompt),
1733
- abortSignal
1734
- })
1735
- );
1736
- const [stream, forkedStream] = result.stream.tee();
1737
- (async () => {
1738
- try {
1739
- let content = "";
1740
- let hasToolCall = false;
1741
- const reader = forkedStream.getReader();
1742
- while (true) {
1743
- const { done, value } = await reader.read();
1744
- if (done)
1745
- break;
1746
- switch (value.type) {
1747
- case "text-delta": {
1748
- content += value.textDelta;
1749
- handleRender(
1750
- [{ content, done: false, delta: value.textDelta }],
1751
- textRender,
1752
- ui
1753
- );
1754
- break;
1755
- }
1756
- case "tool-call-delta": {
1757
- hasToolCall = true;
1758
- break;
1759
- }
1760
- case "tool-call": {
1761
- const toolName = value.toolName;
1762
- if (!tools) {
1763
- throw new NoSuchToolError({ toolName });
1764
- }
1765
- const tool = tools[toolName];
1766
- if (!tool) {
1767
- throw new NoSuchToolError({
1768
- toolName,
1769
- availableTools: Object.keys(tools)
1770
- });
1771
- }
1772
- const parseResult = safeParseJSON({
1773
- text: value.args,
1774
- schema: tool.parameters
1775
- });
1776
- if (parseResult.success === false) {
1777
- throw new InvalidToolArgumentsError({
1778
- toolName,
1779
- toolArgs: value.args,
1780
- cause: parseResult.error
1781
- });
1782
- }
1783
- handleRender(
1784
- [
1785
- parseResult.value,
1786
- {
1787
- toolName,
1788
- toolCallId: value.toolCallId
1789
- }
1790
- ],
1791
- tool.generate,
1792
- ui
1793
- );
1794
- break;
1795
- }
1796
- case "error": {
1797
- throw value.error;
1798
- }
1799
- case "finish": {
1800
- }
1801
- }
1802
- }
1803
- if (hasToolCall) {
1804
- await finished;
1805
- ui.done();
1806
- } else {
1807
- handleRender([{ content, done: true }], textRender, ui);
1808
- await finished;
1809
- ui.done();
1810
- }
1811
- } catch (error) {
1812
- ui.error(error);
1813
- }
1814
- })();
1815
- return {
1816
- ...result,
1817
- stream,
1818
- value: ui.value
1819
- };
1820
- }
1821
-
1822
- // rsc/provider.tsx
1823
- import * as React2 from "react";
1824
- import { InternalAIProvider } from "./rsc-shared.mjs";
1825
- import { jsx as jsx2 } from "react/jsx-runtime";
1826
- async function innerAction({
1827
- action,
1828
- options
1829
- }, state, ...args) {
1830
- "use server";
1831
- return await withAIState(
1832
- {
1833
- state,
1834
- options
1835
- },
1836
- async () => {
1837
- const result = await action(...args);
1838
- sealMutableAIState();
1839
- return [getAIStateDeltaPromise(), result];
1840
- }
1841
- );
1842
- }
1843
- function wrapAction(action, options) {
1844
- return innerAction.bind(null, { action, options });
1845
- }
1846
- function createAI({
1847
- actions,
1848
- initialAIState,
1849
- initialUIState,
1850
- onSetAIState,
1851
- onGetUIState
1852
- }) {
1853
- const wrappedActions = {};
1854
- for (const name in actions) {
1855
- wrappedActions[name] = wrapAction(actions[name], {
1856
- onSetAIState
1857
- });
1858
- }
1859
- const wrappedSyncUIState = onGetUIState ? wrapAction(onGetUIState, {}) : void 0;
1860
- const AI = async (props) => {
1861
- var _a, _b;
1862
- if ("useState" in React2) {
1863
- throw new Error(
1864
- "This component can only be used inside Server Components."
1865
- );
1866
- }
1867
- let uiState = (_a = props.initialUIState) != null ? _a : initialUIState;
1868
- let aiState = (_b = props.initialAIState) != null ? _b : initialAIState;
1869
- let aiStateDelta = void 0;
1870
- if (wrappedSyncUIState) {
1871
- const [newAIStateDelta, newUIState] = await wrappedSyncUIState(aiState);
1872
- if (newUIState !== void 0) {
1873
- aiStateDelta = newAIStateDelta;
1874
- uiState = newUIState;
1875
- }
1835
+ return streamable;
1876
1836
  }
1877
- return /* @__PURE__ */ jsx2(
1878
- InternalAIProvider,
1879
- {
1880
- wrappedActions,
1881
- wrappedSyncUIState,
1882
- initialUIState: uiState,
1883
- initialAIState: aiState,
1884
- initialAIStatePatch: aiStateDelta,
1885
- children: props.children
1886
- }
1887
- );
1888
1837
  };
1889
- return AI;
1838
+ return streamable;
1890
1839
  }
1891
1840
  export {
1892
1841
  createAI,
@@ -1894,7 +1843,6 @@ export {
1894
1843
  createStreamableValue,
1895
1844
  getAIState,
1896
1845
  getMutableAIState,
1897
- render,
1898
1846
  streamUI
1899
1847
  };
1900
1848
  //# sourceMappingURL=rsc-server.mjs.map