ai 5.0.0-alpha.4 → 5.0.0-alpha.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -47,6 +47,7 @@ __export(src_exports, {
47
47
  NoSuchToolError: () => NoSuchToolError,
48
48
  Output: () => output_exports,
49
49
  RetryError: () => RetryError,
50
+ SerialJobExecutor: () => SerialJobExecutor,
50
51
  TextStreamChatTransport: () => TextStreamChatTransport,
51
52
  ToolCallRepairError: () => ToolCallRepairError,
52
53
  ToolExecutionError: () => ToolExecutionError,
@@ -72,7 +73,7 @@ __export(src_exports, {
72
73
  createUIMessageStream: () => createUIMessageStream,
73
74
  createUIMessageStreamResponse: () => createUIMessageStreamResponse,
74
75
  customProvider: () => customProvider,
75
- defaultChatStore: () => defaultChatStore,
76
+ defaultChatStoreOptions: () => defaultChatStoreOptions,
76
77
  defaultSettingsMiddleware: () => defaultSettingsMiddleware,
77
78
  embed: () => embed,
78
79
  embedMany: () => embedMany,
@@ -1048,12 +1049,12 @@ function getToolInvocations(message) {
1048
1049
  // src/ui/process-ui-message-stream.ts
1049
1050
  function createStreamingUIMessageState({
1050
1051
  lastMessage,
1051
- newMessageId = "no-id"
1052
+ newMessageId = ""
1052
1053
  } = {}) {
1053
1054
  var _a17;
1054
1055
  const isContinuation = (lastMessage == null ? void 0 : lastMessage.role) === "assistant";
1055
1056
  const step = isContinuation ? 1 + ((_a17 = extractMaxToolInvocationStep(getToolInvocations(lastMessage))) != null ? _a17 : 0) : 0;
1056
- const message = isContinuation ? structuredClone(lastMessage) : {
1057
+ const message = isContinuation ? lastMessage : {
1057
1058
  id: newMessageId,
1058
1059
  metadata: {},
1059
1060
  role: "assistant",
@@ -1433,7 +1434,7 @@ async function consumeUIMessageStream({
1433
1434
  messageMetadataSchema
1434
1435
  }) {
1435
1436
  const state = createStreamingUIMessageState({
1436
- lastMessage,
1437
+ lastMessage: lastMessage ? structuredClone(lastMessage) : void 0,
1437
1438
  newMessageId: generateId3()
1438
1439
  });
1439
1440
  const runUpdateMessageJob = async (job) => {
@@ -1634,38 +1635,6 @@ async function callCompletionApi({
1634
1635
  // src/ui/chat-store.ts
1635
1636
  var import_provider_utils5 = require("@ai-sdk/provider-utils");
1636
1637
 
1637
- // src/util/serial-job-executor.ts
1638
- var SerialJobExecutor = class {
1639
- constructor() {
1640
- this.queue = [];
1641
- this.isProcessing = false;
1642
- }
1643
- async processQueue() {
1644
- if (this.isProcessing) {
1645
- return;
1646
- }
1647
- this.isProcessing = true;
1648
- while (this.queue.length > 0) {
1649
- await this.queue[0]();
1650
- this.queue.shift();
1651
- }
1652
- this.isProcessing = false;
1653
- }
1654
- async run(job) {
1655
- return new Promise((resolve, reject) => {
1656
- this.queue.push(async () => {
1657
- try {
1658
- await job();
1659
- resolve();
1660
- } catch (error) {
1661
- reject(error);
1662
- }
1663
- });
1664
- void this.processQueue();
1665
- });
1666
- }
1667
- };
1668
-
1669
1638
  // src/ui/should-resubmit-messages.ts
1670
1639
  function shouldResubmitMessages({
1671
1640
  originalMaxToolInvocationStep,
@@ -1723,18 +1692,14 @@ var ChatStore = class {
1723
1692
  transport,
1724
1693
  maxSteps = 1,
1725
1694
  messageMetadataSchema,
1726
- dataPartSchemas
1695
+ dataPartSchemas,
1696
+ createChat
1727
1697
  }) {
1698
+ this.createChat = createChat;
1728
1699
  this.chats = new Map(
1729
- Object.entries(chats).map(([id, state]) => [
1700
+ Object.entries(chats).map(([id, chat]) => [
1730
1701
  id,
1731
- {
1732
- messages: [...state.messages],
1733
- status: "ready",
1734
- activeResponse: void 0,
1735
- error: void 0,
1736
- jobExecutor: new SerialJobExecutor()
1737
- }
1702
+ this.createChat({ messages: chat.messages })
1738
1703
  ])
1739
1704
  );
1740
1705
  this.maxSteps = maxSteps;
@@ -1748,11 +1713,7 @@ var ChatStore = class {
1748
1713
  return this.chats.has(id);
1749
1714
  }
1750
1715
  addChat(id, messages) {
1751
- this.chats.set(id, {
1752
- messages,
1753
- status: "ready",
1754
- jobExecutor: new SerialJobExecutor()
1755
- });
1716
+ this.chats.set(id, this.createChat({ messages }));
1756
1717
  }
1757
1718
  getChats() {
1758
1719
  return Array.from(this.chats.entries());
@@ -1761,28 +1722,28 @@ var ChatStore = class {
1761
1722
  return this.chats.size;
1762
1723
  }
1763
1724
  getStatus(id) {
1764
- return this.getChat(id).status;
1725
+ return this.getChatState(id).status;
1765
1726
  }
1766
1727
  setStatus({
1767
1728
  id,
1768
1729
  status,
1769
1730
  error
1770
1731
  }) {
1771
- const chat = this.getChat(id);
1772
- if (chat.status === status)
1732
+ const state = this.getChatState(id);
1733
+ if (state.status === status)
1773
1734
  return;
1774
- chat.status = status;
1775
- chat.error = error;
1735
+ state.setStatus(status);
1736
+ state.setError(error);
1776
1737
  this.emit({ type: "chat-status-changed", chatId: id, error });
1777
1738
  }
1778
1739
  getError(id) {
1779
- return this.getChat(id).error;
1740
+ return this.getChatState(id).error;
1780
1741
  }
1781
1742
  getMessages(id) {
1782
- return this.getChat(id).messages;
1743
+ return this.getChatState(id).messages;
1783
1744
  }
1784
1745
  getLastMessage(id) {
1785
- const chat = this.getChat(id);
1746
+ const chat = this.getChatState(id);
1786
1747
  return chat.messages[chat.messages.length - 1];
1787
1748
  }
1788
1749
  subscribe(subscriber) {
@@ -1793,11 +1754,11 @@ var ChatStore = class {
1793
1754
  id,
1794
1755
  messages
1795
1756
  }) {
1796
- this.getChat(id).messages = [...messages];
1757
+ this.getChatState(id).setMessages(messages);
1797
1758
  this.emit({ type: "chat-messages-changed", chatId: id });
1798
1759
  }
1799
1760
  removeAssistantResponse(id) {
1800
- const chat = this.getChat(id);
1761
+ const chat = this.getChatState(id);
1801
1762
  const lastMessage = chat.messages[chat.messages.length - 1];
1802
1763
  if (lastMessage == null) {
1803
1764
  throw new Error("Cannot remove assistant response from empty chat");
@@ -1805,7 +1766,8 @@ var ChatStore = class {
1805
1766
  if (lastMessage.role !== "assistant") {
1806
1767
  throw new Error("Last message is not an assistant message");
1807
1768
  }
1808
- this.setMessages({ id, messages: chat.messages.slice(0, -1) });
1769
+ chat.popMessage();
1770
+ this.emit({ type: "chat-messages-changed", chatId: id });
1809
1771
  }
1810
1772
  async submitMessage({
1811
1773
  chatId,
@@ -1817,14 +1779,14 @@ var ChatStore = class {
1817
1779
  onFinish
1818
1780
  }) {
1819
1781
  var _a17;
1820
- const chat = this.getChat(chatId);
1821
- const currentMessages = chat.messages;
1782
+ const state = this.getChatState(chatId);
1783
+ state.pushMessage({ ...message, id: (_a17 = message.id) != null ? _a17 : this.generateId() });
1784
+ this.emit({
1785
+ type: "chat-messages-changed",
1786
+ chatId
1787
+ });
1822
1788
  await this.triggerRequest({
1823
1789
  chatId,
1824
- messages: currentMessages.concat({
1825
- ...message,
1826
- id: (_a17 = message.id) != null ? _a17 : this.generateId()
1827
- }),
1828
1790
  headers,
1829
1791
  body,
1830
1792
  requestType: "generate",
@@ -1841,15 +1803,20 @@ var ChatStore = class {
1841
1803
  onToolCall,
1842
1804
  onFinish
1843
1805
  }) {
1844
- const messages = this.getChat(chatId).messages;
1845
- const messagesToSubmit = messages[messages.length - 1].role === "assistant" ? messages.slice(0, -1) : messages;
1846
- if (messagesToSubmit.length === 0) {
1806
+ const chat = this.getChatState(chatId);
1807
+ if (chat.messages[chat.messages.length - 1].role === "assistant") {
1808
+ chat.popMessage();
1809
+ this.emit({
1810
+ type: "chat-messages-changed",
1811
+ chatId
1812
+ });
1813
+ }
1814
+ if (chat.messages.length === 0) {
1847
1815
  return;
1848
1816
  }
1849
1817
  return this.triggerRequest({
1850
1818
  chatId,
1851
1819
  requestType: "generate",
1852
- messages: messagesToSubmit,
1853
1820
  headers,
1854
1821
  body,
1855
1822
  onError,
@@ -1865,11 +1832,8 @@ var ChatStore = class {
1865
1832
  onToolCall,
1866
1833
  onFinish
1867
1834
  }) {
1868
- const chat = this.getChat(chatId);
1869
- const currentMessages = chat.messages;
1870
1835
  return this.triggerRequest({
1871
1836
  chatId,
1872
- messages: currentMessages,
1873
1837
  requestType: "resume",
1874
1838
  headers,
1875
1839
  body,
@@ -1883,22 +1847,23 @@ var ChatStore = class {
1883
1847
  toolCallId,
1884
1848
  result
1885
1849
  }) {
1886
- const chat = this.getChat(chatId);
1850
+ const chat = this.getChatState(chatId);
1887
1851
  chat.jobExecutor.run(async () => {
1888
- const currentMessages = chat.messages;
1889
1852
  updateToolCallResult({
1890
- messages: currentMessages,
1853
+ messages: chat.messages,
1891
1854
  toolCallId,
1892
1855
  toolResult: result
1893
1856
  });
1894
- this.setMessages({ id: chatId, messages: currentMessages });
1857
+ this.setMessages({
1858
+ id: chatId,
1859
+ messages: chat.messages
1860
+ });
1895
1861
  if (chat.status === "submitted" || chat.status === "streaming") {
1896
1862
  return;
1897
1863
  }
1898
- const lastMessage = currentMessages[currentMessages.length - 1];
1864
+ const lastMessage = chat.messages[chat.messages.length - 1];
1899
1865
  if (isAssistantMessageWithCompletedToolCalls(lastMessage)) {
1900
- await this.triggerRequest({
1901
- messages: currentMessages,
1866
+ this.triggerRequest({
1902
1867
  requestType: "generate",
1903
1868
  chatId
1904
1869
  });
@@ -1907,7 +1872,7 @@ var ChatStore = class {
1907
1872
  }
1908
1873
  async stopStream({ chatId }) {
1909
1874
  var _a17;
1910
- const chat = this.getChat(chatId);
1875
+ const chat = this.getChatState(chatId);
1911
1876
  if (chat.status !== "streaming" && chat.status !== "submitted")
1912
1877
  return;
1913
1878
  if ((_a17 = chat.activeResponse) == null ? void 0 : _a17.abortController) {
@@ -1920,15 +1885,14 @@ var ChatStore = class {
1920
1885
  subscriber.onChatChanged(event);
1921
1886
  }
1922
1887
  }
1923
- getChat(id) {
1888
+ getChatState(id) {
1924
1889
  if (!this.hasChat(id)) {
1925
- throw new Error(`chat '${id}' not found`);
1890
+ this.addChat(id, []);
1926
1891
  }
1927
1892
  return this.chats.get(id);
1928
1893
  }
1929
1894
  async triggerRequest({
1930
1895
  chatId,
1931
- messages: chatMessages,
1932
1896
  requestType,
1933
1897
  headers,
1934
1898
  body,
@@ -1936,26 +1900,25 @@ var ChatStore = class {
1936
1900
  onToolCall,
1937
1901
  onFinish
1938
1902
  }) {
1939
- const self = this;
1940
- const chat = this.getChat(chatId);
1941
- this.setMessages({ id: chatId, messages: chatMessages });
1903
+ const chat = this.getChatState(chatId);
1942
1904
  this.setStatus({ id: chatId, status: "submitted", error: void 0 });
1943
- const messageCount = chatMessages.length;
1905
+ const messageCount = chat.messages.length;
1944
1906
  const maxStep = extractMaxToolInvocationStep(
1945
- getToolInvocations(chatMessages[chatMessages.length - 1])
1907
+ getToolInvocations(chat.messages[chat.messages.length - 1])
1946
1908
  );
1947
1909
  try {
1910
+ const lastMessage = chat.messages[chat.messages.length - 1];
1948
1911
  const activeResponse = {
1949
1912
  state: createStreamingUIMessageState({
1950
- lastMessage: chatMessages[chatMessages.length - 1],
1951
- newMessageId: self.generateId()
1913
+ lastMessage: chat.snapshot ? chat.snapshot(lastMessage) : lastMessage,
1914
+ newMessageId: this.generateId()
1952
1915
  }),
1953
1916
  abortController: new AbortController()
1954
1917
  };
1955
- chat.activeResponse = activeResponse;
1956
- const stream = await self.transport.submitMessages({
1918
+ chat.setActiveResponse(activeResponse);
1919
+ const stream = await this.transport.submitMessages({
1957
1920
  chatId,
1958
- messages: chatMessages,
1921
+ messages: chat.messages,
1959
1922
  body,
1960
1923
  headers,
1961
1924
  abortController: activeResponse.abortController,
@@ -1967,15 +1930,19 @@ var ChatStore = class {
1967
1930
  () => job({
1968
1931
  state: activeResponse.state,
1969
1932
  write: () => {
1970
- self.setStatus({ id: chatId, status: "streaming" });
1971
- const replaceLastMessage = activeResponse.state.message.id === chatMessages[chatMessages.length - 1].id;
1972
- const newMessages = [
1973
- ...replaceLastMessage ? chatMessages.slice(0, chatMessages.length - 1) : chatMessages,
1974
- activeResponse.state.message
1975
- ];
1976
- self.setMessages({
1977
- id: chatId,
1978
- messages: newMessages
1933
+ this.setStatus({ id: chatId, status: "streaming" });
1934
+ const replaceLastMessage = activeResponse.state.message.id === chat.messages[chat.messages.length - 1].id;
1935
+ if (replaceLastMessage) {
1936
+ chat.replaceMessage(
1937
+ chat.messages.length - 1,
1938
+ activeResponse.state.message
1939
+ );
1940
+ } else {
1941
+ chat.pushMessage(activeResponse.state.message);
1942
+ }
1943
+ this.emit({
1944
+ type: "chat-messages-changed",
1945
+ chatId
1979
1946
  });
1980
1947
  }
1981
1948
  })
@@ -1985,8 +1952,8 @@ var ChatStore = class {
1985
1952
  stream: processUIMessageStream({
1986
1953
  stream,
1987
1954
  onToolCall,
1988
- messageMetadataSchema: self.messageMetadataSchema,
1989
- dataPartSchemas: self.dataPartSchemas,
1955
+ messageMetadataSchema: this.messageMetadataSchema,
1956
+ dataPartSchemas: this.dataPartSchemas,
1990
1957
  runUpdateMessageJob
1991
1958
  }),
1992
1959
  onError: (error) => {
@@ -2005,24 +1972,22 @@ var ChatStore = class {
2005
1972
  }
2006
1973
  this.setStatus({ id: chatId, status: "error", error: err });
2007
1974
  } finally {
2008
- chat.activeResponse = void 0;
1975
+ chat.setActiveResponse(void 0);
2009
1976
  }
2010
- const currentMessages = self.getMessages(chatId);
2011
1977
  if (shouldResubmitMessages({
2012
1978
  originalMaxToolInvocationStep: maxStep,
2013
1979
  originalMessageCount: messageCount,
2014
- maxSteps: self.maxSteps,
2015
- messages: currentMessages
1980
+ maxSteps: this.maxSteps,
1981
+ messages: chat.messages
2016
1982
  })) {
2017
- await self.triggerRequest({
1983
+ await this.triggerRequest({
2018
1984
  chatId,
2019
1985
  requestType,
2020
1986
  onError,
2021
1987
  onToolCall,
2022
1988
  onFinish,
2023
1989
  headers,
2024
- body,
2025
- messages: currentMessages
1990
+ body
2026
1991
  });
2027
1992
  }
2028
1993
  }
@@ -2314,22 +2279,22 @@ function convertToModelMessages(messages, options) {
2314
2279
  }
2315
2280
  var convertToCoreMessages = convertToModelMessages;
2316
2281
 
2317
- // src/ui/default-chat-store.ts
2282
+ // src/ui/default-chat-store-options.ts
2318
2283
  var import_provider_utils6 = require("@ai-sdk/provider-utils");
2319
- function defaultChatStore({
2320
- api,
2284
+ function defaultChatStoreOptions({
2285
+ api = "/api/chat",
2321
2286
  fetch: fetch2,
2322
2287
  credentials,
2323
2288
  headers,
2324
2289
  body,
2325
2290
  prepareRequestBody,
2326
2291
  generateId: generateId3 = import_provider_utils6.generateId,
2327
- dataPartSchemas,
2328
2292
  messageMetadataSchema,
2329
2293
  maxSteps = 1,
2294
+ dataPartSchemas,
2330
2295
  chats
2331
2296
  }) {
2332
- return new ChatStore({
2297
+ return () => ({
2333
2298
  transport: new DefaultChatTransport({
2334
2299
  api,
2335
2300
  fetch: fetch2,
@@ -2346,11 +2311,57 @@ function defaultChatStore({
2346
2311
  });
2347
2312
  }
2348
2313
 
2314
+ // src/ui-message-stream/handle-ui-message-stream-finish.ts
2315
+ function handleUIMessageStreamFinish({
2316
+ newMessageId,
2317
+ originalMessages = [],
2318
+ onFinish,
2319
+ stream
2320
+ }) {
2321
+ if (onFinish == null) {
2322
+ return stream;
2323
+ }
2324
+ const lastMessage = originalMessages[originalMessages.length - 1];
2325
+ const isContinuation = (lastMessage == null ? void 0 : lastMessage.role) === "assistant";
2326
+ const messageId = isContinuation ? lastMessage.id : newMessageId;
2327
+ const state = createStreamingUIMessageState({
2328
+ lastMessage: structuredClone(lastMessage),
2329
+ newMessageId: messageId
2330
+ });
2331
+ const runUpdateMessageJob = async (job) => {
2332
+ await job({ state, write: () => {
2333
+ } });
2334
+ };
2335
+ return processUIMessageStream({
2336
+ stream,
2337
+ runUpdateMessageJob
2338
+ }).pipeThrough(
2339
+ new TransformStream({
2340
+ transform(chunk, controller) {
2341
+ controller.enqueue(chunk);
2342
+ },
2343
+ flush() {
2344
+ const isContinuation2 = state.message.id === (lastMessage == null ? void 0 : lastMessage.id);
2345
+ onFinish({
2346
+ isContinuation: isContinuation2,
2347
+ responseMessage: state.message,
2348
+ messages: [
2349
+ ...isContinuation2 ? originalMessages.slice(0, -1) : originalMessages,
2350
+ state.message
2351
+ ]
2352
+ });
2353
+ }
2354
+ })
2355
+ );
2356
+ }
2357
+
2349
2358
  // src/ui-message-stream/create-ui-message-stream.ts
2350
2359
  function createUIMessageStream({
2351
2360
  execute,
2352
- onError = () => "An error occurred."
2361
+ onError = () => "An error occurred.",
2353
2362
  // mask error messages for safety by default
2363
+ originalMessages,
2364
+ onFinish
2354
2365
  }) {
2355
2366
  let controller;
2356
2367
  const ongoingStreamPromises = [];
@@ -2367,25 +2378,27 @@ function createUIMessageStream({
2367
2378
  }
2368
2379
  try {
2369
2380
  const result = execute({
2370
- write(part) {
2371
- safeEnqueue(part);
2372
- },
2373
- merge(streamArg) {
2374
- ongoingStreamPromises.push(
2375
- (async () => {
2376
- const reader = streamArg.getReader();
2377
- while (true) {
2378
- const { done, value } = await reader.read();
2379
- if (done)
2380
- break;
2381
- safeEnqueue(value);
2382
- }
2383
- })().catch((error) => {
2384
- safeEnqueue({ type: "error", errorText: onError(error) });
2385
- })
2386
- );
2387
- },
2388
- onError
2381
+ writer: {
2382
+ write(part) {
2383
+ safeEnqueue(part);
2384
+ },
2385
+ merge(streamArg) {
2386
+ ongoingStreamPromises.push(
2387
+ (async () => {
2388
+ const reader = streamArg.getReader();
2389
+ while (true) {
2390
+ const { done, value } = await reader.read();
2391
+ if (done)
2392
+ break;
2393
+ safeEnqueue(value);
2394
+ }
2395
+ })().catch((error) => {
2396
+ safeEnqueue({ type: "error", errorText: onError(error) });
2397
+ })
2398
+ );
2399
+ },
2400
+ onError
2401
+ }
2389
2402
  });
2390
2403
  if (result) {
2391
2404
  ongoingStreamPromises.push(
@@ -2409,7 +2422,12 @@ function createUIMessageStream({
2409
2422
  } catch (error) {
2410
2423
  }
2411
2424
  });
2412
- return stream;
2425
+ return handleUIMessageStreamFinish({
2426
+ stream,
2427
+ newMessageId: "",
2428
+ originalMessages,
2429
+ onFinish
2430
+ });
2413
2431
  }
2414
2432
 
2415
2433
  // src/ui-message-stream/ui-message-stream-headers.ts
@@ -2474,6 +2492,32 @@ function pipeUIMessageStreamToResponse({
2474
2492
  });
2475
2493
  }
2476
2494
 
2495
+ // src/util/cosine-similarity.ts
2496
+ function cosineSimilarity(vector1, vector2) {
2497
+ if (vector1.length !== vector2.length) {
2498
+ throw new InvalidArgumentError({
2499
+ parameter: "vector1,vector2",
2500
+ value: { vector1Length: vector1.length, vector2Length: vector2.length },
2501
+ message: `Vectors must have the same length`
2502
+ });
2503
+ }
2504
+ const n = vector1.length;
2505
+ if (n === 0) {
2506
+ return 0;
2507
+ }
2508
+ let magnitudeSquared1 = 0;
2509
+ let magnitudeSquared2 = 0;
2510
+ let dotProduct = 0;
2511
+ for (let i = 0; i < n; i++) {
2512
+ const value1 = vector1[i];
2513
+ const value2 = vector2[i];
2514
+ magnitudeSquared1 += value1 * value1;
2515
+ magnitudeSquared2 += value2 * value2;
2516
+ dotProduct += value1 * value2;
2517
+ }
2518
+ return magnitudeSquared1 === 0 || magnitudeSquared2 === 0 ? 0 : dotProduct / (Math.sqrt(magnitudeSquared1) * Math.sqrt(magnitudeSquared2));
2519
+ }
2520
+
2477
2521
  // src/util/data-url.ts
2478
2522
  function getTextFromDataUrl(dataUrl) {
2479
2523
  const [header, base64Content] = dataUrl.split(",");
@@ -2523,31 +2567,37 @@ function isDeepEqualData(obj1, obj2) {
2523
2567
  return true;
2524
2568
  }
2525
2569
 
2526
- // src/util/cosine-similarity.ts
2527
- function cosineSimilarity(vector1, vector2) {
2528
- if (vector1.length !== vector2.length) {
2529
- throw new InvalidArgumentError({
2530
- parameter: "vector1,vector2",
2531
- value: { vector1Length: vector1.length, vector2Length: vector2.length },
2532
- message: `Vectors must have the same length`
2533
- });
2570
+ // src/util/serial-job-executor.ts
2571
+ var SerialJobExecutor = class {
2572
+ constructor() {
2573
+ this.queue = [];
2574
+ this.isProcessing = false;
2534
2575
  }
2535
- const n = vector1.length;
2536
- if (n === 0) {
2537
- return 0;
2576
+ async processQueue() {
2577
+ if (this.isProcessing) {
2578
+ return;
2579
+ }
2580
+ this.isProcessing = true;
2581
+ while (this.queue.length > 0) {
2582
+ await this.queue[0]();
2583
+ this.queue.shift();
2584
+ }
2585
+ this.isProcessing = false;
2538
2586
  }
2539
- let magnitudeSquared1 = 0;
2540
- let magnitudeSquared2 = 0;
2541
- let dotProduct = 0;
2542
- for (let i = 0; i < n; i++) {
2543
- const value1 = vector1[i];
2544
- const value2 = vector2[i];
2545
- magnitudeSquared1 += value1 * value1;
2546
- magnitudeSquared2 += value2 * value2;
2547
- dotProduct += value1 * value2;
2587
+ async run(job) {
2588
+ return new Promise((resolve, reject) => {
2589
+ this.queue.push(async () => {
2590
+ try {
2591
+ await job();
2592
+ resolve();
2593
+ } catch (error) {
2594
+ reject(error);
2595
+ }
2596
+ });
2597
+ void this.processQueue();
2598
+ });
2548
2599
  }
2549
- return magnitudeSquared1 === 0 || magnitudeSquared2 === 0 ? 0 : dotProduct / (Math.sqrt(magnitudeSquared1) * Math.sqrt(magnitudeSquared2));
2550
- }
2600
+ };
2551
2601
 
2552
2602
  // src/util/simulate-readable-stream.ts
2553
2603
  var import_provider_utils7 = require("@ai-sdk/provider-utils");
@@ -3541,6 +3591,15 @@ function convertToLanguageModelV2DataContent(content) {
3541
3591
  }
3542
3592
  return { data: content, mediaType: void 0 };
3543
3593
  }
3594
+ function convertDataContentToBase64String(content) {
3595
+ if (typeof content === "string") {
3596
+ return content;
3597
+ }
3598
+ if (content instanceof ArrayBuffer) {
3599
+ return (0, import_provider_utils11.convertUint8ArrayToBase64)(new Uint8Array(content));
3600
+ }
3601
+ return (0, import_provider_utils11.convertUint8ArrayToBase64)(content);
3602
+ }
3544
3603
  function convertDataContentToUint8Array(content) {
3545
3604
  if (content instanceof Uint8Array) {
3546
3605
  return content;
@@ -4058,6 +4117,21 @@ async function standardizePrompt(prompt) {
4058
4117
  };
4059
4118
  }
4060
4119
 
4120
+ // core/telemetry/stringify-for-telemetry.ts
4121
+ function stringifyForTelemetry(prompt) {
4122
+ return JSON.stringify(
4123
+ prompt.map((message) => ({
4124
+ ...message,
4125
+ content: typeof message.content === "string" ? message.content : message.content.map(
4126
+ (part) => part.type === "file" ? {
4127
+ ...part,
4128
+ data: part.data instanceof Uint8Array ? convertDataContentToBase64String(part.data) : part.data
4129
+ } : part
4130
+ )
4131
+ }))
4132
+ );
4133
+ }
4134
+
4061
4135
  // core/generate-object/output-strategy.ts
4062
4136
  var import_provider20 = require("@ai-sdk/provider");
4063
4137
  var import_provider_utils14 = require("@ai-sdk/provider-utils");
@@ -4451,11 +4525,17 @@ function validateObjectGenerationInput({
4451
4525
  }
4452
4526
  }
4453
4527
 
4528
+ // core/prompt/resolve-language-model.ts
4529
+ var import_gateway = require("@ai-sdk/gateway");
4530
+ function resolveLanguageModel(model) {
4531
+ return typeof model === "string" ? import_gateway.gateway.languageModel(model) : model;
4532
+ }
4533
+
4454
4534
  // core/generate-object/generate-object.ts
4455
4535
  var originalGenerateId = (0, import_provider_utils15.createIdGenerator)({ prefix: "aiobj", size: 24 });
4456
4536
  async function generateObject(options) {
4457
4537
  const {
4458
- model,
4538
+ model: modelArg,
4459
4539
  output = "object",
4460
4540
  system,
4461
4541
  prompt,
@@ -4472,6 +4552,7 @@ async function generateObject(options) {
4472
4552
  } = {},
4473
4553
  ...settings
4474
4554
  } = options;
4555
+ const model = resolveLanguageModel(modelArg);
4475
4556
  const enumValues = "enum" in options ? options.enum : void 0;
4476
4557
  const {
4477
4558
  schema: inputSchema,
@@ -4550,7 +4631,7 @@ async function generateObject(options) {
4550
4631
  }),
4551
4632
  ...baseTelemetryAttributes,
4552
4633
  "ai.prompt.messages": {
4553
- input: () => JSON.stringify(promptMessages)
4634
+ input: () => stringifyForTelemetry(promptMessages)
4554
4635
  },
4555
4636
  // standardized gen-ai llm span attributes:
4556
4637
  "gen_ai.system": model.provider,
@@ -4931,7 +5012,7 @@ function streamObject(options) {
4931
5012
  }
4932
5013
  var DefaultStreamObjectResult = class {
4933
5014
  constructor({
4934
- model,
5015
+ model: modelArg,
4935
5016
  headers,
4936
5017
  telemetry,
4937
5018
  settings,
@@ -4956,6 +5037,7 @@ var DefaultStreamObjectResult = class {
4956
5037
  this._warnings = new DelayedPromise();
4957
5038
  this._request = new DelayedPromise();
4958
5039
  this._response = new DelayedPromise();
5040
+ const model = resolveLanguageModel(modelArg);
4959
5041
  const { maxRetries, retry } = prepareRetries({
4960
5042
  maxRetries: maxRetriesArg
4961
5043
  });
@@ -5052,7 +5134,7 @@ var DefaultStreamObjectResult = class {
5052
5134
  }),
5053
5135
  ...baseTelemetryAttributes,
5054
5136
  "ai.prompt.messages": {
5055
- input: () => JSON.stringify(callOptions.prompt)
5137
+ input: () => stringifyForTelemetry(callOptions.prompt)
5056
5138
  },
5057
5139
  // standardized gen-ai llm span attributes:
5058
5140
  "gen_ai.system": model.provider,
@@ -5769,7 +5851,7 @@ var originalGenerateId3 = (0, import_provider_utils19.createIdGenerator)({
5769
5851
  size: 24
5770
5852
  });
5771
5853
  async function generateText({
5772
- model,
5854
+ model: modelArg,
5773
5855
  tools,
5774
5856
  toolChoice,
5775
5857
  system,
@@ -5794,6 +5876,7 @@ async function generateText({
5794
5876
  onStepFinish,
5795
5877
  ...settings
5796
5878
  }) {
5879
+ const model = resolveLanguageModel(modelArg);
5797
5880
  const stopConditions = asArray(stopWhen);
5798
5881
  const { maxRetries, retry } = prepareRetries({ maxRetries: maxRetriesArg });
5799
5882
  const callSettings = prepareCallSettings(settings);
@@ -5830,7 +5913,7 @@ async function generateText({
5830
5913
  }),
5831
5914
  tracer,
5832
5915
  fn: async (span) => {
5833
- var _a17, _b, _c, _d;
5916
+ var _a17, _b, _c, _d, _e;
5834
5917
  const callSettings2 = prepareCallSettings(settings);
5835
5918
  let currentModelResponse;
5836
5919
  let currentToolCalls = [];
@@ -5849,16 +5932,18 @@ async function generateText({
5849
5932
  }));
5850
5933
  const promptMessages = await convertToLanguageModelPrompt({
5851
5934
  prompt: {
5852
- system: initialPrompt.system,
5935
+ system: (_a17 = prepareStepResult == null ? void 0 : prepareStepResult.system) != null ? _a17 : initialPrompt.system,
5853
5936
  messages: stepInputMessages
5854
5937
  },
5855
5938
  supportedUrls: await model.supportedUrls
5856
5939
  });
5857
- const stepModel = (_a17 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a17 : model;
5940
+ const stepModel = resolveLanguageModel(
5941
+ (_b = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _b : model
5942
+ );
5858
5943
  const { toolChoice: stepToolChoice, tools: stepTools } = prepareToolsAndToolChoice({
5859
5944
  tools,
5860
- toolChoice: (_b = prepareStepResult == null ? void 0 : prepareStepResult.toolChoice) != null ? _b : toolChoice,
5861
- activeTools: (_c = prepareStepResult == null ? void 0 : prepareStepResult.activeTools) != null ? _c : activeTools
5945
+ toolChoice: (_c = prepareStepResult == null ? void 0 : prepareStepResult.toolChoice) != null ? _c : toolChoice,
5946
+ activeTools: (_d = prepareStepResult == null ? void 0 : prepareStepResult.activeTools) != null ? _d : activeTools
5862
5947
  });
5863
5948
  currentModelResponse = await retry(
5864
5949
  () => {
@@ -5878,7 +5963,7 @@ async function generateText({
5878
5963
  "ai.model.id": stepModel.modelId,
5879
5964
  // prompt:
5880
5965
  "ai.prompt.messages": {
5881
- input: () => JSON.stringify(promptMessages)
5966
+ input: () => stringifyForTelemetry(promptMessages)
5882
5967
  },
5883
5968
  "ai.prompt.tools": {
5884
5969
  // convert the language model level tools:
@@ -5901,7 +5986,7 @@ async function generateText({
5901
5986
  }),
5902
5987
  tracer,
5903
5988
  fn: async (span2) => {
5904
- var _a19, _b2, _c2, _d2, _e, _f, _g, _h;
5989
+ var _a19, _b2, _c2, _d2, _e2, _f, _g, _h;
5905
5990
  const result = await stepModel.doGenerate({
5906
5991
  ...callSettings2,
5907
5992
  tools: stepTools,
@@ -5915,7 +6000,7 @@ async function generateText({
5915
6000
  const responseData = {
5916
6001
  id: (_b2 = (_a19 = result.response) == null ? void 0 : _a19.id) != null ? _b2 : generateId3(),
5917
6002
  timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
5918
- modelId: (_f = (_e = result.response) == null ? void 0 : _e.modelId) != null ? _f : stepModel.modelId,
6003
+ modelId: (_f = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f : stepModel.modelId,
5919
6004
  headers: (_g = result.response) == null ? void 0 : _g.headers,
5920
6005
  body: (_h = result.response) == null ? void 0 : _h.body
5921
6006
  };
@@ -5991,7 +6076,7 @@ async function generateText({
5991
6076
  usage: currentModelResponse.usage,
5992
6077
  warnings: currentModelResponse.warnings,
5993
6078
  providerMetadata: currentModelResponse.providerMetadata,
5994
- request: (_d = currentModelResponse.request) != null ? _d : {},
6079
+ request: (_e = currentModelResponse.request) != null ? _e : {},
5995
6080
  response: {
5996
6081
  ...currentModelResponse.response,
5997
6082
  // deep clone msgs to avoid mutating past messages in multi-step:
@@ -6583,7 +6668,7 @@ function streamText({
6583
6668
  ...settings
6584
6669
  }) {
6585
6670
  return new DefaultStreamTextResult({
6586
- model,
6671
+ model: resolveLanguageModel(model),
6587
6672
  telemetry,
6588
6673
  headers,
6589
6674
  settings,
@@ -6898,7 +6983,7 @@ var DefaultStreamTextResult = class {
6898
6983
  responseMessages,
6899
6984
  usage
6900
6985
  }) {
6901
- var _a17, _b, _c;
6986
+ var _a17, _b, _c, _d;
6902
6987
  stepFinish = new DelayedPromise();
6903
6988
  const initialPrompt = await standardizePrompt({
6904
6989
  system,
@@ -6916,16 +7001,18 @@ var DefaultStreamTextResult = class {
6916
7001
  }));
6917
7002
  const promptMessages = await convertToLanguageModelPrompt({
6918
7003
  prompt: {
6919
- system: initialPrompt.system,
7004
+ system: (_a17 = prepareStepResult == null ? void 0 : prepareStepResult.system) != null ? _a17 : initialPrompt.system,
6920
7005
  messages: stepInputMessages
6921
7006
  },
6922
7007
  supportedUrls: await model.supportedUrls
6923
7008
  });
6924
- const stepModel = (_a17 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a17 : model;
7009
+ const stepModel = resolveLanguageModel(
7010
+ (_b = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _b : model
7011
+ );
6925
7012
  const { toolChoice: stepToolChoice, tools: stepTools } = prepareToolsAndToolChoice({
6926
7013
  tools,
6927
- toolChoice: (_b = prepareStepResult == null ? void 0 : prepareStepResult.toolChoice) != null ? _b : toolChoice,
6928
- activeTools: (_c = prepareStepResult == null ? void 0 : prepareStepResult.activeTools) != null ? _c : activeTools
7014
+ toolChoice: (_c = prepareStepResult == null ? void 0 : prepareStepResult.toolChoice) != null ? _c : toolChoice,
7015
+ activeTools: (_d = prepareStepResult == null ? void 0 : prepareStepResult.activeTools) != null ? _d : activeTools
6929
7016
  });
6930
7017
  const {
6931
7018
  result: { stream: stream2, response, request },
@@ -6947,7 +7034,7 @@ var DefaultStreamTextResult = class {
6947
7034
  "ai.model.id": stepModel.modelId,
6948
7035
  // prompt:
6949
7036
  "ai.prompt.messages": {
6950
- input: () => JSON.stringify(promptMessages)
7037
+ input: () => stringifyForTelemetry(promptMessages)
6951
7038
  },
6952
7039
  "ai.prompt.tools": {
6953
7040
  // convert the language model level tools:
@@ -7031,7 +7118,7 @@ var DefaultStreamTextResult = class {
7031
7118
  streamWithToolResults.pipeThrough(
7032
7119
  new TransformStream({
7033
7120
  async transform(chunk, controller) {
7034
- var _a18, _b2, _c2, _d;
7121
+ var _a18, _b2, _c2, _d2;
7035
7122
  if (chunk.type === "stream-start") {
7036
7123
  warnings = chunk.warnings;
7037
7124
  return;
@@ -7108,7 +7195,7 @@ var DefaultStreamTextResult = class {
7108
7195
  doStreamSpan.addEvent("ai.stream.finish");
7109
7196
  doStreamSpan.setAttributes({
7110
7197
  "ai.response.msToFinish": msToFinish,
7111
- "ai.response.avgOutputTokensPerSecond": 1e3 * ((_d = stepUsage.outputTokens) != null ? _d : 0) / msToFinish
7198
+ "ai.response.avgOutputTokensPerSecond": 1e3 * ((_d2 = stepUsage.outputTokens) != null ? _d2 : 0) / msToFinish
7112
7199
  });
7113
7200
  break;
7114
7201
  }
@@ -7358,14 +7445,14 @@ var DefaultStreamTextResult = class {
7358
7445
  messageMetadata,
7359
7446
  sendReasoning = false,
7360
7447
  sendSources = false,
7361
- experimental_sendStart = true,
7362
- experimental_sendFinish = true,
7448
+ sendStart = true,
7449
+ sendFinish = true,
7363
7450
  onError = () => "An error occurred."
7364
7451
  // mask error messages for safety by default
7365
7452
  } = {}) {
7366
7453
  const lastMessage = originalMessages[originalMessages.length - 1];
7367
7454
  const isContinuation = (lastMessage == null ? void 0 : lastMessage.role) === "assistant";
7368
- const messageId = isContinuation ? lastMessage.id : newMessageId;
7455
+ const messageId = isContinuation ? lastMessage.id : newMessageId != null ? newMessageId : this.generateId();
7369
7456
  const baseStream = this.fullStream.pipeThrough(
7370
7457
  new TransformStream({
7371
7458
  transform: async (part, controller) => {
@@ -7471,7 +7558,7 @@ var DefaultStreamTextResult = class {
7471
7558
  break;
7472
7559
  }
7473
7560
  case "start": {
7474
- if (experimental_sendStart) {
7561
+ if (sendStart) {
7475
7562
  const metadata = messageMetadata == null ? void 0 : messageMetadata({ part });
7476
7563
  controller.enqueue({
7477
7564
  type: "start",
@@ -7482,7 +7569,7 @@ var DefaultStreamTextResult = class {
7482
7569
  break;
7483
7570
  }
7484
7571
  case "finish": {
7485
- if (experimental_sendFinish) {
7572
+ if (sendFinish) {
7486
7573
  const metadata = messageMetadata == null ? void 0 : messageMetadata({ part });
7487
7574
  controller.enqueue({
7488
7575
  type: "finish",
@@ -7499,38 +7586,12 @@ var DefaultStreamTextResult = class {
7499
7586
  }
7500
7587
  })
7501
7588
  );
7502
- if (onFinish == null) {
7503
- return baseStream;
7504
- }
7505
- const state = createStreamingUIMessageState({
7506
- lastMessage,
7507
- newMessageId: messageId != null ? messageId : this.generateId()
7508
- });
7509
- const runUpdateMessageJob = async (job) => {
7510
- await job({ state, write: () => {
7511
- } });
7512
- };
7513
- return processUIMessageStream({
7589
+ return handleUIMessageStreamFinish({
7514
7590
  stream: baseStream,
7515
- runUpdateMessageJob
7516
- }).pipeThrough(
7517
- new TransformStream({
7518
- transform(chunk, controller) {
7519
- controller.enqueue(chunk);
7520
- },
7521
- flush() {
7522
- const isContinuation2 = state.message.id === (lastMessage == null ? void 0 : lastMessage.id);
7523
- onFinish({
7524
- isContinuation: isContinuation2,
7525
- responseMessage: state.message,
7526
- messages: [
7527
- ...isContinuation2 ? originalMessages.slice(0, -1) : originalMessages,
7528
- state.message
7529
- ]
7530
- });
7531
- }
7532
- })
7533
- );
7591
+ newMessageId: messageId,
7592
+ originalMessages,
7593
+ onFinish
7594
+ });
7534
7595
  }
7535
7596
  pipeUIMessageStreamToResponse(response, {
7536
7597
  newMessageId,
@@ -7539,8 +7600,8 @@ var DefaultStreamTextResult = class {
7539
7600
  messageMetadata,
7540
7601
  sendReasoning,
7541
7602
  sendSources,
7542
- experimental_sendFinish,
7543
- experimental_sendStart,
7603
+ sendFinish,
7604
+ sendStart,
7544
7605
  onError,
7545
7606
  ...init
7546
7607
  } = {}) {
@@ -7553,8 +7614,8 @@ var DefaultStreamTextResult = class {
7553
7614
  messageMetadata,
7554
7615
  sendReasoning,
7555
7616
  sendSources,
7556
- experimental_sendFinish,
7557
- experimental_sendStart,
7617
+ sendFinish,
7618
+ sendStart,
7558
7619
  onError
7559
7620
  }),
7560
7621
  ...init
@@ -7574,8 +7635,8 @@ var DefaultStreamTextResult = class {
7574
7635
  messageMetadata,
7575
7636
  sendReasoning,
7576
7637
  sendSources,
7577
- experimental_sendFinish,
7578
- experimental_sendStart,
7638
+ sendFinish,
7639
+ sendStart,
7579
7640
  onError,
7580
7641
  ...init
7581
7642
  } = {}) {
@@ -7587,8 +7648,8 @@ var DefaultStreamTextResult = class {
7587
7648
  messageMetadata,
7588
7649
  sendReasoning,
7589
7650
  sendSources,
7590
- experimental_sendFinish,
7591
- experimental_sendStart,
7651
+ sendFinish,
7652
+ sendStart,
7592
7653
  onError
7593
7654
  }),
7594
7655
  ...init
@@ -8636,6 +8697,7 @@ var DefaultTranscriptionResult = class {
8636
8697
  NoSuchToolError,
8637
8698
  Output,
8638
8699
  RetryError,
8700
+ SerialJobExecutor,
8639
8701
  TextStreamChatTransport,
8640
8702
  ToolCallRepairError,
8641
8703
  ToolExecutionError,
@@ -8661,7 +8723,7 @@ var DefaultTranscriptionResult = class {
8661
8723
  createUIMessageStream,
8662
8724
  createUIMessageStreamResponse,
8663
8725
  customProvider,
8664
- defaultChatStore,
8726
+ defaultChatStoreOptions,
8665
8727
  defaultSettingsMiddleware,
8666
8728
  embed,
8667
8729
  embedMany,