ai 4.0.36 → 4.0.37

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,12 @@
1
1
  # ai
2
2
 
3
+ ## 4.0.37
4
+
5
+ ### Patch Changes
6
+
7
+ - 8304ed8: feat (ai/core): Add option `throwErrorForEmptyVectors` to cosineSimilarity
8
+ - ed28182: feat (ai/ui): add appendResponseMessages helper
9
+
3
10
  ## 4.0.36
4
11
 
5
12
  ### Patch Changes
package/dist/index.d.mts CHANGED
@@ -1,4 +1,4 @@
1
- import { DataStreamString, ToolInvocation, Attachment, Schema, DeepPartial, JSONValue as JSONValue$1, AssistantMessage, DataMessage } from '@ai-sdk/ui-utils';
1
+ import { DataStreamString, ToolInvocation, Attachment, Schema, DeepPartial, Message, JSONValue as JSONValue$1, AssistantMessage, DataMessage } from '@ai-sdk/ui-utils';
2
2
  export { AssistantMessage, AssistantStatus, Attachment, ChatRequest, ChatRequestOptions, CreateMessage, DataMessage, DataStreamPart, DeepPartial, IdGenerator, JSONValue, Message, RequestOptions, Schema, ToolInvocation, UseAssistantOptions, formatAssistantStreamPart, formatDataStreamPart, jsonSchema, parseAssistantStreamPart, parseDataStreamPart, processDataStream, processTextStream } from '@ai-sdk/ui-utils';
3
3
  export { ToolCall as CoreToolCall, ToolResult as CoreToolResult, generateId } from '@ai-sdk/provider-utils';
4
4
  import { JSONValue, EmbeddingModelV1, EmbeddingModelV1Embedding, ImageModelV1, ImageModelV1CallWarning, LanguageModelV1, LanguageModelV1FinishReason, LanguageModelV1LogProbs, LanguageModelV1CallWarning, LanguageModelV1ProviderMetadata, LanguageModelV1CallOptions, AISDKError, LanguageModelV1FunctionToolCall, JSONSchema7, NoSuchModelError } from '@ai-sdk/provider';
@@ -1404,6 +1404,18 @@ declare function tool<PARAMETERS extends Parameters, RESULT>(tool: CoreTool<PARA
1404
1404
  execute: undefined;
1405
1405
  };
1406
1406
 
1407
+ /**
1408
+ * Appends the CoreMessage[] from the response to a Message[] (for useChat).
1409
+ * The messages are converted to Messages before being appended.
1410
+ * Timestamps and IDs are generated for the new messages.
1411
+ *
1412
+ * @returns A new Message[] with the response messages appended.
1413
+ */
1414
+ declare function appendResponseMessages({ messages, responseMessages, }: {
1415
+ messages: Message[];
1416
+ responseMessages: CoreMessage[];
1417
+ }): Message[];
1418
+
1407
1419
  /**
1408
1420
  Converts an array of messages from useChat into an array of CoreMessages that can be used
1409
1421
  with the AI core functions (e.g. `streamText`).
@@ -2312,11 +2324,17 @@ declare function experimental_createProviderRegistry(providers: Record<string, P
2312
2324
  *
2313
2325
  * @param vector1 - The first vector.
2314
2326
  * @param vector2 - The second vector.
2327
+ * @param options - Optional configuration.
2328
+ * @param options.throwErrorForEmptyVectors - If true, throws an error for empty vectors. Default: false.
2315
2329
  *
2316
2330
  * @returns The cosine similarity between vector1 and vector2.
2331
+ * @returns 0 if either vector is the zero vector.
2332
+ * @throws {InvalidArgumentError} If throwErrorForEmptyVectors is true and vectors are empty.
2317
2333
  * @throws {Error} If the vectors do not have the same length.
2318
2334
  */
2319
- declare function cosineSimilarity(vector1: number[], vector2: number[]): number;
2335
+ declare function cosineSimilarity(vector1: number[], vector2: number[], options?: {
2336
+ throwErrorForEmptyVectors?: boolean;
2337
+ }): number;
2320
2338
 
2321
2339
  /**
2322
2340
  * Creates a ReadableStream that emits the provided values with an optional delay between each value.
@@ -2623,4 +2641,4 @@ declare namespace llamaindexAdapter {
2623
2641
  };
2624
2642
  }
2625
2643
 
2626
- export { AssistantContent, AssistantResponse, CallWarning, CoreAssistantMessage, CoreMessage, CoreSystemMessage, CoreTool, ToolCallUnion as CoreToolCallUnion, CoreToolChoice, CoreToolMessage, ToolResultUnion as CoreToolResultUnion, CoreUserMessage, DataContent, DataStreamWriter, DownloadError, EmbedManyResult, EmbedResult, Embedding, EmbeddingModel, EmbeddingModelUsage, GenerateImageResult as Experimental_GenerateImageResult, GeneratedImage as Experimental_GeneratedImage, Experimental_LanguageModelV1Middleware, FilePart, FinishReason, GenerateObjectResult, GenerateTextResult, ImageModel, ImageGenerationWarning as ImageModelCallWarning, ImagePart, InvalidArgumentError, InvalidDataContentError, InvalidMessageRoleError, InvalidToolArgumentsError, langchainAdapter as LangChainAdapter, LanguageModel, LanguageModelRequestMetadata, LanguageModelResponseMetadata, LanguageModelUsage, llamaindexAdapter as LlamaIndexAdapter, LogProbs, MessageConversionError, NoObjectGeneratedError, NoOutputSpecifiedError, NoSuchProviderError, NoSuchToolError, ObjectStreamPart, output as Output, Provider, ProviderMetadata, RetryError, StepResult, StreamData, StreamObjectResult, StreamTextResult, StreamTextTransform, TextPart, TextStreamPart, ToolCallPart, ToolCallRepairError, ToolCallRepairFunction, ToolContent, ToolExecutionError, ToolExecutionOptions, ToolResultPart, UserContent, convertToCoreMessages, cosineSimilarity, createDataStream, createDataStreamResponse, embed, embedMany, experimental_createProviderRegistry, experimental_customProvider, generateImage as experimental_generateImage, experimental_wrapLanguageModel, generateObject, generateText, pipeDataStreamToResponse, simulateReadableStream, smoothStream, streamObject, streamText, tool };
2644
+ export { AssistantContent, AssistantResponse, CallWarning, CoreAssistantMessage, CoreMessage, CoreSystemMessage, CoreTool, ToolCallUnion as CoreToolCallUnion, CoreToolChoice, CoreToolMessage, ToolResultUnion as CoreToolResultUnion, CoreUserMessage, DataContent, DataStreamWriter, DownloadError, EmbedManyResult, EmbedResult, Embedding, EmbeddingModel, EmbeddingModelUsage, GenerateImageResult as Experimental_GenerateImageResult, GeneratedImage as Experimental_GeneratedImage, Experimental_LanguageModelV1Middleware, FilePart, FinishReason, GenerateObjectResult, GenerateTextResult, ImageModel, ImageGenerationWarning as ImageModelCallWarning, ImagePart, InvalidArgumentError, InvalidDataContentError, InvalidMessageRoleError, InvalidToolArgumentsError, langchainAdapter as LangChainAdapter, LanguageModel, LanguageModelRequestMetadata, LanguageModelResponseMetadata, LanguageModelUsage, llamaindexAdapter as LlamaIndexAdapter, LogProbs, MessageConversionError, NoObjectGeneratedError, NoOutputSpecifiedError, NoSuchProviderError, NoSuchToolError, ObjectStreamPart, output as Output, Provider, ProviderMetadata, RetryError, StepResult, StreamData, StreamObjectResult, StreamTextResult, StreamTextTransform, TextPart, TextStreamPart, ToolCallPart, ToolCallRepairError, ToolCallRepairFunction, ToolContent, ToolExecutionError, ToolExecutionOptions, ToolResultPart, UserContent, appendResponseMessages, convertToCoreMessages, cosineSimilarity, createDataStream, createDataStreamResponse, embed, embedMany, experimental_createProviderRegistry, experimental_customProvider, generateImage as experimental_generateImage, experimental_wrapLanguageModel, generateObject, generateText, pipeDataStreamToResponse, simulateReadableStream, smoothStream, streamObject, streamText, tool };
package/dist/index.d.ts CHANGED
@@ -1,4 +1,4 @@
1
- import { DataStreamString, ToolInvocation, Attachment, Schema, DeepPartial, JSONValue as JSONValue$1, AssistantMessage, DataMessage } from '@ai-sdk/ui-utils';
1
+ import { DataStreamString, ToolInvocation, Attachment, Schema, DeepPartial, Message, JSONValue as JSONValue$1, AssistantMessage, DataMessage } from '@ai-sdk/ui-utils';
2
2
  export { AssistantMessage, AssistantStatus, Attachment, ChatRequest, ChatRequestOptions, CreateMessage, DataMessage, DataStreamPart, DeepPartial, IdGenerator, JSONValue, Message, RequestOptions, Schema, ToolInvocation, UseAssistantOptions, formatAssistantStreamPart, formatDataStreamPart, jsonSchema, parseAssistantStreamPart, parseDataStreamPart, processDataStream, processTextStream } from '@ai-sdk/ui-utils';
3
3
  export { ToolCall as CoreToolCall, ToolResult as CoreToolResult, generateId } from '@ai-sdk/provider-utils';
4
4
  import { JSONValue, EmbeddingModelV1, EmbeddingModelV1Embedding, ImageModelV1, ImageModelV1CallWarning, LanguageModelV1, LanguageModelV1FinishReason, LanguageModelV1LogProbs, LanguageModelV1CallWarning, LanguageModelV1ProviderMetadata, LanguageModelV1CallOptions, AISDKError, LanguageModelV1FunctionToolCall, JSONSchema7, NoSuchModelError } from '@ai-sdk/provider';
@@ -1404,6 +1404,18 @@ declare function tool<PARAMETERS extends Parameters, RESULT>(tool: CoreTool<PARA
1404
1404
  execute: undefined;
1405
1405
  };
1406
1406
 
1407
+ /**
1408
+ * Appends the CoreMessage[] from the response to a Message[] (for useChat).
1409
+ * The messages are converted to Messages before being appended.
1410
+ * Timestamps and IDs are generated for the new messages.
1411
+ *
1412
+ * @returns A new Message[] with the response messages appended.
1413
+ */
1414
+ declare function appendResponseMessages({ messages, responseMessages, }: {
1415
+ messages: Message[];
1416
+ responseMessages: CoreMessage[];
1417
+ }): Message[];
1418
+
1407
1419
  /**
1408
1420
  Converts an array of messages from useChat into an array of CoreMessages that can be used
1409
1421
  with the AI core functions (e.g. `streamText`).
@@ -2312,11 +2324,17 @@ declare function experimental_createProviderRegistry(providers: Record<string, P
2312
2324
  *
2313
2325
  * @param vector1 - The first vector.
2314
2326
  * @param vector2 - The second vector.
2327
+ * @param options - Optional configuration.
2328
+ * @param options.throwErrorForEmptyVectors - If true, throws an error for empty vectors. Default: false.
2315
2329
  *
2316
2330
  * @returns The cosine similarity between vector1 and vector2.
2331
+ * @returns 0 if either vector is the zero vector.
2332
+ * @throws {InvalidArgumentError} If throwErrorForEmptyVectors is true and vectors are empty.
2317
2333
  * @throws {Error} If the vectors do not have the same length.
2318
2334
  */
2319
- declare function cosineSimilarity(vector1: number[], vector2: number[]): number;
2335
+ declare function cosineSimilarity(vector1: number[], vector2: number[], options?: {
2336
+ throwErrorForEmptyVectors?: boolean;
2337
+ }): number;
2320
2338
 
2321
2339
  /**
2322
2340
  * Creates a ReadableStream that emits the provided values with an optional delay between each value.
@@ -2623,4 +2641,4 @@ declare namespace llamaindexAdapter {
2623
2641
  };
2624
2642
  }
2625
2643
 
2626
- export { AssistantContent, AssistantResponse, CallWarning, CoreAssistantMessage, CoreMessage, CoreSystemMessage, CoreTool, ToolCallUnion as CoreToolCallUnion, CoreToolChoice, CoreToolMessage, ToolResultUnion as CoreToolResultUnion, CoreUserMessage, DataContent, DataStreamWriter, DownloadError, EmbedManyResult, EmbedResult, Embedding, EmbeddingModel, EmbeddingModelUsage, GenerateImageResult as Experimental_GenerateImageResult, GeneratedImage as Experimental_GeneratedImage, Experimental_LanguageModelV1Middleware, FilePart, FinishReason, GenerateObjectResult, GenerateTextResult, ImageModel, ImageGenerationWarning as ImageModelCallWarning, ImagePart, InvalidArgumentError, InvalidDataContentError, InvalidMessageRoleError, InvalidToolArgumentsError, langchainAdapter as LangChainAdapter, LanguageModel, LanguageModelRequestMetadata, LanguageModelResponseMetadata, LanguageModelUsage, llamaindexAdapter as LlamaIndexAdapter, LogProbs, MessageConversionError, NoObjectGeneratedError, NoOutputSpecifiedError, NoSuchProviderError, NoSuchToolError, ObjectStreamPart, output as Output, Provider, ProviderMetadata, RetryError, StepResult, StreamData, StreamObjectResult, StreamTextResult, StreamTextTransform, TextPart, TextStreamPart, ToolCallPart, ToolCallRepairError, ToolCallRepairFunction, ToolContent, ToolExecutionError, ToolExecutionOptions, ToolResultPart, UserContent, convertToCoreMessages, cosineSimilarity, createDataStream, createDataStreamResponse, embed, embedMany, experimental_createProviderRegistry, experimental_customProvider, generateImage as experimental_generateImage, experimental_wrapLanguageModel, generateObject, generateText, pipeDataStreamToResponse, simulateReadableStream, smoothStream, streamObject, streamText, tool };
2644
+ export { AssistantContent, AssistantResponse, CallWarning, CoreAssistantMessage, CoreMessage, CoreSystemMessage, CoreTool, ToolCallUnion as CoreToolCallUnion, CoreToolChoice, CoreToolMessage, ToolResultUnion as CoreToolResultUnion, CoreUserMessage, DataContent, DataStreamWriter, DownloadError, EmbedManyResult, EmbedResult, Embedding, EmbeddingModel, EmbeddingModelUsage, GenerateImageResult as Experimental_GenerateImageResult, GeneratedImage as Experimental_GeneratedImage, Experimental_LanguageModelV1Middleware, FilePart, FinishReason, GenerateObjectResult, GenerateTextResult, ImageModel, ImageGenerationWarning as ImageModelCallWarning, ImagePart, InvalidArgumentError, InvalidDataContentError, InvalidMessageRoleError, InvalidToolArgumentsError, langchainAdapter as LangChainAdapter, LanguageModel, LanguageModelRequestMetadata, LanguageModelResponseMetadata, LanguageModelUsage, llamaindexAdapter as LlamaIndexAdapter, LogProbs, MessageConversionError, NoObjectGeneratedError, NoOutputSpecifiedError, NoSuchProviderError, NoSuchToolError, ObjectStreamPart, output as Output, Provider, ProviderMetadata, RetryError, StepResult, StreamData, StreamObjectResult, StreamTextResult, StreamTextTransform, TextPart, TextStreamPart, ToolCallPart, ToolCallRepairError, ToolCallRepairFunction, ToolContent, ToolExecutionError, ToolExecutionOptions, ToolResultPart, UserContent, appendResponseMessages, convertToCoreMessages, cosineSimilarity, createDataStream, createDataStreamResponse, embed, embedMany, experimental_createProviderRegistry, experimental_customProvider, generateImage as experimental_generateImage, experimental_wrapLanguageModel, generateObject, generateText, pipeDataStreamToResponse, simulateReadableStream, smoothStream, streamObject, streamText, tool };
package/dist/index.js CHANGED
@@ -49,6 +49,7 @@ __export(streams_exports, {
49
49
  ToolExecutionError: () => ToolExecutionError,
50
50
  TypeValidationError: () => import_provider16.TypeValidationError,
51
51
  UnsupportedFunctionalityError: () => import_provider16.UnsupportedFunctionalityError,
52
+ appendResponseMessages: () => appendResponseMessages,
52
53
  convertToCoreMessages: () => convertToCoreMessages,
53
54
  cosineSimilarity: () => cosineSimilarity,
54
55
  createDataStream: () => createDataStream,
@@ -61,7 +62,7 @@ __export(streams_exports, {
61
62
  experimental_wrapLanguageModel: () => experimental_wrapLanguageModel,
62
63
  formatAssistantStreamPart: () => import_ui_utils14.formatAssistantStreamPart,
63
64
  formatDataStreamPart: () => import_ui_utils14.formatDataStreamPart,
64
- generateId: () => import_provider_utils13.generateId,
65
+ generateId: () => import_provider_utils14.generateId,
65
66
  generateObject: () => generateObject,
66
67
  generateText: () => generateText,
67
68
  jsonSchema: () => import_ui_utils9.jsonSchema,
@@ -78,7 +79,7 @@ __export(streams_exports, {
78
79
  });
79
80
  module.exports = __toCommonJS(streams_exports);
80
81
  var import_ui_utils14 = require("@ai-sdk/ui-utils");
81
- var import_provider_utils13 = require("@ai-sdk/provider-utils");
82
+ var import_provider_utils14 = require("@ai-sdk/provider-utils");
82
83
 
83
84
  // core/index.ts
84
85
  var import_ui_utils9 = require("@ai-sdk/ui-utils");
@@ -2279,7 +2280,7 @@ async function generateObject({
2279
2280
  experimental_telemetry: telemetry,
2280
2281
  experimental_providerMetadata: providerMetadata,
2281
2282
  _internal: {
2282
- generateId: generateId3 = originalGenerateId,
2283
+ generateId: generateId4 = originalGenerateId,
2283
2284
  currentDate = () => /* @__PURE__ */ new Date()
2284
2285
  } = {},
2285
2286
  ...settings
@@ -2409,7 +2410,7 @@ async function generateObject({
2409
2410
  headers
2410
2411
  });
2411
2412
  const responseData = {
2412
- id: (_b2 = (_a15 = result2.response) == null ? void 0 : _a15.id) != null ? _b2 : generateId3(),
2413
+ id: (_b2 = (_a15 = result2.response) == null ? void 0 : _a15.id) != null ? _b2 : generateId4(),
2413
2414
  timestamp: (_d = (_c = result2.response) == null ? void 0 : _c.timestamp) != null ? _d : currentDate(),
2414
2415
  modelId: (_f = (_e = result2.response) == null ? void 0 : _e.modelId) != null ? _f : model.modelId
2415
2416
  };
@@ -2517,7 +2518,7 @@ async function generateObject({
2517
2518
  });
2518
2519
  const objectText = (_b2 = (_a15 = result2.toolCalls) == null ? void 0 : _a15[0]) == null ? void 0 : _b2.args;
2519
2520
  const responseData = {
2520
- id: (_d = (_c = result2.response) == null ? void 0 : _c.id) != null ? _d : generateId3(),
2521
+ id: (_d = (_c = result2.response) == null ? void 0 : _c.id) != null ? _d : generateId4(),
2521
2522
  timestamp: (_f = (_e = result2.response) == null ? void 0 : _e.timestamp) != null ? _f : currentDate(),
2522
2523
  modelId: (_h = (_g = result2.response) == null ? void 0 : _g.modelId) != null ? _h : model.modelId
2523
2524
  };
@@ -2815,7 +2816,7 @@ function streamObject({
2815
2816
  experimental_providerMetadata: providerMetadata,
2816
2817
  onFinish,
2817
2818
  _internal: {
2818
- generateId: generateId3 = originalGenerateId2,
2819
+ generateId: generateId4 = originalGenerateId2,
2819
2820
  currentDate = () => /* @__PURE__ */ new Date(),
2820
2821
  now: now2 = now
2821
2822
  } = {},
@@ -2848,7 +2849,7 @@ function streamObject({
2848
2849
  inputProviderMetadata: providerMetadata,
2849
2850
  mode,
2850
2851
  onFinish,
2851
- generateId: generateId3,
2852
+ generateId: generateId4,
2852
2853
  currentDate,
2853
2854
  now: now2
2854
2855
  });
@@ -2870,7 +2871,7 @@ var DefaultStreamObjectResult = class {
2870
2871
  inputProviderMetadata,
2871
2872
  mode,
2872
2873
  onFinish,
2873
- generateId: generateId3,
2874
+ generateId: generateId4,
2874
2875
  currentDate,
2875
2876
  now: now2
2876
2877
  }) {
@@ -3071,7 +3072,7 @@ var DefaultStreamObjectResult = class {
3071
3072
  let accumulatedText = "";
3072
3073
  let textDelta = "";
3073
3074
  let response = {
3074
- id: generateId3(),
3075
+ id: generateId4(),
3075
3076
  timestamp: currentDate(),
3076
3077
  modelId: model.modelId
3077
3078
  };
@@ -3664,7 +3665,7 @@ async function generateText({
3664
3665
  experimental_activeTools: activeTools,
3665
3666
  experimental_repairToolCall: repairToolCall,
3666
3667
  _internal: {
3667
- generateId: generateId3 = originalGenerateId3,
3668
+ generateId: generateId4 = originalGenerateId3,
3668
3669
  currentDate = () => /* @__PURE__ */ new Date()
3669
3670
  } = {},
3670
3671
  onStepFinish,
@@ -3798,7 +3799,7 @@ async function generateText({
3798
3799
  headers
3799
3800
  });
3800
3801
  const responseData = {
3801
- id: (_b2 = (_a16 = result.response) == null ? void 0 : _a16.id) != null ? _b2 : generateId3(),
3802
+ id: (_b2 = (_a16 = result.response) == null ? void 0 : _a16.id) != null ? _b2 : generateId4(),
3802
3803
  timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
3803
3804
  modelId: (_f2 = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f2 : model.modelId
3804
3805
  };
@@ -4499,7 +4500,7 @@ function streamText({
4499
4500
  onStepFinish,
4500
4501
  _internal: {
4501
4502
  now: now2 = now,
4502
- generateId: generateId3 = originalGenerateId4,
4503
+ generateId: generateId4 = originalGenerateId4,
4503
4504
  currentDate = () => /* @__PURE__ */ new Date()
4504
4505
  } = {},
4505
4506
  ...settings
@@ -4529,7 +4530,7 @@ function streamText({
4529
4530
  onStepFinish,
4530
4531
  now: now2,
4531
4532
  currentDate,
4532
- generateId: generateId3
4533
+ generateId: generateId4
4533
4534
  });
4534
4535
  }
4535
4536
  function createOutputTransformStream(output) {
@@ -4609,7 +4610,7 @@ var DefaultStreamTextResult = class {
4609
4610
  onStepFinish,
4610
4611
  now: now2,
4611
4612
  currentDate,
4612
- generateId: generateId3
4613
+ generateId: generateId4
4613
4614
  }) {
4614
4615
  this.warningsPromise = new DelayedPromise();
4615
4616
  this.usagePromise = new DelayedPromise();
@@ -4634,7 +4635,7 @@ var DefaultStreamTextResult = class {
4634
4635
  let recordedContinuationText = "";
4635
4636
  let recordedFullText = "";
4636
4637
  const recordedResponse = {
4637
- id: generateId3(),
4638
+ id: generateId4(),
4638
4639
  timestamp: currentDate(),
4639
4640
  modelId: model.modelId,
4640
4641
  messages: []
@@ -4952,7 +4953,7 @@ var DefaultStreamTextResult = class {
4952
4953
  let fullStepText = stepType2 === "continue" ? previousStepText : "";
4953
4954
  let stepLogProbs;
4954
4955
  let stepResponse = {
4955
- id: generateId3(),
4956
+ id: generateId4(),
4956
4957
  timestamp: currentDate(),
4957
4958
  modelId: model.modelId
4958
4959
  };
@@ -5498,6 +5499,66 @@ var experimental_wrapLanguageModel = ({
5498
5499
  };
5499
5500
  };
5500
5501
 
5502
+ // core/prompt/append-response-messages.ts
5503
+ var import_provider_utils12 = require("@ai-sdk/provider-utils");
5504
+ function appendResponseMessages({
5505
+ messages,
5506
+ responseMessages
5507
+ }) {
5508
+ var _a14;
5509
+ const clonedMessages = structuredClone(messages);
5510
+ for (const message of responseMessages) {
5511
+ const role = message.role;
5512
+ switch (role) {
5513
+ case "system":
5514
+ case "user": {
5515
+ throw new Error(
5516
+ "AI response must not contain system or user messages: " + role
5517
+ );
5518
+ }
5519
+ case "assistant": {
5520
+ clonedMessages.push({
5521
+ role: "assistant",
5522
+ id: (0, import_provider_utils12.generateId)(),
5523
+ // generate an id for the message, will be overridden by the client
5524
+ createdAt: /* @__PURE__ */ new Date(),
5525
+ // generate a createdAt date for the message, will be overridden by the client
5526
+ // only include text in the content:
5527
+ content: typeof message.content === "string" ? message.content : message.content.filter((part) => part.type === "text").map((part) => part.text).join(""),
5528
+ // separate tool calls from the content:
5529
+ toolInvocations: (typeof message.content === "string" ? [] : message.content.filter((part) => part.type === "tool-call")).map((call) => ({
5530
+ state: "call",
5531
+ ...call
5532
+ }))
5533
+ });
5534
+ break;
5535
+ }
5536
+ case "tool": {
5537
+ const previousMessage = clonedMessages[clonedMessages.length - 1];
5538
+ (_a14 = previousMessage.toolInvocations) != null ? _a14 : previousMessage.toolInvocations = [];
5539
+ if (previousMessage.role !== "assistant") {
5540
+ throw new Error(
5541
+ `Tool result must follow an assistant message: ${previousMessage.role}`
5542
+ );
5543
+ }
5544
+ for (const part of message.content) {
5545
+ const toolCall = previousMessage.toolInvocations.find(
5546
+ (call) => call.toolCallId === part.toolCallId
5547
+ );
5548
+ if (!toolCall) {
5549
+ throw new Error("Tool call not found in previous message");
5550
+ }
5551
+ toolCall.state = "result";
5552
+ const toolResult = toolCall;
5553
+ toolResult.result = part.result;
5554
+ }
5555
+ break;
5556
+ }
5557
+ }
5558
+ }
5559
+ return clonedMessages;
5560
+ }
5561
+
5501
5562
  // core/registry/custom-provider.ts
5502
5563
  var import_provider18 = require("@ai-sdk/provider");
5503
5564
  function experimental_customProvider({
@@ -5627,13 +5688,28 @@ function tool(tool2) {
5627
5688
  }
5628
5689
 
5629
5690
  // core/util/cosine-similarity.ts
5630
- function cosineSimilarity(vector1, vector2) {
5691
+ function cosineSimilarity(vector1, vector2, options = {
5692
+ throwErrorForEmptyVectors: false
5693
+ }) {
5694
+ const { throwErrorForEmptyVectors } = options;
5631
5695
  if (vector1.length !== vector2.length) {
5632
5696
  throw new Error(
5633
5697
  `Vectors must have the same length (vector1: ${vector1.length} elements, vector2: ${vector2.length} elements)`
5634
5698
  );
5635
5699
  }
5636
- return dotProduct(vector1, vector2) / (magnitude(vector1) * magnitude(vector2));
5700
+ if (throwErrorForEmptyVectors && vector1.length === 0) {
5701
+ throw new InvalidArgumentError({
5702
+ parameter: "vector1",
5703
+ value: vector1,
5704
+ message: "Vectors cannot be empty"
5705
+ });
5706
+ }
5707
+ const magnitude1 = magnitude(vector1);
5708
+ const magnitude2 = magnitude(vector2);
5709
+ if (magnitude1 === 0 || magnitude2 === 0) {
5710
+ return 0;
5711
+ }
5712
+ return dotProduct(vector1, vector2) / (magnitude1 * magnitude2);
5637
5713
  }
5638
5714
  function dotProduct(vector1, vector2) {
5639
5715
  return vector1.reduce(
@@ -5877,11 +5953,11 @@ __export(llamaindex_adapter_exports, {
5877
5953
  toDataStream: () => toDataStream2,
5878
5954
  toDataStreamResponse: () => toDataStreamResponse2
5879
5955
  });
5880
- var import_provider_utils12 = require("@ai-sdk/provider-utils");
5956
+ var import_provider_utils13 = require("@ai-sdk/provider-utils");
5881
5957
  var import_ui_utils12 = require("@ai-sdk/ui-utils");
5882
5958
  function toDataStreamInternal2(stream, callbacks) {
5883
5959
  const trimStart = trimStartOfStream();
5884
- return (0, import_provider_utils12.convertAsyncIteratorToReadableStream)(stream[Symbol.asyncIterator]()).pipeThrough(
5960
+ return (0, import_provider_utils13.convertAsyncIteratorToReadableStream)(stream[Symbol.asyncIterator]()).pipeThrough(
5885
5961
  new TransformStream({
5886
5962
  async transform(message, controller) {
5887
5963
  controller.enqueue(trimStart(message.delta));
@@ -6030,6 +6106,7 @@ var StreamData = class {
6030
6106
  ToolExecutionError,
6031
6107
  TypeValidationError,
6032
6108
  UnsupportedFunctionalityError,
6109
+ appendResponseMessages,
6033
6110
  convertToCoreMessages,
6034
6111
  cosineSimilarity,
6035
6112
  createDataStream,