ai 3.1.8 → 3.1.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.mts CHANGED
@@ -102,6 +102,60 @@ declare class EmbedResult<VALUE> {
102
102
  });
103
103
  }
104
104
 
105
+ /**
106
+ Embed several values using an embedding model. The type of the value is defined
107
+ by the embedding model.
108
+
109
+ `embedMany` automatically splits large requests into smaller chunks if the model
110
+ has a limit on how many embeddings can be generated in a single call.
111
+
112
+ @param model - The embedding model to use.
113
+ @param values - The values that should be embedded.
114
+
115
+ @param maxRetries - Maximum number of retries. Set to 0 to disable retries. Default: 2.
116
+ @param abortSignal - An optional abort signal that can be used to cancel the call.
117
+
118
+ @returns A result object that contains the embeddings, the value, and additional information.
119
+ */
120
+ declare function embedMany<VALUE>({ model, values, maxRetries, abortSignal, }: {
121
+ /**
122
+ The embedding model to use.
123
+ */
124
+ model: EmbeddingModel<VALUE>;
125
+ /**
126
+ The values that should be embedded.
127
+ */
128
+ values: Array<VALUE>;
129
+ /**
130
+ Maximum number of retries per embedding model call. Set to 0 to disable retries.
131
+
132
+ @default 2
133
+ */
134
+ maxRetries?: number;
135
+ /**
136
+ Abort signal.
137
+ */
138
+ abortSignal?: AbortSignal;
139
+ }): Promise<EmbedManyResult<VALUE>>;
140
+ /**
141
+ The result of a `embedMany` call.
142
+ It contains the embeddings, the values, and additional information.
143
+ */
144
+ declare class EmbedManyResult<VALUE> {
145
+ /**
146
+ The values that were embedded.
147
+ */
148
+ readonly values: Array<VALUE>;
149
+ /**
150
+ The embeddings. They are in the same order as the values.
151
+ */
152
+ readonly embeddings: Array<Embedding>;
153
+ constructor(options: {
154
+ values: Array<VALUE>;
155
+ embeddings: Array<Embedding>;
156
+ });
157
+ }
158
+
105
159
  type TokenUsage = {
106
160
  promptTokens: number;
107
161
  completionTokens: number;
@@ -1023,6 +1077,38 @@ declare function convertToCoreMessages(messages: Array<{
1023
1077
  toolInvocations?: Array<ToolResult<string, unknown, unknown>>;
1024
1078
  }>): CoreMessage[];
1025
1079
 
1080
+ type AssistantStatus = 'in_progress' | 'awaiting_message';
1081
+ type UseAssistantOptions = {
1082
+ /**
1083
+ * The API endpoint that accepts a `{ threadId: string | null; message: string; }` object and returns an `AssistantResponse` stream.
1084
+ * The threadId refers to an existing thread with messages (or is `null` to create a new thread).
1085
+ * The message is the next message that should be appended to the thread and sent to the assistant.
1086
+ */
1087
+ api: string;
1088
+ /**
1089
+ * An optional string that represents the ID of an existing thread.
1090
+ * If not provided, a new thread will be created.
1091
+ */
1092
+ threadId?: string;
1093
+ /**
1094
+ * An optional literal that sets the mode of credentials to be used on the request.
1095
+ * Defaults to "same-origin".
1096
+ */
1097
+ credentials?: RequestCredentials;
1098
+ /**
1099
+ * An optional object of headers to be passed to the API endpoint.
1100
+ */
1101
+ headers?: Record<string, string> | Headers;
1102
+ /**
1103
+ * An optional, additional body object to be passed to the API endpoint.
1104
+ */
1105
+ body?: object;
1106
+ /**
1107
+ * An optional callback that will be called when the assistant encounters an error.
1108
+ */
1109
+ onError?: (error: Error) => void;
1110
+ };
1111
+
1026
1112
  interface FunctionCall$1 {
1027
1113
  /**
1028
1114
  * The arguments to call the function with, as generated by the model in JSON
@@ -2184,4 +2270,4 @@ declare function streamToResponse(res: ReadableStream, response: ServerResponse,
2184
2270
  status?: number;
2185
2271
  }): void;
2186
2272
 
2187
- export { AIStream, AIStreamCallbacksAndOptions, AIStreamParser, AIStreamParserOptions, AWSBedrockAnthropicMessagesStream, AWSBedrockAnthropicStream, AWSBedrockCohereStream, AWSBedrockLlama2Stream, AWSBedrockStream, AnthropicStream, AssistantContent, AssistantMessage, AssistantResponse, CallWarning, ChatRequest, ChatRequestOptions, CohereStream, CompletionUsage, CoreAssistantMessage, CoreMessage, CoreSystemMessage, CoreTool, CoreToolMessage, CoreUserMessage, CreateMessage, DataContent, DataMessage, DeepPartial, EmbedResult, Embedding, EmbeddingModel, ExperimentalAssistantMessage, ExperimentalMessage, ExperimentalTool, ExperimentalToolMessage, ExperimentalUserMessage, FinishReason, Function, FunctionCall$1 as FunctionCall, FunctionCallHandler, FunctionCallPayload, GenerateObjectResult, GenerateTextResult, GoogleGenerativeAIStream, HuggingFaceStream, IdGenerator, ImagePart, InkeepAIStreamCallbacksAndOptions, InkeepChatResultCallbacks, InkeepOnFinalMetadata, InkeepStream, JSONValue, langchainAdapter as LangChainAdapter, LangChainStream, LanguageModel, LogProbs, Message$1 as Message, MistralStream, ObjectStreamPart, ObjectStreamPartInput, OpenAIStream, OpenAIStreamCallbacks, ReactResponseRow, ReplicateStream, RequestOptions, StreamData, StreamObjectResult, StreamPart, StreamString, StreamTextResult, StreamingTextResponse, TextPart$1 as TextPart, TextStreamPart, Tool, ToolCall, ToolCallHandler, ToolCallPart, ToolCallPayload, ToolChoice, ToolContent, ToolInvocation, ToolResultPart, UseChatOptions, UseCompletionOptions, UserContent, convertDataContentToBase64String, convertDataContentToUint8Array, convertToCoreMessages, createCallbacksTransformer, createChunkDecoder, createEventStreamTransformer, createStreamDataTransformer, embed, experimental_AssistantResponse, experimental_StreamData, experimental_StreamingReactResponse, experimental_generateObject, experimental_generateText, experimental_streamObject, experimental_streamText, formatStreamPart, generateId, generateObject, generateText, isStreamStringEqualToType, generateId as nanoid, parseStreamPart, readDataStream, readableFromAsyncIterable, streamObject, streamText, streamToResponse, tool, trimStartOfStreamHelper };
2273
+ export { AIStream, AIStreamCallbacksAndOptions, AIStreamParser, AIStreamParserOptions, AWSBedrockAnthropicMessagesStream, AWSBedrockAnthropicStream, AWSBedrockCohereStream, AWSBedrockLlama2Stream, AWSBedrockStream, AnthropicStream, AssistantContent, AssistantMessage, AssistantResponse, AssistantStatus, CallWarning, ChatRequest, ChatRequestOptions, CohereStream, CompletionUsage, CoreAssistantMessage, CoreMessage, CoreSystemMessage, CoreTool, CoreToolMessage, CoreUserMessage, CreateMessage, DataContent, DataMessage, DeepPartial, EmbedManyResult, EmbedResult, Embedding, EmbeddingModel, ExperimentalAssistantMessage, ExperimentalMessage, ExperimentalTool, ExperimentalToolMessage, ExperimentalUserMessage, FinishReason, Function, FunctionCall$1 as FunctionCall, FunctionCallHandler, FunctionCallPayload, GenerateObjectResult, GenerateTextResult, GoogleGenerativeAIStream, HuggingFaceStream, IdGenerator, ImagePart, InkeepAIStreamCallbacksAndOptions, InkeepChatResultCallbacks, InkeepOnFinalMetadata, InkeepStream, JSONValue, langchainAdapter as LangChainAdapter, LangChainStream, LanguageModel, LogProbs, Message$1 as Message, MistralStream, ObjectStreamPart, ObjectStreamPartInput, OpenAIStream, OpenAIStreamCallbacks, ReactResponseRow, ReplicateStream, RequestOptions, StreamData, StreamObjectResult, StreamPart, StreamString, StreamTextResult, StreamingTextResponse, TextPart$1 as TextPart, TextStreamPart, Tool, ToolCall, ToolCallHandler, ToolCallPart, ToolCallPayload, ToolChoice, ToolContent, ToolInvocation, ToolResultPart, UseAssistantOptions, UseChatOptions, UseCompletionOptions, UserContent, convertDataContentToBase64String, convertDataContentToUint8Array, convertToCoreMessages, createCallbacksTransformer, createChunkDecoder, createEventStreamTransformer, createStreamDataTransformer, embed, embedMany, experimental_AssistantResponse, experimental_StreamData, experimental_StreamingReactResponse, experimental_generateObject, experimental_generateText, experimental_streamObject, experimental_streamText, formatStreamPart, generateId, generateObject, generateText, isStreamStringEqualToType, generateId as nanoid, parseStreamPart, readDataStream, readableFromAsyncIterable, streamObject, streamText, streamToResponse, tool, trimStartOfStreamHelper };
package/dist/index.d.ts CHANGED
@@ -102,6 +102,60 @@ declare class EmbedResult<VALUE> {
102
102
  });
103
103
  }
104
104
 
105
+ /**
106
+ Embed several values using an embedding model. The type of the value is defined
107
+ by the embedding model.
108
+
109
+ `embedMany` automatically splits large requests into smaller chunks if the model
110
+ has a limit on how many embeddings can be generated in a single call.
111
+
112
+ @param model - The embedding model to use.
113
+ @param values - The values that should be embedded.
114
+
115
+ @param maxRetries - Maximum number of retries. Set to 0 to disable retries. Default: 2.
116
+ @param abortSignal - An optional abort signal that can be used to cancel the call.
117
+
118
+ @returns A result object that contains the embeddings, the value, and additional information.
119
+ */
120
+ declare function embedMany<VALUE>({ model, values, maxRetries, abortSignal, }: {
121
+ /**
122
+ The embedding model to use.
123
+ */
124
+ model: EmbeddingModel<VALUE>;
125
+ /**
126
+ The values that should be embedded.
127
+ */
128
+ values: Array<VALUE>;
129
+ /**
130
+ Maximum number of retries per embedding model call. Set to 0 to disable retries.
131
+
132
+ @default 2
133
+ */
134
+ maxRetries?: number;
135
+ /**
136
+ Abort signal.
137
+ */
138
+ abortSignal?: AbortSignal;
139
+ }): Promise<EmbedManyResult<VALUE>>;
140
+ /**
141
+ The result of a `embedMany` call.
142
+ It contains the embeddings, the values, and additional information.
143
+ */
144
+ declare class EmbedManyResult<VALUE> {
145
+ /**
146
+ The values that were embedded.
147
+ */
148
+ readonly values: Array<VALUE>;
149
+ /**
150
+ The embeddings. They are in the same order as the values.
151
+ */
152
+ readonly embeddings: Array<Embedding>;
153
+ constructor(options: {
154
+ values: Array<VALUE>;
155
+ embeddings: Array<Embedding>;
156
+ });
157
+ }
158
+
105
159
  type TokenUsage = {
106
160
  promptTokens: number;
107
161
  completionTokens: number;
@@ -1023,6 +1077,38 @@ declare function convertToCoreMessages(messages: Array<{
1023
1077
  toolInvocations?: Array<ToolResult<string, unknown, unknown>>;
1024
1078
  }>): CoreMessage[];
1025
1079
 
1080
+ type AssistantStatus = 'in_progress' | 'awaiting_message';
1081
+ type UseAssistantOptions = {
1082
+ /**
1083
+ * The API endpoint that accepts a `{ threadId: string | null; message: string; }` object and returns an `AssistantResponse` stream.
1084
+ * The threadId refers to an existing thread with messages (or is `null` to create a new thread).
1085
+ * The message is the next message that should be appended to the thread and sent to the assistant.
1086
+ */
1087
+ api: string;
1088
+ /**
1089
+ * An optional string that represents the ID of an existing thread.
1090
+ * If not provided, a new thread will be created.
1091
+ */
1092
+ threadId?: string;
1093
+ /**
1094
+ * An optional literal that sets the mode of credentials to be used on the request.
1095
+ * Defaults to "same-origin".
1096
+ */
1097
+ credentials?: RequestCredentials;
1098
+ /**
1099
+ * An optional object of headers to be passed to the API endpoint.
1100
+ */
1101
+ headers?: Record<string, string> | Headers;
1102
+ /**
1103
+ * An optional, additional body object to be passed to the API endpoint.
1104
+ */
1105
+ body?: object;
1106
+ /**
1107
+ * An optional callback that will be called when the assistant encounters an error.
1108
+ */
1109
+ onError?: (error: Error) => void;
1110
+ };
1111
+
1026
1112
  interface FunctionCall$1 {
1027
1113
  /**
1028
1114
  * The arguments to call the function with, as generated by the model in JSON
@@ -2184,4 +2270,4 @@ declare function streamToResponse(res: ReadableStream, response: ServerResponse,
2184
2270
  status?: number;
2185
2271
  }): void;
2186
2272
 
2187
- export { AIStream, AIStreamCallbacksAndOptions, AIStreamParser, AIStreamParserOptions, AWSBedrockAnthropicMessagesStream, AWSBedrockAnthropicStream, AWSBedrockCohereStream, AWSBedrockLlama2Stream, AWSBedrockStream, AnthropicStream, AssistantContent, AssistantMessage, AssistantResponse, CallWarning, ChatRequest, ChatRequestOptions, CohereStream, CompletionUsage, CoreAssistantMessage, CoreMessage, CoreSystemMessage, CoreTool, CoreToolMessage, CoreUserMessage, CreateMessage, DataContent, DataMessage, DeepPartial, EmbedResult, Embedding, EmbeddingModel, ExperimentalAssistantMessage, ExperimentalMessage, ExperimentalTool, ExperimentalToolMessage, ExperimentalUserMessage, FinishReason, Function, FunctionCall$1 as FunctionCall, FunctionCallHandler, FunctionCallPayload, GenerateObjectResult, GenerateTextResult, GoogleGenerativeAIStream, HuggingFaceStream, IdGenerator, ImagePart, InkeepAIStreamCallbacksAndOptions, InkeepChatResultCallbacks, InkeepOnFinalMetadata, InkeepStream, JSONValue, langchainAdapter as LangChainAdapter, LangChainStream, LanguageModel, LogProbs, Message$1 as Message, MistralStream, ObjectStreamPart, ObjectStreamPartInput, OpenAIStream, OpenAIStreamCallbacks, ReactResponseRow, ReplicateStream, RequestOptions, StreamData, StreamObjectResult, StreamPart, StreamString, StreamTextResult, StreamingTextResponse, TextPart$1 as TextPart, TextStreamPart, Tool, ToolCall, ToolCallHandler, ToolCallPart, ToolCallPayload, ToolChoice, ToolContent, ToolInvocation, ToolResultPart, UseChatOptions, UseCompletionOptions, UserContent, convertDataContentToBase64String, convertDataContentToUint8Array, convertToCoreMessages, createCallbacksTransformer, createChunkDecoder, createEventStreamTransformer, createStreamDataTransformer, embed, experimental_AssistantResponse, experimental_StreamData, experimental_StreamingReactResponse, experimental_generateObject, experimental_generateText, experimental_streamObject, experimental_streamText, formatStreamPart, generateId, generateObject, generateText, isStreamStringEqualToType, generateId as nanoid, parseStreamPart, readDataStream, readableFromAsyncIterable, streamObject, streamText, streamToResponse, tool, trimStartOfStreamHelper };
2273
+ export { AIStream, AIStreamCallbacksAndOptions, AIStreamParser, AIStreamParserOptions, AWSBedrockAnthropicMessagesStream, AWSBedrockAnthropicStream, AWSBedrockCohereStream, AWSBedrockLlama2Stream, AWSBedrockStream, AnthropicStream, AssistantContent, AssistantMessage, AssistantResponse, AssistantStatus, CallWarning, ChatRequest, ChatRequestOptions, CohereStream, CompletionUsage, CoreAssistantMessage, CoreMessage, CoreSystemMessage, CoreTool, CoreToolMessage, CoreUserMessage, CreateMessage, DataContent, DataMessage, DeepPartial, EmbedManyResult, EmbedResult, Embedding, EmbeddingModel, ExperimentalAssistantMessage, ExperimentalMessage, ExperimentalTool, ExperimentalToolMessage, ExperimentalUserMessage, FinishReason, Function, FunctionCall$1 as FunctionCall, FunctionCallHandler, FunctionCallPayload, GenerateObjectResult, GenerateTextResult, GoogleGenerativeAIStream, HuggingFaceStream, IdGenerator, ImagePart, InkeepAIStreamCallbacksAndOptions, InkeepChatResultCallbacks, InkeepOnFinalMetadata, InkeepStream, JSONValue, langchainAdapter as LangChainAdapter, LangChainStream, LanguageModel, LogProbs, Message$1 as Message, MistralStream, ObjectStreamPart, ObjectStreamPartInput, OpenAIStream, OpenAIStreamCallbacks, ReactResponseRow, ReplicateStream, RequestOptions, StreamData, StreamObjectResult, StreamPart, StreamString, StreamTextResult, StreamingTextResponse, TextPart$1 as TextPart, TextStreamPart, Tool, ToolCall, ToolCallHandler, ToolCallPart, ToolCallPayload, ToolChoice, ToolContent, ToolInvocation, ToolResultPart, UseAssistantOptions, UseChatOptions, UseCompletionOptions, UserContent, convertDataContentToBase64String, convertDataContentToUint8Array, convertToCoreMessages, createCallbacksTransformer, createChunkDecoder, createEventStreamTransformer, createStreamDataTransformer, embed, embedMany, experimental_AssistantResponse, experimental_StreamData, experimental_StreamingReactResponse, experimental_generateObject, experimental_generateText, experimental_streamObject, experimental_streamText, formatStreamPart, generateId, generateObject, generateText, isStreamStringEqualToType, generateId as nanoid, parseStreamPart, readDataStream, readableFromAsyncIterable, streamObject, streamText, streamToResponse, tool, trimStartOfStreamHelper };
package/dist/index.js CHANGED
@@ -40,6 +40,7 @@ __export(streams_exports, {
40
40
  AnthropicStream: () => AnthropicStream,
41
41
  AssistantResponse: () => AssistantResponse,
42
42
  CohereStream: () => CohereStream,
43
+ EmbedManyResult: () => EmbedManyResult,
43
44
  EmbedResult: () => EmbedResult,
44
45
  EmptyResponseBodyError: () => import_provider8.EmptyResponseBodyError,
45
46
  GenerateObjectResult: () => GenerateObjectResult,
@@ -78,6 +79,7 @@ __export(streams_exports, {
78
79
  createEventStreamTransformer: () => createEventStreamTransformer,
79
80
  createStreamDataTransformer: () => createStreamDataTransformer,
80
81
  embed: () => embed,
82
+ embedMany: () => embedMany,
81
83
  experimental_AssistantResponse: () => experimental_AssistantResponse,
82
84
  experimental_StreamData: () => experimental_StreamData,
83
85
  experimental_StreamingReactResponse: () => experimental_StreamingReactResponse,
@@ -192,6 +194,53 @@ var EmbedResult = class {
192
194
  }
193
195
  };
194
196
 
197
+ // core/util/split-array.ts
198
+ function splitArray(array, chunkSize) {
199
+ if (chunkSize <= 0) {
200
+ throw new Error("chunkSize must be greater than 0");
201
+ }
202
+ const result = [];
203
+ for (let i = 0; i < array.length; i += chunkSize) {
204
+ result.push(array.slice(i, i + chunkSize));
205
+ }
206
+ return result;
207
+ }
208
+
209
+ // core/embed/embed-many.ts
210
+ async function embedMany({
211
+ model,
212
+ values,
213
+ maxRetries,
214
+ abortSignal
215
+ }) {
216
+ const retry = retryWithExponentialBackoff({ maxRetries });
217
+ const maxEmbeddingsPerCall = model.maxEmbeddingsPerCall;
218
+ if (maxEmbeddingsPerCall == null) {
219
+ const modelResponse = await retry(
220
+ () => model.doEmbed({ values, abortSignal })
221
+ );
222
+ return new EmbedManyResult({
223
+ values,
224
+ embeddings: modelResponse.embeddings
225
+ });
226
+ }
227
+ const valueChunks = splitArray(values, maxEmbeddingsPerCall);
228
+ const embeddings = [];
229
+ for (const chunk of valueChunks) {
230
+ const modelResponse = await retry(
231
+ () => model.doEmbed({ values: chunk, abortSignal })
232
+ );
233
+ embeddings.push(...modelResponse.embeddings);
234
+ }
235
+ return new EmbedManyResult({ values, embeddings });
236
+ }
237
+ var EmbedManyResult = class {
238
+ constructor(options) {
239
+ this.values = options.values;
240
+ this.embeddings = options.embeddings;
241
+ }
242
+ };
243
+
195
244
  // core/generate-object/generate-object.ts
196
245
  var import_provider5 = require("@ai-sdk/provider");
197
246
  var import_provider_utils3 = require("@ai-sdk/provider-utils");
@@ -1685,7 +1734,7 @@ var StreamTextResult = class {
1685
1734
  "Content-Type": "text/plain; charset=utf-8",
1686
1735
  ...init == null ? void 0 : init.headers
1687
1736
  });
1688
- const reader = this.textStream.pipeThrough(createCallbacksTransformer(void 0)).pipeThrough(createStreamDataTransformer()).getReader();
1737
+ const reader = this.toAIStream().getReader();
1689
1738
  const read = async () => {
1690
1739
  try {
1691
1740
  while (true) {
@@ -1716,15 +1765,14 @@ var StreamTextResult = class {
1716
1765
  "Content-Type": "text/plain; charset=utf-8",
1717
1766
  ...init == null ? void 0 : init.headers
1718
1767
  });
1719
- const reader = this.textStream.getReader();
1768
+ const reader = this.textStream.pipeThrough(new TextEncoderStream()).getReader();
1720
1769
  const read = async () => {
1721
- const encoder = new TextEncoder();
1722
1770
  try {
1723
1771
  while (true) {
1724
1772
  const { done, value } = await reader.read();
1725
1773
  if (done)
1726
1774
  break;
1727
- response.write(encoder.encode(value));
1775
+ response.write(value);
1728
1776
  }
1729
1777
  } catch (error) {
1730
1778
  throw error;
@@ -1754,23 +1802,13 @@ var StreamTextResult = class {
1754
1802
  */
1755
1803
  toTextStreamResponse(init) {
1756
1804
  var _a;
1757
- const encoder = new TextEncoder();
1758
- return new Response(
1759
- this.textStream.pipeThrough(
1760
- new TransformStream({
1761
- transform(chunk, controller) {
1762
- controller.enqueue(encoder.encode(chunk));
1763
- }
1764
- })
1765
- ),
1766
- {
1767
- status: (_a = init == null ? void 0 : init.status) != null ? _a : 200,
1768
- headers: {
1769
- "Content-Type": "text/plain; charset=utf-8",
1770
- ...init == null ? void 0 : init.headers
1771
- }
1805
+ return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
1806
+ status: (_a = init == null ? void 0 : init.status) != null ? _a : 200,
1807
+ headers: {
1808
+ "Content-Type": "text/plain; charset=utf-8",
1809
+ ...init == null ? void 0 : init.headers
1772
1810
  }
1773
- );
1811
+ });
1774
1812
  }
1775
1813
  };
1776
1814
  var experimental_streamText = streamText;
@@ -3322,6 +3360,7 @@ function streamToResponse(res, response, init) {
3322
3360
  AnthropicStream,
3323
3361
  AssistantResponse,
3324
3362
  CohereStream,
3363
+ EmbedManyResult,
3325
3364
  EmbedResult,
3326
3365
  EmptyResponseBodyError,
3327
3366
  GenerateObjectResult,
@@ -3360,6 +3399,7 @@ function streamToResponse(res, response, init) {
3360
3399
  createEventStreamTransformer,
3361
3400
  createStreamDataTransformer,
3362
3401
  embed,
3402
+ embedMany,
3363
3403
  experimental_AssistantResponse,
3364
3404
  experimental_StreamData,
3365
3405
  experimental_StreamingReactResponse,