ai 3.4.9 → 3.4.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +18 -0
- package/README.md +0 -109
- package/dist/index.d.mts +9 -2
- package/dist/index.d.ts +9 -2
- package/dist/index.js +27 -33
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +27 -33
- package/dist/index.mjs.map +1 -1
- package/package.json +5 -5
- package/svelte/dist/index.d.mts +2 -0
- package/svelte/dist/index.d.ts +2 -0
- package/svelte/dist/index.js +7 -0
- package/svelte/dist/index.js.map +1 -1
- package/svelte/dist/index.mjs +7 -0
- package/svelte/dist/index.mjs.map +1 -1
    
        package/CHANGELOG.md
    CHANGED
    
    | @@ -1,5 +1,23 @@ | |
| 1 1 | 
             
            # ai
         | 
| 2 2 |  | 
| 3 | 
            +
            ## 3.4.11
         | 
| 4 | 
            +
             | 
| 5 | 
            +
            ### Patch Changes
         | 
| 6 | 
            +
             | 
| 7 | 
            +
            - caedcda: feat (ai/ui): add setData helper to useChat
         | 
| 8 | 
            +
            - Updated dependencies [caedcda]
         | 
| 9 | 
            +
              - @ai-sdk/svelte@0.0.52
         | 
| 10 | 
            +
              - @ai-sdk/react@0.0.63
         | 
| 11 | 
            +
              - @ai-sdk/solid@0.0.50
         | 
| 12 | 
            +
              - @ai-sdk/vue@0.0.55
         | 
| 13 | 
            +
             | 
| 14 | 
            +
            ## 3.4.10
         | 
| 15 | 
            +
             | 
| 16 | 
            +
            ### Patch Changes
         | 
| 17 | 
            +
             | 
| 18 | 
            +
            - 0b557d7: feat (ai/core): add tracer option to telemetry settings
         | 
| 19 | 
            +
            - 44f6bc5: feat (ai/core): expose StepResult type
         | 
| 20 | 
            +
             | 
| 3 21 | 
             
            ## 3.4.9
         | 
| 4 22 |  | 
| 5 23 | 
             
            ### Patch Changes
         | 
    
        package/README.md
    CHANGED
    
    | @@ -101,115 +101,6 @@ export async function POST(req: Request) { | |
| 101 101 | 
             
            }
         | 
| 102 102 | 
             
            ```
         | 
| 103 103 |  | 
| 104 | 
            -
            ### AI SDK RSC
         | 
| 105 | 
            -
             | 
| 106 | 
            -
            The [AI SDK RSC](https://sdk.vercel.ai/docs/ai-sdk-rsc/overview) module provides an alternative API that also helps you build chatbots and generative user interfaces for frameworks that support [React Server Components](https://nextjs.org/docs/app/building-your-application/rendering/server-components) (RSC).
         | 
| 107 | 
            -
             | 
| 108 | 
            -
            This API leverages the benefits of [Streaming](https://nextjs.org/docs/app/building-your-application/rendering/server-components#streaming) and [Server Actions](https://nextjs.org/docs/app/building-your-application/data-fetching/server-actions-and-mutations) offered by RSC, thus improving the developer experience of managing states between server/client and building generative user interfaces.
         | 
| 109 | 
            -
             | 
| 110 | 
            -
            ###### @/app/actions.tsx (Next.js App Router)
         | 
| 111 | 
            -
             | 
| 112 | 
            -
            ```tsx
         | 
| 113 | 
            -
            import { streamUI } from 'ai/rsc';
         | 
| 114 | 
            -
            import { z } from 'zod';
         | 
| 115 | 
            -
             | 
| 116 | 
            -
            async function submitMessage() {
         | 
| 117 | 
            -
              'use server';
         | 
| 118 | 
            -
             | 
| 119 | 
            -
              const stream = await streamUI({
         | 
| 120 | 
            -
                model: openai('gpt-4-turbo'),
         | 
| 121 | 
            -
                messages: [
         | 
| 122 | 
            -
                  { role: 'system', content: 'You are a friendly bot!' },
         | 
| 123 | 
            -
                  { role: 'user', content: input },
         | 
| 124 | 
            -
                ],
         | 
| 125 | 
            -
                text: ({ content, done }) => {
         | 
| 126 | 
            -
                  return <div>{content}</div>;
         | 
| 127 | 
            -
                },
         | 
| 128 | 
            -
                tools: {
         | 
| 129 | 
            -
                  deploy: {
         | 
| 130 | 
            -
                    description: 'Deploy repository to vercel',
         | 
| 131 | 
            -
                    parameters: z.object({
         | 
| 132 | 
            -
                      repositoryName: z
         | 
| 133 | 
            -
                        .string()
         | 
| 134 | 
            -
                        .describe('The name of the repository, example: vercel/ai-chatbot'),
         | 
| 135 | 
            -
                    }),
         | 
| 136 | 
            -
                    generate: async function* ({ repositoryName }) {
         | 
| 137 | 
            -
                      yield <div>Cloning repository {repositoryName}...</div>;
         | 
| 138 | 
            -
                      await new Promise(resolve => setTimeout(resolve, 3000));
         | 
| 139 | 
            -
                      yield <div>Building repository {repositoryName}...</div>;
         | 
| 140 | 
            -
                      await new Promise(resolve => setTimeout(resolve, 2000));
         | 
| 141 | 
            -
                      return <div>{repositoryName} deployed!</div>;
         | 
| 142 | 
            -
                    },
         | 
| 143 | 
            -
                  },
         | 
| 144 | 
            -
                },
         | 
| 145 | 
            -
              });
         | 
| 146 | 
            -
             | 
| 147 | 
            -
              return {
         | 
| 148 | 
            -
                ui: stream.value,
         | 
| 149 | 
            -
              };
         | 
| 150 | 
            -
            }
         | 
| 151 | 
            -
            ```
         | 
| 152 | 
            -
             | 
| 153 | 
            -
            ###### @/app/ai.ts (Next.js App Router)
         | 
| 154 | 
            -
             | 
| 155 | 
            -
            ```tsx
         | 
| 156 | 
            -
            import { createAI } from 'ai/rsc';
         | 
| 157 | 
            -
            import { submitMessage } from '@/app/actions';
         | 
| 158 | 
            -
             | 
| 159 | 
            -
            export const AI = createAI({
         | 
| 160 | 
            -
              initialAIState: {},
         | 
| 161 | 
            -
              initialUIState: {},
         | 
| 162 | 
            -
              actions: {
         | 
| 163 | 
            -
                submitMessage,
         | 
| 164 | 
            -
              },
         | 
| 165 | 
            -
            });
         | 
| 166 | 
            -
            ```
         | 
| 167 | 
            -
             | 
| 168 | 
            -
            ###### @/app/layout.tsx (Next.js App Router)
         | 
| 169 | 
            -
             | 
| 170 | 
            -
            ```tsx
         | 
| 171 | 
            -
            import { ReactNode } from 'react';
         | 
| 172 | 
            -
            import { AI } from '@/app/ai';
         | 
| 173 | 
            -
             | 
| 174 | 
            -
            export default function Layout({ children }: { children: ReactNode }) {
         | 
| 175 | 
            -
              <AI>{children}</AI>;
         | 
| 176 | 
            -
            }
         | 
| 177 | 
            -
            ```
         | 
| 178 | 
            -
             | 
| 179 | 
            -
            ###### @/app/page.tsx (Next.js App Router)
         | 
| 180 | 
            -
             | 
| 181 | 
            -
            ```tsx
         | 
| 182 | 
            -
            'use client';
         | 
| 183 | 
            -
             | 
| 184 | 
            -
            import { useActions } from 'ai/rsc';
         | 
| 185 | 
            -
            import { ReactNode, useState } from 'react';
         | 
| 186 | 
            -
             | 
| 187 | 
            -
            export default function Page() {
         | 
| 188 | 
            -
              const [input, setInput] = useState('');
         | 
| 189 | 
            -
              const [messages, setMessages] = useState<ReactNode[]>([]);
         | 
| 190 | 
            -
              const { submitMessage } = useActions();
         | 
| 191 | 
            -
             | 
| 192 | 
            -
              return (
         | 
| 193 | 
            -
                <div>
         | 
| 194 | 
            -
                  <input
         | 
| 195 | 
            -
                    value={input}
         | 
| 196 | 
            -
                    onChange={event => {
         | 
| 197 | 
            -
                      setInput(event.target.value);
         | 
| 198 | 
            -
                    }}
         | 
| 199 | 
            -
                  />
         | 
| 200 | 
            -
                  <button
         | 
| 201 | 
            -
                    onClick={async () => {
         | 
| 202 | 
            -
                      const { ui } = await submitMessage(input);
         | 
| 203 | 
            -
                      setMessages(currentMessages => [...currentMessages, ui]);
         | 
| 204 | 
            -
                    }}
         | 
| 205 | 
            -
                  >
         | 
| 206 | 
            -
                    Submit
         | 
| 207 | 
            -
                  </button>
         | 
| 208 | 
            -
                </div>
         | 
| 209 | 
            -
              );
         | 
| 210 | 
            -
            }
         | 
| 211 | 
            -
            ```
         | 
| 212 | 
            -
             | 
| 213 104 | 
             
            ## Templates
         | 
| 214 105 |  | 
| 215 106 | 
             
            We've built [templates](https://vercel.com/templates?type=ai) that include AI SDK integrations for different use cases, providers, and frameworks. You can use these templates to get started with your AI-powered application.
         | 
    
        package/dist/index.d.mts
    CHANGED
    
    | @@ -1,6 +1,6 @@ | |
| 1 1 | 
             
            import { Schema, DeepPartial, ToolInvocation, Attachment, JSONValue as JSONValue$1, CreateMessage, FunctionCall as FunctionCall$1, AssistantMessage, DataMessage } from '@ai-sdk/ui-utils';
         | 
| 2 2 | 
             
            export { AssistantMessage, AssistantStatus, Attachment, ChatRequest, ChatRequestOptions, CreateMessage, DataMessage, DeepPartial, Function, FunctionCall, FunctionCallHandler, IdGenerator, JSONValue, Message, RequestOptions, Schema, StreamPart, Tool, ToolCall, ToolCallHandler, ToolChoice, ToolInvocation, UseAssistantOptions, formatStreamPart, jsonSchema, parseStreamPart, processDataProtocolResponse, readDataStream } from '@ai-sdk/ui-utils';
         | 
| 3 | 
            -
            import { AttributeValue } from '@opentelemetry/api';
         | 
| 3 | 
            +
            import { AttributeValue, Tracer } from '@opentelemetry/api';
         | 
| 4 4 | 
             
            import { EmbeddingModelV1, EmbeddingModelV1Embedding, LanguageModelV1, LanguageModelV1FinishReason, LanguageModelV1LogProbs, LanguageModelV1CallWarning, LanguageModelV1ProviderMetadata, JSONValue, LanguageModelV1CallOptions, NoSuchModelError, AISDKError } from '@ai-sdk/provider';
         | 
| 5 5 | 
             
            export { AISDKError, APICallError, EmptyResponseBodyError, InvalidPromptError, InvalidResponseDataError, JSONParseError, LanguageModelV1, LanguageModelV1CallOptions, LanguageModelV1Prompt, LanguageModelV1StreamPart, LoadAPIKeyError, NoContentGeneratedError, NoSuchModelError, TypeValidationError, UnsupportedFunctionalityError } from '@ai-sdk/provider';
         | 
| 6 6 | 
             
            import { z } from 'zod';
         | 
| @@ -39,6 +39,10 @@ type TelemetrySettings = { | |
| 39 39 | 
             
                 * Additional information to include in the telemetry data.
         | 
| 40 40 | 
             
                 */
         | 
| 41 41 | 
             
                metadata?: Record<string, AttributeValue>;
         | 
| 42 | 
            +
                /**
         | 
| 43 | 
            +
                 * A custom tracer to use for the telemetry data.
         | 
| 44 | 
            +
                 */
         | 
| 45 | 
            +
                tracer?: Tracer;
         | 
| 42 46 | 
             
            };
         | 
| 43 47 |  | 
| 44 48 | 
             
            /**
         | 
| @@ -1334,6 +1338,9 @@ type ToToolResultObject<TOOLS extends Record<string, CoreTool>> = ValueOf<{ | |
| 1334 1338 | 
             
            type ToToolResult<TOOLS extends Record<string, CoreTool>> = ToToolResultObject<ToToolsWithDefinedExecute<ToToolsWithExecute<TOOLS>>>;
         | 
| 1335 1339 | 
             
            type ToToolResultArray<TOOLS extends Record<string, CoreTool>> = Array<ToToolResult<TOOLS>>;
         | 
| 1336 1340 |  | 
| 1341 | 
            +
            /**
         | 
| 1342 | 
            +
             * The result of a single step in the generation process.
         | 
| 1343 | 
            +
             */
         | 
| 1337 1344 | 
             
            type StepResult<TOOLS extends Record<string, CoreTool>> = {
         | 
| 1338 1345 | 
             
                /**
         | 
| 1339 1346 | 
             
              The generated text.
         | 
| @@ -3175,4 +3182,4 @@ declare const generateId: (size?: number) => string; | |
| 3175 3182 | 
             
             */
         | 
| 3176 3183 | 
             
            declare const nanoid: (size?: number) => string;
         | 
| 3177 3184 |  | 
| 3178 | 
            -
            export { AIStream, AIStreamCallbacksAndOptions, AIStreamParser, AIStreamParserOptions, AWSBedrockAnthropicMessagesStream, AWSBedrockAnthropicStream, AWSBedrockCohereStream, AWSBedrockLlama2Stream, AWSBedrockStream, AnthropicStream, AssistantContent, AssistantResponse, CallWarning, CohereStream, CompletionTokenUsage, CompletionUsage, CoreAssistantMessage, CoreMessage, CoreSystemMessage, CoreTool, CoreToolChoice, CoreToolMessage, CoreUserMessage, DataContent, DownloadError, EmbedManyResult, EmbedResult, Embedding, EmbeddingModel, EmbeddingModelUsage, EmbeddingTokenUsage, ExperimentalAssistantMessage, ExperimentalMessage, ExperimentalTool, ExperimentalToolMessage, ExperimentalUserMessage, Experimental_LanguageModelV1Middleware, FilePart, FinishReason, FunctionCallPayload, GenerateObjectResult, GenerateTextResult, GoogleGenerativeAIStream, HuggingFaceStream, ImagePart, InkeepAIStreamCallbacksAndOptions, InkeepChatResultCallbacks, InkeepOnFinalMetadata, InkeepStream, InvalidArgumentError, InvalidDataContentError, InvalidMessageRoleError, InvalidToolArgumentsError, langchainAdapter as LangChainAdapter, LangChainStream, LanguageModel, LanguageModelResponseMetadata, LanguageModelResponseMetadataWithHeaders, LanguageModelUsage, llamaindexAdapter as LlamaIndexAdapter, LogProbs, MessageConversionError, MistralStream, NoObjectGeneratedError, NoSuchProviderError, NoSuchToolError, ObjectStreamPart, OpenAIStream, OpenAIStreamCallbacks, Provider, ProviderMetadata, ReplicateStream, RetryError, StreamData, StreamObjectResult, StreamTextResult, StreamingTextResponse, TextPart$1 as TextPart, TextStreamPart, TokenUsage, ToolCallPart, ToolCallPayload, ToolContent, ToolResultPart, UserContent, convertToCoreMessages, cosineSimilarity, createCallbacksTransformer, createEventStreamTransformer, createStreamDataTransformer, embed, embedMany, experimental_AssistantResponse, experimental_ModelRegistry, experimental_Provider, experimental_ProviderRegistry, experimental_StreamData, experimental_createModelRegistry, experimental_createProviderRegistry, experimental_customProvider, experimental_generateObject, experimental_generateText, experimental_streamObject, experimental_streamText, experimental_wrapLanguageModel, generateId, generateObject, generateText, nanoid, readableFromAsyncIterable, streamObject, streamText, streamToResponse, tool, trimStartOfStreamHelper };
         | 
| 3185 | 
            +
            export { AIStream, AIStreamCallbacksAndOptions, AIStreamParser, AIStreamParserOptions, AWSBedrockAnthropicMessagesStream, AWSBedrockAnthropicStream, AWSBedrockCohereStream, AWSBedrockLlama2Stream, AWSBedrockStream, AnthropicStream, AssistantContent, AssistantResponse, CallWarning, CohereStream, CompletionTokenUsage, CompletionUsage, CoreAssistantMessage, CoreMessage, CoreSystemMessage, CoreTool, CoreToolChoice, CoreToolMessage, CoreUserMessage, DataContent, DownloadError, EmbedManyResult, EmbedResult, Embedding, EmbeddingModel, EmbeddingModelUsage, EmbeddingTokenUsage, ExperimentalAssistantMessage, ExperimentalMessage, ExperimentalTool, ExperimentalToolMessage, ExperimentalUserMessage, Experimental_LanguageModelV1Middleware, FilePart, FinishReason, FunctionCallPayload, GenerateObjectResult, GenerateTextResult, GoogleGenerativeAIStream, HuggingFaceStream, ImagePart, InkeepAIStreamCallbacksAndOptions, InkeepChatResultCallbacks, InkeepOnFinalMetadata, InkeepStream, InvalidArgumentError, InvalidDataContentError, InvalidMessageRoleError, InvalidToolArgumentsError, langchainAdapter as LangChainAdapter, LangChainStream, LanguageModel, LanguageModelResponseMetadata, LanguageModelResponseMetadataWithHeaders, LanguageModelUsage, llamaindexAdapter as LlamaIndexAdapter, LogProbs, MessageConversionError, MistralStream, NoObjectGeneratedError, NoSuchProviderError, NoSuchToolError, ObjectStreamPart, OpenAIStream, OpenAIStreamCallbacks, Provider, ProviderMetadata, ReplicateStream, RetryError, StepResult, StreamData, StreamObjectResult, StreamTextResult, StreamingTextResponse, TextPart$1 as TextPart, TextStreamPart, TokenUsage, ToolCallPart, ToolCallPayload, ToolContent, ToolResultPart, UserContent, convertToCoreMessages, cosineSimilarity, createCallbacksTransformer, createEventStreamTransformer, createStreamDataTransformer, embed, embedMany, experimental_AssistantResponse, experimental_ModelRegistry, experimental_Provider, experimental_ProviderRegistry, experimental_StreamData, experimental_createModelRegistry, experimental_createProviderRegistry, experimental_customProvider, experimental_generateObject, experimental_generateText, experimental_streamObject, experimental_streamText, experimental_wrapLanguageModel, generateId, generateObject, generateText, nanoid, readableFromAsyncIterable, streamObject, streamText, streamToResponse, tool, trimStartOfStreamHelper };
         | 
    
        package/dist/index.d.ts
    CHANGED
    
    | @@ -1,6 +1,6 @@ | |
| 1 1 | 
             
            import { Schema, DeepPartial, ToolInvocation, Attachment, JSONValue as JSONValue$1, CreateMessage, FunctionCall as FunctionCall$1, AssistantMessage, DataMessage } from '@ai-sdk/ui-utils';
         | 
| 2 2 | 
             
            export { AssistantMessage, AssistantStatus, Attachment, ChatRequest, ChatRequestOptions, CreateMessage, DataMessage, DeepPartial, Function, FunctionCall, FunctionCallHandler, IdGenerator, JSONValue, Message, RequestOptions, Schema, StreamPart, Tool, ToolCall, ToolCallHandler, ToolChoice, ToolInvocation, UseAssistantOptions, formatStreamPart, jsonSchema, parseStreamPart, processDataProtocolResponse, readDataStream } from '@ai-sdk/ui-utils';
         | 
| 3 | 
            -
            import { AttributeValue } from '@opentelemetry/api';
         | 
| 3 | 
            +
            import { AttributeValue, Tracer } from '@opentelemetry/api';
         | 
| 4 4 | 
             
            import { EmbeddingModelV1, EmbeddingModelV1Embedding, LanguageModelV1, LanguageModelV1FinishReason, LanguageModelV1LogProbs, LanguageModelV1CallWarning, LanguageModelV1ProviderMetadata, JSONValue, LanguageModelV1CallOptions, NoSuchModelError, AISDKError } from '@ai-sdk/provider';
         | 
| 5 5 | 
             
            export { AISDKError, APICallError, EmptyResponseBodyError, InvalidPromptError, InvalidResponseDataError, JSONParseError, LanguageModelV1, LanguageModelV1CallOptions, LanguageModelV1Prompt, LanguageModelV1StreamPart, LoadAPIKeyError, NoContentGeneratedError, NoSuchModelError, TypeValidationError, UnsupportedFunctionalityError } from '@ai-sdk/provider';
         | 
| 6 6 | 
             
            import { z } from 'zod';
         | 
| @@ -39,6 +39,10 @@ type TelemetrySettings = { | |
| 39 39 | 
             
                 * Additional information to include in the telemetry data.
         | 
| 40 40 | 
             
                 */
         | 
| 41 41 | 
             
                metadata?: Record<string, AttributeValue>;
         | 
| 42 | 
            +
                /**
         | 
| 43 | 
            +
                 * A custom tracer to use for the telemetry data.
         | 
| 44 | 
            +
                 */
         | 
| 45 | 
            +
                tracer?: Tracer;
         | 
| 42 46 | 
             
            };
         | 
| 43 47 |  | 
| 44 48 | 
             
            /**
         | 
| @@ -1334,6 +1338,9 @@ type ToToolResultObject<TOOLS extends Record<string, CoreTool>> = ValueOf<{ | |
| 1334 1338 | 
             
            type ToToolResult<TOOLS extends Record<string, CoreTool>> = ToToolResultObject<ToToolsWithDefinedExecute<ToToolsWithExecute<TOOLS>>>;
         | 
| 1335 1339 | 
             
            type ToToolResultArray<TOOLS extends Record<string, CoreTool>> = Array<ToToolResult<TOOLS>>;
         | 
| 1336 1340 |  | 
| 1341 | 
            +
            /**
         | 
| 1342 | 
            +
             * The result of a single step in the generation process.
         | 
| 1343 | 
            +
             */
         | 
| 1337 1344 | 
             
            type StepResult<TOOLS extends Record<string, CoreTool>> = {
         | 
| 1338 1345 | 
             
                /**
         | 
| 1339 1346 | 
             
              The generated text.
         | 
| @@ -3175,4 +3182,4 @@ declare const generateId: (size?: number) => string; | |
| 3175 3182 | 
             
             */
         | 
| 3176 3183 | 
             
            declare const nanoid: (size?: number) => string;
         | 
| 3177 3184 |  | 
| 3178 | 
            -
            export { AIStream, AIStreamCallbacksAndOptions, AIStreamParser, AIStreamParserOptions, AWSBedrockAnthropicMessagesStream, AWSBedrockAnthropicStream, AWSBedrockCohereStream, AWSBedrockLlama2Stream, AWSBedrockStream, AnthropicStream, AssistantContent, AssistantResponse, CallWarning, CohereStream, CompletionTokenUsage, CompletionUsage, CoreAssistantMessage, CoreMessage, CoreSystemMessage, CoreTool, CoreToolChoice, CoreToolMessage, CoreUserMessage, DataContent, DownloadError, EmbedManyResult, EmbedResult, Embedding, EmbeddingModel, EmbeddingModelUsage, EmbeddingTokenUsage, ExperimentalAssistantMessage, ExperimentalMessage, ExperimentalTool, ExperimentalToolMessage, ExperimentalUserMessage, Experimental_LanguageModelV1Middleware, FilePart, FinishReason, FunctionCallPayload, GenerateObjectResult, GenerateTextResult, GoogleGenerativeAIStream, HuggingFaceStream, ImagePart, InkeepAIStreamCallbacksAndOptions, InkeepChatResultCallbacks, InkeepOnFinalMetadata, InkeepStream, InvalidArgumentError, InvalidDataContentError, InvalidMessageRoleError, InvalidToolArgumentsError, langchainAdapter as LangChainAdapter, LangChainStream, LanguageModel, LanguageModelResponseMetadata, LanguageModelResponseMetadataWithHeaders, LanguageModelUsage, llamaindexAdapter as LlamaIndexAdapter, LogProbs, MessageConversionError, MistralStream, NoObjectGeneratedError, NoSuchProviderError, NoSuchToolError, ObjectStreamPart, OpenAIStream, OpenAIStreamCallbacks, Provider, ProviderMetadata, ReplicateStream, RetryError, StreamData, StreamObjectResult, StreamTextResult, StreamingTextResponse, TextPart$1 as TextPart, TextStreamPart, TokenUsage, ToolCallPart, ToolCallPayload, ToolContent, ToolResultPart, UserContent, convertToCoreMessages, cosineSimilarity, createCallbacksTransformer, createEventStreamTransformer, createStreamDataTransformer, embed, embedMany, experimental_AssistantResponse, experimental_ModelRegistry, experimental_Provider, experimental_ProviderRegistry, experimental_StreamData, experimental_createModelRegistry, experimental_createProviderRegistry, experimental_customProvider, experimental_generateObject, experimental_generateText, experimental_streamObject, experimental_streamText, experimental_wrapLanguageModel, generateId, generateObject, generateText, nanoid, readableFromAsyncIterable, streamObject, streamText, streamToResponse, tool, trimStartOfStreamHelper };
         | 
| 3185 | 
            +
            export { AIStream, AIStreamCallbacksAndOptions, AIStreamParser, AIStreamParserOptions, AWSBedrockAnthropicMessagesStream, AWSBedrockAnthropicStream, AWSBedrockCohereStream, AWSBedrockLlama2Stream, AWSBedrockStream, AnthropicStream, AssistantContent, AssistantResponse, CallWarning, CohereStream, CompletionTokenUsage, CompletionUsage, CoreAssistantMessage, CoreMessage, CoreSystemMessage, CoreTool, CoreToolChoice, CoreToolMessage, CoreUserMessage, DataContent, DownloadError, EmbedManyResult, EmbedResult, Embedding, EmbeddingModel, EmbeddingModelUsage, EmbeddingTokenUsage, ExperimentalAssistantMessage, ExperimentalMessage, ExperimentalTool, ExperimentalToolMessage, ExperimentalUserMessage, Experimental_LanguageModelV1Middleware, FilePart, FinishReason, FunctionCallPayload, GenerateObjectResult, GenerateTextResult, GoogleGenerativeAIStream, HuggingFaceStream, ImagePart, InkeepAIStreamCallbacksAndOptions, InkeepChatResultCallbacks, InkeepOnFinalMetadata, InkeepStream, InvalidArgumentError, InvalidDataContentError, InvalidMessageRoleError, InvalidToolArgumentsError, langchainAdapter as LangChainAdapter, LangChainStream, LanguageModel, LanguageModelResponseMetadata, LanguageModelResponseMetadataWithHeaders, LanguageModelUsage, llamaindexAdapter as LlamaIndexAdapter, LogProbs, MessageConversionError, MistralStream, NoObjectGeneratedError, NoSuchProviderError, NoSuchToolError, ObjectStreamPart, OpenAIStream, OpenAIStreamCallbacks, Provider, ProviderMetadata, ReplicateStream, RetryError, StepResult, StreamData, StreamObjectResult, StreamTextResult, StreamingTextResponse, TextPart$1 as TextPart, TextStreamPart, TokenUsage, ToolCallPart, ToolCallPayload, ToolContent, ToolResultPart, UserContent, convertToCoreMessages, cosineSimilarity, createCallbacksTransformer, createEventStreamTransformer, createStreamDataTransformer, embed, embedMany, experimental_AssistantResponse, experimental_ModelRegistry, experimental_Provider, experimental_ProviderRegistry, experimental_StreamData, experimental_createModelRegistry, experimental_createProviderRegistry, experimental_customProvider, experimental_generateObject, experimental_generateText, experimental_streamObject, experimental_streamText, experimental_wrapLanguageModel, generateId, generateObject, generateText, nanoid, readableFromAsyncIterable, streamObject, streamText, streamToResponse, tool, trimStartOfStreamHelper };
         | 
    
        package/dist/index.js
    CHANGED
    
    | @@ -316,13 +316,15 @@ var noopSpanContext = { | |
| 316 316 | 
             
            };
         | 
| 317 317 |  | 
| 318 318 | 
             
            // core/telemetry/get-tracer.ts
         | 
| 319 | 
            -
             | 
| 320 | 
            -
             | 
| 319 | 
            +
            function getTracer({
         | 
| 320 | 
            +
              isEnabled = false,
         | 
| 321 | 
            +
              tracer
         | 
| 322 | 
            +
            } = {}) {
         | 
| 321 323 | 
             
              if (!isEnabled) {
         | 
| 322 324 | 
             
                return noopTracer;
         | 
| 323 325 | 
             
              }
         | 
| 324 | 
            -
              if ( | 
| 325 | 
            -
                return  | 
| 326 | 
            +
              if (tracer) {
         | 
| 327 | 
            +
                return tracer;
         | 
| 326 328 | 
             
              }
         | 
| 327 329 | 
             
              return import_api.trace.getTracer("ai");
         | 
| 328 330 | 
             
            }
         | 
| @@ -402,14 +404,13 @@ async function embed({ | |
| 402 404 | 
             
              headers,
         | 
| 403 405 | 
             
              experimental_telemetry: telemetry
         | 
| 404 406 | 
             
            }) {
         | 
| 405 | 
            -
              var _a11;
         | 
| 406 407 | 
             
              const baseTelemetryAttributes = getBaseTelemetryAttributes({
         | 
| 407 408 | 
             
                model,
         | 
| 408 409 | 
             
                telemetry,
         | 
| 409 410 | 
             
                headers,
         | 
| 410 411 | 
             
                settings: { maxRetries }
         | 
| 411 412 | 
             
              });
         | 
| 412 | 
            -
              const tracer = getTracer( | 
| 413 | 
            +
              const tracer = getTracer(telemetry);
         | 
| 413 414 | 
             
              return recordSpan({
         | 
| 414 415 | 
             
                name: "ai.embed",
         | 
| 415 416 | 
             
                attributes: selectTelemetryAttributes({
         | 
| @@ -442,14 +443,14 @@ async function embed({ | |
| 442 443 | 
             
                        }),
         | 
| 443 444 | 
             
                        tracer,
         | 
| 444 445 | 
             
                        fn: async (doEmbedSpan) => {
         | 
| 445 | 
            -
                          var  | 
| 446 | 
            +
                          var _a11;
         | 
| 446 447 | 
             
                          const modelResponse = await model.doEmbed({
         | 
| 447 448 | 
             
                            values: [value],
         | 
| 448 449 | 
             
                            abortSignal,
         | 
| 449 450 | 
             
                            headers
         | 
| 450 451 | 
             
                          });
         | 
| 451 452 | 
             
                          const embedding2 = modelResponse.embeddings[0];
         | 
| 452 | 
            -
                          const usage2 = ( | 
| 453 | 
            +
                          const usage2 = (_a11 = modelResponse.usage) != null ? _a11 : { tokens: NaN };
         | 
| 453 454 | 
             
                          doEmbedSpan.setAttributes(
         | 
| 454 455 | 
             
                            selectTelemetryAttributes({
         | 
| 455 456 | 
             
                              telemetry,
         | 
| @@ -515,14 +516,13 @@ async function embedMany({ | |
| 515 516 | 
             
              headers,
         | 
| 516 517 | 
             
              experimental_telemetry: telemetry
         | 
| 517 518 | 
             
            }) {
         | 
| 518 | 
            -
              var _a11;
         | 
| 519 519 | 
             
              const baseTelemetryAttributes = getBaseTelemetryAttributes({
         | 
| 520 520 | 
             
                model,
         | 
| 521 521 | 
             
                telemetry,
         | 
| 522 522 | 
             
                headers,
         | 
| 523 523 | 
             
                settings: { maxRetries }
         | 
| 524 524 | 
             
              });
         | 
| 525 | 
            -
              const tracer = getTracer( | 
| 525 | 
            +
              const tracer = getTracer(telemetry);
         | 
| 526 526 | 
             
              return recordSpan({
         | 
| 527 527 | 
             
                name: "ai.embedMany",
         | 
| 528 528 | 
             
                attributes: selectTelemetryAttributes({
         | 
| @@ -560,14 +560,14 @@ async function embedMany({ | |
| 560 560 | 
             
                        }),
         | 
| 561 561 | 
             
                        tracer,
         | 
| 562 562 | 
             
                        fn: async (doEmbedSpan) => {
         | 
| 563 | 
            -
                          var  | 
| 563 | 
            +
                          var _a11;
         | 
| 564 564 | 
             
                          const modelResponse = await model.doEmbed({
         | 
| 565 565 | 
             
                            values,
         | 
| 566 566 | 
             
                            abortSignal,
         | 
| 567 567 | 
             
                            headers
         | 
| 568 568 | 
             
                          });
         | 
| 569 569 | 
             
                          const embeddings3 = modelResponse.embeddings;
         | 
| 570 | 
            -
                          const usage2 = ( | 
| 570 | 
            +
                          const usage2 = (_a11 = modelResponse.usage) != null ? _a11 : { tokens: NaN };
         | 
| 571 571 | 
             
                          doEmbedSpan.setAttributes(
         | 
| 572 572 | 
             
                            selectTelemetryAttributes({
         | 
| 573 573 | 
             
                              telemetry,
         | 
| @@ -619,14 +619,14 @@ async function embedMany({ | |
| 619 619 | 
             
                        }),
         | 
| 620 620 | 
             
                        tracer,
         | 
| 621 621 | 
             
                        fn: async (doEmbedSpan) => {
         | 
| 622 | 
            -
                          var  | 
| 622 | 
            +
                          var _a11;
         | 
| 623 623 | 
             
                          const modelResponse = await model.doEmbed({
         | 
| 624 624 | 
             
                            values: chunk,
         | 
| 625 625 | 
             
                            abortSignal,
         | 
| 626 626 | 
             
                            headers
         | 
| 627 627 | 
             
                          });
         | 
| 628 628 | 
             
                          const embeddings2 = modelResponse.embeddings;
         | 
| 629 | 
            -
                          const usage2 = ( | 
| 629 | 
            +
                          const usage2 = (_a11 = modelResponse.usage) != null ? _a11 : { tokens: NaN };
         | 
| 630 630 | 
             
                          doEmbedSpan.setAttributes(
         | 
| 631 631 | 
             
                            selectTelemetryAttributes({
         | 
| 632 632 | 
             
                              telemetry,
         | 
| @@ -1982,7 +1982,6 @@ async function generateObject({ | |
| 1982 1982 | 
             
              } = {},
         | 
| 1983 1983 | 
             
              ...settings
         | 
| 1984 1984 | 
             
            }) {
         | 
| 1985 | 
            -
              var _a11;
         | 
| 1986 1985 | 
             
              validateObjectGenerationInput({
         | 
| 1987 1986 | 
             
                output,
         | 
| 1988 1987 | 
             
                mode,
         | 
| @@ -2005,7 +2004,7 @@ async function generateObject({ | |
| 2005 2004 | 
             
                headers,
         | 
| 2006 2005 | 
             
                settings: { ...settings, maxRetries }
         | 
| 2007 2006 | 
             
              });
         | 
| 2008 | 
            -
              const tracer = getTracer( | 
| 2007 | 
            +
              const tracer = getTracer(telemetry);
         | 
| 2009 2008 | 
             
              return recordSpan({
         | 
| 2010 2009 | 
             
                name: "ai.generateObject",
         | 
| 2011 2010 | 
             
                attributes: selectTelemetryAttributes({
         | 
| @@ -2087,7 +2086,7 @@ async function generateObject({ | |
| 2087 2086 | 
             
                          }),
         | 
| 2088 2087 | 
             
                          tracer,
         | 
| 2089 2088 | 
             
                          fn: async (span2) => {
         | 
| 2090 | 
            -
                            var  | 
| 2089 | 
            +
                            var _a11, _b, _c, _d, _e, _f;
         | 
| 2091 2090 | 
             
                            const result2 = await model.doGenerate({
         | 
| 2092 2091 | 
             
                              mode: {
         | 
| 2093 2092 | 
             
                                type: "object-json",
         | 
| @@ -2106,7 +2105,7 @@ async function generateObject({ | |
| 2106 2105 | 
             
                              throw new NoObjectGeneratedError();
         | 
| 2107 2106 | 
             
                            }
         | 
| 2108 2107 | 
             
                            const responseData = {
         | 
| 2109 | 
            -
                              id: (_b = ( | 
| 2108 | 
            +
                              id: (_b = (_a11 = result2.response) == null ? void 0 : _a11.id) != null ? _b : generateId3(),
         | 
| 2110 2109 | 
             
                              timestamp: (_d = (_c = result2.response) == null ? void 0 : _c.timestamp) != null ? _d : currentDate(),
         | 
| 2111 2110 | 
             
                              modelId: (_f = (_e = result2.response) == null ? void 0 : _e.modelId) != null ? _f : model.modelId
         | 
| 2112 2111 | 
             
                            };
         | 
| @@ -2189,7 +2188,7 @@ async function generateObject({ | |
| 2189 2188 | 
             
                          }),
         | 
| 2190 2189 | 
             
                          tracer,
         | 
| 2191 2190 | 
             
                          fn: async (span2) => {
         | 
| 2192 | 
            -
                            var  | 
| 2191 | 
            +
                            var _a11, _b, _c, _d, _e, _f, _g, _h;
         | 
| 2193 2192 | 
             
                            const result2 = await model.doGenerate({
         | 
| 2194 2193 | 
             
                              mode: {
         | 
| 2195 2194 | 
             
                                type: "object-tool",
         | 
| @@ -2207,7 +2206,7 @@ async function generateObject({ | |
| 2207 2206 | 
             
                              abortSignal,
         | 
| 2208 2207 | 
             
                              headers
         | 
| 2209 2208 | 
             
                            });
         | 
| 2210 | 
            -
                            const objectText = (_b = ( | 
| 2209 | 
            +
                            const objectText = (_b = (_a11 = result2.toolCalls) == null ? void 0 : _a11[0]) == null ? void 0 : _b.args;
         | 
| 2211 2210 | 
             
                            if (objectText === void 0) {
         | 
| 2212 2211 | 
             
                              throw new NoObjectGeneratedError();
         | 
| 2213 2212 | 
             
                            }
         | 
| @@ -2332,6 +2331,7 @@ var DefaultGenerateObjectResult = class { | |
| 2332 2331 | 
             
            var experimental_generateObject = generateObject;
         | 
| 2333 2332 |  | 
| 2334 2333 | 
             
            // core/generate-object/stream-object.ts
         | 
| 2334 | 
            +
            var import_provider_utils7 = require("@ai-sdk/provider-utils");
         | 
| 2335 2335 | 
             
            var import_ui_utils2 = require("@ai-sdk/ui-utils");
         | 
| 2336 2336 |  | 
| 2337 2337 | 
             
            // util/create-resolvable-promise.ts
         | 
| @@ -2393,9 +2393,6 @@ function now() { | |
| 2393 2393 | 
             
              return (_b = (_a11 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a11.now()) != null ? _b : Date.now();
         | 
| 2394 2394 | 
             
            }
         | 
| 2395 2395 |  | 
| 2396 | 
            -
            // core/generate-object/stream-object.ts
         | 
| 2397 | 
            -
            var import_provider_utils7 = require("@ai-sdk/provider-utils");
         | 
| 2398 | 
            -
             | 
| 2399 2396 | 
             
            // core/util/prepare-outgoing-http-headers.ts
         | 
| 2400 2397 | 
             
            function prepareOutgoingHttpHeaders(init, {
         | 
| 2401 2398 | 
             
              contentType,
         | 
| @@ -2468,7 +2465,6 @@ async function streamObject({ | |
| 2468 2465 | 
             
              } = {},
         | 
| 2469 2466 | 
             
              ...settings
         | 
| 2470 2467 | 
             
            }) {
         | 
| 2471 | 
            -
              var _a11;
         | 
| 2472 2468 | 
             
              validateObjectGenerationInput({
         | 
| 2473 2469 | 
             
                output,
         | 
| 2474 2470 | 
             
                mode,
         | 
| @@ -2486,7 +2482,7 @@ async function streamObject({ | |
| 2486 2482 | 
             
                headers,
         | 
| 2487 2483 | 
             
                settings: { ...settings, maxRetries }
         | 
| 2488 2484 | 
             
              });
         | 
| 2489 | 
            -
              const tracer = getTracer( | 
| 2485 | 
            +
              const tracer = getTracer(telemetry);
         | 
| 2490 2486 | 
             
              const retry = retryWithExponentialBackoff({ maxRetries });
         | 
| 2491 2487 | 
             
              return recordSpan({
         | 
| 2492 2488 | 
             
                name: "ai.streamObject",
         | 
| @@ -3189,7 +3185,6 @@ async function generateText({ | |
| 3189 3185 | 
             
              onStepFinish,
         | 
| 3190 3186 | 
             
              ...settings
         | 
| 3191 3187 | 
             
            }) {
         | 
| 3192 | 
            -
              var _a11;
         | 
| 3193 3188 | 
             
              if (maxSteps < 1) {
         | 
| 3194 3189 | 
             
                throw new InvalidArgumentError({
         | 
| 3195 3190 | 
             
                  parameter: "maxSteps",
         | 
| @@ -3203,7 +3198,7 @@ async function generateText({ | |
| 3203 3198 | 
             
                headers,
         | 
| 3204 3199 | 
             
                settings: { ...settings, maxRetries }
         | 
| 3205 3200 | 
             
              });
         | 
| 3206 | 
            -
              const tracer = getTracer( | 
| 3201 | 
            +
              const tracer = getTracer(telemetry);
         | 
| 3207 3202 | 
             
              return recordSpan({
         | 
| 3208 3203 | 
             
                name: "ai.generateText",
         | 
| 3209 3204 | 
             
                attributes: selectTelemetryAttributes({
         | 
| @@ -3223,7 +3218,7 @@ async function generateText({ | |
| 3223 3218 | 
             
                }),
         | 
| 3224 3219 | 
             
                tracer,
         | 
| 3225 3220 | 
             
                fn: async (span) => {
         | 
| 3226 | 
            -
                  var  | 
| 3221 | 
            +
                  var _a11, _b, _c, _d, _e;
         | 
| 3227 3222 | 
             
                  const retry = retryWithExponentialBackoff({ maxRetries });
         | 
| 3228 3223 | 
             
                  const validatedPrompt = validatePrompt({
         | 
| 3229 3224 | 
             
                    system,
         | 
| @@ -3283,7 +3278,7 @@ async function generateText({ | |
| 3283 3278 | 
             
                        }),
         | 
| 3284 3279 | 
             
                        tracer,
         | 
| 3285 3280 | 
             
                        fn: async (span2) => {
         | 
| 3286 | 
            -
                          var  | 
| 3281 | 
            +
                          var _a12, _b2, _c2, _d2, _e2, _f;
         | 
| 3287 3282 | 
             
                          const result = await model.doGenerate({
         | 
| 3288 3283 | 
             
                            mode,
         | 
| 3289 3284 | 
             
                            ...callSettings,
         | 
| @@ -3294,7 +3289,7 @@ async function generateText({ | |
| 3294 3289 | 
             
                            headers
         | 
| 3295 3290 | 
             
                          });
         | 
| 3296 3291 | 
             
                          const responseData = {
         | 
| 3297 | 
            -
                            id: (_b2 = ( | 
| 3292 | 
            +
                            id: (_b2 = (_a12 = result.response) == null ? void 0 : _a12.id) != null ? _b2 : generateId3(),
         | 
| 3298 3293 | 
             
                            timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
         | 
| 3299 3294 | 
             
                            modelId: (_f = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f : model.modelId
         | 
| 3300 3295 | 
             
                          };
         | 
| @@ -3335,7 +3330,7 @@ async function generateText({ | |
| 3335 3330 | 
             
                        }
         | 
| 3336 3331 | 
             
                      })
         | 
| 3337 3332 | 
             
                    );
         | 
| 3338 | 
            -
                    currentToolCalls = (( | 
| 3333 | 
            +
                    currentToolCalls = ((_a11 = currentModelResponse.toolCalls) != null ? _a11 : []).map(
         | 
| 3339 3334 | 
             
                      (modelToolCall) => parseToolCall({ toolCall: modelToolCall, tools })
         | 
| 3340 3335 | 
             
                    );
         | 
| 3341 3336 | 
             
                    currentToolResults = tools == null ? [] : await executeTools({
         | 
| @@ -3918,7 +3913,6 @@ async function streamText({ | |
| 3918 3913 | 
             
              } = {},
         | 
| 3919 3914 | 
             
              ...settings
         | 
| 3920 3915 | 
             
            }) {
         | 
| 3921 | 
            -
              var _a11;
         | 
| 3922 3916 | 
             
              if (maxSteps < 1) {
         | 
| 3923 3917 | 
             
                throw new InvalidArgumentError({
         | 
| 3924 3918 | 
             
                  parameter: "maxSteps",
         | 
| @@ -3932,7 +3926,7 @@ async function streamText({ | |
| 3932 3926 | 
             
                headers,
         | 
| 3933 3927 | 
             
                settings: { ...settings, maxRetries }
         | 
| 3934 3928 | 
             
              });
         | 
| 3935 | 
            -
              const tracer = getTracer( | 
| 3929 | 
            +
              const tracer = getTracer(telemetry);
         | 
| 3936 3930 | 
             
              return recordSpan({
         | 
| 3937 3931 | 
             
                name: "ai.streamText",
         | 
| 3938 3932 | 
             
                attributes: selectTelemetryAttributes({
         |