ai 6.0.31 → 6.0.32
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/dist/index.js +1 -1
- package/dist/index.mjs +1 -1
- package/dist/internal/index.js +1 -1
- package/dist/internal/index.mjs +1 -1
- package/docs/00-introduction/index.mdx +76 -0
- package/docs/02-foundations/01-overview.mdx +43 -0
- package/docs/02-foundations/02-providers-and-models.mdx +163 -0
- package/docs/02-foundations/03-prompts.mdx +620 -0
- package/docs/02-foundations/04-tools.mdx +160 -0
- package/docs/02-foundations/05-streaming.mdx +62 -0
- package/docs/02-foundations/index.mdx +43 -0
- package/docs/02-getting-started/00-choosing-a-provider.mdx +110 -0
- package/docs/02-getting-started/01-navigating-the-library.mdx +85 -0
- package/docs/02-getting-started/02-nextjs-app-router.mdx +556 -0
- package/docs/02-getting-started/03-nextjs-pages-router.mdx +542 -0
- package/docs/02-getting-started/04-svelte.mdx +627 -0
- package/docs/02-getting-started/05-nuxt.mdx +566 -0
- package/docs/02-getting-started/06-nodejs.mdx +512 -0
- package/docs/02-getting-started/07-expo.mdx +766 -0
- package/docs/02-getting-started/08-tanstack-start.mdx +583 -0
- package/docs/02-getting-started/index.mdx +44 -0
- package/docs/03-agents/01-overview.mdx +96 -0
- package/docs/03-agents/02-building-agents.mdx +367 -0
- package/docs/03-agents/03-workflows.mdx +370 -0
- package/docs/03-agents/04-loop-control.mdx +350 -0
- package/docs/03-agents/05-configuring-call-options.mdx +286 -0
- package/docs/03-agents/index.mdx +40 -0
- package/docs/03-ai-sdk-core/01-overview.mdx +33 -0
- package/docs/03-ai-sdk-core/05-generating-text.mdx +600 -0
- package/docs/03-ai-sdk-core/10-generating-structured-data.mdx +662 -0
- package/docs/03-ai-sdk-core/15-tools-and-tool-calling.mdx +1102 -0
- package/docs/03-ai-sdk-core/16-mcp-tools.mdx +375 -0
- package/docs/03-ai-sdk-core/20-prompt-engineering.mdx +144 -0
- package/docs/03-ai-sdk-core/25-settings.mdx +198 -0
- package/docs/03-ai-sdk-core/30-embeddings.mdx +247 -0
- package/docs/03-ai-sdk-core/31-reranking.mdx +218 -0
- package/docs/03-ai-sdk-core/35-image-generation.mdx +341 -0
- package/docs/03-ai-sdk-core/36-transcription.mdx +173 -0
- package/docs/03-ai-sdk-core/37-speech.mdx +167 -0
- package/docs/03-ai-sdk-core/40-middleware.mdx +480 -0
- package/docs/03-ai-sdk-core/45-provider-management.mdx +349 -0
- package/docs/03-ai-sdk-core/50-error-handling.mdx +149 -0
- package/docs/03-ai-sdk-core/55-testing.mdx +218 -0
- package/docs/03-ai-sdk-core/60-telemetry.mdx +313 -0
- package/docs/03-ai-sdk-core/65-devtools.mdx +107 -0
- package/docs/03-ai-sdk-core/index.mdx +88 -0
- package/docs/04-ai-sdk-ui/01-overview.mdx +44 -0
- package/docs/04-ai-sdk-ui/02-chatbot.mdx +1313 -0
- package/docs/04-ai-sdk-ui/03-chatbot-message-persistence.mdx +535 -0
- package/docs/04-ai-sdk-ui/03-chatbot-resume-streams.mdx +263 -0
- package/docs/04-ai-sdk-ui/03-chatbot-tool-usage.mdx +682 -0
- package/docs/04-ai-sdk-ui/04-generative-user-interfaces.mdx +389 -0
- package/docs/04-ai-sdk-ui/05-completion.mdx +186 -0
- package/docs/04-ai-sdk-ui/08-object-generation.mdx +344 -0
- package/docs/04-ai-sdk-ui/20-streaming-data.mdx +397 -0
- package/docs/04-ai-sdk-ui/21-error-handling.mdx +190 -0
- package/docs/04-ai-sdk-ui/21-transport.mdx +174 -0
- package/docs/04-ai-sdk-ui/24-reading-ui-message-streams.mdx +104 -0
- package/docs/04-ai-sdk-ui/25-message-metadata.mdx +152 -0
- package/docs/04-ai-sdk-ui/50-stream-protocol.mdx +477 -0
- package/docs/04-ai-sdk-ui/index.mdx +64 -0
- package/docs/05-ai-sdk-rsc/01-overview.mdx +45 -0
- package/docs/05-ai-sdk-rsc/02-streaming-react-components.mdx +209 -0
- package/docs/05-ai-sdk-rsc/03-generative-ui-state.mdx +279 -0
- package/docs/05-ai-sdk-rsc/03-saving-and-restoring-states.mdx +105 -0
- package/docs/05-ai-sdk-rsc/04-multistep-interfaces.mdx +282 -0
- package/docs/05-ai-sdk-rsc/05-streaming-values.mdx +158 -0
- package/docs/05-ai-sdk-rsc/06-loading-state.mdx +273 -0
- package/docs/05-ai-sdk-rsc/08-error-handling.mdx +96 -0
- package/docs/05-ai-sdk-rsc/09-authentication.mdx +42 -0
- package/docs/05-ai-sdk-rsc/10-migrating-to-ui.mdx +722 -0
- package/docs/05-ai-sdk-rsc/index.mdx +58 -0
- package/docs/06-advanced/01-prompt-engineering.mdx +96 -0
- package/docs/06-advanced/02-stopping-streams.mdx +184 -0
- package/docs/06-advanced/03-backpressure.mdx +173 -0
- package/docs/06-advanced/04-caching.mdx +169 -0
- package/docs/06-advanced/05-multiple-streamables.mdx +68 -0
- package/docs/06-advanced/06-rate-limiting.mdx +60 -0
- package/docs/06-advanced/07-rendering-ui-with-language-models.mdx +213 -0
- package/docs/06-advanced/08-model-as-router.mdx +120 -0
- package/docs/06-advanced/09-multistep-interfaces.mdx +115 -0
- package/docs/06-advanced/09-sequential-generations.mdx +55 -0
- package/docs/06-advanced/10-vercel-deployment-guide.mdx +117 -0
- package/docs/06-advanced/index.mdx +11 -0
- package/docs/07-reference/01-ai-sdk-core/01-generate-text.mdx +2142 -0
- package/docs/07-reference/01-ai-sdk-core/02-stream-text.mdx +3215 -0
- package/docs/07-reference/01-ai-sdk-core/03-generate-object.mdx +780 -0
- package/docs/07-reference/01-ai-sdk-core/04-stream-object.mdx +1140 -0
- package/docs/07-reference/01-ai-sdk-core/05-embed.mdx +190 -0
- package/docs/07-reference/01-ai-sdk-core/06-embed-many.mdx +171 -0
- package/docs/07-reference/01-ai-sdk-core/06-rerank.mdx +309 -0
- package/docs/07-reference/01-ai-sdk-core/10-generate-image.mdx +227 -0
- package/docs/07-reference/01-ai-sdk-core/11-transcribe.mdx +138 -0
- package/docs/07-reference/01-ai-sdk-core/12-generate-speech.mdx +214 -0
- package/docs/07-reference/01-ai-sdk-core/15-agent.mdx +203 -0
- package/docs/07-reference/01-ai-sdk-core/16-tool-loop-agent.mdx +449 -0
- package/docs/07-reference/01-ai-sdk-core/17-create-agent-ui-stream.mdx +148 -0
- package/docs/07-reference/01-ai-sdk-core/18-create-agent-ui-stream-response.mdx +168 -0
- package/docs/07-reference/01-ai-sdk-core/18-pipe-agent-ui-stream-to-response.mdx +144 -0
- package/docs/07-reference/01-ai-sdk-core/20-tool.mdx +196 -0
- package/docs/07-reference/01-ai-sdk-core/22-dynamic-tool.mdx +175 -0
- package/docs/07-reference/01-ai-sdk-core/23-create-mcp-client.mdx +410 -0
- package/docs/07-reference/01-ai-sdk-core/24-mcp-stdio-transport.mdx +68 -0
- package/docs/07-reference/01-ai-sdk-core/25-json-schema.mdx +94 -0
- package/docs/07-reference/01-ai-sdk-core/26-zod-schema.mdx +109 -0
- package/docs/07-reference/01-ai-sdk-core/27-valibot-schema.mdx +55 -0
- package/docs/07-reference/01-ai-sdk-core/28-output.mdx +342 -0
- package/docs/07-reference/01-ai-sdk-core/30-model-message.mdx +415 -0
- package/docs/07-reference/01-ai-sdk-core/31-ui-message.mdx +246 -0
- package/docs/07-reference/01-ai-sdk-core/32-validate-ui-messages.mdx +101 -0
- package/docs/07-reference/01-ai-sdk-core/33-safe-validate-ui-messages.mdx +113 -0
- package/docs/07-reference/01-ai-sdk-core/40-provider-registry.mdx +182 -0
- package/docs/07-reference/01-ai-sdk-core/42-custom-provider.mdx +121 -0
- package/docs/07-reference/01-ai-sdk-core/50-cosine-similarity.mdx +52 -0
- package/docs/07-reference/01-ai-sdk-core/60-wrap-language-model.mdx +59 -0
- package/docs/07-reference/01-ai-sdk-core/61-wrap-image-model.mdx +64 -0
- package/docs/07-reference/01-ai-sdk-core/65-language-model-v2-middleware.mdx +46 -0
- package/docs/07-reference/01-ai-sdk-core/66-extract-reasoning-middleware.mdx +68 -0
- package/docs/07-reference/01-ai-sdk-core/67-simulate-streaming-middleware.mdx +71 -0
- package/docs/07-reference/01-ai-sdk-core/68-default-settings-middleware.mdx +80 -0
- package/docs/07-reference/01-ai-sdk-core/69-add-tool-input-examples-middleware.mdx +155 -0
- package/docs/07-reference/01-ai-sdk-core/70-extract-json-middleware.mdx +147 -0
- package/docs/07-reference/01-ai-sdk-core/70-step-count-is.mdx +84 -0
- package/docs/07-reference/01-ai-sdk-core/71-has-tool-call.mdx +120 -0
- package/docs/07-reference/01-ai-sdk-core/75-simulate-readable-stream.mdx +94 -0
- package/docs/07-reference/01-ai-sdk-core/80-smooth-stream.mdx +145 -0
- package/docs/07-reference/01-ai-sdk-core/90-generate-id.mdx +43 -0
- package/docs/07-reference/01-ai-sdk-core/91-create-id-generator.mdx +89 -0
- package/docs/07-reference/01-ai-sdk-core/index.mdx +159 -0
- package/docs/07-reference/02-ai-sdk-ui/01-use-chat.mdx +446 -0
- package/docs/07-reference/02-ai-sdk-ui/02-use-completion.mdx +179 -0
- package/docs/07-reference/02-ai-sdk-ui/03-use-object.mdx +178 -0
- package/docs/07-reference/02-ai-sdk-ui/31-convert-to-model-messages.mdx +230 -0
- package/docs/07-reference/02-ai-sdk-ui/32-prune-messages.mdx +108 -0
- package/docs/07-reference/02-ai-sdk-ui/40-create-ui-message-stream.mdx +151 -0
- package/docs/07-reference/02-ai-sdk-ui/41-create-ui-message-stream-response.mdx +113 -0
- package/docs/07-reference/02-ai-sdk-ui/42-pipe-ui-message-stream-to-response.mdx +73 -0
- package/docs/07-reference/02-ai-sdk-ui/43-read-ui-message-stream.mdx +57 -0
- package/docs/07-reference/02-ai-sdk-ui/46-infer-ui-tools.mdx +99 -0
- package/docs/07-reference/02-ai-sdk-ui/47-infer-ui-tool.mdx +75 -0
- package/docs/07-reference/02-ai-sdk-ui/50-direct-chat-transport.mdx +333 -0
- package/docs/07-reference/02-ai-sdk-ui/index.mdx +89 -0
- package/docs/07-reference/03-ai-sdk-rsc/01-stream-ui.mdx +767 -0
- package/docs/07-reference/03-ai-sdk-rsc/02-create-ai.mdx +90 -0
- package/docs/07-reference/03-ai-sdk-rsc/03-create-streamable-ui.mdx +91 -0
- package/docs/07-reference/03-ai-sdk-rsc/04-create-streamable-value.mdx +48 -0
- package/docs/07-reference/03-ai-sdk-rsc/05-read-streamable-value.mdx +78 -0
- package/docs/07-reference/03-ai-sdk-rsc/06-get-ai-state.mdx +50 -0
- package/docs/07-reference/03-ai-sdk-rsc/07-get-mutable-ai-state.mdx +70 -0
- package/docs/07-reference/03-ai-sdk-rsc/08-use-ai-state.mdx +26 -0
- package/docs/07-reference/03-ai-sdk-rsc/09-use-actions.mdx +42 -0
- package/docs/07-reference/03-ai-sdk-rsc/10-use-ui-state.mdx +35 -0
- package/docs/07-reference/03-ai-sdk-rsc/11-use-streamable-value.mdx +46 -0
- package/docs/07-reference/03-ai-sdk-rsc/20-render.mdx +262 -0
- package/docs/07-reference/03-ai-sdk-rsc/index.mdx +67 -0
- package/docs/07-reference/04-stream-helpers/01-ai-stream.mdx +89 -0
- package/docs/07-reference/04-stream-helpers/02-streaming-text-response.mdx +79 -0
- package/docs/07-reference/04-stream-helpers/05-stream-to-response.mdx +108 -0
- package/docs/07-reference/04-stream-helpers/07-openai-stream.mdx +77 -0
- package/docs/07-reference/04-stream-helpers/08-anthropic-stream.mdx +79 -0
- package/docs/07-reference/04-stream-helpers/09-aws-bedrock-stream.mdx +91 -0
- package/docs/07-reference/04-stream-helpers/10-aws-bedrock-anthropic-stream.mdx +96 -0
- package/docs/07-reference/04-stream-helpers/10-aws-bedrock-messages-stream.mdx +96 -0
- package/docs/07-reference/04-stream-helpers/11-aws-bedrock-cohere-stream.mdx +93 -0
- package/docs/07-reference/04-stream-helpers/12-aws-bedrock-llama-2-stream.mdx +93 -0
- package/docs/07-reference/04-stream-helpers/13-cohere-stream.mdx +78 -0
- package/docs/07-reference/04-stream-helpers/14-google-generative-ai-stream.mdx +85 -0
- package/docs/07-reference/04-stream-helpers/15-hugging-face-stream.mdx +84 -0
- package/docs/07-reference/04-stream-helpers/16-langchain-adapter.mdx +98 -0
- package/docs/07-reference/04-stream-helpers/16-llamaindex-adapter.mdx +70 -0
- package/docs/07-reference/04-stream-helpers/17-mistral-stream.mdx +81 -0
- package/docs/07-reference/04-stream-helpers/18-replicate-stream.mdx +83 -0
- package/docs/07-reference/04-stream-helpers/19-inkeep-stream.mdx +80 -0
- package/docs/07-reference/04-stream-helpers/index.mdx +103 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-api-call-error.mdx +30 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-download-error.mdx +27 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-empty-response-body-error.mdx +24 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-invalid-argument-error.mdx +26 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-invalid-data-content-error.mdx +25 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-invalid-data-content.mdx +26 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-invalid-message-role-error.mdx +25 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-invalid-prompt-error.mdx +47 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-invalid-response-data-error.mdx +25 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-invalid-tool-approval-error.mdx +25 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-invalid-tool-input-error.mdx +27 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-json-parse-error.mdx +25 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-load-api-key-error.mdx +24 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-load-setting-error.mdx +24 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-message-conversion-error.mdx +25 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-no-content-generated-error.mdx +24 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-no-image-generated-error.mdx +36 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-no-object-generated-error.mdx +43 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-no-speech-generated-error.mdx +25 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-no-such-model-error.mdx +26 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-no-such-provider-error.mdx +28 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-no-such-tool-error.mdx +26 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-no-transcript-generated-error.mdx +25 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-retry-error.mdx +27 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-too-many-embedding-values-for-call-error.mdx +27 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-tool-call-not-found-for-approval-error.mdx +26 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-tool-call-repair-error.mdx +28 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-type-validation-error.mdx +25 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-unsupported-functionality-error.mdx +25 -0
- package/docs/07-reference/05-ai-sdk-errors/index.mdx +38 -0
- package/docs/07-reference/index.mdx +34 -0
- package/docs/08-migration-guides/00-versioning.mdx +46 -0
- package/docs/08-migration-guides/24-migration-guide-6-0.mdx +823 -0
- package/docs/08-migration-guides/25-migration-guide-5-0-data.mdx +882 -0
- package/docs/08-migration-guides/26-migration-guide-5-0.mdx +3427 -0
- package/docs/08-migration-guides/27-migration-guide-4-2.mdx +99 -0
- package/docs/08-migration-guides/28-migration-guide-4-1.mdx +14 -0
- package/docs/08-migration-guides/29-migration-guide-4-0.mdx +1157 -0
- package/docs/08-migration-guides/36-migration-guide-3-4.mdx +14 -0
- package/docs/08-migration-guides/37-migration-guide-3-3.mdx +64 -0
- package/docs/08-migration-guides/38-migration-guide-3-2.mdx +46 -0
- package/docs/08-migration-guides/39-migration-guide-3-1.mdx +168 -0
- package/docs/08-migration-guides/index.mdx +22 -0
- package/docs/09-troubleshooting/01-azure-stream-slow.mdx +33 -0
- package/docs/09-troubleshooting/02-client-side-function-calls-not-invoked.mdx +22 -0
- package/docs/09-troubleshooting/03-server-actions-in-client-components.mdx +40 -0
- package/docs/09-troubleshooting/04-strange-stream-output.mdx +36 -0
- package/docs/09-troubleshooting/05-streamable-ui-errors.mdx +16 -0
- package/docs/09-troubleshooting/05-tool-invocation-missing-result.mdx +106 -0
- package/docs/09-troubleshooting/06-streaming-not-working-when-deployed.mdx +31 -0
- package/docs/09-troubleshooting/06-streaming-not-working-when-proxied.mdx +31 -0
- package/docs/09-troubleshooting/06-timeout-on-vercel.mdx +60 -0
- package/docs/09-troubleshooting/07-unclosed-streams.mdx +34 -0
- package/docs/09-troubleshooting/08-use-chat-failed-to-parse-stream.mdx +26 -0
- package/docs/09-troubleshooting/09-client-stream-error.mdx +25 -0
- package/docs/09-troubleshooting/10-use-chat-tools-no-response.mdx +32 -0
- package/docs/09-troubleshooting/11-use-chat-custom-request-options.mdx +149 -0
- package/docs/09-troubleshooting/12-typescript-performance-zod.mdx +46 -0
- package/docs/09-troubleshooting/12-use-chat-an-error-occurred.mdx +59 -0
- package/docs/09-troubleshooting/13-repeated-assistant-messages.mdx +73 -0
- package/docs/09-troubleshooting/14-stream-abort-handling.mdx +73 -0
- package/docs/09-troubleshooting/14-tool-calling-with-structured-outputs.mdx +48 -0
- package/docs/09-troubleshooting/15-abort-breaks-resumable-streams.mdx +55 -0
- package/docs/09-troubleshooting/15-stream-text-not-working.mdx +33 -0
- package/docs/09-troubleshooting/16-streaming-status-delay.mdx +63 -0
- package/docs/09-troubleshooting/17-use-chat-stale-body-data.mdx +141 -0
- package/docs/09-troubleshooting/18-ontoolcall-type-narrowing.mdx +66 -0
- package/docs/09-troubleshooting/19-unsupported-model-version.mdx +50 -0
- package/docs/09-troubleshooting/20-no-object-generated-content-filter.mdx +72 -0
- package/docs/09-troubleshooting/30-model-is-not-assignable-to-type.mdx +21 -0
- package/docs/09-troubleshooting/40-typescript-cannot-find-namespace-jsx.mdx +24 -0
- package/docs/09-troubleshooting/50-react-maximum-update-depth-exceeded.mdx +39 -0
- package/docs/09-troubleshooting/60-jest-cannot-find-module-ai-rsc.mdx +22 -0
- package/docs/09-troubleshooting/index.mdx +11 -0
- package/package.json +8 -4
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
---
|
|
2
|
+
title: streamText fails silently
|
|
3
|
+
description: Troubleshooting errors related to the streamText function not working.
|
|
4
|
+
---
|
|
5
|
+
|
|
6
|
+
# `streamText` is not working
|
|
7
|
+
|
|
8
|
+
## Issue
|
|
9
|
+
|
|
10
|
+
I am using [`streamText`](/docs/reference/ai-sdk-core/stream-text) function, and it does not work.
|
|
11
|
+
It does not throw any errors and the stream is only containing error parts.
|
|
12
|
+
|
|
13
|
+
## Background
|
|
14
|
+
|
|
15
|
+
`streamText` immediately starts streaming to enable sending data without waiting for the model.
|
|
16
|
+
Errors become part of the stream and are not thrown to prevent e.g. servers from crashing.
|
|
17
|
+
|
|
18
|
+
## Solution
|
|
19
|
+
|
|
20
|
+
To log errors, you can provide an `onError` callback that is triggered when an error occurs.
|
|
21
|
+
|
|
22
|
+
```tsx highlight="6-8"
|
|
23
|
+
import { streamText } from 'ai';
|
|
24
|
+
__PROVIDER_IMPORT__;
|
|
25
|
+
|
|
26
|
+
const result = streamText({
|
|
27
|
+
model: __MODEL__,
|
|
28
|
+
prompt: 'Invent a new holiday and describe its traditions.',
|
|
29
|
+
onError({ error }) {
|
|
30
|
+
console.error(error); // your error logging logic here
|
|
31
|
+
},
|
|
32
|
+
});
|
|
33
|
+
```
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
---
|
|
2
|
+
title: Streaming Status Shows But No Text Appears
|
|
3
|
+
description: Why useChat shows "streaming" status without any visible content
|
|
4
|
+
---
|
|
5
|
+
|
|
6
|
+
# Streaming Status Shows But No Text Appears
|
|
7
|
+
|
|
8
|
+
## Issue
|
|
9
|
+
|
|
10
|
+
When using `useChat`, the status changes to "streaming" immediately, but no text appears for several seconds.
|
|
11
|
+
|
|
12
|
+
## Background
|
|
13
|
+
|
|
14
|
+
The status changes to "streaming" as soon as the connection to the server is established and streaming begins - this includes metadata streaming, not just the LLM's generated tokens.
|
|
15
|
+
|
|
16
|
+
## Solution
|
|
17
|
+
|
|
18
|
+
Create a custom loading state that checks if the last assistant message actually contains content:
|
|
19
|
+
|
|
20
|
+
```tsx
|
|
21
|
+
'use client';
|
|
22
|
+
|
|
23
|
+
import { useChat } from '@ai-sdk/react';
|
|
24
|
+
|
|
25
|
+
export default function Page() {
|
|
26
|
+
const { messages, status } = useChat();
|
|
27
|
+
|
|
28
|
+
const lastMessage = messages.at(-1);
|
|
29
|
+
|
|
30
|
+
const showLoader =
|
|
31
|
+
status === 'streaming' &&
|
|
32
|
+
lastMessage?.role === 'assistant' &&
|
|
33
|
+
lastMessage?.parts?.length === 0;
|
|
34
|
+
|
|
35
|
+
return (
|
|
36
|
+
<>
|
|
37
|
+
{messages.map(message => (
|
|
38
|
+
<div key={message.id}>
|
|
39
|
+
{message.role === 'user' ? 'User: ' : 'AI: '}
|
|
40
|
+
{message.parts.map((part, index) =>
|
|
41
|
+
part.type === 'text' ? <span key={index}>{part.text}</span> : null,
|
|
42
|
+
)}
|
|
43
|
+
</div>
|
|
44
|
+
))}
|
|
45
|
+
|
|
46
|
+
{showLoader && <div>Loading...</div>}
|
|
47
|
+
</>
|
|
48
|
+
);
|
|
49
|
+
}
|
|
50
|
+
```
|
|
51
|
+
|
|
52
|
+
You can also check for specific part types if you're waiting for something specific:
|
|
53
|
+
|
|
54
|
+
```tsx
|
|
55
|
+
const showLoader =
|
|
56
|
+
status === 'streaming' &&
|
|
57
|
+
lastMessage?.role === 'assistant' &&
|
|
58
|
+
!lastMessage?.parts?.some(part => part.type === 'text');
|
|
59
|
+
```
|
|
60
|
+
|
|
61
|
+
## Related Issues
|
|
62
|
+
|
|
63
|
+
- [GitHub Issue #7586](https://github.com/vercel/ai/issues/7586)
|
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
---
|
|
2
|
+
title: Stale body values with useChat
|
|
3
|
+
description: Troubleshooting stale values when passing information via the body parameter of useChat
|
|
4
|
+
---
|
|
5
|
+
|
|
6
|
+
# Stale body values with useChat
|
|
7
|
+
|
|
8
|
+
## Issue
|
|
9
|
+
|
|
10
|
+
When using `useChat` and passing dynamic information via the `body` parameter at the hook level, the data remains stale and only reflects the value from the initial component render. This occurs because the body configuration is captured once when the hook is initialized and doesn't update with subsequent component re-renders.
|
|
11
|
+
|
|
12
|
+
```tsx
|
|
13
|
+
// Problematic code - body data will be stale
|
|
14
|
+
export default function Chat() {
|
|
15
|
+
const [temperature, setTemperature] = useState(0.7);
|
|
16
|
+
const [userId, setUserId] = useState('user123');
|
|
17
|
+
|
|
18
|
+
// This body configuration is captured once and won't update
|
|
19
|
+
const { messages, sendMessage } = useChat({
|
|
20
|
+
transport: new DefaultChatTransport({
|
|
21
|
+
api: '/api/chat',
|
|
22
|
+
body: {
|
|
23
|
+
temperature, // Always the initial value (0.7)
|
|
24
|
+
userId, // Always the initial value ('user123')
|
|
25
|
+
},
|
|
26
|
+
}),
|
|
27
|
+
});
|
|
28
|
+
|
|
29
|
+
// Even if temperature or userId change, the body in requests will still use initial values
|
|
30
|
+
return (
|
|
31
|
+
<div>
|
|
32
|
+
<input
|
|
33
|
+
type="range"
|
|
34
|
+
value={temperature}
|
|
35
|
+
onChange={e => setTemperature(parseFloat(e.target.value))}
|
|
36
|
+
/>
|
|
37
|
+
{/* Chat UI */}
|
|
38
|
+
</div>
|
|
39
|
+
);
|
|
40
|
+
}
|
|
41
|
+
```
|
|
42
|
+
|
|
43
|
+
## Background
|
|
44
|
+
|
|
45
|
+
The hook-level body configuration is evaluated once during the initial render and doesn't re-evaluate when component state changes.
|
|
46
|
+
|
|
47
|
+
## Solution
|
|
48
|
+
|
|
49
|
+
Pass dynamic variables via the second argument of the `sendMessage` function instead of at the hook level. Request-level options are evaluated on each call and take precedence over hook-level options.
|
|
50
|
+
|
|
51
|
+
```tsx
|
|
52
|
+
export default function Chat() {
|
|
53
|
+
const [temperature, setTemperature] = useState(0.7);
|
|
54
|
+
const [userId, setUserId] = useState('user123');
|
|
55
|
+
const [input, setInput] = useState('');
|
|
56
|
+
|
|
57
|
+
const { messages, sendMessage } = useChat({
|
|
58
|
+
// Static configuration only
|
|
59
|
+
transport: new DefaultChatTransport({
|
|
60
|
+
api: '/api/chat',
|
|
61
|
+
}),
|
|
62
|
+
});
|
|
63
|
+
|
|
64
|
+
return (
|
|
65
|
+
<div>
|
|
66
|
+
<input
|
|
67
|
+
type="range"
|
|
68
|
+
value={temperature}
|
|
69
|
+
onChange={e => setTemperature(parseFloat(e.target.value))}
|
|
70
|
+
/>
|
|
71
|
+
|
|
72
|
+
<form
|
|
73
|
+
onSubmit={event => {
|
|
74
|
+
event.preventDefault();
|
|
75
|
+
if (input.trim()) {
|
|
76
|
+
// Pass dynamic values as request-level options
|
|
77
|
+
sendMessage(
|
|
78
|
+
{ text: input },
|
|
79
|
+
{
|
|
80
|
+
body: {
|
|
81
|
+
temperature, // Current value at request time
|
|
82
|
+
userId, // Current value at request time
|
|
83
|
+
},
|
|
84
|
+
},
|
|
85
|
+
);
|
|
86
|
+
setInput('');
|
|
87
|
+
}
|
|
88
|
+
}}
|
|
89
|
+
>
|
|
90
|
+
<input value={input} onChange={e => setInput(e.target.value)} />
|
|
91
|
+
</form>
|
|
92
|
+
</div>
|
|
93
|
+
);
|
|
94
|
+
}
|
|
95
|
+
```
|
|
96
|
+
|
|
97
|
+
### Alternative: Dynamic Hook-Level Configuration
|
|
98
|
+
|
|
99
|
+
If you need hook-level configuration that responds to changes, you can use functions that return configuration values. However, for component state, you'll need to use `useRef` to access current values:
|
|
100
|
+
|
|
101
|
+
```tsx
|
|
102
|
+
export default function Chat() {
|
|
103
|
+
const temperatureRef = useRef(0.7);
|
|
104
|
+
|
|
105
|
+
const { messages, sendMessage } = useChat({
|
|
106
|
+
transport: new DefaultChatTransport({
|
|
107
|
+
api: '/api/chat',
|
|
108
|
+
body: () => ({
|
|
109
|
+
temperature: temperatureRef.current, // Access via ref.current
|
|
110
|
+
sessionId: getCurrentSessionId(), // Function calls work directly
|
|
111
|
+
}),
|
|
112
|
+
}),
|
|
113
|
+
});
|
|
114
|
+
|
|
115
|
+
// ...
|
|
116
|
+
}
|
|
117
|
+
```
|
|
118
|
+
|
|
119
|
+
**Recommendation:** Request-level configuration is simpler and more reliable for component state. Use it whenever you need to pass dynamic values that change during the component lifecycle.
|
|
120
|
+
|
|
121
|
+
### Server-side handling
|
|
122
|
+
|
|
123
|
+
On your server side, retrieve the custom fields by destructuring the request body:
|
|
124
|
+
|
|
125
|
+
```tsx
|
|
126
|
+
// app/api/chat/route.ts
|
|
127
|
+
export async function POST(req: Request) {
|
|
128
|
+
const { messages, temperature, userId } = await req.json();
|
|
129
|
+
|
|
130
|
+
const result = streamText({
|
|
131
|
+
model: 'openai/gpt-5-mini',
|
|
132
|
+
messages: await convertToModelMessages(messages),
|
|
133
|
+
temperature, // Use the dynamic temperature from the request
|
|
134
|
+
// ... other configuration
|
|
135
|
+
});
|
|
136
|
+
|
|
137
|
+
return result.toUIMessageStreamResponse();
|
|
138
|
+
}
|
|
139
|
+
```
|
|
140
|
+
|
|
141
|
+
For more information, see [chatbot request configuration documentation](/docs/ai-sdk-ui/chatbot#request-configuration).
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
---
|
|
2
|
+
title: Type Error with onToolCall
|
|
3
|
+
description: How to handle TypeScript type errors when using the onToolCall callback
|
|
4
|
+
---
|
|
5
|
+
|
|
6
|
+
# Type Error with onToolCall
|
|
7
|
+
|
|
8
|
+
When using the `onToolCall` callback with TypeScript, you may encounter type errors when trying to pass tool properties directly to `addToolOutput`.
|
|
9
|
+
|
|
10
|
+
## Problem
|
|
11
|
+
|
|
12
|
+
TypeScript cannot automatically narrow the type of `toolCall.toolName` when you have both static and dynamic tools, leading to type errors:
|
|
13
|
+
|
|
14
|
+
```tsx
|
|
15
|
+
// ❌ This causes a TypeScript error
|
|
16
|
+
const { messages, sendMessage, addToolOutput } = useChat({
|
|
17
|
+
async onToolCall({ toolCall }) {
|
|
18
|
+
addToolOutput({
|
|
19
|
+
tool: toolCall.toolName, // Type 'string' is not assignable to type '"yourTool" | "yourOtherTool"'
|
|
20
|
+
toolCallId: toolCall.toolCallId,
|
|
21
|
+
output: someOutput,
|
|
22
|
+
});
|
|
23
|
+
},
|
|
24
|
+
});
|
|
25
|
+
```
|
|
26
|
+
|
|
27
|
+
The error occurs because:
|
|
28
|
+
|
|
29
|
+
- Static tools have specific literal types for their names (e.g., `"getWeatherInformation"`)
|
|
30
|
+
- Dynamic tools have `toolName` as a generic `string`
|
|
31
|
+
- TypeScript can't guarantee that `toolCall.toolName` matches your specific tool names
|
|
32
|
+
|
|
33
|
+
## Solution
|
|
34
|
+
|
|
35
|
+
Check if the tool is dynamic first to enable proper type narrowing:
|
|
36
|
+
|
|
37
|
+
```tsx
|
|
38
|
+
// ✅ Correct approach with type narrowing
|
|
39
|
+
const { messages, sendMessage, addToolOutput } = useChat({
|
|
40
|
+
async onToolCall({ toolCall }) {
|
|
41
|
+
// Check if it's a dynamic tool first
|
|
42
|
+
if (toolCall.dynamic) {
|
|
43
|
+
return;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
// Now TypeScript knows this is a static tool with the correct type
|
|
47
|
+
addToolOutput({
|
|
48
|
+
tool: toolCall.toolName, // No type error!
|
|
49
|
+
toolCallId: toolCall.toolCallId,
|
|
50
|
+
output: someOutput,
|
|
51
|
+
});
|
|
52
|
+
},
|
|
53
|
+
});
|
|
54
|
+
```
|
|
55
|
+
|
|
56
|
+
<Note>
|
|
57
|
+
If you're still using the deprecated `addToolResult` method, this solution
|
|
58
|
+
applies the same way. Consider migrating to `addToolOutput` for consistency
|
|
59
|
+
with the latest API.
|
|
60
|
+
</Note>
|
|
61
|
+
|
|
62
|
+
## Related
|
|
63
|
+
|
|
64
|
+
- [Chatbot Tool Usage](/docs/ai-sdk-ui/chatbot-tool-usage)
|
|
65
|
+
- [Dynamic Tools](/docs/reference/ai-sdk-core/dynamic-tool)
|
|
66
|
+
- [useChat Reference](/docs/reference/ai-sdk-ui/use-chat)
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
---
|
|
2
|
+
title: Unsupported model version error
|
|
3
|
+
description: Troubleshooting the AI_UnsupportedModelVersionError when migrating to AI SDK 5
|
|
4
|
+
---
|
|
5
|
+
|
|
6
|
+
# Unsupported model version error
|
|
7
|
+
|
|
8
|
+
## Issue
|
|
9
|
+
|
|
10
|
+
When migrating to AI SDK 5, you might encounter an error stating that your model uses an unsupported version:
|
|
11
|
+
|
|
12
|
+
```
|
|
13
|
+
AI_UnsupportedModelVersionError: Unsupported model version v1 for provider "ollama.chat" and model "gamma3:4b".
|
|
14
|
+
AI SDK 5 only supports models that implement specification version "v2".
|
|
15
|
+
```
|
|
16
|
+
|
|
17
|
+
This error occurs because the version of the provider package you're using implements the older (v1) model specification.
|
|
18
|
+
|
|
19
|
+
## Background
|
|
20
|
+
|
|
21
|
+
AI SDK 5 requires all provider packages to implement specification version "v2". When you upgrade to AI SDK 5 but don't update your provider packages to compatible versions, they continue using the older "v1" specification, causing this error.
|
|
22
|
+
|
|
23
|
+
## Solution
|
|
24
|
+
|
|
25
|
+
### Update provider packages to AI SDK 5 compatible versions
|
|
26
|
+
|
|
27
|
+
Update all your `@ai-sdk/*` provider packages to compatible version `2.0.0` or later. These versions implement the v2 specification required by AI SDK 5.
|
|
28
|
+
|
|
29
|
+
```bash
|
|
30
|
+
pnpm install ai@latest @ai-sdk/openai@latest @ai-sdk/anthropic@latest
|
|
31
|
+
```
|
|
32
|
+
|
|
33
|
+
For AI SDK 5 compatibility, you need:
|
|
34
|
+
|
|
35
|
+
- `ai` package: `5.0.0` or later
|
|
36
|
+
- `@ai-sdk/*` packages: `2.0.0` or later (for example, `@ai-sdk/openai`, `@ai-sdk/anthropic`, `@ai-sdk/google`)
|
|
37
|
+
- `@ai-sdk/provider` package: `2.0.0` or later
|
|
38
|
+
- `zod` package: `4.1.8` or later
|
|
39
|
+
|
|
40
|
+
### Check provider compatibility
|
|
41
|
+
|
|
42
|
+
If you're using a third-party or custom provider, verify that it has been updated to support AI SDK 5. Not all providers may have v2-compatible versions available yet.
|
|
43
|
+
|
|
44
|
+
To check if a provider supports AI SDK 5:
|
|
45
|
+
|
|
46
|
+
1. Check the provider's package.json for `@ai-sdk/provider` peer dependency version `2.0.0` or later
|
|
47
|
+
2. Review the provider's changelog or migration guide
|
|
48
|
+
3. Check the provider's repository for AI SDK 5 support
|
|
49
|
+
|
|
50
|
+
For more information on migrating to AI SDK 5, see the [AI SDK 5.0 migration guide](/docs/migration-guides/migration-guide-5-0).
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
---
|
|
2
|
+
title: Object generation failed with OpenAI
|
|
3
|
+
description: Troubleshooting NoObjectGeneratedError with finish-reason content-filter caused by incompatible Zod schema types when using OpenAI structured outputs
|
|
4
|
+
---
|
|
5
|
+
|
|
6
|
+
# Object generation failed with OpenAI
|
|
7
|
+
|
|
8
|
+
## Issue
|
|
9
|
+
|
|
10
|
+
When using `generateObject` or `streamObject` with OpenAI's structured output generation, you may encounter a `NoObjectGeneratedError` with the finish reason `content-filter`. This error occurs when your Zod schema contains incompatible types that OpenAI's structured output feature cannot process.
|
|
11
|
+
|
|
12
|
+
```typescript
|
|
13
|
+
// Problematic code - incompatible schema types
|
|
14
|
+
import { generateObject } from 'ai';
|
|
15
|
+
import { openai } from '@ai-sdk/openai';
|
|
16
|
+
import { z } from 'zod';
|
|
17
|
+
|
|
18
|
+
const result = await generateObject({
|
|
19
|
+
model: openai('gpt-4o-2024-08-06'),
|
|
20
|
+
schema: z.object({
|
|
21
|
+
name: z.string().nullish(), // ❌ .nullish() is not supported
|
|
22
|
+
email: z.string().optional(), // ❌ .optional() is not supported
|
|
23
|
+
age: z.number().nullable(), // ✅ .nullable() is supported
|
|
24
|
+
}),
|
|
25
|
+
prompt: 'Generate a user profile',
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
// Error: NoObjectGeneratedError: No object generated.
|
|
29
|
+
// Finish reason: content-filter
|
|
30
|
+
```
|
|
31
|
+
|
|
32
|
+
## Background
|
|
33
|
+
|
|
34
|
+
OpenAI's structured output generation uses JSON Schema under the hood and has specific requirements for schema compatibility. The Zod methods `.nullish()` and `.optional()` generate JSON Schema patterns that are incompatible with OpenAI's implementation, causing the model to reject the schema and return a content-filter finish reason.
|
|
35
|
+
|
|
36
|
+
## Solution
|
|
37
|
+
|
|
38
|
+
Replace `.nullish()` and `.optional()` with `.nullable()` in your Zod schemas when using structured output generation with OpenAI models.
|
|
39
|
+
|
|
40
|
+
```typescript
|
|
41
|
+
import { generateObject } from 'ai';
|
|
42
|
+
import { openai } from '@ai-sdk/openai';
|
|
43
|
+
import { z } from 'zod';
|
|
44
|
+
|
|
45
|
+
// Correct approach - use .nullable()
|
|
46
|
+
const result = await generateObject({
|
|
47
|
+
model: openai('gpt-4o-2024-08-06'),
|
|
48
|
+
schema: z.object({
|
|
49
|
+
name: z.string().nullable(), // ✅ Use .nullable() instead of .nullish()
|
|
50
|
+
email: z.string().nullable(), // ✅ Use .nullable() instead of .optional()
|
|
51
|
+
age: z.number().nullable(),
|
|
52
|
+
}),
|
|
53
|
+
prompt: 'Generate a user profile',
|
|
54
|
+
});
|
|
55
|
+
|
|
56
|
+
console.log(result.object);
|
|
57
|
+
// { name: "John Doe", email: "john@example.com", age: 30 }
|
|
58
|
+
// or { name: null, email: null, age: 25 }
|
|
59
|
+
```
|
|
60
|
+
|
|
61
|
+
### Schema Type Comparison
|
|
62
|
+
|
|
63
|
+
| Zod Type | Compatible | JSON Schema Behavior |
|
|
64
|
+
| ------------- | ---------- | ------------------------------------------------------ |
|
|
65
|
+
| `.nullable()` | ✅ Yes | Allows `null` or the specified type |
|
|
66
|
+
| `.optional()` | ❌ No | Field can be omitted (not supported) |
|
|
67
|
+
| `.nullish()` | ❌ No | Allows `null`, `undefined`, or omitted (not supported) |
|
|
68
|
+
|
|
69
|
+
## Related Information
|
|
70
|
+
|
|
71
|
+
- For more details on structured output generation, see [Generating Structured Data](/docs/ai-sdk-core/generating-structured-data)
|
|
72
|
+
- For OpenAI-specific structured output configuration, see [OpenAI Provider - Structured Outputs](/providers/ai-sdk-providers/openai#structured-outputs)
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
---
|
|
2
|
+
title: Model is not assignable to type "LanguageModelV1"
|
|
3
|
+
description: Troubleshooting errors related to incompatible models.
|
|
4
|
+
---
|
|
5
|
+
|
|
6
|
+
# Model is not assignable to type "LanguageModelV1"
|
|
7
|
+
|
|
8
|
+
## Issue
|
|
9
|
+
|
|
10
|
+
I have updated the AI SDK and now I get the following error: `Type 'SomeModel' is not assignable to type 'LanguageModelV1'.`
|
|
11
|
+
|
|
12
|
+
<Note>Similar errors can occur with `EmbeddingModelV3` as well.</Note>
|
|
13
|
+
|
|
14
|
+
## Background
|
|
15
|
+
|
|
16
|
+
Sometimes new features are being added to the model specification.
|
|
17
|
+
This can cause incompatibilities with older provider versions.
|
|
18
|
+
|
|
19
|
+
## Solution
|
|
20
|
+
|
|
21
|
+
Update your provider packages and the AI SDK to the latest version.
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
---
|
|
2
|
+
title: TypeScript error "Cannot find namespace 'JSX'"
|
|
3
|
+
description: Troubleshooting errors related to TypeScript and JSX.
|
|
4
|
+
---
|
|
5
|
+
|
|
6
|
+
# TypeScript error "Cannot find namespace 'JSX'"
|
|
7
|
+
|
|
8
|
+
## Issue
|
|
9
|
+
|
|
10
|
+
I am using the AI SDK in a project without React, e.g. an Hono server, and I get the following error:
|
|
11
|
+
`error TS2503: Cannot find namespace 'JSX'.`
|
|
12
|
+
|
|
13
|
+
## Background
|
|
14
|
+
|
|
15
|
+
The AI SDK has a dependency on `@types/react` which defines the `JSX` namespace.
|
|
16
|
+
It will be removed in the next major version of the AI SDK.
|
|
17
|
+
|
|
18
|
+
## Solution
|
|
19
|
+
|
|
20
|
+
You can install the `@types/react` package as a dependency to fix the error.
|
|
21
|
+
|
|
22
|
+
```bash
|
|
23
|
+
npm install @types/react
|
|
24
|
+
```
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
---
|
|
2
|
+
title: React error "Maximum update depth exceeded"
|
|
3
|
+
description: Troubleshooting errors related to the "Maximum update depth exceeded" error.
|
|
4
|
+
---
|
|
5
|
+
|
|
6
|
+
# React error "Maximum update depth exceeded"
|
|
7
|
+
|
|
8
|
+
## Issue
|
|
9
|
+
|
|
10
|
+
I am using the AI SDK in a React project with the `useChat` or `useCompletion` hooks
|
|
11
|
+
and I get the following error when AI responses stream in: `Maximum update depth exceeded`.
|
|
12
|
+
|
|
13
|
+
## Background
|
|
14
|
+
|
|
15
|
+
By default, the UI is re-rendered on every chunk that arrives.
|
|
16
|
+
This can overload the rendering, especially on slower devices or when complex components
|
|
17
|
+
need updating (e.g. Markdown). Throttling can mitigate this.
|
|
18
|
+
|
|
19
|
+
## Solution
|
|
20
|
+
|
|
21
|
+
Use the `experimental_throttle` option to throttle the UI updates:
|
|
22
|
+
|
|
23
|
+
### `useChat`
|
|
24
|
+
|
|
25
|
+
```tsx filename="page.tsx" highlight="2-3"
|
|
26
|
+
const { messages, ... } = useChat({
|
|
27
|
+
// Throttle the messages and data updates to 50ms:
|
|
28
|
+
experimental_throttle: 50
|
|
29
|
+
})
|
|
30
|
+
```
|
|
31
|
+
|
|
32
|
+
### `useCompletion`
|
|
33
|
+
|
|
34
|
+
```tsx filename="page.tsx" highlight="2-3"
|
|
35
|
+
const { completion, ... } = useCompletion({
|
|
36
|
+
// Throttle the completion and data updates to 50ms:
|
|
37
|
+
experimental_throttle: 50
|
|
38
|
+
})
|
|
39
|
+
```
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
---
|
|
2
|
+
title: "Jest: cannot find module '@ai-sdk/rsc'"
|
|
3
|
+
description: "Troubleshooting AI SDK errors related to the Jest: cannot find module '@ai-sdk/rsc' error"
|
|
4
|
+
---
|
|
5
|
+
|
|
6
|
+
# Jest: cannot find module '@ai-sdk/rsc'
|
|
7
|
+
|
|
8
|
+
## Issue
|
|
9
|
+
|
|
10
|
+
I am using AI SDK RSC and am writing tests for my RSC components with Jest.
|
|
11
|
+
|
|
12
|
+
I am getting the following error: `Cannot find module '@ai-sdk/rsc'`.
|
|
13
|
+
|
|
14
|
+
## Solution
|
|
15
|
+
|
|
16
|
+
Configure the module resolution via `jest config update` in `moduleNameMapper`:
|
|
17
|
+
|
|
18
|
+
```json filename="jest.config.js"
|
|
19
|
+
"moduleNameMapper": {
|
|
20
|
+
"^@ai-sdk/rsc$": "<rootDir>/node_modules/@ai-sdk/rsc/dist"
|
|
21
|
+
}
|
|
22
|
+
```
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
---
|
|
2
|
+
title: Troubleshooting
|
|
3
|
+
description: Troubleshooting information for common issues encountered with the AI SDK.
|
|
4
|
+
collapsed: true
|
|
5
|
+
---
|
|
6
|
+
|
|
7
|
+
# Troubleshooting
|
|
8
|
+
|
|
9
|
+
This section is designed to help you quickly identify and resolve common issues encountered with the AI SDK, ensuring a smoother and more efficient development experience.
|
|
10
|
+
|
|
11
|
+
<Support />
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "ai",
|
|
3
|
-
"version": "6.0.
|
|
3
|
+
"version": "6.0.32",
|
|
4
4
|
"description": "AI SDK by Vercel - The AI Toolkit for TypeScript and JavaScript",
|
|
5
5
|
"license": "Apache-2.0",
|
|
6
6
|
"sideEffects": false,
|
|
@@ -9,11 +9,15 @@
|
|
|
9
9
|
"types": "./dist/index.d.ts",
|
|
10
10
|
"files": [
|
|
11
11
|
"dist/**/*",
|
|
12
|
+
"docs/**/*",
|
|
12
13
|
"CHANGELOG.md",
|
|
13
14
|
"internal.d.ts",
|
|
14
15
|
"README.md",
|
|
15
16
|
"test.d.ts"
|
|
16
17
|
],
|
|
18
|
+
"directories": {
|
|
19
|
+
"doc": "./docs"
|
|
20
|
+
},
|
|
17
21
|
"exports": {
|
|
18
22
|
"./package.json": "./package.json",
|
|
19
23
|
".": {
|
|
@@ -37,8 +41,8 @@
|
|
|
37
41
|
"dependencies": {
|
|
38
42
|
"@opentelemetry/api": "1.9.0",
|
|
39
43
|
"@ai-sdk/gateway": "3.0.13",
|
|
40
|
-
"@ai-sdk/provider
|
|
41
|
-
"@ai-sdk/provider": "
|
|
44
|
+
"@ai-sdk/provider": "3.0.2",
|
|
45
|
+
"@ai-sdk/provider-utils": "4.0.5"
|
|
42
46
|
},
|
|
43
47
|
"devDependencies": {
|
|
44
48
|
"@edge-runtime/vm": "^5.0.0",
|
|
@@ -92,7 +96,7 @@
|
|
|
92
96
|
"scripts": {
|
|
93
97
|
"build": "pnpm clean && tsup --tsconfig tsconfig.build.json",
|
|
94
98
|
"build:watch": "pnpm clean && tsup --watch --tsconfig tsconfig.build.json",
|
|
95
|
-
"clean": "del-cli dist *.tsbuildinfo",
|
|
99
|
+
"clean": "del-cli dist docs *.tsbuildinfo",
|
|
96
100
|
"lint": "eslint \"./**/*.ts*\"",
|
|
97
101
|
"type-check": "tsc --build",
|
|
98
102
|
"prettier-check": "prettier --check \"./**/*.ts*\"",
|