ai 6.0.30 → 6.0.32
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +13 -0
- package/dist/index.js +1 -1
- package/dist/index.mjs +1 -1
- package/dist/internal/index.js +1 -1
- package/dist/internal/index.mjs +1 -1
- package/docs/00-introduction/index.mdx +76 -0
- package/docs/02-foundations/01-overview.mdx +43 -0
- package/docs/02-foundations/02-providers-and-models.mdx +163 -0
- package/docs/02-foundations/03-prompts.mdx +620 -0
- package/docs/02-foundations/04-tools.mdx +160 -0
- package/docs/02-foundations/05-streaming.mdx +62 -0
- package/docs/02-foundations/index.mdx +43 -0
- package/docs/02-getting-started/00-choosing-a-provider.mdx +110 -0
- package/docs/02-getting-started/01-navigating-the-library.mdx +85 -0
- package/docs/02-getting-started/02-nextjs-app-router.mdx +556 -0
- package/docs/02-getting-started/03-nextjs-pages-router.mdx +542 -0
- package/docs/02-getting-started/04-svelte.mdx +627 -0
- package/docs/02-getting-started/05-nuxt.mdx +566 -0
- package/docs/02-getting-started/06-nodejs.mdx +512 -0
- package/docs/02-getting-started/07-expo.mdx +766 -0
- package/docs/02-getting-started/08-tanstack-start.mdx +583 -0
- package/docs/02-getting-started/index.mdx +44 -0
- package/docs/03-agents/01-overview.mdx +96 -0
- package/docs/03-agents/02-building-agents.mdx +367 -0
- package/docs/03-agents/03-workflows.mdx +370 -0
- package/docs/03-agents/04-loop-control.mdx +350 -0
- package/docs/03-agents/05-configuring-call-options.mdx +286 -0
- package/docs/03-agents/index.mdx +40 -0
- package/docs/03-ai-sdk-core/01-overview.mdx +33 -0
- package/docs/03-ai-sdk-core/05-generating-text.mdx +600 -0
- package/docs/03-ai-sdk-core/10-generating-structured-data.mdx +662 -0
- package/docs/03-ai-sdk-core/15-tools-and-tool-calling.mdx +1102 -0
- package/docs/03-ai-sdk-core/16-mcp-tools.mdx +375 -0
- package/docs/03-ai-sdk-core/20-prompt-engineering.mdx +144 -0
- package/docs/03-ai-sdk-core/25-settings.mdx +198 -0
- package/docs/03-ai-sdk-core/30-embeddings.mdx +247 -0
- package/docs/03-ai-sdk-core/31-reranking.mdx +218 -0
- package/docs/03-ai-sdk-core/35-image-generation.mdx +341 -0
- package/docs/03-ai-sdk-core/36-transcription.mdx +173 -0
- package/docs/03-ai-sdk-core/37-speech.mdx +167 -0
- package/docs/03-ai-sdk-core/40-middleware.mdx +480 -0
- package/docs/03-ai-sdk-core/45-provider-management.mdx +349 -0
- package/docs/03-ai-sdk-core/50-error-handling.mdx +149 -0
- package/docs/03-ai-sdk-core/55-testing.mdx +218 -0
- package/docs/03-ai-sdk-core/60-telemetry.mdx +313 -0
- package/docs/03-ai-sdk-core/65-devtools.mdx +107 -0
- package/docs/03-ai-sdk-core/index.mdx +88 -0
- package/docs/04-ai-sdk-ui/01-overview.mdx +44 -0
- package/docs/04-ai-sdk-ui/02-chatbot.mdx +1313 -0
- package/docs/04-ai-sdk-ui/03-chatbot-message-persistence.mdx +535 -0
- package/docs/04-ai-sdk-ui/03-chatbot-resume-streams.mdx +263 -0
- package/docs/04-ai-sdk-ui/03-chatbot-tool-usage.mdx +682 -0
- package/docs/04-ai-sdk-ui/04-generative-user-interfaces.mdx +389 -0
- package/docs/04-ai-sdk-ui/05-completion.mdx +186 -0
- package/docs/04-ai-sdk-ui/08-object-generation.mdx +344 -0
- package/docs/04-ai-sdk-ui/20-streaming-data.mdx +397 -0
- package/docs/04-ai-sdk-ui/21-error-handling.mdx +190 -0
- package/docs/04-ai-sdk-ui/21-transport.mdx +174 -0
- package/docs/04-ai-sdk-ui/24-reading-ui-message-streams.mdx +104 -0
- package/docs/04-ai-sdk-ui/25-message-metadata.mdx +152 -0
- package/docs/04-ai-sdk-ui/50-stream-protocol.mdx +477 -0
- package/docs/04-ai-sdk-ui/index.mdx +64 -0
- package/docs/05-ai-sdk-rsc/01-overview.mdx +45 -0
- package/docs/05-ai-sdk-rsc/02-streaming-react-components.mdx +209 -0
- package/docs/05-ai-sdk-rsc/03-generative-ui-state.mdx +279 -0
- package/docs/05-ai-sdk-rsc/03-saving-and-restoring-states.mdx +105 -0
- package/docs/05-ai-sdk-rsc/04-multistep-interfaces.mdx +282 -0
- package/docs/05-ai-sdk-rsc/05-streaming-values.mdx +158 -0
- package/docs/05-ai-sdk-rsc/06-loading-state.mdx +273 -0
- package/docs/05-ai-sdk-rsc/08-error-handling.mdx +96 -0
- package/docs/05-ai-sdk-rsc/09-authentication.mdx +42 -0
- package/docs/05-ai-sdk-rsc/10-migrating-to-ui.mdx +722 -0
- package/docs/05-ai-sdk-rsc/index.mdx +58 -0
- package/docs/06-advanced/01-prompt-engineering.mdx +96 -0
- package/docs/06-advanced/02-stopping-streams.mdx +184 -0
- package/docs/06-advanced/03-backpressure.mdx +173 -0
- package/docs/06-advanced/04-caching.mdx +169 -0
- package/docs/06-advanced/05-multiple-streamables.mdx +68 -0
- package/docs/06-advanced/06-rate-limiting.mdx +60 -0
- package/docs/06-advanced/07-rendering-ui-with-language-models.mdx +213 -0
- package/docs/06-advanced/08-model-as-router.mdx +120 -0
- package/docs/06-advanced/09-multistep-interfaces.mdx +115 -0
- package/docs/06-advanced/09-sequential-generations.mdx +55 -0
- package/docs/06-advanced/10-vercel-deployment-guide.mdx +117 -0
- package/docs/06-advanced/index.mdx +11 -0
- package/docs/07-reference/01-ai-sdk-core/01-generate-text.mdx +2142 -0
- package/docs/07-reference/01-ai-sdk-core/02-stream-text.mdx +3215 -0
- package/docs/07-reference/01-ai-sdk-core/03-generate-object.mdx +780 -0
- package/docs/07-reference/01-ai-sdk-core/04-stream-object.mdx +1140 -0
- package/docs/07-reference/01-ai-sdk-core/05-embed.mdx +190 -0
- package/docs/07-reference/01-ai-sdk-core/06-embed-many.mdx +171 -0
- package/docs/07-reference/01-ai-sdk-core/06-rerank.mdx +309 -0
- package/docs/07-reference/01-ai-sdk-core/10-generate-image.mdx +227 -0
- package/docs/07-reference/01-ai-sdk-core/11-transcribe.mdx +138 -0
- package/docs/07-reference/01-ai-sdk-core/12-generate-speech.mdx +214 -0
- package/docs/07-reference/01-ai-sdk-core/15-agent.mdx +203 -0
- package/docs/07-reference/01-ai-sdk-core/16-tool-loop-agent.mdx +449 -0
- package/docs/07-reference/01-ai-sdk-core/17-create-agent-ui-stream.mdx +148 -0
- package/docs/07-reference/01-ai-sdk-core/18-create-agent-ui-stream-response.mdx +168 -0
- package/docs/07-reference/01-ai-sdk-core/18-pipe-agent-ui-stream-to-response.mdx +144 -0
- package/docs/07-reference/01-ai-sdk-core/20-tool.mdx +196 -0
- package/docs/07-reference/01-ai-sdk-core/22-dynamic-tool.mdx +175 -0
- package/docs/07-reference/01-ai-sdk-core/23-create-mcp-client.mdx +410 -0
- package/docs/07-reference/01-ai-sdk-core/24-mcp-stdio-transport.mdx +68 -0
- package/docs/07-reference/01-ai-sdk-core/25-json-schema.mdx +94 -0
- package/docs/07-reference/01-ai-sdk-core/26-zod-schema.mdx +109 -0
- package/docs/07-reference/01-ai-sdk-core/27-valibot-schema.mdx +55 -0
- package/docs/07-reference/01-ai-sdk-core/28-output.mdx +342 -0
- package/docs/07-reference/01-ai-sdk-core/30-model-message.mdx +415 -0
- package/docs/07-reference/01-ai-sdk-core/31-ui-message.mdx +246 -0
- package/docs/07-reference/01-ai-sdk-core/32-validate-ui-messages.mdx +101 -0
- package/docs/07-reference/01-ai-sdk-core/33-safe-validate-ui-messages.mdx +113 -0
- package/docs/07-reference/01-ai-sdk-core/40-provider-registry.mdx +182 -0
- package/docs/07-reference/01-ai-sdk-core/42-custom-provider.mdx +121 -0
- package/docs/07-reference/01-ai-sdk-core/50-cosine-similarity.mdx +52 -0
- package/docs/07-reference/01-ai-sdk-core/60-wrap-language-model.mdx +59 -0
- package/docs/07-reference/01-ai-sdk-core/61-wrap-image-model.mdx +64 -0
- package/docs/07-reference/01-ai-sdk-core/65-language-model-v2-middleware.mdx +46 -0
- package/docs/07-reference/01-ai-sdk-core/66-extract-reasoning-middleware.mdx +68 -0
- package/docs/07-reference/01-ai-sdk-core/67-simulate-streaming-middleware.mdx +71 -0
- package/docs/07-reference/01-ai-sdk-core/68-default-settings-middleware.mdx +80 -0
- package/docs/07-reference/01-ai-sdk-core/69-add-tool-input-examples-middleware.mdx +155 -0
- package/docs/07-reference/01-ai-sdk-core/70-extract-json-middleware.mdx +147 -0
- package/docs/07-reference/01-ai-sdk-core/70-step-count-is.mdx +84 -0
- package/docs/07-reference/01-ai-sdk-core/71-has-tool-call.mdx +120 -0
- package/docs/07-reference/01-ai-sdk-core/75-simulate-readable-stream.mdx +94 -0
- package/docs/07-reference/01-ai-sdk-core/80-smooth-stream.mdx +145 -0
- package/docs/07-reference/01-ai-sdk-core/90-generate-id.mdx +43 -0
- package/docs/07-reference/01-ai-sdk-core/91-create-id-generator.mdx +89 -0
- package/docs/07-reference/01-ai-sdk-core/index.mdx +159 -0
- package/docs/07-reference/02-ai-sdk-ui/01-use-chat.mdx +446 -0
- package/docs/07-reference/02-ai-sdk-ui/02-use-completion.mdx +179 -0
- package/docs/07-reference/02-ai-sdk-ui/03-use-object.mdx +178 -0
- package/docs/07-reference/02-ai-sdk-ui/31-convert-to-model-messages.mdx +230 -0
- package/docs/07-reference/02-ai-sdk-ui/32-prune-messages.mdx +108 -0
- package/docs/07-reference/02-ai-sdk-ui/40-create-ui-message-stream.mdx +151 -0
- package/docs/07-reference/02-ai-sdk-ui/41-create-ui-message-stream-response.mdx +113 -0
- package/docs/07-reference/02-ai-sdk-ui/42-pipe-ui-message-stream-to-response.mdx +73 -0
- package/docs/07-reference/02-ai-sdk-ui/43-read-ui-message-stream.mdx +57 -0
- package/docs/07-reference/02-ai-sdk-ui/46-infer-ui-tools.mdx +99 -0
- package/docs/07-reference/02-ai-sdk-ui/47-infer-ui-tool.mdx +75 -0
- package/docs/07-reference/02-ai-sdk-ui/50-direct-chat-transport.mdx +333 -0
- package/docs/07-reference/02-ai-sdk-ui/index.mdx +89 -0
- package/docs/07-reference/03-ai-sdk-rsc/01-stream-ui.mdx +767 -0
- package/docs/07-reference/03-ai-sdk-rsc/02-create-ai.mdx +90 -0
- package/docs/07-reference/03-ai-sdk-rsc/03-create-streamable-ui.mdx +91 -0
- package/docs/07-reference/03-ai-sdk-rsc/04-create-streamable-value.mdx +48 -0
- package/docs/07-reference/03-ai-sdk-rsc/05-read-streamable-value.mdx +78 -0
- package/docs/07-reference/03-ai-sdk-rsc/06-get-ai-state.mdx +50 -0
- package/docs/07-reference/03-ai-sdk-rsc/07-get-mutable-ai-state.mdx +70 -0
- package/docs/07-reference/03-ai-sdk-rsc/08-use-ai-state.mdx +26 -0
- package/docs/07-reference/03-ai-sdk-rsc/09-use-actions.mdx +42 -0
- package/docs/07-reference/03-ai-sdk-rsc/10-use-ui-state.mdx +35 -0
- package/docs/07-reference/03-ai-sdk-rsc/11-use-streamable-value.mdx +46 -0
- package/docs/07-reference/03-ai-sdk-rsc/20-render.mdx +262 -0
- package/docs/07-reference/03-ai-sdk-rsc/index.mdx +67 -0
- package/docs/07-reference/04-stream-helpers/01-ai-stream.mdx +89 -0
- package/docs/07-reference/04-stream-helpers/02-streaming-text-response.mdx +79 -0
- package/docs/07-reference/04-stream-helpers/05-stream-to-response.mdx +108 -0
- package/docs/07-reference/04-stream-helpers/07-openai-stream.mdx +77 -0
- package/docs/07-reference/04-stream-helpers/08-anthropic-stream.mdx +79 -0
- package/docs/07-reference/04-stream-helpers/09-aws-bedrock-stream.mdx +91 -0
- package/docs/07-reference/04-stream-helpers/10-aws-bedrock-anthropic-stream.mdx +96 -0
- package/docs/07-reference/04-stream-helpers/10-aws-bedrock-messages-stream.mdx +96 -0
- package/docs/07-reference/04-stream-helpers/11-aws-bedrock-cohere-stream.mdx +93 -0
- package/docs/07-reference/04-stream-helpers/12-aws-bedrock-llama-2-stream.mdx +93 -0
- package/docs/07-reference/04-stream-helpers/13-cohere-stream.mdx +78 -0
- package/docs/07-reference/04-stream-helpers/14-google-generative-ai-stream.mdx +85 -0
- package/docs/07-reference/04-stream-helpers/15-hugging-face-stream.mdx +84 -0
- package/docs/07-reference/04-stream-helpers/16-langchain-adapter.mdx +98 -0
- package/docs/07-reference/04-stream-helpers/16-llamaindex-adapter.mdx +70 -0
- package/docs/07-reference/04-stream-helpers/17-mistral-stream.mdx +81 -0
- package/docs/07-reference/04-stream-helpers/18-replicate-stream.mdx +83 -0
- package/docs/07-reference/04-stream-helpers/19-inkeep-stream.mdx +80 -0
- package/docs/07-reference/04-stream-helpers/index.mdx +103 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-api-call-error.mdx +30 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-download-error.mdx +27 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-empty-response-body-error.mdx +24 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-invalid-argument-error.mdx +26 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-invalid-data-content-error.mdx +25 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-invalid-data-content.mdx +26 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-invalid-message-role-error.mdx +25 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-invalid-prompt-error.mdx +47 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-invalid-response-data-error.mdx +25 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-invalid-tool-approval-error.mdx +25 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-invalid-tool-input-error.mdx +27 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-json-parse-error.mdx +25 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-load-api-key-error.mdx +24 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-load-setting-error.mdx +24 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-message-conversion-error.mdx +25 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-no-content-generated-error.mdx +24 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-no-image-generated-error.mdx +36 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-no-object-generated-error.mdx +43 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-no-speech-generated-error.mdx +25 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-no-such-model-error.mdx +26 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-no-such-provider-error.mdx +28 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-no-such-tool-error.mdx +26 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-no-transcript-generated-error.mdx +25 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-retry-error.mdx +27 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-too-many-embedding-values-for-call-error.mdx +27 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-tool-call-not-found-for-approval-error.mdx +26 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-tool-call-repair-error.mdx +28 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-type-validation-error.mdx +25 -0
- package/docs/07-reference/05-ai-sdk-errors/ai-unsupported-functionality-error.mdx +25 -0
- package/docs/07-reference/05-ai-sdk-errors/index.mdx +38 -0
- package/docs/07-reference/index.mdx +34 -0
- package/docs/08-migration-guides/00-versioning.mdx +46 -0
- package/docs/08-migration-guides/24-migration-guide-6-0.mdx +823 -0
- package/docs/08-migration-guides/25-migration-guide-5-0-data.mdx +882 -0
- package/docs/08-migration-guides/26-migration-guide-5-0.mdx +3427 -0
- package/docs/08-migration-guides/27-migration-guide-4-2.mdx +99 -0
- package/docs/08-migration-guides/28-migration-guide-4-1.mdx +14 -0
- package/docs/08-migration-guides/29-migration-guide-4-0.mdx +1157 -0
- package/docs/08-migration-guides/36-migration-guide-3-4.mdx +14 -0
- package/docs/08-migration-guides/37-migration-guide-3-3.mdx +64 -0
- package/docs/08-migration-guides/38-migration-guide-3-2.mdx +46 -0
- package/docs/08-migration-guides/39-migration-guide-3-1.mdx +168 -0
- package/docs/08-migration-guides/index.mdx +22 -0
- package/docs/09-troubleshooting/01-azure-stream-slow.mdx +33 -0
- package/docs/09-troubleshooting/02-client-side-function-calls-not-invoked.mdx +22 -0
- package/docs/09-troubleshooting/03-server-actions-in-client-components.mdx +40 -0
- package/docs/09-troubleshooting/04-strange-stream-output.mdx +36 -0
- package/docs/09-troubleshooting/05-streamable-ui-errors.mdx +16 -0
- package/docs/09-troubleshooting/05-tool-invocation-missing-result.mdx +106 -0
- package/docs/09-troubleshooting/06-streaming-not-working-when-deployed.mdx +31 -0
- package/docs/09-troubleshooting/06-streaming-not-working-when-proxied.mdx +31 -0
- package/docs/09-troubleshooting/06-timeout-on-vercel.mdx +60 -0
- package/docs/09-troubleshooting/07-unclosed-streams.mdx +34 -0
- package/docs/09-troubleshooting/08-use-chat-failed-to-parse-stream.mdx +26 -0
- package/docs/09-troubleshooting/09-client-stream-error.mdx +25 -0
- package/docs/09-troubleshooting/10-use-chat-tools-no-response.mdx +32 -0
- package/docs/09-troubleshooting/11-use-chat-custom-request-options.mdx +149 -0
- package/docs/09-troubleshooting/12-typescript-performance-zod.mdx +46 -0
- package/docs/09-troubleshooting/12-use-chat-an-error-occurred.mdx +59 -0
- package/docs/09-troubleshooting/13-repeated-assistant-messages.mdx +73 -0
- package/docs/09-troubleshooting/14-stream-abort-handling.mdx +73 -0
- package/docs/09-troubleshooting/14-tool-calling-with-structured-outputs.mdx +48 -0
- package/docs/09-troubleshooting/15-abort-breaks-resumable-streams.mdx +55 -0
- package/docs/09-troubleshooting/15-stream-text-not-working.mdx +33 -0
- package/docs/09-troubleshooting/16-streaming-status-delay.mdx +63 -0
- package/docs/09-troubleshooting/17-use-chat-stale-body-data.mdx +141 -0
- package/docs/09-troubleshooting/18-ontoolcall-type-narrowing.mdx +66 -0
- package/docs/09-troubleshooting/19-unsupported-model-version.mdx +50 -0
- package/docs/09-troubleshooting/20-no-object-generated-content-filter.mdx +72 -0
- package/docs/09-troubleshooting/30-model-is-not-assignable-to-type.mdx +21 -0
- package/docs/09-troubleshooting/40-typescript-cannot-find-namespace-jsx.mdx +24 -0
- package/docs/09-troubleshooting/50-react-maximum-update-depth-exceeded.mdx +39 -0
- package/docs/09-troubleshooting/60-jest-cannot-find-module-ai-rsc.mdx +22 -0
- package/docs/09-troubleshooting/index.mdx +11 -0
- package/package.json +7 -3
|
@@ -0,0 +1,209 @@
|
|
|
1
|
+
---
|
|
2
|
+
title: Streaming React Components
|
|
3
|
+
description: Overview of streaming RSCs
|
|
4
|
+
---
|
|
5
|
+
|
|
6
|
+
import { UIPreviewCard, Card } from '@/components/home/card';
|
|
7
|
+
import { EventPlanning } from '@/components/home/event-planning';
|
|
8
|
+
import { Searching } from '@/components/home/searching';
|
|
9
|
+
import { Weather } from '@/components/home/weather';
|
|
10
|
+
|
|
11
|
+
# Streaming React Components
|
|
12
|
+
|
|
13
|
+
<Note type="warning">
|
|
14
|
+
AI SDK RSC is currently experimental. We recommend using [AI SDK
|
|
15
|
+
UI](/docs/ai-sdk-ui/overview) for production. For guidance on migrating from
|
|
16
|
+
RSC to UI, see our [migration guide](/docs/ai-sdk-rsc/migrating-to-ui).
|
|
17
|
+
</Note>
|
|
18
|
+
|
|
19
|
+
The RSC API allows you to stream React components from the server to the client with the [`streamUI`](/docs/reference/ai-sdk-rsc/stream-ui) function. This is useful when you want to go beyond raw text and stream components to the client in real-time.
|
|
20
|
+
|
|
21
|
+
Similar to [ AI SDK Core ](/docs/ai-sdk-core/overview) APIs (like [ `streamText` ](/docs/reference/ai-sdk-core/stream-text) and [ `streamObject` ](/docs/reference/ai-sdk-core/stream-object)), `streamUI` provides a single function to call a model and allow it to respond with React Server Components.
|
|
22
|
+
It supports the same model interfaces as AI SDK Core APIs.
|
|
23
|
+
|
|
24
|
+
### Concepts
|
|
25
|
+
|
|
26
|
+
To give the model the ability to respond to a user's prompt with a React component, you can leverage [tools](/docs/ai-sdk-core/tools-and-tool-calling).
|
|
27
|
+
|
|
28
|
+
<Note>
|
|
29
|
+
Remember, tools are like programs you can give to the model, and the model can
|
|
30
|
+
decide as and when to use based on the context of the conversation.
|
|
31
|
+
</Note>
|
|
32
|
+
|
|
33
|
+
With the `streamUI` function, **you provide tools that return React components**. With the ability to stream components, the model is akin to a dynamic router that is able to understand the user's intention and display relevant UI.
|
|
34
|
+
|
|
35
|
+
At a high level, the `streamUI` works like other AI SDK Core functions: you can provide the model with a prompt or some conversation history and, optionally, some tools. If the model decides, based on the context of the conversation, to call a tool, it will generate a tool call. The `streamUI` function will then run the respective tool, returning a React component. If the model doesn't have a relevant tool to use, it will return a text generation, which will be passed to the `text` function, for you to handle (render and return as a React component).
|
|
36
|
+
|
|
37
|
+
<Note>Remember, the `streamUI` function must return a React component. </Note>
|
|
38
|
+
|
|
39
|
+
```tsx
|
|
40
|
+
const result = await streamUI({
|
|
41
|
+
model: openai('gpt-4o'),
|
|
42
|
+
prompt: 'Get the weather for San Francisco',
|
|
43
|
+
text: ({ content }) => <div>{content}</div>,
|
|
44
|
+
tools: {},
|
|
45
|
+
});
|
|
46
|
+
```
|
|
47
|
+
|
|
48
|
+
This example calls the `streamUI` function using OpenAI's `gpt-4o` model, passes a prompt, specifies how the model's plain text response (`content`) should be rendered, and then provides an empty object for tools. Even though this example does not define any tools, it will stream the model's response as a `div` rather than plain text.
|
|
49
|
+
|
|
50
|
+
### Adding A Tool
|
|
51
|
+
|
|
52
|
+
Using tools with `streamUI` is similar to how you use tools with `generateText` and `streamText`.
|
|
53
|
+
A tool is an object that has:
|
|
54
|
+
|
|
55
|
+
- `description`: a string telling the model what the tool does and when to use it
|
|
56
|
+
- `inputSchema`: a Zod schema describing what the tool needs in order to run
|
|
57
|
+
- `generate`: an asynchronous function that will be run if the model calls the tool. This must return a React component
|
|
58
|
+
|
|
59
|
+
Let's expand the previous example to add a tool.
|
|
60
|
+
|
|
61
|
+
```tsx highlight="6-14"
|
|
62
|
+
const result = await streamUI({
|
|
63
|
+
model: openai('gpt-4o'),
|
|
64
|
+
prompt: 'Get the weather for San Francisco',
|
|
65
|
+
text: ({ content }) => <div>{content}</div>,
|
|
66
|
+
tools: {
|
|
67
|
+
getWeather: {
|
|
68
|
+
description: 'Get the weather for a location',
|
|
69
|
+
inputSchema: z.object({ location: z.string() }),
|
|
70
|
+
generate: async function* ({ location }) {
|
|
71
|
+
yield <LoadingComponent />;
|
|
72
|
+
const weather = await getWeather(location);
|
|
73
|
+
return <WeatherComponent weather={weather} location={location} />;
|
|
74
|
+
},
|
|
75
|
+
},
|
|
76
|
+
},
|
|
77
|
+
});
|
|
78
|
+
```
|
|
79
|
+
|
|
80
|
+
This tool would be run if the user asks for the weather for their location. If the user hasn't specified a location, the model will ask for it before calling the tool. When the model calls the tool, the generate function will initially return a loading component. This component will show until the awaited call to `getWeather` is resolved, at which point, the model will stream the `<WeatherComponent />` to the user.
|
|
81
|
+
|
|
82
|
+
<Note>
|
|
83
|
+
Note: This example uses a [ generator function
|
|
84
|
+
](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/function*)
|
|
85
|
+
(`function*`), which allows you to pause its execution and return a value,
|
|
86
|
+
then resume from where it left off on the next call. This is useful for
|
|
87
|
+
handling data streams, as you can fetch and return data from an asynchronous
|
|
88
|
+
source like an API, then resume the function to fetch the next chunk when
|
|
89
|
+
needed. By yielding values one at a time, generator functions enable efficient
|
|
90
|
+
processing of streaming data without blocking the main thread.
|
|
91
|
+
</Note>
|
|
92
|
+
|
|
93
|
+
## Using `streamUI` with Next.js
|
|
94
|
+
|
|
95
|
+
Let's see how you can use the example above in a Next.js application.
|
|
96
|
+
|
|
97
|
+
To use `streamUI` in a Next.js application, you will need two things:
|
|
98
|
+
|
|
99
|
+
1. A Server Action (where you will call `streamUI`)
|
|
100
|
+
2. A page to call the Server Action and render the resulting components
|
|
101
|
+
|
|
102
|
+
### Step 1: Create a Server Action
|
|
103
|
+
|
|
104
|
+
<Note>
|
|
105
|
+
Server Actions are server-side functions that you can call directly from the
|
|
106
|
+
frontend. For more info, see [the
|
|
107
|
+
documentation](https://nextjs.org/docs/app/building-your-application/data-fetching/server-actions-and-mutations#with-client-components).
|
|
108
|
+
</Note>
|
|
109
|
+
|
|
110
|
+
Create a Server Action at `app/actions.tsx` and add the following code:
|
|
111
|
+
|
|
112
|
+
```tsx filename="app/actions.tsx"
|
|
113
|
+
'use server';
|
|
114
|
+
|
|
115
|
+
import { streamUI } from '@ai-sdk/rsc';
|
|
116
|
+
import { openai } from '@ai-sdk/openai';
|
|
117
|
+
import { z } from 'zod';
|
|
118
|
+
|
|
119
|
+
const LoadingComponent = () => (
|
|
120
|
+
<div className="animate-pulse p-4">getting weather...</div>
|
|
121
|
+
);
|
|
122
|
+
|
|
123
|
+
const getWeather = async (location: string) => {
|
|
124
|
+
await new Promise(resolve => setTimeout(resolve, 2000));
|
|
125
|
+
return '82°F️ ☀️';
|
|
126
|
+
};
|
|
127
|
+
|
|
128
|
+
interface WeatherProps {
|
|
129
|
+
location: string;
|
|
130
|
+
weather: string;
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
const WeatherComponent = (props: WeatherProps) => (
|
|
134
|
+
<div className="border border-neutral-200 p-4 rounded-lg max-w-fit">
|
|
135
|
+
The weather in {props.location} is {props.weather}
|
|
136
|
+
</div>
|
|
137
|
+
);
|
|
138
|
+
|
|
139
|
+
export async function streamComponent() {
|
|
140
|
+
const result = await streamUI({
|
|
141
|
+
model: openai('gpt-4o'),
|
|
142
|
+
prompt: 'Get the weather for San Francisco',
|
|
143
|
+
text: ({ content }) => <div>{content}</div>,
|
|
144
|
+
tools: {
|
|
145
|
+
getWeather: {
|
|
146
|
+
description: 'Get the weather for a location',
|
|
147
|
+
inputSchema: z.object({
|
|
148
|
+
location: z.string(),
|
|
149
|
+
}),
|
|
150
|
+
generate: async function* ({ location }) {
|
|
151
|
+
yield <LoadingComponent />;
|
|
152
|
+
const weather = await getWeather(location);
|
|
153
|
+
return <WeatherComponent weather={weather} location={location} />;
|
|
154
|
+
},
|
|
155
|
+
},
|
|
156
|
+
},
|
|
157
|
+
});
|
|
158
|
+
|
|
159
|
+
return result.value;
|
|
160
|
+
}
|
|
161
|
+
```
|
|
162
|
+
|
|
163
|
+
The `getWeather` tool should look familiar as it is identical to the example in the previous section. In order for this tool to work:
|
|
164
|
+
|
|
165
|
+
1. First define a `LoadingComponent`, which renders a pulsing `div` that will show some loading text.
|
|
166
|
+
2. Next, define a `getWeather` function that will timeout for 2 seconds (to simulate fetching the weather externally) before returning the "weather" for a `location`. Note: you could run any asynchronous TypeScript code here.
|
|
167
|
+
3. Finally, define a `WeatherComponent` which takes in `location` and `weather` as props, which are then rendered within a `div`.
|
|
168
|
+
|
|
169
|
+
Your Server Action is an asynchronous function called `streamComponent` that takes no inputs, and returns a `ReactNode`. Within the action, you call the `streamUI` function, specifying the model (`gpt-4o`), the prompt, the component that should be rendered if the model chooses to return text, and finally, your `getWeather` tool. Last but not least, you return the resulting component generated by the model with `result.value`.
|
|
170
|
+
|
|
171
|
+
To call this Server Action and display the resulting React Component, you will need a page.
|
|
172
|
+
|
|
173
|
+
### Step 2: Create a Page
|
|
174
|
+
|
|
175
|
+
Create or update your root page (`app/page.tsx`) with the following code:
|
|
176
|
+
|
|
177
|
+
```tsx filename="app/page.tsx"
|
|
178
|
+
'use client';
|
|
179
|
+
|
|
180
|
+
import { useState } from 'react';
|
|
181
|
+
import { Button } from '@/components/ui/button';
|
|
182
|
+
import { streamComponent } from './actions';
|
|
183
|
+
|
|
184
|
+
export default function Page() {
|
|
185
|
+
const [component, setComponent] = useState<React.ReactNode>();
|
|
186
|
+
|
|
187
|
+
return (
|
|
188
|
+
<div>
|
|
189
|
+
<form
|
|
190
|
+
onSubmit={async e => {
|
|
191
|
+
e.preventDefault();
|
|
192
|
+
setComponent(await streamComponent());
|
|
193
|
+
}}
|
|
194
|
+
>
|
|
195
|
+
<Button>Stream Component</Button>
|
|
196
|
+
</form>
|
|
197
|
+
<div>{component}</div>
|
|
198
|
+
</div>
|
|
199
|
+
);
|
|
200
|
+
}
|
|
201
|
+
```
|
|
202
|
+
|
|
203
|
+
This page is first marked as a client component with the `"use client";` directive given it will be using hooks and interactivity. On the page, you render a form. When that form is submitted, you call the `streamComponent` action created in the previous step (just like any other function). The `streamComponent` action returns a `ReactNode` that you can then render on the page using React state (`setComponent`).
|
|
204
|
+
|
|
205
|
+
## Going beyond a single prompt
|
|
206
|
+
|
|
207
|
+
You can now allow the model to respond to your prompt with a React component. However, this example is limited to a static prompt that is set within your Server Action. You could make this example interactive by turning it into a chatbot.
|
|
208
|
+
|
|
209
|
+
Learn how to stream React components with the Next.js App Router using `streamUI` with this [example](/examples/next-app/interface/route-components).
|
|
@@ -0,0 +1,279 @@
|
|
|
1
|
+
---
|
|
2
|
+
title: Managing Generative UI State
|
|
3
|
+
description: Overview of the AI and UI states
|
|
4
|
+
---
|
|
5
|
+
|
|
6
|
+
# Managing Generative UI State
|
|
7
|
+
|
|
8
|
+
<Note type="warning">
|
|
9
|
+
AI SDK RSC is currently experimental. We recommend using [AI SDK
|
|
10
|
+
UI](/docs/ai-sdk-ui/overview) for production. For guidance on migrating from
|
|
11
|
+
RSC to UI, see our [migration guide](/docs/ai-sdk-rsc/migrating-to-ui).
|
|
12
|
+
</Note>
|
|
13
|
+
|
|
14
|
+
State is an essential part of any application. State is particularly important in AI applications as it is passed to large language models (LLMs) on each request to ensure they have the necessary context to produce a great generation. Traditional chatbots are text-based and have a structure that mirrors that of any chat application.
|
|
15
|
+
|
|
16
|
+
For example, in a chatbot, state is an array of `messages` where each `message` has:
|
|
17
|
+
|
|
18
|
+
- `id`: a unique identifier
|
|
19
|
+
- `role`: who sent the message (user/assistant/system/tool)
|
|
20
|
+
- `content`: the content of the message
|
|
21
|
+
|
|
22
|
+
This state can be rendered in the UI and sent to the model without any modifications.
|
|
23
|
+
|
|
24
|
+
With Generative UI, the model can now return a React component, rather than a plain text message. The client can render that component without issue, but that state can't be sent back to the model because React components aren't serialisable. So, what can you do?
|
|
25
|
+
|
|
26
|
+
**The solution is to split the state in two, where one (AI State) becomes a proxy for the other (UI State)**.
|
|
27
|
+
|
|
28
|
+
One way to understand this concept is through a Lego analogy. Imagine a 10,000 piece Lego model that, once built, cannot be easily transported because it is fragile. By taking the model apart, it can be easily transported, and then rebuilt following the steps outlined in the instructions pamphlet. In this way, the instructions pamphlet is a proxy to the physical structure. Similarly, AI State provides a serialisable (JSON) representation of your UI that can be passed back and forth to the model.
|
|
29
|
+
|
|
30
|
+
## What is AI and UI State?
|
|
31
|
+
|
|
32
|
+
The RSC API simplifies how you manage AI State and UI State, providing a robust way to keep them in sync between your database, server and client.
|
|
33
|
+
|
|
34
|
+
### AI State
|
|
35
|
+
|
|
36
|
+
AI State refers to the state of your application in a serialisable format that will be used on the server and can be shared with the language model.
|
|
37
|
+
|
|
38
|
+
For a chat app, the AI State is the conversation history (messages) between the user and the assistant. Components generated by the model would be represented in a JSON format as a tool alongside any necessary props. AI State can also be used to store other values and meta information such as `createdAt` for each message and `chatId` for each conversation. The LLM reads this history so it can generate the next message. This state serves as the source of truth for the current application state.
|
|
39
|
+
|
|
40
|
+
<Note>
|
|
41
|
+
**Note**: AI state can be accessed/modified from both the server and the
|
|
42
|
+
client.
|
|
43
|
+
</Note>
|
|
44
|
+
|
|
45
|
+
### UI State
|
|
46
|
+
|
|
47
|
+
UI State refers to the state of your application that is rendered on the client. It is a fully client-side state (similar to `useState`) that can store anything from Javascript values to React elements. UI state is a list of actual UI elements that are rendered on the client.
|
|
48
|
+
|
|
49
|
+
<Note>**Note**: UI State can only be accessed client-side.</Note>
|
|
50
|
+
|
|
51
|
+
## Using AI / UI State
|
|
52
|
+
|
|
53
|
+
### Creating the AI Context
|
|
54
|
+
|
|
55
|
+
AI SDK RSC simplifies managing AI and UI state across your application by providing several hooks. These hooks are powered by [ React context ](https://react.dev/reference/react/hooks#context-hooks) under the hood.
|
|
56
|
+
|
|
57
|
+
Notably, this means you do not have to pass the message history to the server explicitly for each request. You also can access and update your application state in any child component of the context provider. As you begin building [multistep generative interfaces](/docs/ai-sdk-rsc/multistep-interfaces), this will be particularly helpful.
|
|
58
|
+
|
|
59
|
+
To use `@ai-sdk/rsc` to manage AI and UI State in your application, you can create a React context using [`createAI`](/docs/reference/ai-sdk-rsc/create-ai):
|
|
60
|
+
|
|
61
|
+
```tsx filename='app/actions.tsx'
|
|
62
|
+
// Define the AI state and UI state types
|
|
63
|
+
export type ServerMessage = {
|
|
64
|
+
role: 'user' | 'assistant';
|
|
65
|
+
content: string;
|
|
66
|
+
};
|
|
67
|
+
|
|
68
|
+
export type ClientMessage = {
|
|
69
|
+
id: string;
|
|
70
|
+
role: 'user' | 'assistant';
|
|
71
|
+
display: ReactNode;
|
|
72
|
+
};
|
|
73
|
+
|
|
74
|
+
export const sendMessage = async (input: string): Promise<ClientMessage> => {
|
|
75
|
+
"use server"
|
|
76
|
+
...
|
|
77
|
+
}
|
|
78
|
+
```
|
|
79
|
+
|
|
80
|
+
```tsx filename='app/ai.ts'
|
|
81
|
+
import { createAI } from '@ai-sdk/rsc';
|
|
82
|
+
import { ClientMessage, ServerMessage, sendMessage } from './actions';
|
|
83
|
+
|
|
84
|
+
export type AIState = ServerMessage[];
|
|
85
|
+
export type UIState = ClientMessage[];
|
|
86
|
+
|
|
87
|
+
// Create the AI provider with the initial states and allowed actions
|
|
88
|
+
export const AI = createAI<AIState, UIState>({
|
|
89
|
+
initialAIState: [],
|
|
90
|
+
initialUIState: [],
|
|
91
|
+
actions: {
|
|
92
|
+
sendMessage,
|
|
93
|
+
},
|
|
94
|
+
});
|
|
95
|
+
```
|
|
96
|
+
|
|
97
|
+
<Note>You must pass Server Actions to the `actions` object.</Note>
|
|
98
|
+
|
|
99
|
+
In this example, you define types for AI State and UI State, respectively.
|
|
100
|
+
|
|
101
|
+
Next, wrap your application with your newly created context. With that, you can get and set AI and UI State across your entire application.
|
|
102
|
+
|
|
103
|
+
```tsx filename='app/layout.tsx'
|
|
104
|
+
import { type ReactNode } from 'react';
|
|
105
|
+
import { AI } from './ai';
|
|
106
|
+
|
|
107
|
+
export default function RootLayout({
|
|
108
|
+
children,
|
|
109
|
+
}: Readonly<{ children: ReactNode }>) {
|
|
110
|
+
return (
|
|
111
|
+
<AI>
|
|
112
|
+
<html lang="en">
|
|
113
|
+
<body>{children}</body>
|
|
114
|
+
</html>
|
|
115
|
+
</AI>
|
|
116
|
+
);
|
|
117
|
+
}
|
|
118
|
+
```
|
|
119
|
+
|
|
120
|
+
## Reading UI State in Client
|
|
121
|
+
|
|
122
|
+
The UI state can be accessed in Client Components using the [`useUIState`](/docs/reference/ai-sdk-rsc/use-ui-state) hook provided by the RSC API. The hook returns the current UI state and a function to update the UI state like React's `useState`.
|
|
123
|
+
|
|
124
|
+
```tsx filename='app/page.tsx'
|
|
125
|
+
'use client';
|
|
126
|
+
|
|
127
|
+
import { useUIState } from '@ai-sdk/rsc';
|
|
128
|
+
|
|
129
|
+
export default function Page() {
|
|
130
|
+
const [messages, setMessages] = useUIState();
|
|
131
|
+
|
|
132
|
+
return (
|
|
133
|
+
<ul>
|
|
134
|
+
{messages.map(message => (
|
|
135
|
+
<li key={message.id}>{message.display}</li>
|
|
136
|
+
))}
|
|
137
|
+
</ul>
|
|
138
|
+
);
|
|
139
|
+
}
|
|
140
|
+
```
|
|
141
|
+
|
|
142
|
+
## Reading AI State in Client
|
|
143
|
+
|
|
144
|
+
The AI state can be accessed in Client Components using the [`useAIState`](/docs/reference/ai-sdk-rsc/use-ai-state) hook provided by the RSC API. The hook returns the current AI state.
|
|
145
|
+
|
|
146
|
+
```tsx filename='app/page.tsx'
|
|
147
|
+
'use client';
|
|
148
|
+
|
|
149
|
+
import { useAIState } from '@ai-sdk/rsc';
|
|
150
|
+
|
|
151
|
+
export default function Page() {
|
|
152
|
+
const [messages, setMessages] = useAIState();
|
|
153
|
+
|
|
154
|
+
return (
|
|
155
|
+
<ul>
|
|
156
|
+
{messages.map(message => (
|
|
157
|
+
<li key={message.id}>{message.content}</li>
|
|
158
|
+
))}
|
|
159
|
+
</ul>
|
|
160
|
+
);
|
|
161
|
+
}
|
|
162
|
+
```
|
|
163
|
+
|
|
164
|
+
## Reading AI State on Server
|
|
165
|
+
|
|
166
|
+
The AI State can be accessed within any Server Action provided to the `createAI` context using the [`getAIState`](/docs/reference/ai-sdk-rsc/get-ai-state) function. It returns the current AI state as a read-only value:
|
|
167
|
+
|
|
168
|
+
```tsx filename='app/actions.ts'
|
|
169
|
+
import { getAIState } from '@ai-sdk/rsc';
|
|
170
|
+
|
|
171
|
+
export async function sendMessage(message: string) {
|
|
172
|
+
'use server';
|
|
173
|
+
|
|
174
|
+
const history = getAIState();
|
|
175
|
+
|
|
176
|
+
const response = await generateText({
|
|
177
|
+
model: __MODEL__,
|
|
178
|
+
messages: [...history, { role: 'user', content: message }],
|
|
179
|
+
});
|
|
180
|
+
|
|
181
|
+
return response;
|
|
182
|
+
}
|
|
183
|
+
```
|
|
184
|
+
|
|
185
|
+
<Note>
|
|
186
|
+
Remember, you can only access state within actions that have been passed to
|
|
187
|
+
the `createAI` context within the `actions` key.
|
|
188
|
+
</Note>
|
|
189
|
+
|
|
190
|
+
## Updating AI State on Server
|
|
191
|
+
|
|
192
|
+
The AI State can also be updated from within your Server Action with the [`getMutableAIState`](/docs/reference/ai-sdk-rsc/get-mutable-ai-state) function. This function is similar to `getAIState`, but it returns the state with methods to read and update it:
|
|
193
|
+
|
|
194
|
+
```tsx filename='app/actions.ts'
|
|
195
|
+
import { getMutableAIState } from '@ai-sdk/rsc';
|
|
196
|
+
|
|
197
|
+
export async function sendMessage(message: string) {
|
|
198
|
+
'use server';
|
|
199
|
+
|
|
200
|
+
const history = getMutableAIState();
|
|
201
|
+
|
|
202
|
+
// Update the AI state with the new user message.
|
|
203
|
+
history.update([...history.get(), { role: 'user', content: message }]);
|
|
204
|
+
|
|
205
|
+
const response = await generateText({
|
|
206
|
+
model: __MODEL__,
|
|
207
|
+
messages: history.get(),
|
|
208
|
+
});
|
|
209
|
+
|
|
210
|
+
// Update the AI state again with the response from the model.
|
|
211
|
+
history.done([...history.get(), { role: 'assistant', content: response }]);
|
|
212
|
+
|
|
213
|
+
return response;
|
|
214
|
+
}
|
|
215
|
+
```
|
|
216
|
+
|
|
217
|
+
<Note>
|
|
218
|
+
It is important to update the AI State with new responses using `.update()`
|
|
219
|
+
and `.done()` to keep the conversation history in sync.
|
|
220
|
+
</Note>
|
|
221
|
+
|
|
222
|
+
## Calling Server Actions from the Client
|
|
223
|
+
|
|
224
|
+
To call the `sendMessage` action from the client, you can use the [`useActions`](/docs/reference/ai-sdk-rsc/use-actions) hook. The hook returns all the available Actions that were provided to `createAI`:
|
|
225
|
+
|
|
226
|
+
```tsx filename='app/page.tsx'
|
|
227
|
+
'use client';
|
|
228
|
+
|
|
229
|
+
import { useActions, useUIState } from '@ai-sdk/rsc';
|
|
230
|
+
import { AI } from './ai';
|
|
231
|
+
|
|
232
|
+
export default function Page() {
|
|
233
|
+
const { sendMessage } = useActions<typeof AI>();
|
|
234
|
+
const [messages, setMessages] = useUIState();
|
|
235
|
+
|
|
236
|
+
const handleSubmit = async event => {
|
|
237
|
+
event.preventDefault();
|
|
238
|
+
|
|
239
|
+
setMessages([
|
|
240
|
+
...messages,
|
|
241
|
+
{ id: Date.now(), role: 'user', display: event.target.message.value },
|
|
242
|
+
]);
|
|
243
|
+
|
|
244
|
+
const response = await sendMessage(event.target.message.value);
|
|
245
|
+
|
|
246
|
+
setMessages([
|
|
247
|
+
...messages,
|
|
248
|
+
{ id: Date.now(), role: 'assistant', display: response },
|
|
249
|
+
]);
|
|
250
|
+
};
|
|
251
|
+
|
|
252
|
+
return (
|
|
253
|
+
<>
|
|
254
|
+
<ul>
|
|
255
|
+
{messages.map(message => (
|
|
256
|
+
<li key={message.id}>{message.display}</li>
|
|
257
|
+
))}
|
|
258
|
+
</ul>
|
|
259
|
+
<form onSubmit={handleSubmit}>
|
|
260
|
+
<input type="text" name="message" />
|
|
261
|
+
<button type="submit">Send</button>
|
|
262
|
+
</form>
|
|
263
|
+
</>
|
|
264
|
+
);
|
|
265
|
+
}
|
|
266
|
+
```
|
|
267
|
+
|
|
268
|
+
When the user submits a message, the `sendMessage` action is called with the message content. The response from the action is then added to the UI state, updating the displayed messages.
|
|
269
|
+
|
|
270
|
+
<Note>
|
|
271
|
+
Important! Don't forget to update the UI State after you call your Server
|
|
272
|
+
Action otherwise the streamed component will not show in the UI.
|
|
273
|
+
</Note>
|
|
274
|
+
|
|
275
|
+
To learn more, check out this [example](/examples/next-app/state-management/ai-ui-states) on managing AI and UI state using `@ai-sdk/rsc`.
|
|
276
|
+
|
|
277
|
+
---
|
|
278
|
+
|
|
279
|
+
Next, you will learn how you can save and restore state with `@ai-sdk/rsc`.
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
---
|
|
2
|
+
title: Saving and Restoring States
|
|
3
|
+
description: Saving and restoring AI and UI states with onGetUIState and onSetAIState
|
|
4
|
+
---
|
|
5
|
+
|
|
6
|
+
# Saving and Restoring States
|
|
7
|
+
|
|
8
|
+
<Note type="warning">
|
|
9
|
+
AI SDK RSC is currently experimental. We recommend using [AI SDK
|
|
10
|
+
UI](/docs/ai-sdk-ui/overview) for production. For guidance on migrating from
|
|
11
|
+
RSC to UI, see our [migration guide](/docs/ai-sdk-rsc/migrating-to-ui).
|
|
12
|
+
</Note>
|
|
13
|
+
|
|
14
|
+
AI SDK RSC provides convenient methods for saving and restoring AI and UI state. This is useful for saving the state of your application after every model generation, and restoring it when the user revisits the generations.
|
|
15
|
+
|
|
16
|
+
## AI State
|
|
17
|
+
|
|
18
|
+
### Saving AI state
|
|
19
|
+
|
|
20
|
+
The AI state can be saved using the [`onSetAIState`](/docs/reference/ai-sdk-rsc/create-ai#on-set-ai-state) callback, which gets called whenever the AI state is updated. In the following example, you save the chat history to a database whenever the generation is marked as done.
|
|
21
|
+
|
|
22
|
+
```tsx filename='app/ai.ts'
|
|
23
|
+
export const AI = createAI<ServerMessage[], ClientMessage[]>({
|
|
24
|
+
actions: {
|
|
25
|
+
continueConversation,
|
|
26
|
+
},
|
|
27
|
+
onSetAIState: async ({ state, done }) => {
|
|
28
|
+
'use server';
|
|
29
|
+
|
|
30
|
+
if (done) {
|
|
31
|
+
saveChatToDB(state);
|
|
32
|
+
}
|
|
33
|
+
},
|
|
34
|
+
});
|
|
35
|
+
```
|
|
36
|
+
|
|
37
|
+
### Restoring AI state
|
|
38
|
+
|
|
39
|
+
The AI state can be restored using the [`initialAIState`](/docs/reference/ai-sdk-rsc/create-ai#initial-ai-state) prop passed to the context provider created by the [`createAI`](/docs/reference/ai-sdk-rsc/create-ai) function. In the following example, you restore the chat history from a database when the component is mounted.
|
|
40
|
+
|
|
41
|
+
```tsx file='app/layout.tsx'
|
|
42
|
+
import { ReactNode } from 'react';
|
|
43
|
+
import { AI } from './ai';
|
|
44
|
+
|
|
45
|
+
export default async function RootLayout({
|
|
46
|
+
children,
|
|
47
|
+
}: Readonly<{ children: ReactNode }>) {
|
|
48
|
+
const chat = await loadChatFromDB();
|
|
49
|
+
|
|
50
|
+
return (
|
|
51
|
+
<html lang="en">
|
|
52
|
+
<body>
|
|
53
|
+
<AI initialAIState={chat}>{children}</AI>
|
|
54
|
+
</body>
|
|
55
|
+
</html>
|
|
56
|
+
);
|
|
57
|
+
}
|
|
58
|
+
```
|
|
59
|
+
|
|
60
|
+
## UI State
|
|
61
|
+
|
|
62
|
+
### Saving UI state
|
|
63
|
+
|
|
64
|
+
The UI state cannot be saved directly, since the contents aren't yet serializable. Instead, you can use the AI state as proxy to store details about the UI state and use it to restore the UI state when needed.
|
|
65
|
+
|
|
66
|
+
### Restoring UI state
|
|
67
|
+
|
|
68
|
+
The UI state can be restored using the AI state as a proxy. In the following example, you restore the chat history from the AI state when the component is mounted. You use the [`onGetUIState`](/docs/reference/ai-sdk-rsc/create-ai#on-get-ui-state) callback to listen for SSR events and restore the UI state.
|
|
69
|
+
|
|
70
|
+
```tsx filename='app/ai.ts'
|
|
71
|
+
export const AI = createAI<ServerMessage[], ClientMessage[]>({
|
|
72
|
+
actions: {
|
|
73
|
+
continueConversation,
|
|
74
|
+
},
|
|
75
|
+
onGetUIState: async () => {
|
|
76
|
+
'use server';
|
|
77
|
+
|
|
78
|
+
const historyFromDB: ServerMessage[] = await loadChatFromDB();
|
|
79
|
+
const historyFromApp: ServerMessage[] = getAIState();
|
|
80
|
+
|
|
81
|
+
// If the history from the database is different from the
|
|
82
|
+
// history in the app, they're not in sync so return the UIState
|
|
83
|
+
// based on the history from the database
|
|
84
|
+
|
|
85
|
+
if (historyFromDB.length !== historyFromApp.length) {
|
|
86
|
+
return historyFromDB.map(({ role, content }) => ({
|
|
87
|
+
id: generateId(),
|
|
88
|
+
role,
|
|
89
|
+
display:
|
|
90
|
+
role === 'function' ? (
|
|
91
|
+
<Component {...JSON.parse(content)} />
|
|
92
|
+
) : (
|
|
93
|
+
content
|
|
94
|
+
),
|
|
95
|
+
}));
|
|
96
|
+
}
|
|
97
|
+
},
|
|
98
|
+
});
|
|
99
|
+
```
|
|
100
|
+
|
|
101
|
+
To learn more, check out this [example](/examples/next-app/state-management/save-and-restore-states) that persists and restores states in your Next.js application.
|
|
102
|
+
|
|
103
|
+
---
|
|
104
|
+
|
|
105
|
+
Next, you will learn how you can use `@ai-sdk/rsc` functions like `useActions` and `useUIState` to create interactive, multistep interfaces.
|