@mastra/mcp-docs-server 1.0.0-beta.4 → 1.0.0-beta.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.docs/organized/changelogs/%40internal%2Fstorage-test-utils.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fagent-builder.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fai-sdk.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fastra.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fchroma.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fclickhouse.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fclient-js.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fcloudflare-d1.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fcloudflare.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fcore.md +326 -126
- package/.docs/organized/changelogs/%40mastra%2Fcouchbase.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fdeployer-cloud.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fdeployer-cloudflare.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fdeployer-netlify.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fdeployer-vercel.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fdeployer.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fdynamodb.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fevals.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Flance.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Flibsql.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Floggers.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fmcp-docs-server.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fmcp-registry-registry.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fmcp.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fmemory.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fmongodb.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fmssql.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fopensearch.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fpg.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fpinecone.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fplayground-ui.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fqdrant.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Frag.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Freact.md +80 -1
- package/.docs/organized/changelogs/%40mastra%2Fs3vectors.md +9 -0
- package/.docs/organized/changelogs/%40mastra%2Fschema-compat.md +36 -0
- package/.docs/organized/changelogs/%40mastra%2Fserver.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fturbopuffer.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fupstash.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fvectorize.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fvoice-azure.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fvoice-cloudflare.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fvoice-deepgram.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fvoice-elevenlabs.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fvoice-gladia.md +92 -1
- package/.docs/organized/changelogs/%40mastra%2Fvoice-google-gemini-live.md +67 -1
- package/.docs/organized/changelogs/%40mastra%2Fvoice-google.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fvoice-murf.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fvoice-openai-realtime.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fvoice-openai.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fvoice-playai.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fvoice-sarvam.md +201 -1
- package/.docs/organized/changelogs/%40mastra%2Fvoice-speechify.md +201 -1
- package/.docs/organized/changelogs/create-mastra.md +201 -1
- package/.docs/organized/changelogs/mastra.md +201 -1
- package/.docs/organized/code-examples/memory-with-processors.md +1 -1
- package/.docs/organized/code-examples/quick-start.md +1 -1
- package/.docs/raw/agents/adding-voice.mdx +7 -10
- package/.docs/raw/agents/guardrails.mdx +19 -20
- package/.docs/raw/agents/human-in-the-loop-with-tools.mdx +6 -5
- package/.docs/raw/agents/networks.mdx +1 -2
- package/.docs/raw/agents/overview.mdx +5 -5
- package/.docs/raw/agents/using-tools.mdx +4 -5
- package/.docs/raw/course/01-first-agent/05-running-playground.md +5 -5
- package/.docs/raw/course/01-first-agent/09-testing-your-agent.md +3 -3
- package/.docs/raw/course/01-first-agent/13-testing-your-tool.md +3 -3
- package/.docs/raw/course/01-first-agent/17-testing-memory.md +2 -2
- package/.docs/raw/course/04-workflows/07-using-playground.md +1 -1
- package/.docs/raw/deployment/building-mastra.mdx +1 -1
- package/.docs/raw/deployment/cloud-providers/amazon-ec2.mdx +1 -1
- package/.docs/raw/deployment/cloud-providers/aws-lambda.mdx +1 -1
- package/.docs/raw/deployment/cloud-providers/azure-app-services.mdx +1 -1
- package/.docs/raw/deployment/cloud-providers/digital-ocean.mdx +1 -1
- package/.docs/raw/deployment/cloud-providers/index.mdx +1 -1
- package/.docs/raw/deployment/mastra-cloud/observability.mdx +19 -17
- package/.docs/raw/deployment/mastra-cloud/setting-up.mdx +1 -1
- package/.docs/raw/deployment/overview.mdx +2 -2
- package/.docs/raw/deployment/web-framework.mdx +5 -5
- package/.docs/raw/evals/custom-scorers.mdx +3 -5
- package/.docs/raw/evals/overview.mdx +2 -3
- package/.docs/raw/getting-started/project-structure.mdx +1 -1
- package/.docs/raw/getting-started/start.mdx +72 -0
- package/.docs/raw/getting-started/studio.mdx +1 -1
- package/.docs/raw/{frameworks/agentic-uis/ai-sdk.mdx → guides/build-your-ui/ai-sdk-ui.mdx} +105 -11
- package/.docs/raw/{frameworks/web-frameworks → guides/getting-started}/astro.mdx +23 -25
- package/.docs/raw/{frameworks/servers → guides/getting-started}/express.mdx +3 -4
- package/.docs/raw/guides/{guide → getting-started}/manual-install.mdx +1 -1
- package/.docs/raw/guides/{quickstarts/nextjs.mdx → getting-started/next-js.mdx} +11 -11
- package/.docs/raw/guides/{quickstarts/standalone-server.mdx → getting-started/quickstart.mdx} +7 -7
- package/.docs/raw/{frameworks/web-frameworks → guides/getting-started}/sveltekit.mdx +23 -25
- package/.docs/raw/{frameworks/web-frameworks → guides/getting-started}/vite-react.mdx +7 -7
- package/.docs/raw/guides/guide/ai-recruiter.mdx +2 -3
- package/.docs/raw/guides/guide/chef-michel.mdx +2 -3
- package/.docs/raw/guides/guide/notes-mcp-server.mdx +2 -2
- package/.docs/raw/guides/guide/research-assistant.mdx +7 -8
- package/.docs/raw/guides/guide/stock-agent.mdx +4 -6
- package/.docs/raw/guides/guide/web-search.mdx +12 -10
- package/.docs/raw/guides/migrations/agentnetwork.mdx +4 -4
- package/.docs/raw/guides/migrations/ai-sdk-v4-to-v5.mdx +1 -1
- package/.docs/raw/guides/migrations/upgrade-to-v1/agent.mdx +29 -0
- package/.docs/raw/guides/migrations/upgrade-to-v1/tools.mdx +5 -0
- package/.docs/raw/guides/migrations/upgrade-to-v1/workflows.mdx +22 -0
- package/.docs/raw/guides/migrations/vnext-to-standard-apis.mdx +2 -2
- package/.docs/raw/index.mdx +2 -2
- package/.docs/raw/mcp/overview.mdx +3 -5
- package/.docs/raw/memory/memory-processors.mdx +1 -2
- package/.docs/raw/memory/semantic-recall.mdx +7 -7
- package/.docs/raw/memory/storage/memory-with-libsql.mdx +2 -4
- package/.docs/raw/memory/storage/memory-with-mongodb.mdx +2 -4
- package/.docs/raw/memory/storage/memory-with-pg.mdx +2 -4
- package/.docs/raw/memory/storage/memory-with-upstash.mdx +2 -4
- package/.docs/raw/memory/threads-and-resources.mdx +3 -3
- package/.docs/raw/memory/working-memory.mdx +4 -5
- package/.docs/raw/{logging.mdx → observability/logging.mdx} +1 -1
- package/.docs/raw/observability/overview.mdx +2 -2
- package/.docs/raw/observability/tracing/exporters/otel.mdx +21 -2
- package/.docs/raw/observability/tracing/exporters/posthog.mdx +107 -0
- package/.docs/raw/observability/tracing/overview.mdx +3 -2
- package/.docs/raw/rag/chunking-and-embedding.mdx +16 -17
- package/.docs/raw/rag/overview.mdx +3 -2
- package/.docs/raw/rag/retrieval.mdx +20 -32
- package/.docs/raw/reference/agents/agent.mdx +7 -10
- package/.docs/raw/reference/agents/generateLegacy.mdx +2 -2
- package/.docs/raw/reference/agents/getLLM.mdx +1 -1
- package/.docs/raw/reference/agents/network.mdx +2 -3
- package/.docs/raw/reference/cli/mastra.mdx +2 -1
- package/.docs/raw/reference/client-js/agents.mdx +3 -3
- package/.docs/raw/reference/core/getLogger.mdx +1 -1
- package/.docs/raw/reference/core/listLogs.mdx +1 -1
- package/.docs/raw/reference/core/listLogsByRunId.mdx +1 -1
- package/.docs/raw/reference/core/mastra-model-gateway.mdx +5 -19
- package/.docs/raw/reference/core/setLogger.mdx +1 -1
- package/.docs/raw/reference/core/setTelemetry.mdx +1 -1
- package/.docs/raw/reference/evals/answer-relevancy.mdx +28 -98
- package/.docs/raw/reference/evals/answer-similarity.mdx +12 -258
- package/.docs/raw/reference/evals/bias.mdx +29 -87
- package/.docs/raw/reference/evals/completeness.mdx +31 -90
- package/.docs/raw/reference/evals/content-similarity.mdx +28 -88
- package/.docs/raw/reference/evals/context-precision.mdx +28 -130
- package/.docs/raw/reference/evals/context-relevance.mdx +11 -11
- package/.docs/raw/reference/evals/faithfulness.mdx +28 -101
- package/.docs/raw/reference/evals/hallucination.mdx +28 -103
- package/.docs/raw/reference/evals/keyword-coverage.mdx +28 -107
- package/.docs/raw/reference/evals/noise-sensitivity.mdx +11 -11
- package/.docs/raw/reference/evals/prompt-alignment.mdx +15 -15
- package/.docs/raw/reference/evals/textual-difference.mdx +27 -100
- package/.docs/raw/reference/evals/tone-consistency.mdx +25 -98
- package/.docs/raw/reference/evals/tool-call-accuracy.mdx +7 -7
- package/.docs/raw/reference/evals/toxicity.mdx +29 -92
- package/.docs/raw/reference/memory/memory-class.mdx +5 -7
- package/.docs/raw/reference/observability/tracing/exporters/posthog.mdx +132 -0
- package/.docs/raw/reference/processors/batch-parts-processor.mdx +1 -1
- package/.docs/raw/reference/processors/language-detector.mdx +1 -1
- package/.docs/raw/reference/processors/moderation-processor.mdx +2 -2
- package/.docs/raw/reference/processors/pii-detector.mdx +2 -2
- package/.docs/raw/reference/processors/prompt-injection-detector.mdx +1 -1
- package/.docs/raw/reference/processors/system-prompt-scrubber.mdx +2 -3
- package/.docs/raw/reference/processors/token-limiter-processor.mdx +2 -2
- package/.docs/raw/reference/processors/unicode-normalizer.mdx +1 -1
- package/.docs/raw/reference/rag/embeddings.mdx +5 -5
- package/.docs/raw/reference/rag/rerank.mdx +1 -2
- package/.docs/raw/reference/rag/rerankWithScorer.mdx +0 -1
- package/.docs/raw/reference/streaming/agents/stream.mdx +8 -1
- package/.docs/raw/reference/templates/overview.mdx +1 -4
- package/.docs/raw/reference/tools/client.mdx +1 -2
- package/.docs/raw/reference/tools/create-tool.mdx +132 -0
- package/.docs/raw/reference/tools/graph-rag-tool.mdx +5 -5
- package/.docs/raw/reference/tools/mcp-client.mdx +2 -4
- package/.docs/raw/reference/tools/mcp-server.mdx +1 -2
- package/.docs/raw/reference/tools/vector-query-tool.mdx +14 -15
- package/.docs/raw/reference/vectors/chroma.mdx +81 -1
- package/.docs/raw/reference/vectors/couchbase.mdx +24 -17
- package/.docs/raw/reference/vectors/lance.mdx +38 -22
- package/.docs/raw/reference/vectors/libsql.mdx +35 -2
- package/.docs/raw/reference/vectors/mongodb.mdx +35 -2
- package/.docs/raw/reference/vectors/opensearch.mdx +37 -16
- package/.docs/raw/reference/vectors/pg.mdx +43 -36
- package/.docs/raw/reference/vectors/pinecone.mdx +48 -1
- package/.docs/raw/reference/vectors/qdrant.mdx +36 -1
- package/.docs/raw/reference/vectors/turbopuffer.mdx +74 -0
- package/.docs/raw/reference/voice/openai-realtime.mdx +2 -2
- package/.docs/raw/reference/voice/voice.addInstructions.mdx +2 -3
- package/.docs/raw/reference/voice/voice.addTools.mdx +1 -1
- package/.docs/raw/reference/voice/voice.answer.mdx +1 -1
- package/.docs/raw/reference/voice/voice.close.mdx +1 -1
- package/.docs/raw/reference/voice/voice.connect.mdx +1 -1
- package/.docs/raw/reference/voice/voice.off.mdx +1 -1
- package/.docs/raw/reference/voice/voice.on.mdx +1 -1
- package/.docs/raw/reference/voice/voice.send.mdx +1 -1
- package/.docs/raw/reference/voice/voice.updateConfig.mdx +1 -1
- package/.docs/raw/server-db/mastra-client.mdx +1 -2
- package/.docs/raw/streaming/overview.mdx +20 -9
- package/.docs/raw/streaming/tool-streaming.mdx +47 -4
- package/.docs/raw/tools-mcp/advanced-usage.mdx +1 -2
- package/.docs/raw/tools-mcp/mcp-overview.mdx +3 -5
- package/.docs/raw/voice/overview.mdx +21 -41
- package/.docs/raw/voice/speech-to-speech.mdx +4 -4
- package/.docs/raw/voice/speech-to-text.mdx +1 -2
- package/.docs/raw/voice/text-to-speech.mdx +1 -2
- package/.docs/raw/workflows/control-flow.mdx +180 -0
- package/CHANGELOG.md +10 -0
- package/dist/{chunk-5NJC7NRO.js → chunk-4CM2BQNP.js} +24 -4
- package/dist/prepare-docs/package-changes.d.ts.map +1 -1
- package/dist/prepare-docs/prepare.js +1 -1
- package/dist/stdio.js +1 -1
- package/package.json +7 -7
- package/.docs/raw/frameworks/agentic-uis/cedar-os.mdx +0 -102
- package/.docs/raw/frameworks/agentic-uis/openrouter.mdx +0 -179
- package/.docs/raw/frameworks/web-frameworks/next-js.mdx +0 -379
- package/.docs/raw/getting-started/quickstart.mdx +0 -27
- package/.docs/raw/getting-started/templates.mdx +0 -73
- /package/.docs/raw/{frameworks/agentic-uis → guides/build-your-ui}/assistant-ui.mdx +0 -0
- /package/.docs/raw/{frameworks/agentic-uis → guides/build-your-ui}/copilotkit.mdx +0 -0
|
@@ -22,7 +22,7 @@ const speaker = new Speaker({
|
|
|
22
22
|
// Initialize a real-time voice provider
|
|
23
23
|
const voice = new OpenAIRealtimeVoice({
|
|
24
24
|
realtimeConfig: {
|
|
25
|
-
model: "gpt-
|
|
25
|
+
model: "gpt-5.1-realtime",
|
|
26
26
|
apiKey: process.env.OPENAI_API_KEY,
|
|
27
27
|
options: {
|
|
28
28
|
sessionConfig: {
|
|
@@ -15,7 +15,7 @@ import { OpenAIRealtimeVoice } from "@mastra/voice-openai-realtime";
|
|
|
15
15
|
// Initialize a real-time voice provider
|
|
16
16
|
const voice = new OpenAIRealtimeVoice({
|
|
17
17
|
realtimeConfig: {
|
|
18
|
-
model: "gpt-
|
|
18
|
+
model: "gpt-5.1-realtime",
|
|
19
19
|
apiKey: process.env.OPENAI_API_KEY,
|
|
20
20
|
},
|
|
21
21
|
speaker: "alloy",
|
|
@@ -240,7 +240,6 @@ const handleClientTool = async () => {
|
|
|
240
240
|
This is a standard Mastra [agent](../agents/overview#setting-up-agents) configured to return hex color codes, intended to work with the browser-based client tool defined above.
|
|
241
241
|
|
|
242
242
|
```typescript title="src/mastra/agents/color-agent" showLineNumbers copy
|
|
243
|
-
import { openai } from "@ai-sdk/openai";
|
|
244
243
|
import { Agent } from "@mastra/core/agent";
|
|
245
244
|
|
|
246
245
|
export const colorAgent = new Agent({
|
|
@@ -249,7 +248,7 @@ export const colorAgent = new Agent({
|
|
|
249
248
|
instructions: `You are a helpful CSS assistant.
|
|
250
249
|
You can change the background color of web pages.
|
|
251
250
|
Respond with a hex reference for the color requested by the user`,
|
|
252
|
-
model: openai
|
|
251
|
+
model: "openai/gpt-5.1",
|
|
253
252
|
});
|
|
254
253
|
```
|
|
255
254
|
|
|
@@ -60,19 +60,30 @@ An agent stream provides access to various response properties:
|
|
|
60
60
|
|
|
61
61
|
AI SDK v5 uses `LanguageModelV2` for the model providers. If you are getting an error that you are using an AI SDK v4 model you will need to upgrade your model package to the next major version.
|
|
62
62
|
|
|
63
|
-
For integration with AI SDK v5, use `
|
|
63
|
+
For integration with AI SDK v5, use the `toAISdkV5Stream()` utility from `@mastra/ai-sdk` to convert Mastra streams to AI SDK-compatible format:
|
|
64
|
+
|
|
65
|
+
```typescript {2,9-12} showLineNumbers copy
|
|
66
|
+
import { toAISdkV5Stream } from "@mastra/ai-sdk";
|
|
64
67
|
|
|
65
|
-
```typescript {5} showLineNumbers copy
|
|
66
68
|
const testAgent = mastra.getAgent("testAgent");
|
|
67
69
|
|
|
68
|
-
const stream = await testAgent.stream(
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
);
|
|
70
|
+
const stream = await testAgent.stream([
|
|
71
|
+
{ role: "user", content: "Help me organize my day" },
|
|
72
|
+
]);
|
|
72
73
|
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
74
|
+
// Convert to AI SDK v5 compatible stream
|
|
75
|
+
const aiSDKStream = toAISdkV5Stream(stream, { from: "agent" });
|
|
76
|
+
|
|
77
|
+
// Use with AI SDK v5 methods
|
|
78
|
+
```
|
|
79
|
+
|
|
80
|
+
For converting messages to AI SDK v5 format, use the `toAISdkV5Messages()` utility from `@mastra/ai-sdk/ui`:
|
|
81
|
+
|
|
82
|
+
```typescript {1,4} showLineNumbers copy
|
|
83
|
+
import { toAISdkV5Messages } from "@mastra/ai-sdk/ui";
|
|
84
|
+
|
|
85
|
+
const messages = [{ role: "user", content: "Hello" }];
|
|
86
|
+
const aiSDKMessages = toAISdkV5Messages(messages);
|
|
76
87
|
```
|
|
77
88
|
|
|
78
89
|
### Using `Agent.network()`
|
|
@@ -18,8 +18,7 @@ By combining writable tool streams with agent streaming, you gain fine grained c
|
|
|
18
18
|
|
|
19
19
|
Agent streaming can be combined with tool calls, allowing tool outputs to be written directly into the agent’s streaming response. This makes it possible to surface tool activity as part of the overall interaction.
|
|
20
20
|
|
|
21
|
-
```typescript {
|
|
22
|
-
import { openai } from "@ai-sdk/openai";
|
|
21
|
+
```typescript {3,9} showLineNumbers copy
|
|
23
22
|
import { Agent } from "@mastra/core/agent";
|
|
24
23
|
|
|
25
24
|
import { testTool } from "../tools/test-tool";
|
|
@@ -28,7 +27,7 @@ export const testAgent = new Agent({
|
|
|
28
27
|
id: "test-agent",
|
|
29
28
|
name: "Test Agent",
|
|
30
29
|
instructions: "You are a weather agent.",
|
|
31
|
-
model: openai
|
|
30
|
+
model: "openai/gpt-5.1",
|
|
32
31
|
tools: { testTool },
|
|
33
32
|
});
|
|
34
33
|
```
|
|
@@ -117,9 +116,53 @@ for await (const chunk of stream) {
|
|
|
117
116
|
}
|
|
118
117
|
```
|
|
119
118
|
|
|
119
|
+
## Tool Lifecycle Hooks
|
|
120
|
+
|
|
121
|
+
Tools support lifecycle hooks that allow you to monitor different stages of tool execution during streaming. These hooks are particularly useful for logging or analytics.
|
|
122
|
+
|
|
123
|
+
### Example: Using onInputAvailable and onOutput
|
|
124
|
+
|
|
125
|
+
```typescript showLineNumbers copy
|
|
126
|
+
import { createTool } from "@mastra/core/tools";
|
|
127
|
+
import { z } from "zod";
|
|
128
|
+
|
|
129
|
+
export const weatherTool = createTool({
|
|
130
|
+
id: "weather-tool",
|
|
131
|
+
description: "Get weather information",
|
|
132
|
+
inputSchema: z.object({
|
|
133
|
+
city: z.string(),
|
|
134
|
+
}),
|
|
135
|
+
outputSchema: z.object({
|
|
136
|
+
temperature: z.number(),
|
|
137
|
+
conditions: z.string(),
|
|
138
|
+
}),
|
|
139
|
+
// Called when the complete input is available
|
|
140
|
+
onInputAvailable: ({ input, toolCallId }) => {
|
|
141
|
+
console.log(`Weather requested for: ${input.city}`);
|
|
142
|
+
},
|
|
143
|
+
execute: async (input) => {
|
|
144
|
+
const weather = await fetchWeather(input.city);
|
|
145
|
+
return weather;
|
|
146
|
+
},
|
|
147
|
+
// Called after successful execution
|
|
148
|
+
onOutput: ({ output, toolName }) => {
|
|
149
|
+
console.log(`${toolName} result: ${output.temperature}°F, ${output.conditions}`);
|
|
150
|
+
},
|
|
151
|
+
});
|
|
152
|
+
```
|
|
153
|
+
|
|
154
|
+
### Available Hooks
|
|
155
|
+
|
|
156
|
+
- **onInputStart**: Called when tool call input streaming begins
|
|
157
|
+
- **onInputDelta**: Called for each chunk of input as it streams in
|
|
158
|
+
- **onInputAvailable**: Called when complete input is parsed and validated
|
|
159
|
+
- **onOutput**: Called after the tool successfully executes with the output
|
|
160
|
+
|
|
161
|
+
For detailed documentation on all lifecycle hooks, see the [createTool() reference](/reference/v1/tools/create-tool#tool-lifecycle-hooks).
|
|
162
|
+
|
|
120
163
|
## Tool using an agent
|
|
121
164
|
|
|
122
|
-
Pipe an agent
|
|
165
|
+
Pipe an agent's `textStream` to the tool's `writer`. This streams partial output, and Mastra automatically aggregates the agent's usage into the tool run.
|
|
123
166
|
|
|
124
167
|
```typescript showLineNumbers copy
|
|
125
168
|
import { createTool } from "@mastra/core/tools";
|
|
@@ -110,7 +110,6 @@ You can then add this tool to your Mastra agent just like any other tool:
|
|
|
110
110
|
|
|
111
111
|
```typescript title="src/mastra/agents/mixedToolsAgent.ts"
|
|
112
112
|
import { Agent } from "@mastra/core/agent";
|
|
113
|
-
import { openai } from "@ai-sdk/openai";
|
|
114
113
|
import { vercelWeatherTool } from "../tools/vercelWeatherTool"; // Vercel AI SDK tool
|
|
115
114
|
import { mastraTool } from "../tools/mastraTool"; // Mastra createTool tool
|
|
116
115
|
|
|
@@ -118,7 +117,7 @@ export const mixedToolsAgent = new Agent({
|
|
|
118
117
|
id: "mixed-tools-agent",
|
|
119
118
|
name: "Mixed Tools Agent",
|
|
120
119
|
instructions: "You can use tools defined in different formats.",
|
|
121
|
-
model: openai
|
|
120
|
+
model: "openai/gpt-5.1",
|
|
122
121
|
tools: {
|
|
123
122
|
weatherVercel: vercelWeatherTool,
|
|
124
123
|
someMastraTool: mastraTool,
|
|
@@ -54,8 +54,7 @@ export const testMcpClient = new MCPClient({
|
|
|
54
54
|
|
|
55
55
|
To use tools from an MCP server in an agent, import your `MCPClient` and call `.listTools()` in the `tools` parameter. This loads from the defined MCP servers, making them available to the agent.
|
|
56
56
|
|
|
57
|
-
```typescript {
|
|
58
|
-
import { openai } from "@ai-sdk/openai";
|
|
57
|
+
```typescript {3,15} title="src/mastra/agents/test-agent.ts" showLineNumbers copy
|
|
59
58
|
import { Agent } from "@mastra/core/agent";
|
|
60
59
|
|
|
61
60
|
import { testMcpClient } from "../mcp/test-mcp-client";
|
|
@@ -70,7 +69,7 @@ export const testAgent = new Agent({
|
|
|
70
69
|
- US National Weather Service
|
|
71
70
|
|
|
72
71
|
Answer questions using the information you find using the MCP Servers.`,
|
|
73
|
-
model: openai
|
|
72
|
+
model: "openai/gpt-5.1",
|
|
74
73
|
tools: await testMcpClient.listTools(),
|
|
75
74
|
});
|
|
76
75
|
```
|
|
@@ -144,8 +143,7 @@ Use the `.listTools()` method to fetch tools from all configured MCP servers. Th
|
|
|
144
143
|
|
|
145
144
|
> See [listTools()](/reference/v1/tools/mcp-client#listtools) for more information.
|
|
146
145
|
|
|
147
|
-
```typescript {
|
|
148
|
-
import { openai } from "@ai-sdk/openai";
|
|
146
|
+
```typescript {7} title="src/mastra/agents/test-agent.ts" showLineNumbers copy
|
|
149
147
|
import { Agent } from "@mastra/core/agent";
|
|
150
148
|
|
|
151
149
|
import { testMcpClient } from "../mcp/test-mcp-client";
|
|
@@ -17,7 +17,6 @@ To learn how to integrate voice capabilities into your agents, check out the [Ad
|
|
|
17
17
|
|
|
18
18
|
```typescript
|
|
19
19
|
import { Agent } from "@mastra/core/agent";
|
|
20
|
-
import { openai } from "@ai-sdk/openai";
|
|
21
20
|
import { OpenAIVoice } from "@mastra/voice-openai";
|
|
22
21
|
|
|
23
22
|
// Initialize OpenAI voice for TTS
|
|
@@ -27,7 +26,7 @@ const voiceAgent = new Agent({
|
|
|
27
26
|
name: "Voice Agent",
|
|
28
27
|
instructions:
|
|
29
28
|
"You are a voice assistant that can help users with their tasks.",
|
|
30
|
-
model: openai
|
|
29
|
+
model: "openai/gpt-5.1",
|
|
31
30
|
voice: new OpenAIVoice(),
|
|
32
31
|
});
|
|
33
32
|
```
|
|
@@ -46,7 +45,6 @@ For detailed configuration options and advanced features, check out our [Text-to
|
|
|
46
45
|
|
|
47
46
|
```typescript
|
|
48
47
|
import { Agent } from "@mastra/core/agent";
|
|
49
|
-
import { openai } from "@ai-sdk/openai";
|
|
50
48
|
import { OpenAIVoice } from "@mastra/voice-openai";
|
|
51
49
|
import { playAudio } from "@mastra/node-audio";
|
|
52
50
|
|
|
@@ -55,7 +53,7 @@ const voiceAgent = new Agent({
|
|
|
55
53
|
name: "Voice Agent",
|
|
56
54
|
instructions:
|
|
57
55
|
"You are a voice assistant that can help users with their tasks.",
|
|
58
|
-
model: openai
|
|
56
|
+
model: "openai/gpt-5.1",
|
|
59
57
|
voice: new OpenAIVoice(),
|
|
60
58
|
});
|
|
61
59
|
|
|
@@ -77,7 +75,6 @@ Visit the [OpenAI Voice Reference](/reference/v1/voice/openai) for more informat
|
|
|
77
75
|
|
|
78
76
|
```typescript
|
|
79
77
|
import { Agent } from "@mastra/core/agent";
|
|
80
|
-
import { openai } from "@ai-sdk/openai";
|
|
81
78
|
import { AzureVoice } from "@mastra/voice-azure";
|
|
82
79
|
import { playAudio } from "@mastra/node-audio";
|
|
83
80
|
|
|
@@ -86,7 +83,7 @@ const voiceAgent = new Agent({
|
|
|
86
83
|
name: "Voice Agent",
|
|
87
84
|
instructions:
|
|
88
85
|
"You are a voice assistant that can help users with their tasks.",
|
|
89
|
-
model: openai
|
|
86
|
+
model: "openai/gpt-5.1",
|
|
90
87
|
voice: new AzureVoice(),
|
|
91
88
|
});
|
|
92
89
|
|
|
@@ -107,7 +104,6 @@ Visit the [Azure Voice Reference](/reference/v1/voice/azure) for more informatio
|
|
|
107
104
|
|
|
108
105
|
```typescript
|
|
109
106
|
import { Agent } from "@mastra/core/agent";
|
|
110
|
-
import { openai } from "@ai-sdk/openai";
|
|
111
107
|
import { ElevenLabsVoice } from "@mastra/voice-elevenlabs";
|
|
112
108
|
import { playAudio } from "@mastra/node-audio";
|
|
113
109
|
|
|
@@ -116,7 +112,7 @@ const voiceAgent = new Agent({
|
|
|
116
112
|
name: "Voice Agent",
|
|
117
113
|
instructions:
|
|
118
114
|
"You are a voice assistant that can help users with their tasks.",
|
|
119
|
-
model: openai
|
|
115
|
+
model: "openai/gpt-5.1",
|
|
120
116
|
voice: new ElevenLabsVoice(),
|
|
121
117
|
});
|
|
122
118
|
|
|
@@ -137,7 +133,6 @@ Visit the [ElevenLabs Voice Reference](/reference/v1/voice/elevenlabs) for more
|
|
|
137
133
|
|
|
138
134
|
```typescript
|
|
139
135
|
import { Agent } from "@mastra/core/agent";
|
|
140
|
-
import { openai } from "@ai-sdk/openai";
|
|
141
136
|
import { PlayAIVoice } from "@mastra/voice-playai";
|
|
142
137
|
import { playAudio } from "@mastra/node-audio";
|
|
143
138
|
|
|
@@ -146,7 +141,7 @@ const voiceAgent = new Agent({
|
|
|
146
141
|
name: "Voice Agent",
|
|
147
142
|
instructions:
|
|
148
143
|
"You are a voice assistant that can help users with their tasks.",
|
|
149
|
-
model: openai
|
|
144
|
+
model: "openai/gpt-5.1",
|
|
150
145
|
voice: new PlayAIVoice(),
|
|
151
146
|
});
|
|
152
147
|
|
|
@@ -167,7 +162,6 @@ Visit the [PlayAI Voice Reference](/reference/v1/voice/playai) for more informat
|
|
|
167
162
|
|
|
168
163
|
```typescript
|
|
169
164
|
import { Agent } from "@mastra/core/agent";
|
|
170
|
-
import { openai } from "@ai-sdk/openai";
|
|
171
165
|
import { GoogleVoice } from "@mastra/voice-google";
|
|
172
166
|
import { playAudio } from "@mastra/node-audio";
|
|
173
167
|
|
|
@@ -176,7 +170,7 @@ const voiceAgent = new Agent({
|
|
|
176
170
|
name: "Voice Agent",
|
|
177
171
|
instructions:
|
|
178
172
|
"You are a voice assistant that can help users with their tasks.",
|
|
179
|
-
model: openai
|
|
173
|
+
model: "openai/gpt-5.1",
|
|
180
174
|
voice: new GoogleVoice(),
|
|
181
175
|
});
|
|
182
176
|
|
|
@@ -197,7 +191,6 @@ Visit the [Google Voice Reference](/reference/v1/voice/google) for more informat
|
|
|
197
191
|
|
|
198
192
|
```typescript
|
|
199
193
|
import { Agent } from "@mastra/core/agent";
|
|
200
|
-
import { openai } from "@ai-sdk/openai";
|
|
201
194
|
import { CloudflareVoice } from "@mastra/voice-cloudflare";
|
|
202
195
|
import { playAudio } from "@mastra/node-audio";
|
|
203
196
|
|
|
@@ -206,7 +199,7 @@ const voiceAgent = new Agent({
|
|
|
206
199
|
name: "Voice Agent",
|
|
207
200
|
instructions:
|
|
208
201
|
"You are a voice assistant that can help users with their tasks.",
|
|
209
|
-
model: openai
|
|
202
|
+
model: "openai/gpt-5.1",
|
|
210
203
|
voice: new CloudflareVoice(),
|
|
211
204
|
});
|
|
212
205
|
|
|
@@ -227,7 +220,6 @@ Visit the [Cloudflare Voice Reference](/reference/v1/voice/cloudflare) for more
|
|
|
227
220
|
|
|
228
221
|
```typescript
|
|
229
222
|
import { Agent } from "@mastra/core/agent";
|
|
230
|
-
import { openai } from "@ai-sdk/openai";
|
|
231
223
|
import { DeepgramVoice } from "@mastra/voice-deepgram";
|
|
232
224
|
import { playAudio } from "@mastra/node-audio";
|
|
233
225
|
|
|
@@ -236,7 +228,7 @@ const voiceAgent = new Agent({
|
|
|
236
228
|
name: "Voice Agent",
|
|
237
229
|
instructions:
|
|
238
230
|
"You are a voice assistant that can help users with their tasks.",
|
|
239
|
-
model: openai
|
|
231
|
+
model: "openai/gpt-5.1",
|
|
240
232
|
voice: new DeepgramVoice(),
|
|
241
233
|
});
|
|
242
234
|
|
|
@@ -257,7 +249,6 @@ Visit the [Deepgram Voice Reference](/reference/v1/voice/deepgram) for more info
|
|
|
257
249
|
|
|
258
250
|
```typescript
|
|
259
251
|
import { Agent } from "@mastra/core/agent";
|
|
260
|
-
import { openai } from "@ai-sdk/openai";
|
|
261
252
|
import { SpeechifyVoice } from "@mastra/voice-speechify";
|
|
262
253
|
import { playAudio } from "@mastra/node-audio";
|
|
263
254
|
|
|
@@ -266,7 +257,7 @@ const voiceAgent = new Agent({
|
|
|
266
257
|
name: "Voice Agent",
|
|
267
258
|
instructions:
|
|
268
259
|
"You are a voice assistant that can help users with their tasks.",
|
|
269
|
-
model: openai
|
|
260
|
+
model: "openai/gpt-5.1",
|
|
270
261
|
voice: new SpeechifyVoice(),
|
|
271
262
|
});
|
|
272
263
|
|
|
@@ -287,7 +278,6 @@ Visit the [Speechify Voice Reference](/reference/v1/voice/speechify) for more in
|
|
|
287
278
|
|
|
288
279
|
```typescript
|
|
289
280
|
import { Agent } from "@mastra/core/agent";
|
|
290
|
-
import { openai } from "@ai-sdk/openai";
|
|
291
281
|
import { SarvamVoice } from "@mastra/voice-sarvam";
|
|
292
282
|
import { playAudio } from "@mastra/node-audio";
|
|
293
283
|
|
|
@@ -296,7 +286,7 @@ const voiceAgent = new Agent({
|
|
|
296
286
|
name: "Voice Agent",
|
|
297
287
|
instructions:
|
|
298
288
|
"You are a voice assistant that can help users with their tasks.",
|
|
299
|
-
model: openai
|
|
289
|
+
model: "openai/gpt-5.1",
|
|
300
290
|
voice: new SarvamVoice(),
|
|
301
291
|
});
|
|
302
292
|
|
|
@@ -317,7 +307,6 @@ Visit the [Sarvam Voice Reference](/reference/v1/voice/sarvam) for more informat
|
|
|
317
307
|
|
|
318
308
|
```typescript
|
|
319
309
|
import { Agent } from "@mastra/core/agent";
|
|
320
|
-
import { openai } from "@ai-sdk/openai";
|
|
321
310
|
import { MurfVoice } from "@mastra/voice-murf";
|
|
322
311
|
import { playAudio } from "@mastra/node-audio";
|
|
323
312
|
|
|
@@ -326,7 +315,7 @@ const voiceAgent = new Agent({
|
|
|
326
315
|
name: "Voice Agent",
|
|
327
316
|
instructions:
|
|
328
317
|
"You are a voice assistant that can help users with their tasks.",
|
|
329
|
-
model: openai
|
|
318
|
+
model: "openai/gpt-5.1",
|
|
330
319
|
voice: new MurfVoice(),
|
|
331
320
|
});
|
|
332
321
|
|
|
@@ -359,7 +348,6 @@ You can download a sample audio file from [here](https://github.com/mastra-ai/re
|
|
|
359
348
|
|
|
360
349
|
```typescript
|
|
361
350
|
import { Agent } from "@mastra/core/agent";
|
|
362
|
-
import { openai } from "@ai-sdk/openai";
|
|
363
351
|
import { OpenAIVoice } from "@mastra/voice-openai";
|
|
364
352
|
import { createReadStream } from "fs";
|
|
365
353
|
|
|
@@ -368,7 +356,7 @@ const voiceAgent = new Agent({
|
|
|
368
356
|
name: "Voice Agent",
|
|
369
357
|
instructions:
|
|
370
358
|
"You are a voice assistant that can help users with their tasks.",
|
|
371
|
-
model: openai
|
|
359
|
+
model: "openai/gpt-5.1",
|
|
372
360
|
voice: new OpenAIVoice(),
|
|
373
361
|
});
|
|
374
362
|
|
|
@@ -391,7 +379,6 @@ Visit the [OpenAI Voice Reference](/reference/v1/voice/openai) for more informat
|
|
|
391
379
|
```typescript
|
|
392
380
|
import { createReadStream } from "fs";
|
|
393
381
|
import { Agent } from "@mastra/core/agent";
|
|
394
|
-
import { openai } from "@ai-sdk/openai";
|
|
395
382
|
import { AzureVoice } from "@mastra/voice-azure";
|
|
396
383
|
import { createReadStream } from "fs";
|
|
397
384
|
|
|
@@ -400,7 +387,7 @@ const voiceAgent = new Agent({
|
|
|
400
387
|
name: "Voice Agent",
|
|
401
388
|
instructions:
|
|
402
389
|
"You are a voice assistant that can help users with their tasks.",
|
|
403
|
-
model: openai
|
|
390
|
+
model: "openai/gpt-5.1",
|
|
404
391
|
voice: new AzureVoice(),
|
|
405
392
|
});
|
|
406
393
|
|
|
@@ -422,7 +409,6 @@ Visit the [Azure Voice Reference](/reference/v1/voice/azure) for more informatio
|
|
|
422
409
|
|
|
423
410
|
```typescript
|
|
424
411
|
import { Agent } from "@mastra/core/agent";
|
|
425
|
-
import { openai } from "@ai-sdk/openai";
|
|
426
412
|
import { ElevenLabsVoice } from "@mastra/voice-elevenlabs";
|
|
427
413
|
import { createReadStream } from "fs";
|
|
428
414
|
|
|
@@ -431,7 +417,7 @@ const voiceAgent = new Agent({
|
|
|
431
417
|
name: "Voice Agent",
|
|
432
418
|
instructions:
|
|
433
419
|
"You are a voice assistant that can help users with their tasks.",
|
|
434
|
-
model: openai
|
|
420
|
+
model: "openai/gpt-5.1",
|
|
435
421
|
voice: new ElevenLabsVoice(),
|
|
436
422
|
});
|
|
437
423
|
|
|
@@ -453,7 +439,6 @@ Visit the [ElevenLabs Voice Reference](/reference/v1/voice/elevenlabs) for more
|
|
|
453
439
|
|
|
454
440
|
```typescript
|
|
455
441
|
import { Agent } from "@mastra/core/agent";
|
|
456
|
-
import { openai } from "@ai-sdk/openai";
|
|
457
442
|
import { GoogleVoice } from "@mastra/voice-google";
|
|
458
443
|
import { createReadStream } from "fs";
|
|
459
444
|
|
|
@@ -462,7 +447,7 @@ const voiceAgent = new Agent({
|
|
|
462
447
|
name: "Voice Agent",
|
|
463
448
|
instructions:
|
|
464
449
|
"You are a voice assistant that can help users with their tasks.",
|
|
465
|
-
model: openai
|
|
450
|
+
model: "openai/gpt-5.1",
|
|
466
451
|
voice: new GoogleVoice(),
|
|
467
452
|
});
|
|
468
453
|
|
|
@@ -484,7 +469,6 @@ Visit the [Google Voice Reference](/reference/v1/voice/google) for more informat
|
|
|
484
469
|
|
|
485
470
|
```typescript
|
|
486
471
|
import { Agent } from "@mastra/core/agent";
|
|
487
|
-
import { openai } from "@ai-sdk/openai";
|
|
488
472
|
import { CloudflareVoice } from "@mastra/voice-cloudflare";
|
|
489
473
|
import { createReadStream } from "fs";
|
|
490
474
|
|
|
@@ -493,7 +477,7 @@ const voiceAgent = new Agent({
|
|
|
493
477
|
name: "Voice Agent",
|
|
494
478
|
instructions:
|
|
495
479
|
"You are a voice assistant that can help users with their tasks.",
|
|
496
|
-
model: openai
|
|
480
|
+
model: "openai/gpt-5.1",
|
|
497
481
|
voice: new CloudflareVoice(),
|
|
498
482
|
});
|
|
499
483
|
|
|
@@ -515,7 +499,6 @@ Visit the [Cloudflare Voice Reference](/reference/v1/voice/cloudflare) for more
|
|
|
515
499
|
|
|
516
500
|
```typescript
|
|
517
501
|
import { Agent } from "@mastra/core/agent";
|
|
518
|
-
import { openai } from "@ai-sdk/openai";
|
|
519
502
|
import { DeepgramVoice } from "@mastra/voice-deepgram";
|
|
520
503
|
import { createReadStream } from "fs";
|
|
521
504
|
|
|
@@ -524,7 +507,7 @@ const voiceAgent = new Agent({
|
|
|
524
507
|
name: "Voice Agent",
|
|
525
508
|
instructions:
|
|
526
509
|
"You are a voice assistant that can help users with their tasks.",
|
|
527
|
-
model: openai
|
|
510
|
+
model: "openai/gpt-5.1",
|
|
528
511
|
voice: new DeepgramVoice(),
|
|
529
512
|
});
|
|
530
513
|
|
|
@@ -546,7 +529,6 @@ Visit the [Deepgram Voice Reference](/reference/v1/voice/deepgram) for more info
|
|
|
546
529
|
|
|
547
530
|
```typescript
|
|
548
531
|
import { Agent } from "@mastra/core/agent";
|
|
549
|
-
import { openai } from "@ai-sdk/openai";
|
|
550
532
|
import { SarvamVoice } from "@mastra/voice-sarvam";
|
|
551
533
|
import { createReadStream } from "fs";
|
|
552
534
|
|
|
@@ -555,7 +537,7 @@ const voiceAgent = new Agent({
|
|
|
555
537
|
name: "Voice Agent",
|
|
556
538
|
instructions:
|
|
557
539
|
"You are a voice assistant that can help users with their tasks.",
|
|
558
|
-
model: openai
|
|
540
|
+
model: "openai/gpt-5.1",
|
|
559
541
|
voice: new SarvamVoice(),
|
|
560
542
|
});
|
|
561
543
|
|
|
@@ -585,7 +567,6 @@ For detailed configuration options and advanced features, check out [Speech to S
|
|
|
585
567
|
|
|
586
568
|
```typescript
|
|
587
569
|
import { Agent } from "@mastra/core/agent";
|
|
588
|
-
import { openai } from "@ai-sdk/openai";
|
|
589
570
|
import { playAudio, getMicrophoneStream } from "@mastra/node-audio";
|
|
590
571
|
import { OpenAIRealtimeVoice } from "@mastra/voice-openai-realtime";
|
|
591
572
|
|
|
@@ -594,7 +575,7 @@ const voiceAgent = new Agent({
|
|
|
594
575
|
name: "Voice Agent",
|
|
595
576
|
instructions:
|
|
596
577
|
"You are a voice assistant that can help users with their tasks.",
|
|
597
|
-
model: openai
|
|
578
|
+
model: "openai/gpt-5.1",
|
|
598
579
|
voice: new OpenAIRealtimeVoice(),
|
|
599
580
|
});
|
|
600
581
|
|
|
@@ -618,7 +599,6 @@ Visit the [OpenAI Voice Reference](/reference/v1/voice/openai-realtime) for more
|
|
|
618
599
|
|
|
619
600
|
```typescript
|
|
620
601
|
import { Agent } from "@mastra/core/agent";
|
|
621
|
-
import { openai } from "@ai-sdk/openai";
|
|
622
602
|
import { playAudio, getMicrophoneStream } from "@mastra/node-audio";
|
|
623
603
|
import { GeminiLiveVoice } from "@mastra/voice-google-gemini-live";
|
|
624
604
|
|
|
@@ -627,7 +607,7 @@ const voiceAgent = new Agent({
|
|
|
627
607
|
name: "Voice Agent",
|
|
628
608
|
instructions:
|
|
629
609
|
"You are a voice assistant that can help users with their tasks.",
|
|
630
|
-
model: openai
|
|
610
|
+
model: "openai/gpt-5.1",
|
|
631
611
|
voice: new GeminiLiveVoice({
|
|
632
612
|
// Live API mode
|
|
633
613
|
apiKey: process.env.GOOGLE_API_KEY,
|
|
@@ -937,7 +917,7 @@ const voiceAgent = new Agent({
|
|
|
937
917
|
id: "aisdk-voice-agent",
|
|
938
918
|
name: "AI SDK Voice Agent",
|
|
939
919
|
instructions: "You are a helpful assistant with voice capabilities.",
|
|
940
|
-
model: openai("gpt-
|
|
920
|
+
model: openai("gpt-5.1"),
|
|
941
921
|
voice,
|
|
942
922
|
});
|
|
943
923
|
```
|
|
@@ -13,13 +13,13 @@ STS enables continuous bidirectional audio communication through listening to ev
|
|
|
13
13
|
## Configuration
|
|
14
14
|
|
|
15
15
|
- **`apiKey`**: Your OpenAI API key. Falls back to the `OPENAI_API_KEY` environment variable.
|
|
16
|
-
- **`model`**: The model ID to use for real-time voice interactions (e.g., `gpt-
|
|
16
|
+
- **`model`**: The model ID to use for real-time voice interactions (e.g., `gpt-5.1-realtime`).
|
|
17
17
|
- **`speaker`**: The default voice ID for speech synthesis. This allows you to specify which voice to use for the speech output.
|
|
18
18
|
|
|
19
19
|
```typescript
|
|
20
20
|
const voice = new OpenAIRealtimeVoice({
|
|
21
21
|
apiKey: "your-openai-api-key",
|
|
22
|
-
model: "gpt-
|
|
22
|
+
model: "gpt-5.1-realtime",
|
|
23
23
|
speaker: "alloy", // Default voice
|
|
24
24
|
});
|
|
25
25
|
|
|
@@ -38,7 +38,7 @@ const agent = new Agent({
|
|
|
38
38
|
id: "agent",
|
|
39
39
|
name: "OpenAI Realtime Agent",
|
|
40
40
|
instructions: `You are a helpful assistant with real-time voice capabilities.`,
|
|
41
|
-
model: openai
|
|
41
|
+
model: "openai/gpt-5.1",
|
|
42
42
|
voice: new OpenAIRealtimeVoice(),
|
|
43
43
|
});
|
|
44
44
|
|
|
@@ -73,7 +73,7 @@ const agent = new Agent({
|
|
|
73
73
|
instructions:
|
|
74
74
|
"You are a helpful assistant with real-time voice capabilities.",
|
|
75
75
|
// Model used for text generation; voice provider handles realtime audio
|
|
76
|
-
model: openai
|
|
76
|
+
model: "openai/gpt-5.1",
|
|
77
77
|
voice: new GeminiLiveVoice({
|
|
78
78
|
apiKey: process.env.GOOGLE_API_KEY,
|
|
79
79
|
model: "gemini-2.0-flash-exp",
|
|
@@ -54,7 +54,6 @@ The primary method for STT is the `listen()` method, which converts spoken audio
|
|
|
54
54
|
|
|
55
55
|
```typescript
|
|
56
56
|
import { Agent } from "@mastra/core/agent";
|
|
57
|
-
import { openai } from "@ai-sdk/openai";
|
|
58
57
|
import { OpenAIVoice } from "@mastra/voice-openai";
|
|
59
58
|
import { getMicrophoneStream } from "@mastra/node-audio";
|
|
60
59
|
|
|
@@ -65,7 +64,7 @@ const agent = new Agent({
|
|
|
65
64
|
name: "Voice Agent",
|
|
66
65
|
instructions:
|
|
67
66
|
"You are a voice assistant that provides recommendations based on user input.",
|
|
68
|
-
model: openai
|
|
67
|
+
model: "openai/gpt-5.1",
|
|
69
68
|
voice,
|
|
70
69
|
});
|
|
71
70
|
|
|
@@ -62,7 +62,6 @@ The primary method for TTS is the `speak()` method, which converts text to speec
|
|
|
62
62
|
|
|
63
63
|
```typescript
|
|
64
64
|
import { Agent } from "@mastra/core/agent";
|
|
65
|
-
import { openai } from "@ai-sdk/openai";
|
|
66
65
|
import { OpenAIVoice } from "@mastra/voice-openai";
|
|
67
66
|
|
|
68
67
|
const voice = new OpenAIVoice();
|
|
@@ -72,7 +71,7 @@ const agent = new Agent({
|
|
|
72
71
|
name: "Voice Agent",
|
|
73
72
|
instructions:
|
|
74
73
|
"You are a voice assistant that can help users with their tasks.",
|
|
75
|
-
model: openai
|
|
74
|
+
model: "openai/gpt-5.1",
|
|
76
75
|
voice,
|
|
77
76
|
});
|
|
78
77
|
|