@mastra/mcp-docs-server 0.0.1 → 0.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. package/.docs/organized/changelogs/%40mastra%2Fastra.md +25 -25
  2. package/.docs/organized/changelogs/%40mastra%2Fchroma.md +25 -25
  3. package/.docs/organized/changelogs/%40mastra%2Fclient-js.md +27 -27
  4. package/.docs/organized/changelogs/%40mastra%2Fcomposio.md +25 -25
  5. package/.docs/organized/changelogs/%40mastra%2Fcore.md +24 -24
  6. package/.docs/organized/changelogs/%40mastra%2Fdeployer-cloudflare.md +37 -37
  7. package/.docs/organized/changelogs/%40mastra%2Fdeployer-netlify.md +37 -37
  8. package/.docs/organized/changelogs/%40mastra%2Fdeployer-vercel.md +37 -37
  9. package/.docs/organized/changelogs/%40mastra%2Fdeployer.md +36 -36
  10. package/.docs/organized/changelogs/%40mastra%2Fevals.md +25 -25
  11. package/.docs/organized/changelogs/%40mastra%2Ffirecrawl.md +29 -29
  12. package/.docs/organized/changelogs/%40mastra%2Fgithub.md +25 -25
  13. package/.docs/organized/changelogs/%40mastra%2Floggers.md +25 -25
  14. package/.docs/organized/changelogs/%40mastra%2Fmcp-docs-server.md +26 -0
  15. package/.docs/organized/changelogs/%40mastra%2Fmcp.md +25 -25
  16. package/.docs/organized/changelogs/%40mastra%2Fmemory.md +25 -25
  17. package/.docs/organized/changelogs/%40mastra%2Fpg.md +25 -25
  18. package/.docs/organized/changelogs/%40mastra%2Fpinecone.md +29 -29
  19. package/.docs/organized/changelogs/%40mastra%2Fplayground-ui.md +34 -34
  20. package/.docs/organized/changelogs/%40mastra%2Fqdrant.md +25 -25
  21. package/.docs/organized/changelogs/%40mastra%2Frag.md +27 -27
  22. package/.docs/organized/changelogs/%40mastra%2Fragie.md +25 -25
  23. package/.docs/organized/changelogs/%40mastra%2Fspeech-azure.md +25 -25
  24. package/.docs/organized/changelogs/%40mastra%2Fspeech-deepgram.md +25 -25
  25. package/.docs/organized/changelogs/%40mastra%2Fspeech-elevenlabs.md +25 -25
  26. package/.docs/organized/changelogs/%40mastra%2Fspeech-google.md +25 -25
  27. package/.docs/organized/changelogs/%40mastra%2Fspeech-ibm.md +25 -25
  28. package/.docs/organized/changelogs/%40mastra%2Fspeech-murf.md +25 -25
  29. package/.docs/organized/changelogs/%40mastra%2Fspeech-openai.md +25 -25
  30. package/.docs/organized/changelogs/%40mastra%2Fspeech-playai.md +25 -25
  31. package/.docs/organized/changelogs/%40mastra%2Fspeech-replicate.md +25 -25
  32. package/.docs/organized/changelogs/%40mastra%2Fspeech-speechify.md +25 -25
  33. package/.docs/organized/changelogs/%40mastra%2Fstabilityai.md +25 -25
  34. package/.docs/organized/changelogs/%40mastra%2Fturbopuffer.md +24 -0
  35. package/.docs/organized/changelogs/%40mastra%2Fupstash.md +25 -25
  36. package/.docs/organized/changelogs/%40mastra%2Fvectorize.md +25 -25
  37. package/.docs/organized/changelogs/%40mastra%2Fvoice-deepgram.md +25 -25
  38. package/.docs/organized/changelogs/%40mastra%2Fvoice-elevenlabs.md +25 -25
  39. package/.docs/organized/changelogs/%40mastra%2Fvoice-google.md +25 -25
  40. package/.docs/organized/changelogs/%40mastra%2Fvoice-murf.md +25 -25
  41. package/.docs/organized/changelogs/%40mastra%2Fvoice-openai-realtime.md +26 -0
  42. package/.docs/organized/changelogs/%40mastra%2Fvoice-openai.md +25 -25
  43. package/.docs/organized/changelogs/%40mastra%2Fvoice-playai.md +25 -25
  44. package/.docs/organized/changelogs/%40mastra%2Fvoice-sarvam.md +25 -0
  45. package/.docs/organized/changelogs/%40mastra%2Fvoice-speechify.md +25 -25
  46. package/.docs/organized/changelogs/create-mastra.md +16 -16
  47. package/.docs/organized/changelogs/mastra.md +58 -58
  48. package/.docs/organized/code-examples/agent.md +8 -3
  49. package/.docs/organized/code-examples/ai-sdk-useChat.md +1 -0
  50. package/.docs/organized/code-examples/weather-agent.md +1 -0
  51. package/.docs/raw/deployment/client.mdx +120 -0
  52. package/.docs/raw/deployment/server.mdx +1 -1
  53. package/.docs/raw/evals/00-overview.mdx +58 -75
  54. package/.docs/raw/evals/01-textual-evals.mdx +53 -0
  55. package/.docs/raw/evals/02-custom-eval.mdx +6 -170
  56. package/.docs/raw/evals/03-running-in-ci.mdx +78 -0
  57. package/.docs/raw/getting-started/installation.mdx +24 -13
  58. package/.docs/raw/getting-started/mcp-docs-server.mdx +138 -0
  59. package/.docs/raw/index.mdx +2 -2
  60. package/.docs/raw/local-dev/add-to-existing-project.mdx +48 -0
  61. package/.docs/raw/local-dev/creating-a-new-project.mdx +54 -0
  62. package/.docs/raw/local-dev/mastra-dev.mdx +78 -35
  63. package/.docs/raw/reference/agents/createTool.mdx +128 -89
  64. package/.docs/raw/reference/agents/stream.mdx +19 -18
  65. package/.docs/raw/reference/cli/dev.mdx +58 -21
  66. package/.docs/raw/reference/voice/openai-realtime.mdx +1 -1
  67. package/.docs/raw/storage/overview.mdx +331 -0
  68. package/package.json +2 -2
  69. package/.docs/raw/evals/01-supported-evals.mdx +0 -31
  70. package/.docs/raw/local-dev/creating-projects.mdx +0 -74
  71. package/.docs/raw/reference/client-js/index.mdx +0 -127
  72. /package/.docs/raw/{local-dev/integrations.mdx → integrations/index.mdx} +0 -0
@@ -10,178 +10,14 @@ Creating your own eval is as easy as creating a new function. You simply create
10
10
 
11
11
  ## Basic example
12
12
 
13
- Here is a very basic example of a custom eval that checks if the output contains a certain keyword. This is a simplified version of our own [keyword coverage eval](/docs/reference/evals/keyword-coverage).
14
-
15
-
16
- ```typescript copy showLineNumbers filename="src/mastra/evals/keyword-coverage.ts"
17
- import { Metric, type MetricResult } from '@mastra/core/eval';
18
-
19
- interface KeywordCoverageResult extends MetricResult {
20
- info: {
21
- totalKeywords: number;
22
- matchedKeywords: number;
23
- };
24
- }
25
-
26
- export class KeywordCoverageMetric extends Metric {
27
- private referenceKeywords: Set<string>;
28
-
29
- constructor(keywords: string[]) {
30
- super();
31
- this.referenceKeywords = new Set(keywords);
32
- }
33
-
34
- async measure(input: string, output: string): Promise<KeywordCoverageResult> {
35
- // Handle empty strings case
36
- if (!input && !output) {
37
- return {
38
- score: 1,
39
- info: {
40
- totalKeywords: 0,
41
- matchedKeywords: 0,
42
- },
43
- };
44
- }
45
-
46
- const matchedKeywords = [...this.referenceKeywords].filter(k => output.includes(k));
47
- const totalKeywords = this.referenceKeywords.size;
48
- const coverage = totalKeywords > 0 ? matchedKeywords.length / totalKeywords : 0;
49
-
50
- return {
51
- score: coverage,
52
- info: {
53
- totalKeywords: this.referenceKeywords.size,
54
- matchedKeywords: matchedKeywords.length,
55
- },
56
- };
57
- }
58
- }
59
- ```
13
+ For a simple example of creating a custom metric that checks if the output contains certain words, see our [Word Inclusion example](/examples/evals/word-inclusion).
60
14
 
61
15
  ## Creating a custom LLM-Judge
62
16
 
63
- A custom LLM judge can provide more targeted and meaningful evaluations for your use case. For example, if you're building a medical Q&A system, you might want to evaluate not just answer relevancy but also medical accuracy and safety considerations.
64
-
65
- Let's create an example to make sure our [Chef Michel](/docs/guides/01-chef-michel) is giving complete recipe information to the user.
66
-
67
- We'll start with creating the judge agent. You can put it all in one file but we prefer splitting it into a separate file to keep things readable.
68
-
69
- ```typescript copy showLineNumbers filename="src/mastra/evals/recipe-completeness/metricJudge.ts"
70
- import { type LanguageModel } from '@mastra/core/llm';
71
- import { MastraAgentJudge } from '@mastra/evals/judge';
72
- import { z } from 'zod';
73
-
74
- import { RECIPE_COMPLETENESS_INSTRUCTIONS, generateCompletenessPrompt, generateReasonPrompt } from './prompts';
75
-
76
- export class RecipeCompletenessJudge extends MastraAgentJudge {
77
- constructor(model: LanguageModel) {
78
- super('Recipe Completeness', RECIPE_COMPLETENESS_INSTRUCTIONS, model);
79
- }
80
-
81
- async evaluate(
82
- input: string,
83
- output: string,
84
- ): Promise<{
85
- missing: string[];
86
- verdict: string;
87
- }> {
88
- const completenessPrompt = generateCompletenessPrompt({ input, output });
89
- const result = await this.agent.generate(completenessPrompt, {
90
- output: z.object({
91
- missing: z.array(z.string()),
92
- verdict: z.string(),
93
- }),
94
- });
95
-
96
- return result.object;
97
- }
98
-
99
- async getReason(args: {
100
- input: string;
101
- output: string;
102
- missing: string[];
103
- verdict: string;
104
- }): Promise<string> {
105
- const prompt = generateReasonPrompt(args);
106
- const result = await this.agent.generate(prompt, {
107
- output: z.object({
108
- reason: z.string(),
109
- }),
110
- });
111
-
112
- return result.object.reason;
113
- }
114
- }
115
- ```
116
- ```typescript copy showLineNumbers filename="src/mastra/evals/recipe-completeness/index.ts"
117
- import { Metric, type MetricResult } from '@mastra/core/eval';
118
- import { type LanguageModel } from '@mastra/core/llm';
119
-
120
- import { RecipeCompletenessJudge } from './metricJudge';
121
-
122
- export interface RecipeCompletenessMetricOptions {
123
- scale?: number;
124
- }
125
-
126
- export interface MetricResultWithInfo extends MetricResult {
127
- info: {
128
- reason: string;
129
- missing: string[];
130
- };
131
- }
132
-
133
- export class RecipeCompletenessMetric extends Metric {
134
- private judge: RecipeCompletenessJudge;
135
- private scale: number;
136
- constructor(model: LanguageModel, { scale = 1 }: RecipeCompletenessMetricOptions = {}) {
137
- super();
138
-
139
- this.judge = new RecipeCompletenessJudge(model);
140
- this.scale = scale;
141
- }
142
-
143
- async measure(input: string, output: string): Promise<MetricResultWithInfo> {
144
- const { verdict, missing } = await this.judge.evaluate(input, output);
145
- const score = this.calculateScore({ verdict });
146
- const reason = await this.judge.getReason({
147
- input,
148
- output,
149
- verdict,
150
- missing,
151
- });
152
-
153
- return {
154
- score,
155
- info: {
156
- missing,
157
- reason,
158
- },
159
- };
160
- }
161
-
162
- private calculateScore(verdict: { verdict: string }): number {
163
- return verdict.verdict.toLowerCase() === 'incomplete' ? 0 : 1;
164
- }
165
- }
166
- ```
167
-
168
- ```typescript copy showLineNumbers filename="src/mastra/agents/chefAgent.ts"
169
- import { openai } from '@ai-sdk/openai';
170
- import { Agent } from '@mastra/core/agent';
171
-
172
- import { RecipeCompletenessMetric } from '../evals';
173
-
174
- export const chefAgent = new Agent({
175
- name: 'chef-agent',
176
- instructions:
177
- 'You are Michel, a practical and experienced home chef' +
178
- 'You help people cook with whatever ingredients they have available.',
179
- model: openai('gpt-4o-mini'),
180
- evals: {
181
- recipeCompleteness: new RecipeCompletenessMetric(openai('gpt-4o-mini')),
182
- },
183
- });
184
- ```
17
+ A custom LLM judge helps evaluate specific aspects of your AI's responses. Think of it like having an expert reviewer for your particular use case:
185
18
 
186
- You can now use the `RecipeCompletenessMetric` in your project. [See the full example here](/examples/evals/custom-eval).
19
+ - Medical Q&A Judge checks for medical accuracy and safety
20
+ - Customer Service → Judge evaluates tone and helpfulness
21
+ - Code Generation → Judge verifies code correctness and style
187
22
 
23
+ For a practical example, see how we evaluate [Chef Michel's](/docs/guides/01-chef-michel) recipes for gluten content in our [Gluten Checker example](/examples/evals/custom-eval).
@@ -0,0 +1,78 @@
1
+ ---
2
+ title: "Running in CI"
3
+ description: "Learn how to run Mastra evals in your CI/CD pipeline to monitor agent quality over time."
4
+ ---
5
+
6
+ # Running Evals in CI
7
+
8
+ Running evals in your CI pipeline helps bridge this gap by providing quantifiable metrics for measuring agent quality over time.
9
+
10
+ ## Setting Up CI Integration
11
+
12
+ We support any testing framework that supports ESM modules. For example, you can use [Vitest](https://vitest.dev/), [Jest](https://jestjs.io/) or [Mocha](https://mochajs.org/) to run evals in your CI/CD pipeline.
13
+
14
+ ```typescript copy showLineNumbers filename="src/mastra/agents/index.test.ts"
15
+ import { describe, it, expect } from 'vitest';
16
+ import { evaluate } from '@mastra/core/eval';
17
+ import { myAgent } from './index';
18
+
19
+ describe('My Agent', () => {
20
+ it('should validate tone consistency', async () => {
21
+ const metric = new ToneConsistencyMetric();
22
+ const result = await evaluate(myAgent, 'Hello, world!', metric)
23
+
24
+ expect(result.score).toBe(1);
25
+ });
26
+ });
27
+ ```
28
+
29
+ You will need to configure a testSetup and globalSetup script for your testing framework to capture the eval results. It allows us to show these results in your mastra dashboard.
30
+
31
+ ## Framework Configuration
32
+
33
+ ### Vitest Setup
34
+
35
+ Add these files to your project to run evals in your CI/CD pipeline and capture results in the Mastra dashboard:
36
+
37
+ ```typescript copy showLineNumbers filename="globalSetup.ts"
38
+ import { globalSetup } from '@mastra/evals';
39
+
40
+ export default function setup() {
41
+ globalSetup()
42
+ }
43
+ ```
44
+
45
+ ```typescript copy showLineNumbers filename="testSetup.ts"
46
+ import { beforeAll } from 'vitest';
47
+ import { attachListeners } from '@mastra/evals';
48
+
49
+ beforeAll(async () => {
50
+ await attachListeners();
51
+ });
52
+ ```
53
+
54
+ ```typescript copy showLineNumbers filename="vitest.config.ts"
55
+ import { defineConfig } from 'vitest/config'
56
+
57
+ export default defineConfig({
58
+ test: {
59
+ globalSetup: './globalSetup.ts',
60
+ setupFiles: ['./testSetup.ts'],
61
+ },
62
+ })
63
+ ```
64
+
65
+ ## Storage Configuration
66
+
67
+ To store eval results in Mastra Storage:
68
+
69
+ ```typescript
70
+ import { mastra } from './your-mastra-setup';
71
+
72
+ beforeAll(async () => {
73
+ // Store evals in Mastra Storage (requires storage to be enabled)
74
+ await attachListeners(mastra);
75
+ });
76
+ ```
77
+
78
+ With file storage, evals persist and can be queried later. With memory storage, evals are isolated to the test process.
@@ -36,17 +36,17 @@ npx create-mastra@latest
36
36
  </Tabs.Tab>
37
37
  <Tabs.Tab>
38
38
  ```bash copy
39
- npm create mastra
39
+ npm create mastra@latest
40
40
  ```
41
41
  </Tabs.Tab>
42
42
  <Tabs.Tab>
43
43
  ```bash copy
44
- yarn create mastra
44
+ yarn create mastra@latest
45
45
  ```
46
46
  </Tabs.Tab>
47
47
  <Tabs.Tab>
48
48
  ```bash copy
49
- pnpm create mastra
49
+ pnpm create mastra@latest
50
50
  ```
51
51
  </Tabs.Tab>
52
52
  </Tabs>
@@ -64,9 +64,20 @@ Select default provider:
64
64
  ◯ Anthropic
65
65
  ◯ Groq
66
66
  Would you like to include example code? No / Yes
67
+ Turn your IDE into a Mastra expert? (Installs MCP server)
68
+ ◯ Skip for now
69
+ ◯ Cursor
70
+ ◯ Windsurf
67
71
  ```
68
72
 
69
- After the prompts, `create-mastra` will set up your project directory with TypeScript, install dependencies, and configure your selected components and LLM provider.
73
+ After the prompts, `create-mastra` will:
74
+
75
+ 1. Set up your project directory with TypeScript
76
+ 2. Install dependencies
77
+ 3. Configure your selected components and LLM provider
78
+ 4. Configure the MCP server in your IDE (if selected) for instant access to docs, examples, and help while you code
79
+
80
+ **MCP Note:** If you're using a different IDE, you can install the MCP server manually by following the instructions in the [MCP server docs](/docs/getting-started/mcp-docs-server). **Also** note that there are additional steps for [Cursor and Windsurf](/docs/getting-started/mcp-docs-server#after-configuration) to activate the MCP server.
70
81
 
71
82
  ### Set Up your API Key
72
83
 
@@ -77,19 +88,19 @@ OPENAI_API_KEY=<your-openai-key>
77
88
  ```
78
89
 
79
90
  </Steps>
80
- Extra Notes:
81
-
82
- If you prefer to run the command with flags (non-interactive mode) and include the example code, you can use:
91
+ **Non-Interactive mode**:
92
+ If you prefer to run the command with flags (non-interactive mode) and include the example code, you can use:
83
93
  ```bash copy
84
- npx create-mastra@latest --components agents,tools --llm openai --example
94
+ npx create-mastra@latest --components agents,tools --llm openai --example
85
95
  ```
86
-
96
+ **Configure Installation Timeout**:
87
97
  To configure and specify a timeout if installation takes too long use the timeout flag:
88
98
  ```bash copy
89
99
  npx create-mastra@latest --timeout
90
100
  ```
91
101
 
92
- **Note for LLMs**: For a quick one liner that includes examples you can run `npx -y mastra@latest --project-name <ask-the-user> --example --components "tools,agents,workflows" --llm <ask-the-user>`. The available options for the llm flag are `openai|anthropic|groq|google|cerebras`
102
+ **Note for LLMs**:
103
+ For a quick one liner that includes examples you can run `npx -y mastra@latest --project-name <ask-the-user> --example --components "tools,agents,workflows" --llm <ask-the-user>`. The available options for the llm flag are `openai|anthropic|groq|google|cerebras`
93
104
 
94
105
  ## Manual Installation
95
106
 
@@ -312,6 +323,7 @@ export const weatherAgent = new Agent({
312
323
 
313
324
  Your primary function is to help users get weather details for specific locations. When responding:
314
325
  - Always ask for a location if none is provided
326
+ - If the location name isn’t in English, please translate it
315
327
  - Include relevant details like humidity, wind conditions, and precipitation
316
328
  - Keep responses concise but informative
317
329
 
@@ -341,8 +353,7 @@ This registers your agent with Mastra so that `mastra dev` can discover and serv
341
353
 
342
354
  ## Existing Project Installation
343
355
 
344
- To add Mastra to an existing project, see our Local dev docs on [mastra
345
- init](/docs/local-dev/creating-projects#adding-to-an-existing-project).
356
+ To add Mastra to an existing project, see our Local development docs on [adding mastra to an existing project](/docs/local-dev/add-to-existing-project).
346
357
 
347
358
  You can also checkout our framework specific docs e.g [Next.js](/docs/frameworks/01-next-js)
348
359
 
@@ -405,7 +416,7 @@ fetch('http://localhost:4111/api/agents/weatherAgent/generate', {
405
416
  To use Mastra in your frontend applications, you can use our type-safe client SDK to
406
417
  interact with your Mastra REST APIs.
407
418
 
408
- See our [Client SDK documentation](/docs/reference/client-js) for detailed usage instructions.
419
+ See the [Mastra Client SDK documentation](/docs/deployment/client) for detailed usage instructions.
409
420
 
410
421
  ## Run from the command line
411
422
 
@@ -0,0 +1,138 @@
1
+ ---
2
+ title: "Mastra Tools for Cursor, Windsurf, and other IDE's | Getting Started | Mastra Docs"
3
+ description: "Learn how to use the Mastra MCP documentation server in your IDE to turn it into an agentic Mastra expert."
4
+ ---
5
+
6
+ # Mastra Tools for your agentic IDE
7
+
8
+ `@mastra/mcp-docs-server` provides direct access to Mastra's complete knowledge base in Cursor, Windsurf, Cline, or any other IDE that supports MCP.
9
+
10
+ It has access to documentation, code examples, technical blog posts / feature announcements, and package changelogs which your IDE can read to help you build with Mastra.
11
+
12
+ The MCP server tools have been designed to allow an agent to query the specific information it needs to complete a Mastra related task - for example: adding a Mastra feature to an agent, scaffolding a new project, or helping you understand how something works.
13
+
14
+ ## How it works
15
+
16
+ Once it's installed in your IDE you can write prompts and assume the agent will understand everything about Mastra.
17
+
18
+ ### Add features
19
+
20
+ - "Add evals to my agent and write tests"
21
+ - "Write me a workflow that does the following `[task]`"
22
+ - "Make a new tool that allows my agent to access `[3rd party API]`"
23
+
24
+ ### Ask about integrations
25
+
26
+ - "Does Mastra work with the AI SDK?
27
+ How can I use it in my `[React/Svelte/etc]` project?"
28
+ - "What's the latest Mastra news around MCP?"
29
+ - "Does Mastra support `[provider]` speech and voice APIs? Show me an example in my code of how I can use it."
30
+
31
+ ### Debug or update existing code
32
+
33
+ - "I'm running into a bug with agent memory, have there been any related changes or bug fixes recently?"
34
+ - "How does working memory behave in Mastra and how can I use it to do `[task]`? It doesn't seem to work the way I expect."
35
+ - "I saw there are new workflow features, explain them to me and then update `[workflow]` to use them."
36
+
37
+ **And more** - if you have a question, try asking your IDE and let it look it up for you.
38
+
39
+ ## Automatic Installation
40
+
41
+ Run `pnpm create mastra@latest` and select Cursor or Windsurf when prompted to install the MCP server. For other IDEs, or if you already have a Mastra project, install the MCP server by following the instructions below.
42
+
43
+ ## Manual Installation
44
+
45
+ - **Cursor**: Edit `.cursor/mcp.json` in your project root, or `~/.cursor/mcp.json` for global configuration
46
+ - **Windsurf**: Edit `~/.codeium/windsurf/mcp_config.json` (only supports global configuration)
47
+
48
+ Add the following configuration:
49
+
50
+ ### MacOS/Linux
51
+
52
+ ```json
53
+ {
54
+ "mcpServers": {
55
+ "mastra": {
56
+ "command": "npx",
57
+ "args": ["-y", "@mastra/mcp-docs-server@latest"]
58
+ }
59
+ }
60
+ }
61
+ ```
62
+
63
+ ### Windows
64
+
65
+ ```json
66
+ {
67
+ "mcpServers": {
68
+ "mastra": {
69
+ "command": "cmd",
70
+ "args": ["/c", "npx", "-y", "@mastra/mcp-docs-server@latest"]
71
+ }
72
+ }
73
+ }
74
+ ```
75
+
76
+ ## After Configuration
77
+
78
+ ### Cursor
79
+
80
+ 1. Open Cursor settings
81
+ 2. Navigate to MCP settings
82
+ 3. Click "enable" on the Mastra MCP server
83
+ 4. If you have an agent chat open, you'll need to re-open it or start a new chat to use the MCP server
84
+
85
+ ### Windsurf
86
+
87
+ 1. Fully quit and re-open Windsurf
88
+ 2. If tool calls start failing, go to Windsurfs MCP settings and re-start the MCP server. This is a common Windsurf MCP issue and isn't related to Mastra. Right now Cursor's MCP implementation is more stable than Windsurfs is.
89
+
90
+ In both IDEs it may take a minute for the MCP server to start the first time as it needs to download the package from npm.
91
+
92
+ ## Available Agent Tools
93
+
94
+ ### Documentation
95
+
96
+ Access Mastra's complete documentation:
97
+
98
+ - Getting started / installation
99
+ - Guides and tutorials
100
+ - API references
101
+
102
+ ### Examples
103
+
104
+ Browse code examples:
105
+
106
+ - Complete project structures
107
+ - Implementation patterns
108
+ - Best practices
109
+
110
+ ### Blog Posts
111
+
112
+ Search the blog for:
113
+
114
+ - Technical posts
115
+ - Changelog and feature announcements
116
+ - AI news and updates
117
+
118
+ ### Package Changes
119
+
120
+ Track updates for Mastra and `@mastra/*` packages:
121
+
122
+ - Bug fixes
123
+ - New features
124
+ - Breaking changes
125
+
126
+ ## Common Issues
127
+
128
+ 1. **Server Not Starting**
129
+
130
+ - Ensure npx is installed and working
131
+ - Check for conflicting MCP servers
132
+ - Verify your configuration file syntax
133
+ - On Windows, make sure to use the Windows-specific configuration
134
+
135
+ 2. **Tool Calls Failing**
136
+ - Restart the MCP server and/or your IDE
137
+ - Update to the latest version of your IDE
138
+
@@ -1,11 +1,11 @@
1
1
  ---
2
2
  title: "Introduction | Mastra Docs"
3
- description: "Mastra is a Typescript agent framework. It helps you build AI applications and features quickly. It gives you the set of primitives you need: workflows, agents, RAG, integrations, syncs and evals."
3
+ description: "Mastra is a TypeScript agent framework. It helps you build AI applications and features quickly. It gives you the set of primitives you need: workflows, agents, RAG, integrations, syncs and evals."
4
4
  ---
5
5
 
6
6
  # About Mastra
7
7
 
8
- Mastra is an open-source Typescript agent framework.
8
+ Mastra is an open-source TypeScript agent framework.
9
9
 
10
10
  It's designed to give you the primitives you need to build AI applications and features.
11
11
 
@@ -0,0 +1,48 @@
1
+ ---
2
+ title: "Adding to an Existing Project | Mastra Local Development Docs"
3
+ description: "Add Mastra to your existing Node.js applications"
4
+ ---
5
+
6
+ # Adding to an Existing Project
7
+
8
+ You can add Mastra to an existing project using the CLI:
9
+
10
+ ```bash npm2yarn copy
11
+ npm install -g mastra@latest
12
+ mastra init
13
+ ```
14
+
15
+ Changes made to project:
16
+ 1. Creates `src/mastra` directory with entry point
17
+ 2. Adds required dependencies
18
+ 3. Configures TypeScript compiler options
19
+
20
+
21
+ ## Interactive Setup
22
+
23
+ Running commands without arguments starts a CLI prompt for:
24
+
25
+ 1. Component selection
26
+ 2. LLM provider configuration
27
+ 3. API key setup
28
+ 4. Example code inclusion
29
+
30
+ ## Non-Interactive Setup
31
+
32
+ To initialize mastra in non-interactive mode use the following command arguments:
33
+
34
+ ```bash
35
+ Arguments:
36
+ --components Specify components: agents, tools, workflows
37
+ --llm-provider LLM provider: openai, anthropic, or groq
38
+ --add-example Include example implementation
39
+ --llm-api-key Provider API key
40
+ --dir Directory for Mastra files (defaults to src/)
41
+ ```
42
+ For more details, refer to the [mastra init CLI documentation](/docs/reference/cli/init).
43
+
44
+
45
+
46
+
47
+
48
+
@@ -0,0 +1,54 @@
1
+ ---
2
+ title: "Creating a new Project | Mastra Local Development Docs"
3
+ description: "Create new Mastra projects or add Mastra to existing Node.js applications using the CLI"
4
+ ---
5
+
6
+ # Creating a new project
7
+
8
+ You can create a new project using the `create-mastra` package:
9
+
10
+ ```bash npm2yarn copy
11
+ npm create mastra@latest
12
+ ```
13
+
14
+ You can also create a new project by using the `mastra` CLI directly:
15
+
16
+ ```bash npm2yarn copy
17
+ npm install -g mastra@latest
18
+ mastra create
19
+ ```
20
+
21
+ ## Interactive Setup
22
+
23
+ Running commands without arguments starts a CLI prompt for:
24
+
25
+ 1. Project name
26
+ 1. Component selection
27
+ 2. LLM provider configuration
28
+ 3. API key setup
29
+ 4. Example code inclusion
30
+
31
+ ## Non-Interactive Setup
32
+
33
+ To initialize mastra in non-interactive mode use the following command arguments:
34
+
35
+ ```bash
36
+ Arguments:
37
+ --components Specify components: agents, tools, workflows
38
+ --llm-provider LLM provider: openai, anthropic, groq, google, or cerebras
39
+ --add-example Include example implementation
40
+ --llm-api-key Provider API key
41
+ --project-name Project name that will be used in package.json and as the project directory name
42
+ ```
43
+
44
+
45
+
46
+ Generated project structure:
47
+ ```
48
+ my-project/
49
+ ├── src/
50
+ │ └── mastra/
51
+ │ └── index.ts # Mastra entry point
52
+ ├── package.json
53
+ └── tsconfig.json
54
+ ```