@mastra/mcp-docs-server 0.0.7 → 0.0.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. package/.docs/organized/changelogs/%40mastra%2Fastra.md +16 -16
  2. package/.docs/organized/changelogs/%40mastra%2Fchroma.md +16 -16
  3. package/.docs/organized/changelogs/%40mastra%2Fclickhouse.md +15 -0
  4. package/.docs/organized/changelogs/%40mastra%2Fclient-js.md +16 -16
  5. package/.docs/organized/changelogs/%40mastra%2Fcloudflare.md +21 -0
  6. package/.docs/organized/changelogs/%40mastra%2Fcore.md +14 -14
  7. package/.docs/organized/changelogs/%40mastra%2Fdeployer-cloudflare.md +26 -26
  8. package/.docs/organized/changelogs/%40mastra%2Fdeployer-netlify.md +26 -26
  9. package/.docs/organized/changelogs/%40mastra%2Fdeployer-vercel.md +26 -26
  10. package/.docs/organized/changelogs/%40mastra%2Fdeployer.md +26 -26
  11. package/.docs/organized/changelogs/%40mastra%2Fevals.md +16 -16
  12. package/.docs/organized/changelogs/%40mastra%2Ffirecrawl.md +18 -18
  13. package/.docs/organized/changelogs/%40mastra%2Fgithub.md +16 -16
  14. package/.docs/organized/changelogs/%40mastra%2Floggers.md +16 -16
  15. package/.docs/organized/changelogs/%40mastra%2Fmcp-docs-server.md +17 -2
  16. package/.docs/organized/changelogs/%40mastra%2Fmcp.md +16 -16
  17. package/.docs/organized/changelogs/%40mastra%2Fmem0.md +15 -0
  18. package/.docs/organized/changelogs/%40mastra%2Fmemory.md +16 -16
  19. package/.docs/organized/changelogs/%40mastra%2Fpg.md +16 -16
  20. package/.docs/organized/changelogs/%40mastra%2Fpinecone.md +16 -16
  21. package/.docs/organized/changelogs/%40mastra%2Fplayground-ui.md +21 -21
  22. package/.docs/organized/changelogs/%40mastra%2Fqdrant.md +16 -16
  23. package/.docs/organized/changelogs/%40mastra%2Frag.md +16 -16
  24. package/.docs/organized/changelogs/%40mastra%2Fragie.md +16 -16
  25. package/.docs/organized/changelogs/%40mastra%2Fserver.md +16 -16
  26. package/.docs/organized/changelogs/%40mastra%2Fspeech-azure.md +16 -16
  27. package/.docs/organized/changelogs/%40mastra%2Fspeech-deepgram.md +16 -16
  28. package/.docs/organized/changelogs/%40mastra%2Fspeech-elevenlabs.md +16 -16
  29. package/.docs/organized/changelogs/%40mastra%2Fspeech-google.md +16 -16
  30. package/.docs/organized/changelogs/%40mastra%2Fspeech-ibm.md +16 -16
  31. package/.docs/organized/changelogs/%40mastra%2Fspeech-murf.md +16 -16
  32. package/.docs/organized/changelogs/%40mastra%2Fspeech-openai.md +16 -16
  33. package/.docs/organized/changelogs/%40mastra%2Fspeech-playai.md +16 -16
  34. package/.docs/organized/changelogs/%40mastra%2Fspeech-replicate.md +16 -16
  35. package/.docs/organized/changelogs/%40mastra%2Fspeech-speechify.md +16 -16
  36. package/.docs/organized/changelogs/%40mastra%2Fturbopuffer.md +16 -16
  37. package/.docs/organized/changelogs/%40mastra%2Fupstash.md +16 -16
  38. package/.docs/organized/changelogs/%40mastra%2Fvectorize.md +16 -16
  39. package/.docs/organized/changelogs/%40mastra%2Fvoice-azure.md +15 -0
  40. package/.docs/organized/changelogs/%40mastra%2Fvoice-cloudflare.md +15 -0
  41. package/.docs/organized/changelogs/%40mastra%2Fvoice-deepgram.md +16 -16
  42. package/.docs/organized/changelogs/%40mastra%2Fvoice-elevenlabs.md +16 -16
  43. package/.docs/organized/changelogs/%40mastra%2Fvoice-google.md +16 -16
  44. package/.docs/organized/changelogs/%40mastra%2Fvoice-murf.md +16 -16
  45. package/.docs/organized/changelogs/%40mastra%2Fvoice-openai-realtime.md +16 -10
  46. package/.docs/organized/changelogs/%40mastra%2Fvoice-openai.md +16 -16
  47. package/.docs/organized/changelogs/%40mastra%2Fvoice-playai.md +16 -16
  48. package/.docs/organized/changelogs/%40mastra%2Fvoice-sarvam.md +15 -0
  49. package/.docs/organized/changelogs/%40mastra%2Fvoice-speechify.md +16 -16
  50. package/.docs/organized/changelogs/create-mastra.md +21 -21
  51. package/.docs/organized/changelogs/mastra.md +31 -31
  52. package/.docs/organized/code-examples/ai-sdk-useChat.md +1 -2
  53. package/.docs/organized/code-examples/memory-with-mem0.md +0 -1
  54. package/.docs/raw/agents/agent-memory.mdx +1 -5
  55. package/.docs/raw/getting-started/installation.mdx +40 -51
  56. package/.docs/raw/memory/overview.mdx +1 -2
  57. package/.docs/raw/memory/semantic-recall.mdx +3 -4
  58. package/.docs/raw/memory/working-memory.mdx +1 -2
  59. package/dist/_tsup-dts-rollup.d.ts +84 -18
  60. package/dist/{chunk-J7WZZETH.js → chunk-QWYMT5LP.js} +3 -0
  61. package/dist/prepare-docs/prepare.js +1 -1
  62. package/dist/stdio.js +252 -90
  63. package/package.json +5 -6
  64. package/.docs/organized/code-examples/mcp-configuration.md +0 -341
  65. package/.docs/raw/guides/ai-recruiter.mdx +0 -187
  66. package/.docs/raw/guides/chef-michel.mdx +0 -242
  67. package/.docs/raw/guides/research-assistant.mdx +0 -297
  68. package/.docs/raw/guides/stock-agent.mdx +0 -182
@@ -1,341 +0,0 @@
1
- ### package.json
2
- ```json
3
- {
4
- "name": "examples-mcp-configuration",
5
- "type": "module",
6
- "private": true,
7
- "main": "index.js",
8
- "scripts": {
9
- "start": "npx bun src/index.ts",
10
- "dev": "mastra dev",
11
- "test": "cross-env NODE_OPTIONS='--experimental-vm-modules --max-old-space-size=8192' jest"
12
- },
13
- "keywords": [],
14
- "author": "",
15
- "license": "ISC",
16
- "description": "",
17
- "dependencies": {
18
- "@ai-sdk/openai": "latest",
19
- "@mastra/core": "workspace:*",
20
- "@mastra/mcp": "workspace:*",
21
- "@mastra/memory": "workspace:*",
22
- "chalk": "^5.4.1",
23
- "fastmcp": "^1.20.4",
24
- "zod": "^3.24.2"
25
- },
26
- "devDependencies": {
27
- "@jest/globals": "^29.7.0",
28
- "jest": "^29.7.0",
29
- "mastra": "workspace:*",
30
- "ts-jest": "^29.2.6",
31
- "tsx": "^4.19.3"
32
- },
33
- "version": "0.0.1"
34
- }
35
-
36
- ```
37
-
38
- ### index.ts
39
- ```typescript
40
- import { openai } from '@ai-sdk/openai';
41
- import { Agent } from '@mastra/core/agent';
42
- import { MCPConfiguration } from '@mastra/mcp';
43
- import chalk from 'chalk';
44
-
45
- // start sse server - in real life this would already be running but want to show using sse and stdio in this example
46
- import './mastra/tools/sse';
47
-
48
- console.log(chalk.blue(`Creating agent`));
49
- export const stockWeatherAgent = new Agent({
50
- name: 'Stock + Weather Agent',
51
- instructions:
52
- 'You are a helpful assistant that provides current stock prices. When asked about a stock, use the stock price tool to fetch the stock price. You also love to check the weather when your stock market buddies ask you what the weather is.',
53
- model: openai('gpt-4o'),
54
- });
55
-
56
- console.log(chalk.blue(`Creating MCPConfiguration`));
57
- const mcp = new MCPConfiguration({
58
- servers: {
59
- stockPrice: {
60
- command: 'npx',
61
- args: ['-y', 'tsx', './src/mastra/tools/stock-price.ts'],
62
- env: {
63
- FAKE_CREDS: 'let me in!',
64
- },
65
- },
66
- weather: {
67
- url: new URL('http://localhost:8080/sse'),
68
- },
69
- },
70
- });
71
-
72
- const toolsets = await mcp.getToolsets();
73
-
74
- console.log({ toolsets });
75
-
76
- const prompt = `Whats the weather in Seattle and what is the current stock price of Apple (AAPL)?`;
77
- console.log(chalk.yellow(`Sending prompt:\n"${prompt}"\n\n`));
78
- const response = await stockWeatherAgent.stream(prompt, {
79
- toolsets,
80
- });
81
-
82
- for await (const part of response.fullStream) {
83
- switch (part.type) {
84
- case 'error':
85
- console.error(part.error);
86
- break;
87
- case 'text-delta':
88
- process.stdout.write(chalk.green(part.textDelta));
89
- break;
90
- case 'tool-call':
91
- console.log(`calling tool ${part.toolName} with args ${chalk.red(JSON.stringify(part.args, null, 2))}`);
92
- break;
93
- case 'tool-result':
94
- console.log(`tool result ${chalk.cyan(JSON.stringify(part.result, null, 2))}`);
95
- break;
96
- }
97
- }
98
-
99
- ```
100
-
101
- ### mastra/agents/index.ts
102
- ```typescript
103
- import { openai } from '@ai-sdk/openai';
104
- import { Agent } from '@mastra/core/agent';
105
- import { MCPConfiguration } from '@mastra/mcp';
106
-
107
- // start sse server - in real life this would already be running but want to show using sse and stdio in this example
108
- import '../tools/sse';
109
-
110
- const mcp = new MCPConfiguration({
111
- servers: {
112
- stockPrice: {
113
- command: 'npx',
114
- args: ['-y', 'tsx', '../../src/mastra/tools/stock-price.ts'],
115
- env: {
116
- FAKE_CREDS: 'let me in!',
117
- },
118
- },
119
- weather: {
120
- url: new URL('http://localhost:8080/sse'),
121
- },
122
- },
123
- });
124
-
125
- export const stockWeatherAgent = new Agent({
126
- name: 'Stock + Weather Agent',
127
- instructions:
128
- 'You are a helpful assistant that provides current stock prices. When asked about a stock, use the stock price tool to fetch the stock price. You also love to check the weather when your stock market buddies ask you what the weather is.',
129
- model: openai('gpt-4o'),
130
- tools: await mcp.getTools(),
131
- });
132
-
133
- ```
134
-
135
- ### mastra/index.ts
136
- ```typescript
137
- import { Mastra } from '@mastra/core';
138
-
139
- import { stockWeatherAgent } from './agents';
140
-
141
- export const mastra = new Mastra({
142
- agents: { stockWeatherAgent },
143
- });
144
-
145
- ```
146
-
147
- ### mastra/tools/sse.ts
148
- ```typescript
149
- import chalk from 'chalk';
150
- import { spawn } from 'child_process';
151
- import path from 'path';
152
-
153
- function relativeFromRoot(pathString: string) {
154
- if (import.meta.url.includes(`.mastra`)) {
155
- return path.join(`../../`, pathString);
156
- }
157
-
158
- return path.join(`./`, pathString);
159
- }
160
-
161
- const sseFile = relativeFromRoot(`./src/mastra/tools/weather.ts`);
162
-
163
- console.log(chalk.blue(`Starting mock sse server`));
164
- // simulating an sse server that's already running
165
- const sseProcess = spawn(`npx`, [`-y`, `tsx`, sseFile]);
166
-
167
- sseProcess.stderr.on(`data`, chunk => {
168
- const message = chunk.toString();
169
- console.error(message);
170
- });
171
- await new Promise(res => {
172
- sseProcess.stdout.on(`data`, chunk => {
173
- const message = chunk.toString();
174
- if (message.includes('server is running on SSE')) {
175
- res(null);
176
- } else {
177
- console.log(message);
178
- }
179
- });
180
- });
181
-
182
- process.on(`exit`, () => {
183
- sseProcess.kill(`SIGINT`);
184
- });
185
-
186
- ```
187
-
188
- ### mastra/tools/stock-price.ts
189
- ```typescript
190
- import { FastMCP } from 'fastmcp';
191
- import { z } from 'zod';
192
-
193
- const getStockPrice = async (symbol: string) => {
194
- const data = await fetch(`https://mastra-stock-data.vercel.app/api/stock-data?symbol=${symbol}`).then(r => r.json());
195
- return data.prices['4. close'];
196
- };
197
-
198
- const server = new FastMCP({
199
- name: 'Stock Price Server',
200
- version: '1.0.0',
201
- });
202
-
203
- server.addTool({
204
- name: 'getStockPrice',
205
- description: "Fetches the last day's closing stock price for a given symbol",
206
- parameters: z.object({
207
- symbol: z.string(),
208
- }),
209
- execute: async args => {
210
- console.log('Using tool to fetch stock price for', args.symbol);
211
- const price = await getStockPrice(args.symbol);
212
- return JSON.stringify({
213
- symbol: args.symbol,
214
- currentPrice: price,
215
- });
216
- },
217
- });
218
-
219
- // Start the server with stdio transport
220
- server.start({
221
- transportType: 'stdio',
222
- });
223
-
224
- export { server };
225
-
226
- ```
227
-
228
- ### mastra/tools/weather.ts
229
- ```typescript
230
- import { FastMCP } from 'fastmcp';
231
- import { z } from 'zod';
232
-
233
- interface WeatherResponse {
234
- current: {
235
- time: string;
236
- temperature_2m: number;
237
- apparent_temperature: number;
238
- relative_humidity_2m: number;
239
- wind_speed_10m: number;
240
- wind_gusts_10m: number;
241
- weather_code: number;
242
- };
243
- }
244
-
245
- const getWeather = async (location: string) => {
246
- const geocodingUrl = `https://geocoding-api.open-meteo.com/v1/search?name=${encodeURIComponent(location)}&count=1`;
247
- const geocodingResponse = await fetch(geocodingUrl);
248
- const geocodingData = await geocodingResponse.json();
249
-
250
- if (!geocodingData.results?.[0]) {
251
- throw new Error(`Location '${location}' not found`);
252
- }
253
-
254
- const { latitude, longitude, name } = geocodingData.results[0];
255
-
256
- const weatherUrl = `https://api.open-meteo.com/v1/forecast?latitude=${latitude}&longitude=${longitude}&current=temperature_2m,apparent_temperature,relative_humidity_2m,wind_speed_10m,wind_gusts_10m,weather_code`;
257
-
258
- const response = await fetch(weatherUrl);
259
- const data: WeatherResponse = await response.json();
260
-
261
- return {
262
- temperature: data.current.temperature_2m,
263
- feelsLike: data.current.apparent_temperature,
264
- humidity: data.current.relative_humidity_2m,
265
- windSpeed: data.current.wind_speed_10m,
266
- windGust: data.current.wind_gusts_10m,
267
- conditions: getWeatherCondition(data.current.weather_code),
268
- location: name,
269
- };
270
- };
271
-
272
- function getWeatherCondition(code: number): string {
273
- const conditions: Record<number, string> = {
274
- 0: 'Clear sky',
275
- 1: 'Mainly clear',
276
- 2: 'Partly cloudy',
277
- 3: 'Overcast',
278
- 45: 'Foggy',
279
- 48: 'Depositing rime fog',
280
- 51: 'Light drizzle',
281
- 53: 'Moderate drizzle',
282
- 55: 'Dense drizzle',
283
- 56: 'Light freezing drizzle',
284
- 57: 'Dense freezing drizzle',
285
- 61: 'Slight rain',
286
- 63: 'Moderate rain',
287
- 65: 'Heavy rain',
288
- 66: 'Light freezing rain',
289
- 67: 'Heavy freezing rain',
290
- 71: 'Slight snow fall',
291
- 73: 'Moderate snow fall',
292
- 75: 'Heavy snow fall',
293
- 77: 'Snow grains',
294
- 80: 'Slight rain showers',
295
- 81: 'Moderate rain showers',
296
- 82: 'Violent rain showers',
297
- 85: 'Slight snow showers',
298
- 86: 'Heavy snow showers',
299
- 95: 'Thunderstorm',
300
- 96: 'Thunderstorm with slight hail',
301
- 99: 'Thunderstorm with heavy hail',
302
- };
303
- return conditions[code] || 'Unknown';
304
- }
305
-
306
- const server = new FastMCP({
307
- name: 'Weather Server',
308
- version: '1.0.0',
309
- });
310
-
311
- server.addTool({
312
- name: 'getWeather',
313
- description: 'Get current weather for a location',
314
- parameters: z.object({
315
- location: z.string().describe('City name'),
316
- }),
317
- execute: async args => {
318
- try {
319
- const weatherData = await getWeather(args.location);
320
- return JSON.stringify(weatherData);
321
- } catch (error) {
322
- if (error instanceof Error) {
323
- throw new Error(`Weather fetch failed: ${error.message}`);
324
- }
325
- throw error;
326
- }
327
- },
328
- });
329
-
330
- // Start the server with SSE support
331
- server.start({
332
- transportType: 'sse',
333
- sse: {
334
- endpoint: '/sse',
335
- port: 8080,
336
- },
337
- });
338
-
339
- export { server };
340
-
341
- ```
@@ -1,187 +0,0 @@
1
- ---
2
- title: "Building an AI Recruiter | Mastra Workflows | Guides"
3
- description: Guide on building a recruiter workflow in Mastra to gather and process candidate information using LLMs.
4
- ---
5
-
6
- # Introduction
7
-
8
- In this guide, you'll learn how Mastra helps you build workflows with LLMs.
9
-
10
- We'll walk through creating a workflow that gathers information from a candidate's resume, then branches to either a technical or behavioral question based on the candidate's profile. Along the way, you'll see how to structure workflow steps, handle branching, and integrate LLM calls.
11
-
12
- Below is a concise version of the workflow. It starts by importing the necessary modules, sets up Mastra, defines steps to extract and classify candidate data, and then asks suitable follow-up questions. Each code block is followed by a short explanation of what it does and why it's useful.
13
-
14
- ## 1. Imports and Setup
15
-
16
- You need to import Mastra tools and Zod to handle workflow definitions and data validation.
17
-
18
- ```typescript filename="src/mastra/index.ts" copy
19
-
20
- import { Mastra } from "@mastra/core";
21
- import { Step, Workflow } from "@mastra/core/workflows";
22
- import { z } from "zod";
23
- ```
24
-
25
- Add your `OPENAI_API_KEY` to the `.env` file.
26
-
27
- ```bash filename=".env" copy
28
- OPENAI_API_KEY=<your-openai-key>
29
- ```
30
-
31
- ## 2. Step One: Gather Candidate Info
32
-
33
- You want to extract candidate details from the resume text and classify them as technical or non-technical. This step calls an LLM to parse the resume and return structured JSON, including the name, technical status, specialty, and the original resume text. The code reads resumeText from trigger data, prompts the LLM, and returns organized fields for use in subsequent steps.
34
-
35
- ```typescript filename="src/mastra/index.ts" copy
36
- import { Agent } from '@mastra/core/agent';
37
- import { openai } from "@ai-sdk/openai";
38
-
39
- const recruiter = new Agent({
40
- name: "Recruiter Agent",
41
- instructions: `You are a recruiter.`,
42
- model: openai("gpt-4o-mini"),
43
- })
44
-
45
- const gatherCandidateInfo = new Step({
46
- id: "gatherCandidateInfo",
47
- inputSchema: z.object({
48
- resumeText: z.string(),
49
- }),
50
- outputSchema: z.object({
51
- candidateName: z.string(),
52
- isTechnical: z.boolean(),
53
- specialty: z.string(),
54
- resumeText: z.string(),
55
- }),
56
- execute: async ({ context }) => {
57
- const resumeText = context?.getStepResult<{
58
- resumeText: string;
59
- }>("trigger")?.resumeText;
60
-
61
- const prompt = `
62
- Extract details from the resume text:
63
- "${resumeText}"
64
- `;
65
-
66
- const res = await recruiter.generate(prompt, {
67
- output: z.object({
68
- candidateName: z.string(),
69
- isTechnical: z.boolean(),
70
- specialty: z.string(),
71
- resumeText: z.string(),
72
- }),
73
- });
74
-
75
- return res.object;
76
- },
77
- });
78
- ```
79
-
80
- ## 3. Technical Question Step
81
-
82
- This step prompts a candidate who is identified as technical for more information about how they got into their specialty. It uses the entire resume text so the LLM can craft a relevant follow-up question. The code generates a question about the candidate's specialty.
83
-
84
- ```typescript filename="src/mastra/index.ts" copy
85
- interface CandidateInfo {
86
- candidateName: string;
87
- isTechnical: boolean;
88
- specialty: string;
89
- resumeText: string;
90
- }
91
-
92
- const askAboutSpecialty = new Step({
93
- id: "askAboutSpecialty",
94
- outputSchema: z.object({
95
- question: z.string(),
96
- }),
97
- execute: async ({ context }) => {
98
- const candidateInfo = context?.getStepResult<CandidateInfo>(
99
- "gatherCandidateInfo",
100
- );
101
-
102
- const prompt = `
103
- You are a recruiter. Given the resume below, craft a short question
104
- for ${candidateInfo?.candidateName} about how they got into "${candidateInfo?.specialty}".
105
- Resume: ${candidateInfo?.resumeText}
106
- `;
107
- const res = await recruiter.generate(prompt);
108
-
109
- return { question: res?.text?.trim() || "" };
110
- },
111
- });
112
- ```
113
-
114
- ## 4. Behavioral Question Step
115
-
116
- If the candidate is non-technical, you want a different follow-up question. This step asks what interests them most about the role, again referencing their complete resume text. The code solicits a role-focused query from the LLM.
117
-
118
- ```typescript filename="src/mastra/index.ts" copy
119
- const askAboutRole = new Step({
120
- id: "askAboutRole",
121
- outputSchema: z.object({
122
- question: z.string(),
123
- }),
124
- execute: async ({ context }) => {
125
- const candidateInfo = context?.getStepResult<CandidateInfo>(
126
- "gatherCandidateInfo",
127
- );
128
-
129
- const prompt = `
130
- You are a recruiter. Given the resume below, craft a short question
131
- for ${candidateInfo?.candidateName} asking what interests them most about this role.
132
- Resume: ${candidateInfo?.resumeText}
133
- `;
134
- const res = await recruiter.generate(prompt);
135
- return { question: res?.text?.trim() || "" };
136
- },
137
- });
138
- ```
139
-
140
- ## 5. Define the Workflow
141
-
142
- You now combine the steps to implement branching logic based on the candidate's technical status. The workflow first gathers candidate data, then either asks about their specialty or about their role, depending on isTechnical. The code chains gatherCandidateInfo with askAboutSpecialty and askAboutRole, and commits the workflow.
143
-
144
- ```typescript filename="src/mastra/index.ts" copy
145
- const candidateWorkflow = new Workflow({
146
- name: "candidate-workflow",
147
- triggerSchema: z.object({
148
- resumeText: z.string(),
149
- }),
150
- });
151
-
152
- candidateWorkflow
153
- .step(gatherCandidateInfo)
154
- .then(askAboutSpecialty, {
155
- when: { "gatherCandidateInfo.isTechnical": true },
156
- })
157
- .after(gatherCandidateInfo)
158
- .step(askAboutRole, {
159
- when: { "gatherCandidateInfo.isTechnical": false },
160
- });
161
-
162
- candidateWorkflow.commit();
163
- ```
164
-
165
- ## 6. Execute the Workflow
166
-
167
- ```typescript filename="src/mastra/index.ts" copy
168
- const mastra = new Mastra({
169
- workflows: {
170
- candidateWorkflow,
171
- },
172
- });
173
-
174
- (async () => {
175
- const { runId, start } = mastra.getWorkflow("candidateWorkflow").createRun();
176
-
177
- console.log("Run", runId);
178
-
179
- const runResult = await start({
180
- triggerData: { resumeText: "Simulated resume content..." },
181
- });
182
-
183
- console.log("Final output:", runResult.results);
184
- })();
185
- ```
186
-
187
- You've just built a workflow to parse a resume and decide which question to ask based on the candidate's technical abilities. Congrats and happy hacking!