@mastra/mcp-docs-server 0.13.7-alpha.5 → 0.13.8-alpha.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (97) hide show
  1. package/.docs/organized/changelogs/%40internal%2Fstorage-test-utils.md +30 -30
  2. package/.docs/organized/changelogs/%40mastra%2Fastra.md +31 -31
  3. package/.docs/organized/changelogs/%40mastra%2Fchroma.md +31 -31
  4. package/.docs/organized/changelogs/%40mastra%2Fclickhouse.md +31 -31
  5. package/.docs/organized/changelogs/%40mastra%2Fclient-js.md +60 -60
  6. package/.docs/organized/changelogs/%40mastra%2Fcloud.md +44 -44
  7. package/.docs/organized/changelogs/%40mastra%2Fcloudflare-d1.md +34 -34
  8. package/.docs/organized/changelogs/%40mastra%2Fcloudflare.md +33 -33
  9. package/.docs/organized/changelogs/%40mastra%2Fcore.md +72 -72
  10. package/.docs/organized/changelogs/%40mastra%2Fcouchbase.md +31 -31
  11. package/.docs/organized/changelogs/%40mastra%2Fdeployer-cloudflare.md +56 -56
  12. package/.docs/organized/changelogs/%40mastra%2Fdeployer-netlify.md +52 -52
  13. package/.docs/organized/changelogs/%40mastra%2Fdeployer-vercel.md +52 -52
  14. package/.docs/organized/changelogs/%40mastra%2Fdeployer.md +73 -73
  15. package/.docs/organized/changelogs/%40mastra%2Fdynamodb.md +33 -33
  16. package/.docs/organized/changelogs/%40mastra%2Fevals.md +36 -36
  17. package/.docs/organized/changelogs/%40mastra%2Ffirecrawl.md +47 -47
  18. package/.docs/organized/changelogs/%40mastra%2Fgithub.md +31 -31
  19. package/.docs/organized/changelogs/%40mastra%2Flance.md +30 -0
  20. package/.docs/organized/changelogs/%40mastra%2Flibsql.md +50 -50
  21. package/.docs/organized/changelogs/%40mastra%2Floggers.md +31 -31
  22. package/.docs/organized/changelogs/%40mastra%2Fmcp-docs-server.md +44 -44
  23. package/.docs/organized/changelogs/%40mastra%2Fmcp-registry-registry.md +31 -31
  24. package/.docs/organized/changelogs/%40mastra%2Fmcp.md +43 -43
  25. package/.docs/organized/changelogs/%40mastra%2Fmem0.md +33 -33
  26. package/.docs/organized/changelogs/%40mastra%2Fmemory.md +51 -51
  27. package/.docs/organized/changelogs/%40mastra%2Fmongodb.md +48 -48
  28. package/.docs/organized/changelogs/%40mastra%2Fmssql.md +32 -0
  29. package/.docs/organized/changelogs/%40mastra%2Fopensearch.md +31 -31
  30. package/.docs/organized/changelogs/%40mastra%2Fpg.md +58 -58
  31. package/.docs/organized/changelogs/%40mastra%2Fpinecone.md +31 -31
  32. package/.docs/organized/changelogs/%40mastra%2Fplayground-ui.md +58 -58
  33. package/.docs/organized/changelogs/%40mastra%2Fqdrant.md +31 -31
  34. package/.docs/organized/changelogs/%40mastra%2Frag.md +46 -46
  35. package/.docs/organized/changelogs/%40mastra%2Fragie.md +31 -31
  36. package/.docs/organized/changelogs/%40mastra%2Fserver.md +58 -58
  37. package/.docs/organized/changelogs/%40mastra%2Fturbopuffer.md +31 -31
  38. package/.docs/organized/changelogs/%40mastra%2Fupstash.md +49 -49
  39. package/.docs/organized/changelogs/%40mastra%2Fvectorize.md +33 -33
  40. package/.docs/organized/changelogs/%40mastra%2Fvoice-azure.md +33 -33
  41. package/.docs/organized/changelogs/%40mastra%2Fvoice-cloudflare.md +33 -33
  42. package/.docs/organized/changelogs/%40mastra%2Fvoice-deepgram.md +31 -31
  43. package/.docs/organized/changelogs/%40mastra%2Fvoice-elevenlabs.md +31 -31
  44. package/.docs/organized/changelogs/%40mastra%2Fvoice-google.md +31 -31
  45. package/.docs/organized/changelogs/%40mastra%2Fvoice-murf.md +43 -43
  46. package/.docs/organized/changelogs/%40mastra%2Fvoice-openai-realtime.md +35 -35
  47. package/.docs/organized/changelogs/%40mastra%2Fvoice-openai.md +45 -45
  48. package/.docs/organized/changelogs/%40mastra%2Fvoice-playai.md +31 -31
  49. package/.docs/organized/changelogs/%40mastra%2Fvoice-sarvam.md +31 -31
  50. package/.docs/organized/changelogs/%40mastra%2Fvoice-speechify.md +31 -31
  51. package/.docs/organized/changelogs/create-mastra.md +10 -10
  52. package/.docs/organized/changelogs/mastra.md +70 -70
  53. package/.docs/raw/agents/streaming.mdx +118 -0
  54. package/.docs/raw/community/contributing-templates.mdx +1 -0
  55. package/.docs/raw/memory/overview.mdx +3 -1
  56. package/.docs/raw/memory/working-memory.mdx +67 -0
  57. package/.docs/raw/observability/logging.mdx +94 -19
  58. package/.docs/raw/reference/agents/streamVNext.mdx +598 -0
  59. package/.docs/raw/reference/observability/logger.mdx +69 -50
  60. package/.docs/raw/reference/observability/providers/langfuse.mdx +47 -14
  61. package/.docs/raw/reference/workflows/dountil.mdx +1 -2
  62. package/.docs/raw/reference/workflows/dowhile.mdx +1 -2
  63. package/.docs/raw/reference/workflows/resume.mdx +13 -3
  64. package/.docs/raw/reference/workflows/step.mdx +0 -1
  65. package/.docs/raw/reference/workflows/streamVNext.mdx +215 -0
  66. package/.docs/raw/workflows/streaming.mdx +115 -0
  67. package/.docs/raw/workflows/suspend-and-resume.mdx +13 -2
  68. package/dist/index.d.ts +5 -0
  69. package/dist/index.d.ts.map +1 -0
  70. package/dist/logger.d.ts +11 -0
  71. package/dist/logger.d.ts.map +1 -0
  72. package/dist/prepare-docs/code-examples.d.ts +5 -0
  73. package/dist/prepare-docs/code-examples.d.ts.map +1 -0
  74. package/dist/prepare-docs/copy-raw.d.ts +2 -0
  75. package/dist/prepare-docs/copy-raw.d.ts.map +1 -0
  76. package/dist/prepare-docs/package-changes.d.ts +5 -0
  77. package/dist/prepare-docs/package-changes.d.ts.map +1 -0
  78. package/dist/prepare-docs/prepare.d.ts +2 -1
  79. package/dist/prepare-docs/prepare.d.ts.map +1 -0
  80. package/dist/stdio.d.ts +2 -0
  81. package/dist/stdio.d.ts.map +1 -0
  82. package/dist/tools/__tests__/test-setup.d.ts +5 -0
  83. package/dist/tools/__tests__/test-setup.d.ts.map +1 -0
  84. package/dist/tools/blog.d.ts +22 -0
  85. package/dist/tools/blog.d.ts.map +1 -0
  86. package/dist/tools/changes.d.ts +22 -0
  87. package/dist/tools/changes.d.ts.map +1 -0
  88. package/dist/tools/course.d.ts +85 -0
  89. package/dist/tools/course.d.ts.map +1 -0
  90. package/dist/tools/docs.d.ts +28 -0
  91. package/dist/tools/docs.d.ts.map +1 -0
  92. package/dist/tools/examples.d.ts +28 -0
  93. package/dist/tools/examples.d.ts.map +1 -0
  94. package/dist/utils.d.ts +62 -0
  95. package/dist/utils.d.ts.map +1 -0
  96. package/package.json +6 -6
  97. package/dist/_tsup-dts-rollup.d.ts +0 -304
@@ -1,86 +1,98 @@
1
1
  ---
2
- title: "Reference: Logger Instance | Mastra Observability Docs"
3
- description: Documentation for Logger instances, which provide methods to record events at various severity levels.
2
+ title: "Reference: PinoLogger | Mastra Observability Docs"
3
+ description: Documentation for PinoLogger, which provides methods to record events at various severity levels.
4
4
  ---
5
5
 
6
- # Logger Instance
6
+ # PinoLogger
7
7
 
8
- A Logger instance is created by `new PinoLogger()` and provides methods to record events at various severity levels. Depending on the logger type, messages may be written to the console, file, or an external service.
8
+ A Logger instance is created using `new PinoLogger()` and provides methods to record events at various severity levels.
9
9
 
10
- ## Example
10
+ When deploying to Mastra Cloud, logs are displayed on the [Logs](../../docs/mastra-cloud/dashboard.mdx#logs) page. In self-hosted or custom environments, logs can be directed to files or external services depending on the configured transports.
11
11
 
12
- ```typescript showLineNumbers copy
13
- // Using a console logger
14
- const logger = new PinoLogger({ name: "Mastra", level: "info" });
12
+ ## Usage example
15
13
 
16
- logger.debug("Debug message"); // Won't be logged because level is INFO
17
- logger.info({
18
- message: "User action occurred",
19
- destinationPath: "user-actions",
20
- type: "AGENT",
21
- }); // Logged
22
- logger.error("An error occurred"); // Logged as ERROR
14
+ ```typescript filename="src/mastra/index.ts" showLineNumbers copy
15
+ import { Mastra } from '@mastra/core/mastra';
16
+ import { PinoLogger } from '@mastra/loggers';
17
+
18
+ export const mastra = new Mastra({
19
+ // ...
20
+ logger: new PinoLogger({
21
+ name: 'Mastra',
22
+ level: 'info',
23
+ }),
24
+ });
23
25
  ```
24
26
 
25
- ## Methods
27
+ ## Parameters
26
28
 
27
29
  <PropertiesTable
28
30
  content={[
29
31
  {
30
- name: "debug",
31
- type: "(message: BaseLogMessage | string, ...args: any[]) => void | Promise<void>",
32
- description: "Write a DEBUG-level log. Only recorded if level DEBUG.",
32
+ name: "name",
33
+ type: "string",
34
+ description: "A label used to group and identify logs from this logger.",
33
35
  },
34
- {
35
- name: "info",
36
- type: "(message: BaseLogMessage | string, ...args: any[]) => void | Promise<void>",
37
- description: "Write an INFO-level log. Only recorded if level INFO.",
36
+ {
37
+ name: "level",
38
+ type: `"debug" | "info" | "warn" | "error"`,
39
+ description: "Sets the minimum log level. Messages below this level are ignored.",
38
40
  },
39
41
  {
40
- name: "warn",
41
- type: "(message: BaseLogMessage | string, ...args: any[]) => void | Promise<void>",
42
- description: "Write a WARN-level log. Only recorded if level ≤ WARN.",
42
+ name: "transports",
43
+ type: "Record<string, LoggerTransport>",
44
+ description: "A map of transport instances used to persist logs.",
43
45
  },
44
46
  {
45
- name: "error",
46
- type: "(message: BaseLogMessage | string, ...args: any[]) => void | Promise<void>",
47
- description: "Write an ERROR-level log. Only recorded if level ≤ ERROR.",
47
+ name: "overrideDefaultTransports",
48
+ type: "boolean",
49
+ isOptional: true,
50
+ description: "If true, disables the default console transport.",
48
51
  },
49
52
  {
50
- name: "cleanup",
51
- type: "() => Promise<void>",
53
+ name: "formatters",
54
+ type: "pino.LoggerOptions['formatters']",
52
55
  isOptional: true,
53
- description:
54
- "Cleanup resources held by the logger (e.g., network connections for Upstash). Not all loggers implement this.",
56
+ description: "Custom Pino formatters for log serialization.",
55
57
  },
56
58
  ]}
57
59
  />
58
60
 
59
- **Note:** Some loggers require a `BaseLogMessage` object (with `message`, `destinationPath`, `type` fields). For instance, the `File` and `Upstash` loggers need structured messages.
60
61
 
61
- ## File Transport (Structured Logs)
62
+ ## File transport (structured logs)
62
63
 
63
- ```typescript showLineNumbers copy
64
+ Writes structured logs to a file using the `FileTransport`. The logger accepts a plain message as the first argument and structured metadata as the second argument. These are internally converted to a `BaseLogMessage` and persisted to the configured file path.
65
+
66
+
67
+ ```typescript filename="src/mastra/loggers/file-transport.ts" showLineNumbers copy
64
68
  import { FileTransport } from "@mastra/loggers/file";
69
+ import { PinoLogger } from "@mastra/loggers/pino";
65
70
 
66
- const fileLogger = new PinoLogger({
71
+ export const fileLogger = new PinoLogger({
67
72
  name: "Mastra",
68
73
  transports: { file: new FileTransport({ path: "test-dir/test.log" }) },
69
74
  level: "warn",
70
75
  });
76
+ ```
77
+
78
+ ### File transport usage
71
79
 
80
+ ```typescript showLineNumbers copy
72
81
  fileLogger.warn("Low disk space", {
73
82
  destinationPath: "system",
74
83
  type: "WORKFLOW",
75
84
  });
76
85
  ```
77
86
 
78
- ## Upstash Logger (Remote Log Drain)
87
+ ## Upstash transport (remote log drain)
79
88
 
80
- ```typescript showLineNumbers copy
89
+ Streams structured logs to a remote Redis list using the `UpstashTransport`. The logger accepts a string message and a structured metadata object. This enables centralized logging for distributed environments, supporting filtering by `destinationPath`, `type`, and `runId`.
90
+
91
+ ```typescript filename="src/mastra/loggers/upstash-transport.ts" showLineNumbers copy
81
92
  import { UpstashTransport } from "@mastra/loggers/upstash";
93
+ import { PinoLogger } from "@mastra/loggers/pino";
82
94
 
83
- const logger = new PinoLogger({
95
+ export const upstashLogger = new PinoLogger({
84
96
  name: "Mastra",
85
97
  transports: {
86
98
  upstash: new UpstashTransport({
@@ -91,28 +103,35 @@ const logger = new PinoLogger({
91
103
  },
92
104
  level: "info",
93
105
  });
106
+ ```
94
107
 
95
- logger.info({
96
- message: "User signed in",
108
+
109
+ ### Upstash transport usage
110
+
111
+ ```typescript showLineNumbers copy
112
+ upstashLogger.info("User signed in", {
97
113
  destinationPath: "auth",
98
114
  type: "AGENT",
99
115
  runId: "run_123",
100
116
  });
101
117
  ```
102
118
 
103
- ## Custom Transport
119
+ ## Custom transport
104
120
 
105
121
  You can create custom transports using the `createCustomTransport` utility to integrate with any logging service or stream.
106
122
 
107
- ### Example: Sentry Integration
123
+ ### Sentry transport example
108
124
 
109
- ```typescript showLineNumbers copy
125
+ Creates a custom transport using `createCustomTransport` and integrates it with a third-party logging stream such as `pino-sentry-transport`. This allows forwarding logs to an external system like Sentry for advanced monitoring and observability.
126
+
127
+ ```typescript filename="src/mastra/loggers/sentry-transport.ts" showLineNumbers copy
110
128
  import { createCustomTransport } from "@mastra/core/loggers";
111
- import pinoSentry from 'pino-sentry-transport';
129
+ import { PinoLogger } from "@mastra/loggers/pino";
130
+ import pinoSentry from "pino-sentry-transport";
112
131
 
113
132
  const sentryStream = await pinoSentry({
114
133
  sentry: {
115
- dsn: 'YOUR_SENTRY_DSN',
134
+ dsn: "YOUR_SENTRY_DSN",
116
135
  _experiments: {
117
136
  enableLogs: true,
118
137
  },
@@ -121,9 +140,9 @@ const sentryStream = await pinoSentry({
121
140
 
122
141
  const customTransport = createCustomTransport(sentryStream);
123
142
 
124
- const logger = new PinoLogger({
143
+ export const sentryLogger = new PinoLogger({
125
144
  name: "Mastra",
126
- transports: { sentry: customTransport },
127
145
  level: "info",
146
+ transports: { sentry: customTransport },
128
147
  });
129
- ```
148
+ ```
@@ -11,36 +11,69 @@ Langfuse is an open-source observability platform designed specifically for LLM
11
11
 
12
12
  ## Configuration
13
13
 
14
- To use Langfuse with Mastra, you'll need to configure the following environment variables:
14
+ To use Langfuse with Mastra, you can configure it using either environment variables or directly in your Mastra configuration.
15
+
16
+ ### Using Environment Variables
17
+
18
+ Set the following environment variables:
15
19
 
16
20
  ```env
17
- LANGFUSE_PUBLIC_KEY=your_public_key
18
- LANGFUSE_SECRET_KEY=your_secret_key
19
- LANGFUSE_BASEURL=https://cloud.langfuse.com # Optional - defaults to cloud.langfuse.com
21
+ OTEL_EXPORTER_OTLP_ENDPOINT="https://cloud.langfuse.com/api/public/otel/v1/traces" # 🇪🇺 EU data region
22
+ # OTEL_EXPORTER_OTLP_ENDPOINT="https://us.cloud.langfuse.com/api/public/otel/v1/traces" # 🇺🇸 US data region
23
+
24
+ OTEL_EXPORTER_OTLP_HEADERS="Authorization=Basic ${AUTH_STRING}"
25
+ ```
26
+
27
+ Where `AUTH_STRING` is the base64-encoded combination of your public and secret keys (see below).
28
+
29
+ ### Generating AUTH_STRING
30
+
31
+ The authorization uses basic auth with your Langfuse API keys. You can generate the base64-encoded auth string using:
32
+
33
+ ```bash
34
+ echo -n "pk-lf-1234567890:sk-lf-1234567890" | base64
20
35
  ```
21
36
 
22
- **Important**: When configuring the telemetry export settings, the `traceName` parameter must be set to `"ai"` for the Langfuse integration to work properly.
37
+ For long API keys on GNU systems, you may need to add `-w 0` to prevent auto-wrapping:
38
+
39
+ ```bash
40
+ echo -n "pk-lf-1234567890:sk-lf-1234567890" | base64 -w 0
41
+ ```
23
42
 
24
43
  ## Implementation
25
44
 
26
- Here's how to configure Mastra to use Langfuse:
45
+ Here's how to configure Mastra to use Langfuse with OpenTelemetry:
46
+
47
+ ```typescript
48
+ import { Mastra } from "@mastra/core";
49
+
50
+ export const mastra = new Mastra({
51
+ // ... other config
52
+ telemetry: {
53
+ enabled: true,
54
+ export: {
55
+ type: 'otlp',
56
+ endpoint: 'https://cloud.langfuse.com/api/public/otel/v1/traces', // or your preferred endpoint
57
+ headers: {
58
+ Authorization: `Basic ${AUTH_STRING}`, // Your base64-encoded auth string
59
+ },
60
+ },
61
+ },
62
+ });
63
+ ```
64
+
65
+ Alternatively, if you're using environment variables, you can simplify the configuration:
27
66
 
28
67
  ```typescript
29
68
  import { Mastra } from "@mastra/core";
30
- import { LangfuseExporter } from "langfuse-vercel";
31
69
 
32
70
  export const mastra = new Mastra({
33
71
  // ... other config
34
72
  telemetry: {
35
- serviceName: "ai", // this must be set to "ai" so that the LangfuseExporter thinks it's an AI SDK trace
36
73
  enabled: true,
37
74
  export: {
38
- type: "custom",
39
- exporter: new LangfuseExporter({
40
- publicKey: process.env.LANGFUSE_PUBLIC_KEY,
41
- secretKey: process.env.LANGFUSE_SECRET_KEY,
42
- baseUrl: process.env.LANGFUSE_BASEURL,
43
- }),
75
+ type: 'otlp',
76
+ // endpoint and headers will be read from OTEL_EXPORTER_OTLP_* env vars
44
77
  },
45
78
  },
46
79
  });
@@ -47,5 +47,4 @@ workflow.dountil(stepOne, async ({ inputData }) => true);
47
47
 
48
48
  ## Related
49
49
 
50
- - [Loops](../../docs/workflows/control-flow.mdx#loops)
51
- - [Loops example](../../examples/workflows/control-flow.mdx)
50
+ - [Control flow](../../docs/workflows/control-flow.mdx)
@@ -47,5 +47,4 @@ workflow.dowhile(stepOne, async ({ inputData }) => true);
47
47
 
48
48
  ## Related
49
49
 
50
- - [Loops](../../docs/workflows/flow-control.mdx#loops)
51
- - [Loops example](../../examples/workflows/control-flow.mdx)
50
+ - [Control flow](../../docs/workflows/control-flow.mdx)
@@ -15,12 +15,22 @@ const result = await run.start({ inputData: { startValue: 0 } });
15
15
 
16
16
  if (result.status === "suspended") {
17
17
  const resumedResults = await run.resume({
18
- step: result.suspended[0],
19
18
  resumeData: { newValue: 0 },
20
19
  });
21
20
  }
22
21
  ```
23
22
 
23
+
24
+ For more advanced scenarios where you need to specify the exact step to resume:
25
+
26
+ ```typescript
27
+ await run.resume({
28
+ step: result.suspended[0], // Explicitly choose which step to resume
29
+ resumeData: { newValue: 0 },
30
+ });
31
+ ```
32
+ > **Note**: When exactly one step is suspended, you can omit the `step` parameter and the workflow will automatically resume that step. For workflows with multiple suspended steps, you must explicitly specify which step to resume.
33
+
24
34
  ## Parameters
25
35
 
26
36
  <PropertiesTable
@@ -40,8 +50,8 @@ if (result.status === "suspended") {
40
50
  {
41
51
  name: "step",
42
52
  type: "Step | Step[] | string | string[]",
43
- description: "The step(s) to resume execution from",
44
- isOptional: false,
53
+ description: "The step(s) to resume execution from. When omitted, the workflow will automatically resume the suspended step if exactly one step is suspended. Throws an error if multiple steps are suspended and no step is specified.",
54
+ isOptional: true,
45
55
  },
46
56
  {
47
57
  name: "runtimeContext",
@@ -156,5 +156,4 @@ const processOrder = createStep({
156
156
 
157
157
  - [Control flow](../../docs/workflows/control-flow.mdx)
158
158
  - [Using agents and tools](../../docs/workflows/using-with-agents-and-tools.mdx)
159
- - [Tool and agent as step example](../../examples/workflows/agent-and-tool-interop.mdx)
160
159
  - [Input data mapping](../../docs/workflows/input-data-mapping.mdx)
@@ -0,0 +1,215 @@
1
+ ---
2
+ title: "Reference: Workflow.streamVNext() | Streaming | Workflows | Mastra Docs"
3
+ description: Documentation for the `.streamVNext()` method in Mastra workflows, which enables real-time streaming of responses.
4
+ ---
5
+
6
+ # `streamVNext()`
7
+
8
+ The `streamVNext()` method enables real-time streaming of responses from a workflow.
9
+
10
+ ## Usage
11
+
12
+ ```typescript
13
+ const run = await myWorkflow.createRunAsync();
14
+
15
+ // Add a stream to monitor execution
16
+ const stream = run.streamVNext({ inputData: {...} });
17
+
18
+
19
+ for (const chunk of stream) {
20
+ // do something with the chunk
21
+ }
22
+
23
+ ```
24
+
25
+ ## Protocol
26
+
27
+ <PropertiesTable
28
+ content={[
29
+ {
30
+ name: "start",
31
+ type: "object",
32
+ description: "The workflow starts",
33
+ isOptional: false,
34
+ properties: [
35
+ {
36
+ type: "object",
37
+ parameters: [
38
+ {
39
+ name: "example",
40
+ type: "{ type: 'start', runId: '1', from: 'WORKFLOW', payload: { runId: '1' } }",
41
+ description: "Example message structure",
42
+ isOptional: false,
43
+ },
44
+ ],
45
+ },
46
+ ],
47
+ },
48
+ {
49
+ name: "step-start",
50
+ type: "object",
51
+ description: "The start of a step",
52
+ isOptional: false,
53
+ properties: [
54
+ {
55
+ type: "object",
56
+ parameters: [
57
+ {
58
+ name: "example",
59
+ type: "{ type: 'step-start', runId: '1', from: 'WORKFLOW', payload: { id: 'fetch-weather' } }",
60
+ description: "Example message structure",
61
+ isOptional: false,
62
+ },
63
+ ],
64
+ },
65
+ ],
66
+ },
67
+ {
68
+ name: "step-output",
69
+ type: "object",
70
+ description: "Custom output from a step",
71
+ isOptional: false,
72
+ properties: [
73
+ {
74
+ type: "object",
75
+ parameters: [
76
+ {
77
+ name: "example",
78
+ type: "{ type: 'step-output', runId: '1', from: 'WORKFLOW', payload: { stepName: 'my step', args: { ... }, stepCallId: 'uuid', startedAt: 1717000000000, status: 'running' } }",
79
+ description: "Example message structure",
80
+ isOptional: false,
81
+ },
82
+ ],
83
+ },
84
+ ],
85
+ },
86
+ {
87
+ name: "step-result",
88
+ type: "object",
89
+ description: "The result of a step",
90
+ isOptional: false,
91
+ properties: [
92
+ {
93
+ type: "object",
94
+ parameters: [
95
+ {
96
+ name: "example",
97
+ type: "{ type: 'step-result', runId: '1', from: 'WORKFLOW', payload: { stepName: 'my step', result: { ... }, stepCallId: 'uuid', endedAt: 1717000000000, status: 'success', output: [Object] } }",
98
+ description: "Example message structure",
99
+ isOptional: false,
100
+ },
101
+ ],
102
+ },
103
+ ],
104
+ },
105
+ {
106
+ name: "finish",
107
+ type: "object",
108
+ description: "The end of the workflow",
109
+ isOptional: false,
110
+ properties: [
111
+ {
112
+ type: "object",
113
+ parameters: [
114
+ {
115
+ name: "example",
116
+ type: "{ type: 'finish', runId: '1', from: 'WORKFLOW', payload: { totalUsage: { promptTokens: 100, completionTokens: 100, totalTokens: 200 } } }",
117
+ description: "Example message structure",
118
+ isOptional: false,
119
+ },
120
+ ],
121
+ },
122
+ ],
123
+ },
124
+ ]}
125
+ />
126
+
127
+ ## Returns
128
+
129
+ ### PropertiesTable for Return Values
130
+
131
+ <PropertiesTable
132
+ content={[
133
+ {
134
+ name: "usage",
135
+ type: "Promise<object>",
136
+ isOptional: true,
137
+ description:
138
+ "Total usage of the workflow, including sub agents/workflows as a step.",
139
+ properties: [
140
+ {
141
+ type: "number",
142
+ parameters: [
143
+ {
144
+ name: "promptTokens",
145
+ type: "number",
146
+ isOptional: true,
147
+ description: "The number of prompt tokens used by the agent.",
148
+ },
149
+ ],
150
+ },
151
+ {
152
+ type: "number",
153
+ parameters: [
154
+ {
155
+ name: "completionTokens",
156
+ type: "number",
157
+ isOptional: true,
158
+ description: "The number of completion tokens used by the agent.",
159
+ },
160
+ ],
161
+ },
162
+ {
163
+ type: "number",
164
+ parameters: [
165
+ {
166
+ name: "totalTokens",
167
+ type: "number",
168
+ isOptional: true,
169
+ description: "The total number of tokens used by the agent.",
170
+ },
171
+ ],
172
+ },
173
+ ],
174
+ },
175
+ {
176
+ name: "status",
177
+ type: "Promise<string>",
178
+ isOptional: true,
179
+ description:
180
+ "The status of the workflow run.",
181
+ },
182
+ {
183
+ name: "result",
184
+ type: "Promise<object>",
185
+ isOptional: true,
186
+ description:
187
+ "The result of the workflow run.",
188
+ },
189
+ ]}
190
+ />
191
+
192
+ ## Examples
193
+
194
+ ### Basic Streaming
195
+
196
+ ```typescript
197
+ const run = await myWorkflow.createRunAsync();
198
+ const stream = run.streamVNext({ inputData: {...} });
199
+
200
+ for await (const chunk of stream) {
201
+ process.stdout.write(chunk);
202
+ }
203
+ ```
204
+
205
+ ### Structured Output Streaming
206
+
207
+ ```typescript
208
+ const run = await myWorkflow.createRunAsync();
209
+ const stream = run.streamVNext({ inputData: {...} });
210
+
211
+
212
+ const result = await stream.result;
213
+ console.log("Final structured result:", result);
214
+ ```
215
+
@@ -0,0 +1,115 @@
1
+ ---
2
+ title: "Using Workflow Streaming | Workflows | Mastra Docs"
3
+ description: Documentation on how to stream workflows
4
+ ---
5
+
6
+ # Workflow Streaming
7
+
8
+ Workflows in Mastra have access to a powerful streaming protocol! With seamless integration into tools or agents as a step, you can stream responses directly back to your clients, creating a more interactive and engaging experience.
9
+
10
+ ## Usage
11
+
12
+ To use the new protocol, you can use the `streamVNext` method on an workflow. This method will return a custom MatraWorkflowStream. This stream extends a ReadableStream, so all basic stream methods are available.
13
+
14
+ ```typescript
15
+ const run = await myWorkflow.createRunAsync();
16
+ const stream = await run.streamVNext({ inputData: { city: 'New York' } });
17
+
18
+ for await (const chunk of stream) {
19
+ console.log(chunk);
20
+ }
21
+ ```
22
+
23
+ Each chunk is a JSON object with the following properties:
24
+
25
+ ```json
26
+ {
27
+ type: string;
28
+ runId: string;
29
+ from: string;
30
+ payload: Record<string, any>;
31
+ }
32
+ ```
33
+
34
+ We have a couple of utility functions on the stream to help you with the streaming process.
35
+
36
+ - `stream.status` - The status of the workflow run.
37
+ - `stream.result` - The result of the workflow run.
38
+ - `stream.usage` - The total token usage of the workflow run.
39
+
40
+ ### How to use the stream in a tool
41
+
42
+ Each tool gets a `writer` argument, which is a writable stream with a custom write function. This write function is used to write the tool's response to the stream.
43
+
44
+ ```typescript filename="src/mastra/workflows/weather.ts" showLineNumbers copy
45
+ import { createStep } from "@mastra/core/workflows";
46
+ import { z } from "zod";
47
+
48
+ export const weatherInfo = createStep({
49
+ id: "weather-info",
50
+ inputSchema: z.object({
51
+ city: z.string(),
52
+ }),
53
+ outputSchema: z.object({
54
+ conditions: z.string(),
55
+ temperature: z.number(),
56
+ }),
57
+ description: `Fetches the current weather information for a given city`,
58
+ execute: async ({ inputData: { city }, writer }) => {
59
+ writer.write({
60
+ type: "weather-data",
61
+ args: {
62
+ city
63
+ },
64
+ status: "pending"
65
+ })
66
+ // Tool logic here (e.g., API call)
67
+ console.log("Using tool to fetch weather information for", city);
68
+
69
+ writer.write({
70
+ type: "weather-data",
71
+ args: {
72
+ city
73
+ },
74
+ status: "success",
75
+ result: {
76
+ temperature: 20,
77
+ conditions: "Sunny"
78
+ }
79
+ })
80
+
81
+ return { temperature: 20, conditions: "Sunny" }; // Example return
82
+ },
83
+ });
84
+ ```
85
+
86
+ If you want to use the stream in an agent, you can use the `streamVNext` method on the agent and pipe it to the agent's input stream.
87
+
88
+ ```typescript filename="src/mastra/workflows/weather.ts" showLineNumbers copy
89
+ import { createStep } from "@mastra/core/workflows";
90
+ import { z } from "zod";
91
+
92
+ export const weatherInfo = createStep({
93
+ id: "weather-info",
94
+ inputSchema: z.object({
95
+ city: z.string(),
96
+ }),
97
+ outputSchema: z.object({
98
+ text: z.string(),
99
+ }),
100
+ description: `Fetches the current weather information for a given city`,
101
+ execute: async ({ inputData: { city }, writer, mastra }) => {
102
+ const agent = mastra.getAgent('weatherAgent')
103
+ const stream = await agent.streamVNext(`What is the weather in ${city}?`);
104
+
105
+ await stream.pipeTo(writer);
106
+
107
+ return {
108
+ text: await stream.text,
109
+ }
110
+ },
111
+ });
112
+ ```
113
+
114
+ Piping the stream to the agent's input stream will allow us to automatically sum up the usage of the agent so the total usage count can be calculated.
115
+