@mastra/client-js 0.0.0-generate-message-id-20250512171942 → 0.0.0-inject-middleware-20250528222017

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,11 +1,14 @@
1
+ import type { RuntimeContext } from '@mastra/core/runtime-context';
1
2
  import type {
2
- GetWorkflowResponse,
3
3
  ClientOptions,
4
- WorkflowRunResult,
4
+ GetWorkflowResponse,
5
5
  GetWorkflowRunsResponse,
6
6
  GetWorkflowRunsParams,
7
+ WorkflowRunResult,
8
+ WorkflowWatchResult,
7
9
  } from '../types';
8
10
 
11
+ import { parseClientRuntimeContext } from '../utils';
9
12
  import { BaseResource } from './base';
10
13
 
11
14
  const RECORD_SEPARATOR = '\x1E';
@@ -18,6 +21,77 @@ export class Workflow extends BaseResource {
18
21
  super(options);
19
22
  }
20
23
 
24
+ /**
25
+ * Creates an async generator that processes a readable stream and yields workflow records
26
+ * separated by the Record Separator character (\x1E)
27
+ *
28
+ * @param stream - The readable stream to process
29
+ * @returns An async generator that yields parsed records
30
+ */
31
+ private async *streamProcessor(stream: ReadableStream): AsyncGenerator<WorkflowWatchResult, void, unknown> {
32
+ const reader = stream.getReader();
33
+
34
+ // Track if we've finished reading from the stream
35
+ let doneReading = false;
36
+ // Buffer to accumulate partial chunks
37
+ let buffer = '';
38
+
39
+ try {
40
+ while (!doneReading) {
41
+ // Read the next chunk from the stream
42
+ const { done, value } = await reader.read();
43
+ doneReading = done;
44
+
45
+ // Skip processing if we're done and there's no value
46
+ if (done && !value) continue;
47
+
48
+ try {
49
+ // Decode binary data to text
50
+ const decoded = value ? new TextDecoder().decode(value) : '';
51
+
52
+ // Split the combined buffer and new data by record separator
53
+ const chunks = (buffer + decoded).split(RECORD_SEPARATOR);
54
+
55
+ // The last chunk might be incomplete, so save it for the next iteration
56
+ buffer = chunks.pop() || '';
57
+
58
+ // Process complete chunks
59
+ for (const chunk of chunks) {
60
+ if (chunk) {
61
+ // Only process non-empty chunks
62
+ if (typeof chunk === 'string') {
63
+ try {
64
+ const parsedChunk = JSON.parse(chunk);
65
+ yield parsedChunk;
66
+ } catch {
67
+ // Silently ignore parsing errors to maintain stream processing
68
+ // This allows the stream to continue even if one record is malformed
69
+ }
70
+ }
71
+ }
72
+ }
73
+ } catch {
74
+ // Silently ignore parsing errors to maintain stream processing
75
+ // This allows the stream to continue even if one record is malformed
76
+ }
77
+ }
78
+
79
+ // Process any remaining data in the buffer after stream is done
80
+ if (buffer) {
81
+ try {
82
+ yield JSON.parse(buffer);
83
+ } catch {
84
+ // Ignore parsing error for final chunk
85
+ }
86
+ }
87
+ } finally {
88
+ // Always ensure we clean up the reader
89
+ reader.cancel().catch(() => {
90
+ // Ignore cancel errors
91
+ });
92
+ }
93
+ }
94
+
21
95
  /**
22
96
  * Retrieves details about the workflow
23
97
  * @returns Promise containing workflow details including steps and graphs
@@ -56,22 +130,10 @@ export class Workflow extends BaseResource {
56
130
  }
57
131
  }
58
132
 
59
- /**
60
- * @deprecated Use `startAsync` instead
61
- * Executes the workflow with the provided parameters
62
- * @param params - Parameters required for workflow execution
63
- * @returns Promise containing the workflow execution results
64
- */
65
- execute(params: Record<string, any>): Promise<WorkflowRunResult> {
66
- return this.request(`/api/workflows/${this.workflowId}/execute`, {
67
- method: 'POST',
68
- body: params,
69
- });
70
- }
71
-
72
133
  /**
73
134
  * Creates a new workflow run
74
- * @returns Promise containing the generated run ID
135
+ * @param params - Optional object containing the optional runId
136
+ * @returns Promise containing the runId of the created run
75
137
  */
76
138
  createRun(params?: { runId?: string }): Promise<{ runId: string }> {
77
139
  const searchParams = new URLSearchParams();
@@ -80,150 +142,162 @@ export class Workflow extends BaseResource {
80
142
  searchParams.set('runId', params.runId);
81
143
  }
82
144
 
83
- return this.request(`/api/workflows/${this.workflowId}/createRun?${searchParams.toString()}`, {
145
+ return this.request(`/api/workflows/${this.workflowId}/create-run?${searchParams.toString()}`, {
84
146
  method: 'POST',
85
147
  });
86
148
  }
87
149
 
88
150
  /**
89
151
  * Starts a workflow run synchronously without waiting for the workflow to complete
90
- * @param params - Object containing the runId and triggerData
152
+ * @param params - Object containing the runId, inputData and runtimeContext
91
153
  * @returns Promise containing success message
92
154
  */
93
- start(params: { runId: string; triggerData: Record<string, any> }): Promise<{ message: string }> {
155
+ start(params: {
156
+ runId: string;
157
+ inputData: Record<string, any>;
158
+ runtimeContext?: RuntimeContext | Record<string, any>;
159
+ }): Promise<{ message: string }> {
160
+ const runtimeContext = parseClientRuntimeContext(params.runtimeContext);
94
161
  return this.request(`/api/workflows/${this.workflowId}/start?runId=${params.runId}`, {
95
162
  method: 'POST',
96
- body: params?.triggerData,
163
+ body: { inputData: params?.inputData, runtimeContext },
97
164
  });
98
165
  }
99
166
 
100
167
  /**
101
168
  * Resumes a suspended workflow step synchronously without waiting for the workflow to complete
102
- * @param stepId - ID of the step to resume
103
- * @param runId - ID of the workflow run
104
- * @param context - Context to resume the workflow with
105
- * @returns Promise containing the workflow resume results
169
+ * @param params - Object containing the runId, step, resumeData and runtimeContext
170
+ * @returns Promise containing success message
106
171
  */
107
172
  resume({
108
- stepId,
173
+ step,
109
174
  runId,
110
- context,
175
+ resumeData,
176
+ ...rest
111
177
  }: {
112
- stepId: string;
178
+ step: string | string[];
113
179
  runId: string;
114
- context: Record<string, any>;
180
+ resumeData?: Record<string, any>;
181
+ runtimeContext?: RuntimeContext | Record<string, any>;
115
182
  }): Promise<{ message: string }> {
183
+ const runtimeContext = parseClientRuntimeContext(rest.runtimeContext);
116
184
  return this.request(`/api/workflows/${this.workflowId}/resume?runId=${runId}`, {
117
185
  method: 'POST',
186
+ stream: true,
118
187
  body: {
119
- stepId,
120
- context,
188
+ step,
189
+ resumeData,
190
+ runtimeContext,
121
191
  },
122
192
  });
123
193
  }
124
194
 
125
195
  /**
126
196
  * Starts a workflow run asynchronously and returns a promise that resolves when the workflow is complete
127
- * @param params - Object containing the optional runId and triggerData
197
+ * @param params - Object containing the optional runId, inputData and runtimeContext
128
198
  * @returns Promise containing the workflow execution results
129
199
  */
130
- startAsync(params: { runId?: string; triggerData: Record<string, any> }): Promise<WorkflowRunResult> {
200
+ startAsync(params: {
201
+ runId?: string;
202
+ inputData: Record<string, any>;
203
+ runtimeContext?: RuntimeContext | Record<string, any>;
204
+ }): Promise<WorkflowRunResult> {
131
205
  const searchParams = new URLSearchParams();
132
206
 
133
207
  if (!!params?.runId) {
134
208
  searchParams.set('runId', params.runId);
135
209
  }
136
210
 
211
+ const runtimeContext = parseClientRuntimeContext(params.runtimeContext);
212
+
137
213
  return this.request(`/api/workflows/${this.workflowId}/start-async?${searchParams.toString()}`, {
138
214
  method: 'POST',
139
- body: params?.triggerData,
215
+ body: { inputData: params.inputData, runtimeContext },
140
216
  });
141
217
  }
142
218
 
143
219
  /**
144
- * Resumes a suspended workflow step asynchronously and returns a promise that resolves when the workflow is complete
145
- * @param params - Object containing the runId, stepId, and context
146
- * @returns Promise containing the workflow resume results
220
+ * Starts a vNext workflow run and returns a stream
221
+ * @param params - Object containing the optional runId, inputData and runtimeContext
222
+ * @returns Promise containing the vNext workflow execution results
147
223
  */
148
- resumeAsync(params: { runId: string; stepId: string; context: Record<string, any> }): Promise<WorkflowRunResult> {
149
- return this.request(`/api/workflows/${this.workflowId}/resume-async?runId=${params.runId}`, {
150
- method: 'POST',
151
- body: {
152
- stepId: params.stepId,
153
- context: params.context,
154
- },
155
- });
156
- }
224
+ async stream(params: { runId?: string; inputData: Record<string, any>; runtimeContext?: RuntimeContext }) {
225
+ const searchParams = new URLSearchParams();
157
226
 
158
- /**
159
- * Creates an async generator that processes a readable stream and yields records
160
- * separated by the Record Separator character (\x1E)
161
- *
162
- * @param stream - The readable stream to process
163
- * @returns An async generator that yields parsed records
164
- */
165
- private async *streamProcessor(stream: ReadableStream): AsyncGenerator<WorkflowRunResult, void, unknown> {
166
- const reader = stream.getReader();
227
+ if (!!params?.runId) {
228
+ searchParams.set('runId', params.runId);
229
+ }
167
230
 
168
- // Track if we've finished reading from the stream
169
- let doneReading = false;
170
- // Buffer to accumulate partial chunks
171
- let buffer = '';
231
+ const runtimeContext = params.runtimeContext ? Object.fromEntries(params.runtimeContext.entries()) : undefined;
232
+ const response: Response = await this.request(
233
+ `/api/workflows/${this.workflowId}/stream?${searchParams.toString()}`,
234
+ {
235
+ method: 'POST',
236
+ body: { inputData: params.inputData, runtimeContext },
237
+ stream: true,
238
+ },
239
+ );
172
240
 
173
- try {
174
- while (!doneReading) {
175
- // Read the next chunk from the stream
176
- const { done, value } = await reader.read();
177
- doneReading = done;
241
+ if (!response.ok) {
242
+ throw new Error(`Failed to stream vNext workflow: ${response.statusText}`);
243
+ }
178
244
 
179
- // Skip processing if we're done and there's no value
180
- if (done && !value) continue;
245
+ if (!response.body) {
246
+ throw new Error('Response body is null');
247
+ }
181
248
 
249
+ // Create a transform stream that processes the response body
250
+ const transformStream = new TransformStream<ArrayBuffer, WorkflowWatchResult>({
251
+ start() {},
252
+ async transform(chunk, controller) {
182
253
  try {
183
254
  // Decode binary data to text
184
- const decoded = value ? new TextDecoder().decode(value) : '';
185
-
186
- // Split the combined buffer and new data by record separator
187
- const chunks = (buffer + decoded).split(RECORD_SEPARATOR);
255
+ const decoded = new TextDecoder().decode(chunk);
188
256
 
189
- // The last chunk might be incomplete, so save it for the next iteration
190
- buffer = chunks.pop() || '';
257
+ // Split by record separator
258
+ const chunks = decoded.split(RECORD_SEPARATOR);
191
259
 
192
- // Process complete chunks
260
+ // Process each chunk
193
261
  for (const chunk of chunks) {
194
262
  if (chunk) {
195
- // Only process non-empty chunks
196
- if (typeof chunk === 'string') {
197
- try {
198
- const parsedChunk = JSON.parse(chunk);
199
- yield parsedChunk;
200
- } catch {
201
- // Silently ignore parsing errors to maintain stream processing
202
- // This allows the stream to continue even if one record is malformed
203
- }
263
+ try {
264
+ const parsedChunk = JSON.parse(chunk);
265
+ controller.enqueue(parsedChunk);
266
+ } catch {
267
+ // Silently ignore parsing errors
204
268
  }
205
269
  }
206
270
  }
207
271
  } catch {
208
- // Silently ignore parsing errors to maintain stream processing
209
- // This allows the stream to continue even if one record is malformed
272
+ // Silently ignore processing errors
210
273
  }
211
- }
274
+ },
275
+ });
212
276
 
213
- // Process any remaining data in the buffer after stream is done
214
- if (buffer) {
215
- try {
216
- yield JSON.parse(buffer);
217
- } catch {
218
- // Ignore parsing error for final chunk
219
- }
220
- }
221
- } finally {
222
- // Always ensure we clean up the reader
223
- reader.cancel().catch(() => {
224
- // Ignore cancel errors
225
- });
226
- }
277
+ // Pipe the response body through the transform stream
278
+ return response.body.pipeThrough(transformStream);
279
+ }
280
+
281
+ /**
282
+ * Resumes a suspended workflow step asynchronously and returns a promise that resolves when the workflow is complete
283
+ * @param params - Object containing the runId, step, resumeData and runtimeContext
284
+ * @returns Promise containing the workflow resume results
285
+ */
286
+ resumeAsync(params: {
287
+ runId: string;
288
+ step: string | string[];
289
+ resumeData?: Record<string, any>;
290
+ runtimeContext?: RuntimeContext | Record<string, any>;
291
+ }): Promise<WorkflowRunResult> {
292
+ const runtimeContext = parseClientRuntimeContext(params.runtimeContext);
293
+ return this.request(`/api/workflows/${this.workflowId}/resume-async?runId=${params.runId}`, {
294
+ method: 'POST',
295
+ body: {
296
+ step: params.step,
297
+ resumeData: params.resumeData,
298
+ runtimeContext,
299
+ },
300
+ });
227
301
  }
228
302
 
229
303
  /**
@@ -231,7 +305,7 @@ export class Workflow extends BaseResource {
231
305
  * @param runId - Optional run ID to filter the watch stream
232
306
  * @returns AsyncGenerator that yields parsed records from the workflow watch stream
233
307
  */
234
- async watch({ runId }: { runId?: string }, onRecord: (record: WorkflowRunResult) => void) {
308
+ async watch({ runId }: { runId?: string }, onRecord: (record: WorkflowWatchResult) => void) {
235
309
  const response: Response = await this.request(`/api/workflows/${this.workflowId}/watch?runId=${runId}`, {
236
310
  stream: true,
237
311
  });
@@ -245,7 +319,35 @@ export class Workflow extends BaseResource {
245
319
  }
246
320
 
247
321
  for await (const record of this.streamProcessor(response.body)) {
248
- onRecord(record);
322
+ if (typeof record === 'string') {
323
+ onRecord(JSON.parse(record));
324
+ } else {
325
+ onRecord(record);
326
+ }
249
327
  }
250
328
  }
329
+
330
+ /**
331
+ * Creates a new ReadableStream from an iterable or async iterable of objects,
332
+ * serializing each as JSON and separating them with the record separator (\x1E).
333
+ *
334
+ * @param records - An iterable or async iterable of objects to stream
335
+ * @returns A ReadableStream emitting the records as JSON strings separated by the record separator
336
+ */
337
+ static createRecordStream(records: Iterable<any> | AsyncIterable<any>): ReadableStream {
338
+ const encoder = new TextEncoder();
339
+ return new ReadableStream({
340
+ async start(controller) {
341
+ try {
342
+ for await (const record of records as AsyncIterable<any>) {
343
+ const json = JSON.stringify(record) + RECORD_SEPARATOR;
344
+ controller.enqueue(encoder.encode(json));
345
+ }
346
+ controller.close();
347
+ } catch (err) {
348
+ controller.error(err);
349
+ }
350
+ },
351
+ });
352
+ }
251
353
  }
package/src/types.ts CHANGED
@@ -3,16 +3,21 @@ import type {
3
3
  AiMessageType,
4
4
  CoreMessage,
5
5
  QueryResult,
6
- StepAction,
7
- StepGraph,
8
6
  StorageThreadType,
9
- BaseLogMessage,
10
- WorkflowRunResult as CoreWorkflowRunResult,
11
7
  WorkflowRuns,
8
+ LegacyWorkflowRuns,
12
9
  } from '@mastra/core';
13
-
14
10
  import type { AgentGenerateOptions, AgentStreamOptions } from '@mastra/core/agent';
15
- import type { NewWorkflow, WatchEvent, WorkflowResult as VNextWorkflowResult } from '@mastra/core/workflows/vNext';
11
+ import type { BaseLogMessage } from '@mastra/core/logger';
12
+
13
+ import type { MCPToolType, ServerInfo } from '@mastra/core/mcp';
14
+ import type { RuntimeContext } from '@mastra/core/runtime-context';
15
+ import type { Workflow, WatchEvent, WorkflowResult } from '@mastra/core/workflows';
16
+ import type {
17
+ StepAction,
18
+ StepGraph,
19
+ LegacyWorkflowRunResult as CoreLegacyWorkflowRunResult,
20
+ } from '@mastra/core/workflows/legacy';
16
21
  import type { JSONSchema7 } from 'json-schema';
17
22
  import type { ZodSchema } from 'zod';
18
23
 
@@ -38,21 +43,40 @@ export interface RequestOptions {
38
43
  signal?: AbortSignal;
39
44
  }
40
45
 
46
+ type WithoutMethods<T> = {
47
+ [K in keyof T as T[K] extends (...args: any[]) => any
48
+ ? never
49
+ : T[K] extends { (): any }
50
+ ? never
51
+ : T[K] extends undefined | ((...args: any[]) => any)
52
+ ? never
53
+ : K]: T[K];
54
+ };
55
+
41
56
  export interface GetAgentResponse {
42
57
  name: string;
43
58
  instructions: string;
44
59
  tools: Record<string, GetToolResponse>;
60
+ workflows: Record<string, GetWorkflowResponse>;
45
61
  provider: string;
46
62
  modelId: string;
63
+ defaultGenerateOptions: WithoutMethods<AgentGenerateOptions>;
64
+ defaultStreamOptions: WithoutMethods<AgentStreamOptions>;
47
65
  }
48
66
 
49
67
  export type GenerateParams<T extends JSONSchema7 | ZodSchema | undefined = undefined> = {
50
68
  messages: string | string[] | CoreMessage[] | AiMessageType[];
51
- } & Partial<Omit<AgentGenerateOptions<T>, 'experimental_generateMessageId'>>;
69
+ output?: T;
70
+ experimental_output?: T;
71
+ runtimeContext?: RuntimeContext | Record<string, any>;
72
+ } & WithoutMethods<Omit<AgentGenerateOptions<T>, 'output' | 'experimental_output' | 'runtimeContext'>>;
52
73
 
53
74
  export type StreamParams<T extends JSONSchema7 | ZodSchema | undefined = undefined> = {
54
75
  messages: string | string[] | CoreMessage[] | AiMessageType[];
55
- } & Omit<AgentStreamOptions<T>, 'onFinish' | 'onStepFinish' | 'telemetry' | 'experimental_generateMessageId'>;
76
+ output?: T;
77
+ experimental_output?: T;
78
+ runtimeContext?: RuntimeContext | Record<string, any>;
79
+ } & WithoutMethods<Omit<AgentStreamOptions<T>, 'output' | 'experimental_output' | 'runtimeContext'>>;
56
80
 
57
81
  export interface GetEvalsByAgentIdResponse extends GetAgentResponse {
58
82
  evals: any[];
@@ -68,7 +92,7 @@ export interface GetToolResponse {
68
92
  outputSchema: string;
69
93
  }
70
94
 
71
- export interface GetWorkflowResponse {
95
+ export interface GetLegacyWorkflowResponse {
72
96
  name: string;
73
97
  triggerSchema: string;
74
98
  steps: Record<string, StepAction<any, any, any, any>>;
@@ -85,17 +109,20 @@ export interface GetWorkflowRunsParams {
85
109
  resourceId?: string;
86
110
  }
87
111
 
112
+ export type GetLegacyWorkflowRunsResponse = LegacyWorkflowRuns;
113
+
88
114
  export type GetWorkflowRunsResponse = WorkflowRuns;
89
115
 
90
- export type WorkflowRunResult = {
116
+ export type LegacyWorkflowRunResult = {
91
117
  activePaths: Record<string, { status: string; suspendPayload?: any; stepPath: string[] }>;
92
- results: CoreWorkflowRunResult<any, any, any>['results'];
118
+ results: CoreLegacyWorkflowRunResult<any, any, any>['results'];
93
119
  timestamp: number;
94
120
  runId: string;
95
121
  };
96
122
 
97
- export interface GetVNextWorkflowResponse {
123
+ export interface GetWorkflowResponse {
98
124
  name: string;
125
+ description?: string;
99
126
  steps: {
100
127
  [key: string]: {
101
128
  id: string;
@@ -106,14 +133,14 @@ export interface GetVNextWorkflowResponse {
106
133
  suspendSchema: string;
107
134
  };
108
135
  };
109
- stepGraph: NewWorkflow['serializedStepGraph'];
136
+ stepGraph: Workflow['serializedStepGraph'];
110
137
  inputSchema: string;
111
138
  outputSchema: string;
112
139
  }
113
140
 
114
- export type VNextWorkflowWatchResult = WatchEvent & { runId: string };
141
+ export type WorkflowWatchResult = WatchEvent & { runId: string };
115
142
 
116
- export type VNextWorkflowRunResult = VNextWorkflowResult<any, any>;
143
+ export type WorkflowRunResult = WorkflowResult<any, any>;
117
144
  export interface UpsertVectorParams {
118
145
  indexName: string;
119
146
  vectors: number[][];
@@ -152,10 +179,10 @@ export interface SaveMessageToMemoryParams {
152
179
  export type SaveMessageToMemoryResponse = MessageType[];
153
180
 
154
181
  export interface CreateMemoryThreadParams {
155
- title: string;
156
- metadata: Record<string, any>;
182
+ title?: string;
183
+ metadata?: Record<string, any>;
157
184
  resourceId: string;
158
- threadId: string;
185
+ threadId?: string;
159
186
  agentId: string;
160
187
  }
161
188
 
@@ -174,6 +201,13 @@ export interface UpdateMemoryThreadParams {
174
201
  resourceId: string;
175
202
  }
176
203
 
204
+ export interface GetMemoryThreadMessagesParams {
205
+ /**
206
+ * Limit the number of messages to retrieve (default: 40)
207
+ */
208
+ limit?: number;
209
+ }
210
+
177
211
  export interface GetMemoryThreadMessagesResponse {
178
212
  messages: CoreMessage[];
179
213
  uiMessages: AiMessageType[];
@@ -260,3 +294,21 @@ export interface GetNetworkResponse {
260
294
  };
261
295
  state?: Record<string, any>;
262
296
  }
297
+
298
+ export interface McpServerListResponse {
299
+ servers: ServerInfo[];
300
+ next: string | null;
301
+ total_count: number;
302
+ }
303
+
304
+ export interface McpToolInfo {
305
+ id: string;
306
+ name: string;
307
+ description?: string;
308
+ inputSchema: string;
309
+ toolType?: MCPToolType;
310
+ }
311
+
312
+ export interface McpServerToolListResponse {
313
+ tools: McpToolInfo[];
314
+ }
@@ -0,0 +1,11 @@
1
+ import { RuntimeContext } from '@mastra/core/runtime-context';
2
+
3
+ export function parseClientRuntimeContext(runtimeContext?: RuntimeContext | Record<string, any>) {
4
+ if (runtimeContext) {
5
+ if (runtimeContext instanceof RuntimeContext) {
6
+ return Object.fromEntries(runtimeContext.entries());
7
+ }
8
+ return runtimeContext;
9
+ }
10
+ return undefined;
11
+ }