@mastra/voice-openai-realtime 0.0.0-vnext-inngest-20250508131921 → 0.0.0-vnextAgentNetwork-20250602134426

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,15 +1,181 @@
1
1
  # @mastra/voice-openai-realtime
2
2
 
3
- ## 0.0.0-vnext-inngest-20250508131921
3
+ ## 0.0.0-vnextAgentNetwork-20250602134426
4
4
 
5
5
  ### Patch Changes
6
6
 
7
+ - f0d559f: Fix peerdeps for alpha channel
8
+ - Updated dependencies [ee77e78]
9
+ - Updated dependencies [592a2db]
10
+ - Updated dependencies [e5dc18d]
11
+ - Updated dependencies [ab5adbe]
12
+ - Updated dependencies [1e8bb40]
13
+ - Updated dependencies [195c428]
14
+ - Updated dependencies [f73e11b]
15
+ - Updated dependencies [c5bf1ce]
16
+ - Updated dependencies [12b7002]
17
+ - Updated dependencies [2901125]
18
+ - @mastra/core@0.0.0-vnextAgentNetwork-20250602134426
19
+
20
+ ## 0.10.1-alpha.0
21
+
22
+ ### Patch Changes
23
+
24
+ - f0d559f: Fix peerdeps for alpha channel
25
+ - Updated dependencies [1e8bb40]
26
+ - @mastra/core@0.10.2-alpha.2
27
+
28
+ ## 0.10.0
29
+
30
+ ### Minor Changes
31
+
32
+ - 83da932: Move @mastra/core to peerdeps
33
+
34
+ ### Patch Changes
35
+
36
+ - d3628af: Add realtime context for openai realtime voice
37
+ - Updated dependencies [b3a3d63]
38
+ - Updated dependencies [344f453]
39
+ - Updated dependencies [0a3ae6d]
40
+ - Updated dependencies [95911be]
41
+ - Updated dependencies [f53a6ac]
42
+ - Updated dependencies [5eb5a99]
43
+ - Updated dependencies [7e632c5]
44
+ - Updated dependencies [1e9fbfa]
45
+ - Updated dependencies [eabdcd9]
46
+ - Updated dependencies [90be034]
47
+ - Updated dependencies [99f050a]
48
+ - Updated dependencies [d0ee3c6]
49
+ - Updated dependencies [b2ae5aa]
50
+ - Updated dependencies [23f258c]
51
+ - Updated dependencies [a7292b0]
52
+ - Updated dependencies [0dcb9f0]
53
+ - Updated dependencies [2672a05]
54
+ - @mastra/core@0.10.0
55
+
56
+ ## 0.3.0-alpha.1
57
+
58
+ ### Minor Changes
59
+
60
+ - 83da932: Move @mastra/core to peerdeps
61
+
62
+ ### Patch Changes
63
+
64
+ - Updated dependencies [b3a3d63]
65
+ - Updated dependencies [344f453]
66
+ - Updated dependencies [0a3ae6d]
67
+ - Updated dependencies [95911be]
68
+ - Updated dependencies [5eb5a99]
69
+ - Updated dependencies [7e632c5]
70
+ - Updated dependencies [1e9fbfa]
71
+ - Updated dependencies [b2ae5aa]
72
+ - Updated dependencies [a7292b0]
73
+ - Updated dependencies [0dcb9f0]
74
+ - @mastra/core@0.10.0-alpha.1
75
+
76
+ ## 0.2.5-alpha.0
77
+
78
+ ### Patch Changes
79
+
80
+ - d3628af: Add realtime context for openai realtime voice
81
+ - Updated dependencies [f53a6ac]
82
+ - Updated dependencies [eabdcd9]
83
+ - Updated dependencies [90be034]
84
+ - Updated dependencies [99f050a]
85
+ - Updated dependencies [d0ee3c6]
86
+ - Updated dependencies [23f258c]
87
+ - Updated dependencies [2672a05]
88
+ - @mastra/core@0.9.5-alpha.0
89
+
90
+ ## 0.2.4
91
+
92
+ ### Patch Changes
93
+
94
+ - Updated dependencies [396be50]
95
+ - Updated dependencies [ab80e7e]
96
+ - Updated dependencies [c3bd795]
97
+ - Updated dependencies [da082f8]
98
+ - Updated dependencies [a5810ce]
99
+ - Updated dependencies [3e9c131]
100
+ - Updated dependencies [3171b5b]
101
+ - Updated dependencies [973e5ac]
102
+ - Updated dependencies [daf942f]
103
+ - Updated dependencies [0b8b868]
104
+ - Updated dependencies [9e1eff5]
105
+ - Updated dependencies [6fa1ad1]
106
+ - Updated dependencies [c28d7a0]
107
+ - Updated dependencies [edf1e88]
108
+ - @mastra/core@0.9.4
109
+
110
+ ## 0.2.4-alpha.4
111
+
112
+ ### Patch Changes
113
+
114
+ - Updated dependencies [3e9c131]
115
+ - @mastra/core@0.9.4-alpha.4
116
+
117
+ ## 0.2.4-alpha.3
118
+
119
+ ### Patch Changes
120
+
121
+ - Updated dependencies [396be50]
122
+ - Updated dependencies [c3bd795]
123
+ - Updated dependencies [da082f8]
124
+ - Updated dependencies [a5810ce]
125
+ - @mastra/core@0.9.4-alpha.3
126
+
127
+ ## 0.2.4-alpha.2
128
+
129
+ ### Patch Changes
130
+
131
+ - Updated dependencies [3171b5b]
132
+ - Updated dependencies [973e5ac]
133
+ - Updated dependencies [9e1eff5]
134
+ - @mastra/core@0.9.4-alpha.2
135
+
136
+ ## 0.2.4-alpha.1
137
+
138
+ ### Patch Changes
139
+
140
+ - Updated dependencies [ab80e7e]
141
+ - Updated dependencies [6fa1ad1]
142
+ - Updated dependencies [c28d7a0]
143
+ - Updated dependencies [edf1e88]
144
+ - @mastra/core@0.9.4-alpha.1
145
+
146
+ ## 0.2.4-alpha.0
147
+
148
+ ### Patch Changes
149
+
150
+ - Updated dependencies [daf942f]
151
+ - Updated dependencies [0b8b868]
152
+ - @mastra/core@0.9.4-alpha.0
153
+
154
+ ## 0.2.3
155
+
156
+ ### Patch Changes
157
+
158
+ - Updated dependencies [e450778]
159
+ - Updated dependencies [8902157]
160
+ - Updated dependencies [ca0dc88]
7
161
  - Updated dependencies [526c570]
162
+ - Updated dependencies [d7a6a33]
8
163
  - Updated dependencies [9cd1a46]
9
164
  - Updated dependencies [b5d2de0]
10
165
  - Updated dependencies [644f8ad]
11
166
  - Updated dependencies [70dbf51]
12
- - @mastra/core@0.0.0-vnext-inngest-20250508131921
167
+ - @mastra/core@0.9.3
168
+
169
+ ## 0.2.3-alpha.1
170
+
171
+ ### Patch Changes
172
+
173
+ - Updated dependencies [e450778]
174
+ - Updated dependencies [8902157]
175
+ - Updated dependencies [ca0dc88]
176
+ - Updated dependencies [9cd1a46]
177
+ - Updated dependencies [70dbf51]
178
+ - @mastra/core@0.9.3-alpha.1
13
179
 
14
180
  ## 0.2.3-alpha.0
15
181
 
package/README.md CHANGED
@@ -27,19 +27,15 @@ const voice = new OpenAIRealtimeVoice();
27
27
 
28
28
  // Create a voice instance with configuration
29
29
  const voice = new OpenAIRealtimeVoice({
30
- chatModel: {
31
- apiKey: 'your-api-key', // Optional, can use OPENAI_API_KEY env var
32
- model: 'gpt-4o-mini-realtime', // Optional, uses latest model by default
33
- options: {
34
- sessionConfig: {
35
- voice: 'alloy', // Default voice
36
- turn_detection: {
37
- type: 'server_vad',
38
- threshold: 0.5,
39
- silence_duration_ms: 1000,
40
- },
41
- },
42
- },
30
+ apiKey: 'your-api-key', // Optional, can use OPENAI_API_KEY env var
31
+ model: 'gpt-4o-mini-realtime', // Optional, uses latest model by default
32
+ });
33
+
34
+ voice.updateSession({
35
+ turn_detection: {
36
+ type: 'server_vad',
37
+ threshold: 0.5,
38
+ silence_duration_ms: 1000,
43
39
  },
44
40
  });
45
41
 
@@ -1,5 +1,6 @@
1
1
  import { MastraVoice } from '@mastra/core/voice';
2
2
  import type { Realtime } from 'openai-realtime-api';
3
+ import type { RuntimeContext } from '@mastra/core/runtime-context';
3
4
  import type { ToolsInput } from '@mastra/core/agent';
4
5
 
5
6
  /**
@@ -28,10 +29,8 @@ export declare type OpenAIExecuteFunction = (args: any) => Promise<any>;
28
29
  * @example
29
30
  * ```typescript
30
31
  * const voice = new OpenAIRealtimeVoice({
31
- * chatModel: {
32
- * apiKey: process.env.OPENAI_API_KEY,
33
- * model: 'gpt-4o-mini-realtime'
34
- * }
32
+ * apiKey: process.env.OPENAI_API_KEY,
33
+ * model: 'gpt-4o-mini-realtime'
35
34
  * });
36
35
  *
37
36
  * await voice.open();
@@ -53,6 +52,7 @@ export declare class OpenAIRealtimeVoice extends MastraVoice {
53
52
  private debug;
54
53
  private queue;
55
54
  private transcriber;
55
+ private runtimeContext?;
56
56
  /**
57
57
  * Creates a new instance of OpenAIRealtimeVoice.
58
58
  *
@@ -66,10 +66,8 @@ export declare class OpenAIRealtimeVoice extends MastraVoice {
66
66
  * @example
67
67
  * ```typescript
68
68
  * const voice = new OpenAIRealtimeVoice({
69
- * chatModel: {
70
- * apiKey: 'your-api-key',
71
- * model: 'gpt-4o-mini-realtime',
72
- * },
69
+ * apiKey: 'your-api-key',
70
+ * model: 'gpt-4o-mini-realtime',
73
71
  * speaker: 'alloy'
74
72
  * });
75
73
  * ```
@@ -182,6 +180,14 @@ export declare class OpenAIRealtimeVoice extends MastraVoice {
182
180
  * ```
183
181
  */
184
182
  updateConfig(sessionConfig: unknown): void;
183
+ /**
184
+ * Checks if listening capabilities are enabled.
185
+ *
186
+ * @returns {Promise<{ enabled: boolean }>}
187
+ */
188
+ getListener(): Promise<{
189
+ enabled: boolean;
190
+ }>;
185
191
  /**
186
192
  * Processes audio input for speech recognition.
187
193
  * Takes a readable stream of audio data and emits a writing event.
@@ -220,7 +226,9 @@ export declare class OpenAIRealtimeVoice extends MastraVoice {
220
226
  * // Now ready for voice interactions
221
227
  * ```
222
228
  */
223
- connect(): Promise<void>;
229
+ connect({ runtimeContext }?: {
230
+ runtimeContext?: RuntimeContext;
231
+ }): Promise<void>;
224
232
  disconnect(): void;
225
233
  /**
226
234
  * Streams audio data in real-time to the OpenAI service.
@@ -319,16 +327,18 @@ export declare class OpenAIRealtimeVoice extends MastraVoice {
319
327
  private sendEvent;
320
328
  }
321
329
 
322
- export declare const transformTools: (tools?: TTools_2) => {
323
- openaiTool: {
324
- type: string;
325
- name: string;
326
- description: string;
327
- parameters: {
328
- [key: string]: any;
329
- };
330
+ declare type ToolDefinition = {
331
+ type: 'function';
332
+ name: string;
333
+ description: string;
334
+ parameters: {
335
+ [key: string]: any;
330
336
  };
331
- execute: (args: any) => Promise<any>;
337
+ };
338
+
339
+ export declare const transformTools: (tools?: TTools_2) => {
340
+ openaiTool: ToolDefinition;
341
+ execute: OpenAIExecuteFunction;
332
342
  }[];
333
343
 
334
344
  declare type TTools = ToolsInput;
@@ -1,5 +1,6 @@
1
1
  import { MastraVoice } from '@mastra/core/voice';
2
2
  import type { Realtime } from 'openai-realtime-api';
3
+ import type { RuntimeContext } from '@mastra/core/runtime-context';
3
4
  import type { ToolsInput } from '@mastra/core/agent';
4
5
 
5
6
  /**
@@ -28,10 +29,8 @@ export declare type OpenAIExecuteFunction = (args: any) => Promise<any>;
28
29
  * @example
29
30
  * ```typescript
30
31
  * const voice = new OpenAIRealtimeVoice({
31
- * chatModel: {
32
- * apiKey: process.env.OPENAI_API_KEY,
33
- * model: 'gpt-4o-mini-realtime'
34
- * }
32
+ * apiKey: process.env.OPENAI_API_KEY,
33
+ * model: 'gpt-4o-mini-realtime'
35
34
  * });
36
35
  *
37
36
  * await voice.open();
@@ -53,6 +52,7 @@ export declare class OpenAIRealtimeVoice extends MastraVoice {
53
52
  private debug;
54
53
  private queue;
55
54
  private transcriber;
55
+ private runtimeContext?;
56
56
  /**
57
57
  * Creates a new instance of OpenAIRealtimeVoice.
58
58
  *
@@ -66,10 +66,8 @@ export declare class OpenAIRealtimeVoice extends MastraVoice {
66
66
  * @example
67
67
  * ```typescript
68
68
  * const voice = new OpenAIRealtimeVoice({
69
- * chatModel: {
70
- * apiKey: 'your-api-key',
71
- * model: 'gpt-4o-mini-realtime',
72
- * },
69
+ * apiKey: 'your-api-key',
70
+ * model: 'gpt-4o-mini-realtime',
73
71
  * speaker: 'alloy'
74
72
  * });
75
73
  * ```
@@ -182,6 +180,14 @@ export declare class OpenAIRealtimeVoice extends MastraVoice {
182
180
  * ```
183
181
  */
184
182
  updateConfig(sessionConfig: unknown): void;
183
+ /**
184
+ * Checks if listening capabilities are enabled.
185
+ *
186
+ * @returns {Promise<{ enabled: boolean }>}
187
+ */
188
+ getListener(): Promise<{
189
+ enabled: boolean;
190
+ }>;
185
191
  /**
186
192
  * Processes audio input for speech recognition.
187
193
  * Takes a readable stream of audio data and emits a writing event.
@@ -220,7 +226,9 @@ export declare class OpenAIRealtimeVoice extends MastraVoice {
220
226
  * // Now ready for voice interactions
221
227
  * ```
222
228
  */
223
- connect(): Promise<void>;
229
+ connect({ runtimeContext }?: {
230
+ runtimeContext?: RuntimeContext;
231
+ }): Promise<void>;
224
232
  disconnect(): void;
225
233
  /**
226
234
  * Streams audio data in real-time to the OpenAI service.
@@ -319,16 +327,18 @@ export declare class OpenAIRealtimeVoice extends MastraVoice {
319
327
  private sendEvent;
320
328
  }
321
329
 
322
- export declare const transformTools: (tools?: TTools_2) => {
323
- openaiTool: {
324
- type: string;
325
- name: string;
326
- description: string;
327
- parameters: {
328
- [key: string]: any;
329
- };
330
+ declare type ToolDefinition = {
331
+ type: 'function';
332
+ name: string;
333
+ description: string;
334
+ parameters: {
335
+ [key: string]: any;
330
336
  };
331
- execute: (args: any) => Promise<any>;
337
+ };
338
+
339
+ export declare const transformTools: (tools?: TTools_2) => {
340
+ openaiTool: ToolDefinition;
341
+ execute: OpenAIExecuteFunction;
332
342
  }[];
333
343
 
334
344
  declare type TTools = ToolsInput;
package/dist/index.cjs CHANGED
@@ -89,10 +89,8 @@ var OpenAIRealtimeVoice = class extends voice.MastraVoice {
89
89
  * @example
90
90
  * ```typescript
91
91
  * const voice = new OpenAIRealtimeVoice({
92
- * chatModel: {
93
- * apiKey: 'your-api-key',
94
- * model: 'gpt-4o-mini-realtime',
95
- * },
92
+ * apiKey: 'your-api-key',
93
+ * model: 'gpt-4o-mini-realtime',
96
94
  * speaker: 'alloy'
97
95
  * });
98
96
  * ```
@@ -116,6 +114,7 @@ var OpenAIRealtimeVoice = class extends voice.MastraVoice {
116
114
  debug;
117
115
  queue = [];
118
116
  transcriber;
117
+ runtimeContext;
119
118
  /**
120
119
  * Returns a list of available voice speakers.
121
120
  *
@@ -240,6 +239,14 @@ var OpenAIRealtimeVoice = class extends voice.MastraVoice {
240
239
  updateConfig(sessionConfig) {
241
240
  this.sendEvent("session.update", { session: sessionConfig });
242
241
  }
242
+ /**
243
+ * Checks if listening capabilities are enabled.
244
+ *
245
+ * @returns {Promise<{ enabled: boolean }>}
246
+ */
247
+ async getListener() {
248
+ return { enabled: true };
249
+ }
243
250
  /**
244
251
  * Processes audio input for speech recognition.
245
252
  * Takes a readable stream of audio data and emits a writing event.
@@ -312,9 +319,10 @@ var OpenAIRealtimeVoice = class extends voice.MastraVoice {
312
319
  * // Now ready for voice interactions
313
320
  * ```
314
321
  */
315
- async connect() {
322
+ async connect({ runtimeContext } = {}) {
316
323
  const url = `${this.options.url || DEFAULT_URL}?model=${this.options.model || DEFAULT_MODEL}`;
317
324
  const apiKey = this.options.apiKey || process.env.OPENAI_API_KEY;
325
+ this.runtimeContext = runtimeContext;
318
326
  this.ws = new ws.WebSocket(url, void 0, {
319
327
  headers: {
320
328
  Authorization: "Bearer " + apiKey,
@@ -557,7 +565,7 @@ var OpenAIRealtimeVoice = class extends voice.MastraVoice {
557
565
  });
558
566
  }
559
567
  const result = await tool?.execute?.(
560
- { context },
568
+ { context, runtimeContext: this.runtimeContext },
561
569
  {
562
570
  toolCallId: output.call_id,
563
571
  messages: []
package/dist/index.js CHANGED
@@ -87,10 +87,8 @@ var OpenAIRealtimeVoice = class extends MastraVoice {
87
87
  * @example
88
88
  * ```typescript
89
89
  * const voice = new OpenAIRealtimeVoice({
90
- * chatModel: {
91
- * apiKey: 'your-api-key',
92
- * model: 'gpt-4o-mini-realtime',
93
- * },
90
+ * apiKey: 'your-api-key',
91
+ * model: 'gpt-4o-mini-realtime',
94
92
  * speaker: 'alloy'
95
93
  * });
96
94
  * ```
@@ -114,6 +112,7 @@ var OpenAIRealtimeVoice = class extends MastraVoice {
114
112
  debug;
115
113
  queue = [];
116
114
  transcriber;
115
+ runtimeContext;
117
116
  /**
118
117
  * Returns a list of available voice speakers.
119
118
  *
@@ -238,6 +237,14 @@ var OpenAIRealtimeVoice = class extends MastraVoice {
238
237
  updateConfig(sessionConfig) {
239
238
  this.sendEvent("session.update", { session: sessionConfig });
240
239
  }
240
+ /**
241
+ * Checks if listening capabilities are enabled.
242
+ *
243
+ * @returns {Promise<{ enabled: boolean }>}
244
+ */
245
+ async getListener() {
246
+ return { enabled: true };
247
+ }
241
248
  /**
242
249
  * Processes audio input for speech recognition.
243
250
  * Takes a readable stream of audio data and emits a writing event.
@@ -310,9 +317,10 @@ var OpenAIRealtimeVoice = class extends MastraVoice {
310
317
  * // Now ready for voice interactions
311
318
  * ```
312
319
  */
313
- async connect() {
320
+ async connect({ runtimeContext } = {}) {
314
321
  const url = `${this.options.url || DEFAULT_URL}?model=${this.options.model || DEFAULT_MODEL}`;
315
322
  const apiKey = this.options.apiKey || process.env.OPENAI_API_KEY;
323
+ this.runtimeContext = runtimeContext;
316
324
  this.ws = new WebSocket(url, void 0, {
317
325
  headers: {
318
326
  Authorization: "Bearer " + apiKey,
@@ -555,7 +563,7 @@ var OpenAIRealtimeVoice = class extends MastraVoice {
555
563
  });
556
564
  }
557
565
  const result = await tool?.execute?.(
558
- { context },
566
+ { context, runtimeContext: this.runtimeContext },
559
567
  {
560
568
  toolCallId: output.call_id,
561
569
  messages: []
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@mastra/voice-openai-realtime",
3
- "version": "0.0.0-vnext-inngest-20250508131921",
3
+ "version": "0.0.0-vnextAgentNetwork-20250602134426",
4
4
  "description": "Mastra OpenAI Realtime API integration",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",
@@ -22,8 +22,7 @@
22
22
  "dependencies": {
23
23
  "openai-realtime-api": "^1.0.7",
24
24
  "ws": "^8.18.1",
25
- "zod-to-json-schema": "^3.24.5",
26
- "@mastra/core": "0.0.0-vnext-inngest-20250508131921"
25
+ "zod-to-json-schema": "^3.24.5"
27
26
  },
28
27
  "devDependencies": {
29
28
  "@microsoft/api-extractor": "^7.52.5",
@@ -33,7 +32,13 @@
33
32
  "tsup": "^8.4.0",
34
33
  "typescript": "^5.8.3",
35
34
  "vitest": "^2.1.9",
36
- "@internal/lint": "0.0.0-vnext-inngest-20250508131921"
35
+ "zod": "^3.24.3",
36
+ "@internal/lint": "0.0.0-vnextAgentNetwork-20250602134426",
37
+ "@mastra/core": "0.0.0-vnextAgentNetwork-20250602134426"
38
+ },
39
+ "peerDependencies": {
40
+ "@mastra/core": "^0.10.0-alpha.0",
41
+ "zod": "^3.0.0"
37
42
  },
38
43
  "scripts": {
39
44
  "build": "tsup src/index.ts --format esm,cjs --experimental-dts --clean --treeshake",
package/src/index.test.ts CHANGED
@@ -26,9 +26,7 @@ describe('OpenAIRealtimeVoice', () => {
26
26
  beforeEach(() => {
27
27
  vi.clearAllMocks();
28
28
  voice = new OpenAIRealtimeVoice({
29
- chatModel: {
30
- apiKey: 'test-api-key',
31
- },
29
+ apiKey: 'test-api-key',
32
30
  });
33
31
  mockClient = (voice as any).client;
34
32
  });
package/src/index.ts CHANGED
@@ -1,6 +1,7 @@
1
1
  import { EventEmitter } from 'events';
2
2
  import { PassThrough } from 'stream';
3
3
  import type { ToolsInput } from '@mastra/core/agent';
4
+ import type { RuntimeContext } from '@mastra/core/runtime-context';
4
5
  import { MastraVoice } from '@mastra/core/voice';
5
6
  import type { Realtime, RealtimeServerEvents } from 'openai-realtime-api';
6
7
  import { WebSocket } from 'ws';
@@ -91,10 +92,8 @@ type RealtimeClientServerEventMap = {
91
92
  * @example
92
93
  * ```typescript
93
94
  * const voice = new OpenAIRealtimeVoice({
94
- * chatModel: {
95
- * apiKey: process.env.OPENAI_API_KEY,
96
- * model: 'gpt-4o-mini-realtime'
97
- * }
95
+ * apiKey: process.env.OPENAI_API_KEY,
96
+ * model: 'gpt-4o-mini-realtime'
98
97
  * });
99
98
  *
100
99
  * await voice.open();
@@ -115,7 +114,7 @@ export class OpenAIRealtimeVoice extends MastraVoice {
115
114
  private debug: boolean;
116
115
  private queue: unknown[] = [];
117
116
  private transcriber: Realtime.AudioTranscriptionModel;
118
-
117
+ private runtimeContext?: RuntimeContext;
119
118
  /**
120
119
  * Creates a new instance of OpenAIRealtimeVoice.
121
120
  *
@@ -129,10 +128,8 @@ export class OpenAIRealtimeVoice extends MastraVoice {
129
128
  * @example
130
129
  * ```typescript
131
130
  * const voice = new OpenAIRealtimeVoice({
132
- * chatModel: {
133
- * apiKey: 'your-api-key',
134
- * model: 'gpt-4o-mini-realtime',
135
- * },
131
+ * apiKey: 'your-api-key',
132
+ * model: 'gpt-4o-mini-realtime',
136
133
  * speaker: 'alloy'
137
134
  * });
138
135
  * ```
@@ -289,6 +286,15 @@ export class OpenAIRealtimeVoice extends MastraVoice {
289
286
  this.sendEvent('session.update', { session: sessionConfig });
290
287
  }
291
288
 
289
+ /**
290
+ * Checks if listening capabilities are enabled.
291
+ *
292
+ * @returns {Promise<{ enabled: boolean }>}
293
+ */
294
+ async getListener() {
295
+ return { enabled: true };
296
+ }
297
+
292
298
  /**
293
299
  * Processes audio input for speech recognition.
294
300
  * Takes a readable stream of audio data and emits a writing event.
@@ -367,9 +373,11 @@ export class OpenAIRealtimeVoice extends MastraVoice {
367
373
  * // Now ready for voice interactions
368
374
  * ```
369
375
  */
370
- async connect() {
376
+ async connect({ runtimeContext }: { runtimeContext?: RuntimeContext } = {}) {
371
377
  const url = `${this.options.url || DEFAULT_URL}?model=${this.options.model || DEFAULT_MODEL}`;
372
378
  const apiKey = this.options.apiKey || process.env.OPENAI_API_KEY;
379
+ this.runtimeContext = runtimeContext;
380
+
373
381
  this.ws = new WebSocket(url, undefined, {
374
382
  headers: {
375
383
  Authorization: 'Bearer ' + apiKey,
@@ -638,7 +646,7 @@ export class OpenAIRealtimeVoice extends MastraVoice {
638
646
  }
639
647
 
640
648
  const result = await tool?.execute?.(
641
- { context },
649
+ { context, runtimeContext: this.runtimeContext },
642
650
  {
643
651
  toolCallId: output.call_id,
644
652
  messages: [],
@@ -0,0 +1,119 @@
1
+ import { createTool } from '@mastra/core/tools';
2
+ import { describe, it, expect } from 'vitest';
3
+ import { z } from 'zod';
4
+ import { transformTools } from './utils';
5
+
6
+ // Vitest provides these globals automatically, but we can import them explicitly for clarity
7
+
8
+ describe('transformTools', () => {
9
+ describe('Basic Tool Transformation', () => {
10
+ it('should transform a tool with Zod inputSchema to OpenAI format', () => {
11
+ // Create a test tool with Zod schema
12
+ const tool = createTool({
13
+ id: 'zodTool',
14
+ description: 'A tool with Zod schema',
15
+ inputSchema: z.object({
16
+ name: z.string(),
17
+ age: z.number().optional(),
18
+ }),
19
+ outputSchema: z.string(),
20
+ execute: async ({ context }) => {
21
+ return `Hello, ${context.name}`;
22
+ },
23
+ });
24
+
25
+ // Transform the tool
26
+ const transformedTools = transformTools({
27
+ zodTool: tool,
28
+ });
29
+
30
+ // Assert the transformation results
31
+ expect(transformedTools).toHaveLength(1);
32
+ const { openaiTool } = transformedTools[0];
33
+
34
+ expect(openaiTool).toMatchObject({
35
+ type: 'function',
36
+ name: 'zodTool',
37
+ description: 'A tool with Zod schema',
38
+ parameters: expect.objectContaining({
39
+ type: 'object',
40
+ properties: expect.objectContaining({
41
+ name: expect.objectContaining({ type: 'string' }),
42
+ age: expect.objectContaining({ type: 'number' }),
43
+ }),
44
+ required: ['name'],
45
+ }),
46
+ });
47
+ });
48
+
49
+ it('should transform a tool with JSON schema parameters to OpenAI format', () => {
50
+ // Create a test tool with direct JSON schema
51
+ const tool = {
52
+ id: 'jsonTool',
53
+ description: 'A tool with JSON schema',
54
+ parameters: {
55
+ type: 'object',
56
+ properties: {
57
+ query: { type: 'string' },
58
+ limit: { type: 'integer' },
59
+ },
60
+ required: ['query'],
61
+ },
62
+ execute: async (args: { query: string; limit?: number }) => {
63
+ return `Searched for: ${args.query}`;
64
+ },
65
+ };
66
+
67
+ // Transform the tool
68
+ const transformedTools = transformTools({
69
+ jsonTool: tool,
70
+ });
71
+
72
+ // Assert the transformation results
73
+ expect(transformedTools).toHaveLength(1);
74
+ const { openaiTool } = transformedTools[0];
75
+
76
+ expect(openaiTool).toMatchObject({
77
+ type: 'function',
78
+ name: 'jsonTool',
79
+ description: 'A tool with JSON schema',
80
+ parameters: expect.objectContaining({
81
+ type: 'object',
82
+ properties: expect.objectContaining({
83
+ query: expect.objectContaining({ type: 'string' }),
84
+ limit: expect.objectContaining({ type: 'integer' }),
85
+ }),
86
+ required: ['query'],
87
+ }),
88
+ });
89
+ });
90
+ });
91
+
92
+ describe('Tool Execution Tests', () => {
93
+ it('should create an adapter function for tool execution', async () => {
94
+ // Create a tool that expects context
95
+ const tool = createTool({
96
+ id: 'messageTool',
97
+ description: 'A tool that processes a message',
98
+ inputSchema: z.object({
99
+ message: z.string(),
100
+ }),
101
+ outputSchema: z.string(),
102
+ execute: async ({ context }) => {
103
+ return `Processed: ${context.message}`;
104
+ },
105
+ });
106
+
107
+ // Transform the tool
108
+ const transformedTools = transformTools({
109
+ messageTool: tool,
110
+ });
111
+
112
+ // Execute the transformed tool
113
+ const result = await transformedTools[0].execute({ message: 'Hello' });
114
+
115
+ // Verify the adapter correctly passes the context
116
+ expect(result).toBe('Processed: Hello');
117
+ });
118
+ });
119
+ });
package/src/utils.ts CHANGED
@@ -3,10 +3,18 @@ import type { ToolsInput } from '@mastra/core/agent';
3
3
  import { zodToJsonSchema } from 'zod-to-json-schema';
4
4
 
5
5
  export type OpenAIExecuteFunction = (args: any) => Promise<any>;
6
+ type ToolDefinition = {
7
+ type: 'function';
8
+ name: string;
9
+ description: string;
10
+ parameters: {
11
+ [key: string]: any;
12
+ };
13
+ };
6
14
 
7
15
  type TTools = ToolsInput;
8
16
  export const transformTools = (tools?: TTools) => {
9
- const openaiTools = [];
17
+ const openaiTools: { openaiTool: ToolDefinition; execute: OpenAIExecuteFunction }[] = [];
10
18
  for (const [name, tool] of Object.entries(tools || {})) {
11
19
  let parameters: { [key: string]: any };
12
20
 
@@ -28,7 +36,7 @@ export const transformTools = (tools?: TTools) => {
28
36
  console.warn(`Tool ${name} has neither inputSchema nor parameters, skipping`);
29
37
  continue;
30
38
  }
31
- const openaiTool = {
39
+ const openaiTool: ToolDefinition = {
32
40
  type: 'function',
33
41
  name,
34
42
  description: tool.description || `Tool: ${name}`,